* decl.c: Remove calls to add_decl_expr, pushdecl, rest_of_compilation,
[official-gcc.git] / gcc / expr.c
bloba92eab6f9d92c065417af099c3d83d6a723d1473
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
117 struct store_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
126 void *constfundata;
127 int reverse;
130 static rtx enqueue_insn (rtx, rtx);
131 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
132 unsigned int);
133 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *);
135 static bool block_move_libcall_safe_for_call_parm (void);
136 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
137 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
138 static tree emit_block_move_libcall_fn (int);
139 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
140 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
141 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
142 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
143 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
144 struct store_by_pieces *);
145 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
146 static rtx clear_storage_via_libcall (rtx, rtx);
147 static tree clear_storage_libcall_fn (int);
148 static rtx compress_float_constant (rtx, rtx);
149 static rtx get_subtarget (rtx);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 #endif
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
219 #endif
221 /* This is run once per compilation to set up which modes can be used
222 directly in memory and to initialize the block move optab. */
224 void
225 init_expr_once (void)
227 rtx insn, pat;
228 enum machine_mode mode;
229 int num_clobbers;
230 rtx mem, mem1;
231 rtx reg;
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
250 int regno;
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
254 PUT_MODE (mem1, mode);
255 PUT_MODE (reg, mode);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
263 regno++)
265 if (! HARD_REGNO_MODE_OK (regno, mode))
266 continue;
268 REGNO (reg) = regno;
270 SET_SRC (pat) = mem;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
299 srcmode = GET_MODE_WIDER_MODE (srcmode))
301 enum insn_code ic;
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
305 continue;
307 PUT_MODE (mem, srcmode);
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
315 /* This is run at the start of compiling a function. */
317 void
318 init_expr (void)
320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
323 /* Small sanity check that the queue is empty at the end of a function. */
325 void
326 finish_expr_for_function (void)
328 if (pending_chain)
329 abort ();
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
342 static rtx
343 enqueue_insn (rtx var, rtx body)
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (rtx x, int modify)
368 RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
373 return x;
374 #endif
376 if (code != QUEUED)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
382 shared. */
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 rtx y = XEXP (x, 0);
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
389 if (QUEUED_INSN (y))
391 rtx temp = gen_reg_rtx (GET_MODE (x));
393 emit_insn_before (gen_move_insn (temp, new),
394 QUEUED_INSN (y));
395 return temp;
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
405 if (code == MEM)
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
410 x = copy_rtx (x);
411 XEXP (x, 0) = tem;
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 x = copy_rtx (x);
421 XEXP (x, 0) = new0;
422 XEXP (x, 1) = new1;
425 return x;
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
429 emit_queue. */
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
433 use that copy. */
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
440 QUEUED_INSN (x));
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (rtx x)
452 enum rtx_code code = GET_CODE (x);
453 switch (code)
455 case QUEUED:
456 return 1;
457 case MEM:
458 return queued_subexp_p (XEXP (x, 0));
459 case MULT:
460 case PLUS:
461 case MINUS:
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
464 default:
465 return 0;
469 /* Retrieve a mark on the queue. */
471 static rtx
472 mark_queue (void)
474 return pending_chain;
477 /* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
481 static void
482 emit_insns_enqueued_after_mark (rtx mark)
484 rtx p;
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark && ! QUEUED_BODY (mark))
490 return;
492 while ((p = pending_chain) != mark)
494 rtx body = QUEUED_BODY (p);
496 switch (GET_CODE (body))
498 case INSN:
499 case JUMP_INSN:
500 case CALL_INSN:
501 case CODE_LABEL:
502 case BARRIER:
503 case NOTE:
504 QUEUED_INSN (p) = body;
505 emit_insn (body);
506 break;
508 #ifdef ENABLE_CHECKING
509 case SEQUENCE:
510 abort ();
511 break;
512 #endif
514 default:
515 QUEUED_INSN (p) = emit_insn (body);
516 break;
519 QUEUED_BODY (p) = 0;
520 pending_chain = QUEUED_NEXT (p);
524 /* Perform all the pending incrementations. */
526 void
527 emit_queue (void)
529 emit_insns_enqueued_after_mark (NULL_RTX);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
537 void
538 convert_move (rtx to, rtx from, int unsignedp)
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
544 enum insn_code code;
545 rtx libcall;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
554 if (to_real != from_real)
555 abort ();
557 /* If the source and destination are already the same, then there's
558 nothing to do. */
559 if (to == from)
560 return;
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
564 TO here. */
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
573 abort ();
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
579 return;
582 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
585 abort ();
587 if (VECTOR_MODE_P (to_mode))
588 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
589 else
590 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592 emit_move_insn (to, from);
593 return;
596 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
599 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
600 return;
603 if (to_real)
605 rtx value, insns;
606 convert_optab tab;
608 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
609 tab = sext_optab;
610 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
611 tab = trunc_optab;
612 else
613 abort ();
615 /* Try converting directly if the insn is supported. */
617 code = tab->handlers[to_mode][from_mode].insn_code;
618 if (code != CODE_FOR_nothing)
620 emit_unop_insn (code, to, from,
621 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
622 return;
625 /* Otherwise use a libcall. */
626 libcall = tab->handlers[to_mode][from_mode].libfunc;
628 if (!libcall)
629 /* This conversion is not implemented yet. */
630 abort ();
632 start_sequence ();
633 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
634 1, from, from_mode);
635 insns = get_insns ();
636 end_sequence ();
637 emit_libcall_block (insns, to, value,
638 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
639 from)
640 : gen_rtx_FLOAT_EXTEND (to_mode, from));
641 return;
644 /* Handle pointer conversion. */ /* SPEE 900220. */
645 /* Targets are expected to provide conversion insns between PxImode and
646 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
647 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
649 enum machine_mode full_mode
650 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
652 if (trunc_optab->handlers[to_mode][full_mode].insn_code
653 == CODE_FOR_nothing)
654 abort ();
656 if (full_mode != from_mode)
657 from = convert_to_mode (full_mode, from, unsignedp);
658 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
659 to, from, UNKNOWN);
660 return;
662 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
664 enum machine_mode full_mode
665 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
667 if (sext_optab->handlers[full_mode][from_mode].insn_code
668 == CODE_FOR_nothing)
669 abort ();
671 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
672 to, from, UNKNOWN);
673 if (to_mode == full_mode)
674 return;
676 /* else proceed to integer conversions below. */
677 from_mode = full_mode;
680 /* Now both modes are integers. */
682 /* Handle expanding beyond a word. */
683 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
684 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
686 rtx insns;
687 rtx lowpart;
688 rtx fill_value;
689 rtx lowfrom;
690 int i;
691 enum machine_mode lowpart_mode;
692 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
694 /* Try converting directly if the insn is supported. */
695 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
696 != CODE_FOR_nothing)
698 /* If FROM is a SUBREG, put it into a register. Do this
699 so that we always generate the same set of insns for
700 better cse'ing; if an intermediate assignment occurred,
701 we won't be doing the operation directly on the SUBREG. */
702 if (optimize > 0 && GET_CODE (from) == SUBREG)
703 from = force_reg (from_mode, from);
704 emit_unop_insn (code, to, from, equiv_code);
705 return;
707 /* Next, try converting via full word. */
708 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
709 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
710 != CODE_FOR_nothing))
712 if (REG_P (to))
714 if (reg_overlap_mentioned_p (to, from))
715 from = force_reg (from_mode, from);
716 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
718 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
719 emit_unop_insn (code, to,
720 gen_lowpart (word_mode, to), equiv_code);
721 return;
724 /* No special multiword conversion insn; do it by hand. */
725 start_sequence ();
727 /* Since we will turn this into a no conflict block, we must ensure
728 that the source does not overlap the target. */
730 if (reg_overlap_mentioned_p (to, from))
731 from = force_reg (from_mode, from);
733 /* Get a copy of FROM widened to a word, if necessary. */
734 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
735 lowpart_mode = word_mode;
736 else
737 lowpart_mode = from_mode;
739 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
741 lowpart = gen_lowpart (lowpart_mode, to);
742 emit_move_insn (lowpart, lowfrom);
744 /* Compute the value to put in each remaining word. */
745 if (unsignedp)
746 fill_value = const0_rtx;
747 else
749 #ifdef HAVE_slt
750 if (HAVE_slt
751 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
752 && STORE_FLAG_VALUE == -1)
754 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
755 lowpart_mode, 0);
756 fill_value = gen_reg_rtx (word_mode);
757 emit_insn (gen_slt (fill_value));
759 else
760 #endif
762 fill_value
763 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
764 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
765 NULL_RTX, 0);
766 fill_value = convert_to_mode (word_mode, fill_value, 1);
770 /* Fill the remaining words. */
771 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
773 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
774 rtx subword = operand_subword (to, index, 1, to_mode);
776 if (subword == 0)
777 abort ();
779 if (fill_value != subword)
780 emit_move_insn (subword, fill_value);
783 insns = get_insns ();
784 end_sequence ();
786 emit_no_conflict_block (insns, to, from, NULL_RTX,
787 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
788 return;
791 /* Truncating multi-word to a word or less. */
792 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
793 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
795 if (!((GET_CODE (from) == MEM
796 && ! MEM_VOLATILE_P (from)
797 && direct_load[(int) to_mode]
798 && ! mode_dependent_address_p (XEXP (from, 0)))
799 || REG_P (from)
800 || GET_CODE (from) == SUBREG))
801 from = force_reg (from_mode, from);
802 convert_move (to, gen_lowpart (word_mode, from), 0);
803 return;
806 /* Now follow all the conversions between integers
807 no more than a word long. */
809 /* For truncation, usually we can just refer to FROM in a narrower mode. */
810 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
811 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
812 GET_MODE_BITSIZE (from_mode)))
814 if (!((GET_CODE (from) == MEM
815 && ! MEM_VOLATILE_P (from)
816 && direct_load[(int) to_mode]
817 && ! mode_dependent_address_p (XEXP (from, 0)))
818 || REG_P (from)
819 || GET_CODE (from) == SUBREG))
820 from = force_reg (from_mode, from);
821 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
822 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
823 from = copy_to_reg (from);
824 emit_move_insn (to, gen_lowpart (to_mode, from));
825 return;
828 /* Handle extension. */
829 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
831 /* Convert directly if that works. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
833 != CODE_FOR_nothing)
835 if (flag_force_mem)
836 from = force_not_mem (from);
838 emit_unop_insn (code, to, from, equiv_code);
839 return;
841 else
843 enum machine_mode intermediate;
844 rtx tmp;
845 tree shift_amount;
847 /* Search for a mode to convert via. */
848 for (intermediate = from_mode; intermediate != VOIDmode;
849 intermediate = GET_MODE_WIDER_MODE (intermediate))
850 if (((can_extend_p (to_mode, intermediate, unsignedp)
851 != CODE_FOR_nothing)
852 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
853 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
854 GET_MODE_BITSIZE (intermediate))))
855 && (can_extend_p (intermediate, from_mode, unsignedp)
856 != CODE_FOR_nothing))
858 convert_move (to, convert_to_mode (intermediate, from,
859 unsignedp), unsignedp);
860 return;
863 /* No suitable intermediate mode.
864 Generate what we need with shifts. */
865 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
866 - GET_MODE_BITSIZE (from_mode), 0);
867 from = gen_lowpart (to_mode, force_reg (from_mode, from));
868 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
869 to, unsignedp);
870 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
871 to, unsignedp);
872 if (tmp != to)
873 emit_move_insn (to, tmp);
874 return;
878 /* Support special truncate insns for certain modes. */
879 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
881 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
882 to, from, UNKNOWN);
883 return;
886 /* Handle truncation of volatile memrefs, and so on;
887 the things that couldn't be truncated directly,
888 and for which there was no special instruction.
890 ??? Code above formerly short-circuited this, for most integer
891 mode pairs, with a force_reg in from_mode followed by a recursive
892 call to this routine. Appears always to have been wrong. */
893 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
895 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
896 emit_move_insn (to, temp);
897 return;
900 /* Mode combination is not recognized. */
901 abort ();
904 /* Return an rtx for a value that would result
905 from converting X to mode MODE.
906 Both X and MODE may be floating, or both integer.
907 UNSIGNEDP is nonzero if X is an unsigned value.
908 This can be done by referring to a part of X in place
909 or by copying to a new temporary with conversion.
911 This function *must not* call protect_from_queue
912 except when putting X into an insn (in which case convert_move does it). */
915 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
917 return convert_modes (mode, VOIDmode, x, unsignedp);
920 /* Return an rtx for a value that would result
921 from converting X from mode OLDMODE to mode MODE.
922 Both modes may be floating, or both integer.
923 UNSIGNEDP is nonzero if X is an unsigned value.
925 This can be done by referring to a part of X in place
926 or by copying to a new temporary with conversion.
928 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
930 This function *must not* call protect_from_queue
931 except when putting X into an insn (in which case convert_move does it). */
934 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
936 rtx temp;
938 /* If FROM is a SUBREG that indicates that we have already done at least
939 the required extension, strip it. */
941 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
942 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
943 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
944 x = gen_lowpart (mode, x);
946 if (GET_MODE (x) != VOIDmode)
947 oldmode = GET_MODE (x);
949 if (mode == oldmode)
950 return x;
952 /* There is one case that we must handle specially: If we are converting
953 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
954 we are to interpret the constant as unsigned, gen_lowpart will do
955 the wrong if the constant appears negative. What we want to do is
956 make the high-order word of the constant zero, not all ones. */
958 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
959 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
960 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
962 HOST_WIDE_INT val = INTVAL (x);
964 if (oldmode != VOIDmode
965 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
967 int width = GET_MODE_BITSIZE (oldmode);
969 /* We need to zero extend VAL. */
970 val &= ((HOST_WIDE_INT) 1 << width) - 1;
973 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
976 /* We can do this with a gen_lowpart if both desired and current modes
977 are integer, and this is either a constant integer, a register, or a
978 non-volatile MEM. Except for the constant case where MODE is no
979 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
981 if ((GET_CODE (x) == CONST_INT
982 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
983 || (GET_MODE_CLASS (mode) == MODE_INT
984 && GET_MODE_CLASS (oldmode) == MODE_INT
985 && (GET_CODE (x) == CONST_DOUBLE
986 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
987 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
988 && direct_load[(int) mode])
989 || (REG_P (x)
990 && (! HARD_REGISTER_P (x)
991 || HARD_REGNO_MODE_OK (REGNO (x), mode))
992 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
993 GET_MODE_BITSIZE (GET_MODE (x)))))))))
995 /* ?? If we don't know OLDMODE, we have to assume here that
996 X does not need sign- or zero-extension. This may not be
997 the case, but it's the best we can do. */
998 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
999 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1001 HOST_WIDE_INT val = INTVAL (x);
1002 int width = GET_MODE_BITSIZE (oldmode);
1004 /* We must sign or zero-extend in this case. Start by
1005 zero-extending, then sign extend if we need to. */
1006 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1007 if (! unsignedp
1008 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1009 val |= (HOST_WIDE_INT) (-1) << width;
1011 return gen_int_mode (val, mode);
1014 return gen_lowpart (mode, x);
1017 /* Converting from integer constant into mode is always equivalent to an
1018 subreg operation. */
1019 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1021 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1022 abort ();
1023 return simplify_gen_subreg (mode, x, oldmode, 0);
1026 temp = gen_reg_rtx (mode);
1027 convert_move (temp, x, unsignedp);
1028 return temp;
1031 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1032 store efficiently. Due to internal GCC limitations, this is
1033 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1034 for an immediate constant. */
1036 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1038 /* Determine whether the LEN bytes can be moved by using several move
1039 instructions. Return nonzero if a call to move_by_pieces should
1040 succeed. */
1043 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1044 unsigned int align ATTRIBUTE_UNUSED)
1046 return MOVE_BY_PIECES_P (len, align);
1049 /* Generate several move instructions to copy LEN bytes from block FROM to
1050 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1051 and TO through protect_from_queue before calling.
1053 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1054 used to push FROM to the stack.
1056 ALIGN is maximum stack alignment we can assume.
1058 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1059 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1060 stpcpy. */
1063 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1064 unsigned int align, int endp)
1066 struct move_by_pieces data;
1067 rtx to_addr, from_addr = XEXP (from, 0);
1068 unsigned int max_size = MOVE_MAX_PIECES + 1;
1069 enum machine_mode mode = VOIDmode, tmode;
1070 enum insn_code icode;
1072 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1074 data.offset = 0;
1075 data.from_addr = from_addr;
1076 if (to)
1078 to_addr = XEXP (to, 0);
1079 data.to = to;
1080 data.autinc_to
1081 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1082 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1083 data.reverse
1084 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1086 else
1088 to_addr = NULL_RTX;
1089 data.to = NULL_RTX;
1090 data.autinc_to = 1;
1091 #ifdef STACK_GROWS_DOWNWARD
1092 data.reverse = 1;
1093 #else
1094 data.reverse = 0;
1095 #endif
1097 data.to_addr = to_addr;
1098 data.from = from;
1099 data.autinc_from
1100 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1101 || GET_CODE (from_addr) == POST_INC
1102 || GET_CODE (from_addr) == POST_DEC);
1104 data.explicit_inc_from = 0;
1105 data.explicit_inc_to = 0;
1106 if (data.reverse) data.offset = len;
1107 data.len = len;
1109 /* If copying requires more than two move insns,
1110 copy addresses to registers (to make displacements shorter)
1111 and use post-increment if available. */
1112 if (!(data.autinc_from && data.autinc_to)
1113 && move_by_pieces_ninsns (len, align) > 2)
1115 /* Find the mode of the largest move... */
1116 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1117 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1118 if (GET_MODE_SIZE (tmode) < max_size)
1119 mode = tmode;
1121 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1123 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1124 data.autinc_from = 1;
1125 data.explicit_inc_from = -1;
1127 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1129 data.from_addr = copy_addr_to_reg (from_addr);
1130 data.autinc_from = 1;
1131 data.explicit_inc_from = 1;
1133 if (!data.autinc_from && CONSTANT_P (from_addr))
1134 data.from_addr = copy_addr_to_reg (from_addr);
1135 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1137 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1138 data.autinc_to = 1;
1139 data.explicit_inc_to = -1;
1141 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1143 data.to_addr = copy_addr_to_reg (to_addr);
1144 data.autinc_to = 1;
1145 data.explicit_inc_to = 1;
1147 if (!data.autinc_to && CONSTANT_P (to_addr))
1148 data.to_addr = copy_addr_to_reg (to_addr);
1151 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1152 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1153 align = MOVE_MAX * BITS_PER_UNIT;
1155 /* First move what we can in the largest integer mode, then go to
1156 successively smaller modes. */
1158 while (max_size > 1)
1160 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1161 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1162 if (GET_MODE_SIZE (tmode) < max_size)
1163 mode = tmode;
1165 if (mode == VOIDmode)
1166 break;
1168 icode = mov_optab->handlers[(int) mode].insn_code;
1169 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1170 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1172 max_size = GET_MODE_SIZE (mode);
1175 /* The code above should have handled everything. */
1176 if (data.len > 0)
1177 abort ();
1179 if (endp)
1181 rtx to1;
1183 if (data.reverse)
1184 abort ();
1185 if (data.autinc_to)
1187 if (endp == 2)
1189 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1190 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1191 else
1192 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1193 -1));
1195 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1196 data.offset);
1198 else
1200 if (endp == 2)
1201 --data.offset;
1202 to1 = adjust_address (data.to, QImode, data.offset);
1204 return to1;
1206 else
1207 return data.to;
1210 /* Return number of insns required to move L bytes by pieces.
1211 ALIGN (in bits) is maximum alignment we can assume. */
1213 static unsigned HOST_WIDE_INT
1214 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1216 unsigned HOST_WIDE_INT n_insns = 0;
1217 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1219 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1220 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1221 align = MOVE_MAX * BITS_PER_UNIT;
1223 while (max_size > 1)
1225 enum machine_mode mode = VOIDmode, tmode;
1226 enum insn_code icode;
1228 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1229 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1230 if (GET_MODE_SIZE (tmode) < max_size)
1231 mode = tmode;
1233 if (mode == VOIDmode)
1234 break;
1236 icode = mov_optab->handlers[(int) mode].insn_code;
1237 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1238 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1240 max_size = GET_MODE_SIZE (mode);
1243 if (l)
1244 abort ();
1245 return n_insns;
1248 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1249 with move instructions for mode MODE. GENFUN is the gen_... function
1250 to make a move insn for that mode. DATA has all the other info. */
1252 static void
1253 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1254 struct move_by_pieces *data)
1256 unsigned int size = GET_MODE_SIZE (mode);
1257 rtx to1 = NULL_RTX, from1;
1259 while (data->len >= size)
1261 if (data->reverse)
1262 data->offset -= size;
1264 if (data->to)
1266 if (data->autinc_to)
1267 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1268 data->offset);
1269 else
1270 to1 = adjust_address (data->to, mode, data->offset);
1273 if (data->autinc_from)
1274 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1275 data->offset);
1276 else
1277 from1 = adjust_address (data->from, mode, data->offset);
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1280 emit_insn (gen_add2_insn (data->to_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1282 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1283 emit_insn (gen_add2_insn (data->from_addr,
1284 GEN_INT (-(HOST_WIDE_INT)size)));
1286 if (data->to)
1287 emit_insn ((*genfun) (to1, from1));
1288 else
1290 #ifdef PUSH_ROUNDING
1291 emit_single_push_insn (mode, from1, NULL);
1292 #else
1293 abort ();
1294 #endif
1297 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1298 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1299 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1300 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1302 if (! data->reverse)
1303 data->offset += size;
1305 data->len -= size;
1309 /* Emit code to move a block Y to a block X. This may be done with
1310 string-move instructions, with multiple scalar move instructions,
1311 or with a library call.
1313 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1314 SIZE is an rtx that says how long they are.
1315 ALIGN is the maximum alignment we can assume they have.
1316 METHOD describes what kind of copy this is, and what mechanisms may be used.
1318 Return the address of the new block, if memcpy is called and returns it,
1319 0 otherwise. */
1322 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1324 bool may_use_call;
1325 rtx retval = 0;
1326 unsigned int align;
1328 switch (method)
1330 case BLOCK_OP_NORMAL:
1331 may_use_call = true;
1332 break;
1334 case BLOCK_OP_CALL_PARM:
1335 may_use_call = block_move_libcall_safe_for_call_parm ();
1337 /* Make inhibit_defer_pop nonzero around the library call
1338 to force it to pop the arguments right away. */
1339 NO_DEFER_POP;
1340 break;
1342 case BLOCK_OP_NO_LIBCALL:
1343 may_use_call = false;
1344 break;
1346 default:
1347 abort ();
1350 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1352 if (GET_MODE (x) != BLKmode)
1353 abort ();
1354 if (GET_MODE (y) != BLKmode)
1355 abort ();
1357 x = protect_from_queue (x, 1);
1358 y = protect_from_queue (y, 0);
1359 size = protect_from_queue (size, 0);
1361 if (GET_CODE (x) != MEM)
1362 abort ();
1363 if (GET_CODE (y) != MEM)
1364 abort ();
1365 if (size == 0)
1366 abort ();
1368 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1369 can be incorrect is coming from __builtin_memcpy. */
1370 if (GET_CODE (size) == CONST_INT)
1372 if (INTVAL (size) == 0)
1373 return 0;
1375 x = shallow_copy_rtx (x);
1376 y = shallow_copy_rtx (y);
1377 set_mem_size (x, size);
1378 set_mem_size (y, size);
1381 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1382 move_by_pieces (x, y, INTVAL (size), align, 0);
1383 else if (emit_block_move_via_movstr (x, y, size, align))
1385 else if (may_use_call)
1386 retval = emit_block_move_via_libcall (x, y, size);
1387 else
1388 emit_block_move_via_loop (x, y, size, align);
1390 if (method == BLOCK_OP_CALL_PARM)
1391 OK_DEFER_POP;
1393 return retval;
1396 /* A subroutine of emit_block_move. Returns true if calling the
1397 block move libcall will not clobber any parameters which may have
1398 already been placed on the stack. */
1400 static bool
1401 block_move_libcall_safe_for_call_parm (void)
1403 /* If arguments are pushed on the stack, then they're safe. */
1404 if (PUSH_ARGS)
1405 return true;
1407 /* If registers go on the stack anyway, any argument is sure to clobber
1408 an outgoing argument. */
1409 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1411 tree fn = emit_block_move_libcall_fn (false);
1412 (void) fn;
1413 if (REG_PARM_STACK_SPACE (fn) != 0)
1414 return false;
1416 #endif
1418 /* If any argument goes in memory, then it might clobber an outgoing
1419 argument. */
1421 CUMULATIVE_ARGS args_so_far;
1422 tree fn, arg;
1424 fn = emit_block_move_libcall_fn (false);
1425 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1427 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1428 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1430 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1431 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1432 if (!tmp || !REG_P (tmp))
1433 return false;
1434 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1435 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1436 NULL_TREE, 1))
1437 return false;
1438 #endif
1439 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1442 return true;
1445 /* A subroutine of emit_block_move. Expand a movstr pattern;
1446 return true if successful. */
1448 static bool
1449 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1451 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1452 int save_volatile_ok = volatile_ok;
1453 enum machine_mode mode;
1455 /* Since this is a move insn, we don't care about volatility. */
1456 volatile_ok = 1;
1458 /* Try the most limited insn first, because there's no point
1459 including more than one in the machine description unless
1460 the more limited one has some advantage. */
1462 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1463 mode = GET_MODE_WIDER_MODE (mode))
1465 enum insn_code code = movstr_optab[(int) mode];
1466 insn_operand_predicate_fn pred;
1468 if (code != CODE_FOR_nothing
1469 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1470 here because if SIZE is less than the mode mask, as it is
1471 returned by the macro, it will definitely be less than the
1472 actual mode mask. */
1473 && ((GET_CODE (size) == CONST_INT
1474 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1475 <= (GET_MODE_MASK (mode) >> 1)))
1476 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1477 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1478 || (*pred) (x, BLKmode))
1479 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1480 || (*pred) (y, BLKmode))
1481 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1482 || (*pred) (opalign, VOIDmode)))
1484 rtx op2;
1485 rtx last = get_last_insn ();
1486 rtx pat;
1488 op2 = convert_to_mode (mode, size, 1);
1489 pred = insn_data[(int) code].operand[2].predicate;
1490 if (pred != 0 && ! (*pred) (op2, mode))
1491 op2 = copy_to_mode_reg (mode, op2);
1493 /* ??? When called via emit_block_move_for_call, it'd be
1494 nice if there were some way to inform the backend, so
1495 that it doesn't fail the expansion because it thinks
1496 emitting the libcall would be more efficient. */
1498 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1499 if (pat)
1501 emit_insn (pat);
1502 volatile_ok = save_volatile_ok;
1503 return true;
1505 else
1506 delete_insns_since (last);
1510 volatile_ok = save_volatile_ok;
1511 return false;
1514 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1515 Return the return value from memcpy, 0 otherwise. */
1517 static rtx
1518 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1520 rtx dst_addr, src_addr;
1521 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1522 enum machine_mode size_mode;
1523 rtx retval;
1525 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1527 It is unsafe to save the value generated by protect_from_queue and reuse
1528 it later. Consider what happens if emit_queue is called before the
1529 return value from protect_from_queue is used.
1531 Expansion of the CALL_EXPR below will call emit_queue before we are
1532 finished emitting RTL for argument setup. So if we are not careful we
1533 could get the wrong value for an argument.
1535 To avoid this problem we go ahead and emit code to copy the addresses of
1536 DST and SRC and SIZE into new pseudos. We can then place those new
1537 pseudos into an RTL_EXPR and use them later, even after a call to
1538 emit_queue.
1540 Note this is not strictly needed for library calls since they do not call
1541 emit_queue before loading their arguments. However, we may need to have
1542 library calls call emit_queue in the future since failing to do so could
1543 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1544 arguments in registers. */
1546 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1547 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1549 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1550 src_addr = convert_memory_address (ptr_mode, src_addr);
1552 dst_tree = make_tree (ptr_type_node, dst_addr);
1553 src_tree = make_tree (ptr_type_node, src_addr);
1555 if (TARGET_MEM_FUNCTIONS)
1556 size_mode = TYPE_MODE (sizetype);
1557 else
1558 size_mode = TYPE_MODE (unsigned_type_node);
1560 size = convert_to_mode (size_mode, size, 1);
1561 size = copy_to_mode_reg (size_mode, size);
1563 /* It is incorrect to use the libcall calling conventions to call
1564 memcpy in this context. This could be a user call to memcpy and
1565 the user may wish to examine the return value from memcpy. For
1566 targets where libcalls and normal calls have different conventions
1567 for returning pointers, we could end up generating incorrect code.
1569 For convenience, we generate the call to bcopy this way as well. */
1571 if (TARGET_MEM_FUNCTIONS)
1572 size_tree = make_tree (sizetype, size);
1573 else
1574 size_tree = make_tree (unsigned_type_node, size);
1576 fn = emit_block_move_libcall_fn (true);
1577 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1578 if (TARGET_MEM_FUNCTIONS)
1580 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1583 else
1585 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1586 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1589 /* Now we have to build up the CALL_EXPR itself. */
1590 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1591 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1592 call_expr, arg_list, NULL_TREE);
1594 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1596 /* If we are initializing a readonly value, show the above call clobbered
1597 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1598 the delay slot scheduler might overlook conflicts and take nasty
1599 decisions. */
1600 if (RTX_UNCHANGING_P (dst))
1601 add_function_usage_to
1602 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1603 gen_rtx_CLOBBER (VOIDmode, dst),
1604 NULL_RTX));
1606 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1609 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1610 for the function we use for block copies. The first time FOR_CALL
1611 is true, we call assemble_external. */
1613 static GTY(()) tree block_move_fn;
1615 void
1616 init_block_move_fn (const char *asmspec)
1618 if (!block_move_fn)
1620 tree args, fn;
1622 if (TARGET_MEM_FUNCTIONS)
1624 fn = get_identifier ("memcpy");
1625 args = build_function_type_list (ptr_type_node, ptr_type_node,
1626 const_ptr_type_node, sizetype,
1627 NULL_TREE);
1629 else
1631 fn = get_identifier ("bcopy");
1632 args = build_function_type_list (void_type_node, const_ptr_type_node,
1633 ptr_type_node, unsigned_type_node,
1634 NULL_TREE);
1637 fn = build_decl (FUNCTION_DECL, fn, args);
1638 DECL_EXTERNAL (fn) = 1;
1639 TREE_PUBLIC (fn) = 1;
1640 DECL_ARTIFICIAL (fn) = 1;
1641 TREE_NOTHROW (fn) = 1;
1643 block_move_fn = fn;
1646 if (asmspec)
1648 SET_DECL_RTL (block_move_fn, NULL_RTX);
1649 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1653 static tree
1654 emit_block_move_libcall_fn (int for_call)
1656 static bool emitted_extern;
1658 if (!block_move_fn)
1659 init_block_move_fn (NULL);
1661 if (for_call && !emitted_extern)
1663 emitted_extern = true;
1664 make_decl_rtl (block_move_fn, NULL);
1665 assemble_external (block_move_fn);
1668 return block_move_fn;
1671 /* A subroutine of emit_block_move. Copy the data via an explicit
1672 loop. This is used only when libcalls are forbidden. */
1673 /* ??? It'd be nice to copy in hunks larger than QImode. */
1675 static void
1676 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1677 unsigned int align ATTRIBUTE_UNUSED)
1679 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1680 enum machine_mode iter_mode;
1682 iter_mode = GET_MODE (size);
1683 if (iter_mode == VOIDmode)
1684 iter_mode = word_mode;
1686 top_label = gen_label_rtx ();
1687 cmp_label = gen_label_rtx ();
1688 iter = gen_reg_rtx (iter_mode);
1690 emit_move_insn (iter, const0_rtx);
1692 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1693 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1694 do_pending_stack_adjust ();
1696 emit_jump (cmp_label);
1697 emit_label (top_label);
1699 tmp = convert_modes (Pmode, iter_mode, iter, true);
1700 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1701 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1702 x = change_address (x, QImode, x_addr);
1703 y = change_address (y, QImode, y_addr);
1705 emit_move_insn (x, y);
1707 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1708 true, OPTAB_LIB_WIDEN);
1709 if (tmp != iter)
1710 emit_move_insn (iter, tmp);
1712 emit_label (cmp_label);
1714 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1715 true, top_label);
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1721 void
1722 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1724 int i;
1725 #ifdef HAVE_load_multiple
1726 rtx pat;
1727 rtx last;
1728 #endif
1730 if (nregs == 0)
1731 return;
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1736 /* See if the machine can do this with a load multiple insn. */
1737 #ifdef HAVE_load_multiple
1738 if (HAVE_load_multiple)
1740 last = get_last_insn ();
1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1742 GEN_INT (nregs));
1743 if (pat)
1745 emit_insn (pat);
1746 return;
1748 else
1749 delete_insns_since (last);
1751 #endif
1753 for (i = 0; i < nregs; i++)
1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1755 operand_subword_force (x, i, mode));
1758 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1759 The number of registers to be filled is NREGS. */
1761 void
1762 move_block_from_reg (int regno, rtx x, int nregs)
1764 int i;
1766 if (nregs == 0)
1767 return;
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1775 GEN_INT (nregs));
1776 if (pat)
1778 emit_insn (pat);
1779 return;
1781 else
1782 delete_insns_since (last);
1784 #endif
1786 for (i = 0; i < nregs; i++)
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1790 if (tem == 0)
1791 abort ();
1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1797 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1804 gen_group_rtx (rtx orig)
1806 int i, length;
1807 rtx *tmps;
1809 if (GET_CODE (orig) != PARALLEL)
1810 abort ();
1812 length = XVECLEN (orig, 0);
1813 tmps = alloca (sizeof (rtx) * length);
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1818 if (i)
1819 tmps[0] = 0;
1821 for (; i < length; i++)
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1832 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1835 if not known. */
1837 void
1838 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1840 rtx *tmps, src;
1841 int start, i;
1843 if (GET_CODE (dst) != PARALLEL)
1844 abort ();
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1849 start = 0;
1850 else
1851 start = 1;
1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
1861 int shift = 0;
1863 /* Handle trailing fragments that run over the size of the struct. */
1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1868 if (
1869 #ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1872 #else
1873 BYTES_BIG_ENDIAN
1874 #endif
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1877 bytelen = ssize - bytepos;
1878 if (bytelen <= 0)
1879 abort ();
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1885 src = orig_src;
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1893 else
1894 src = gen_reg_rtx (GET_MODE (orig_src));
1896 emit_move_insn (src, orig_src);
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1904 && bytelen == GET_MODE_SIZE (mode))
1906 tmps[i] = gen_reg_rtx (mode);
1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 else if (GET_CODE (src) == CONCAT)
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1920 to be extracted. */
1921 tmps[i] = XEXP (src, bytepos / slen0);
1922 if (! CONSTANT_P (tmps[i])
1923 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
1928 else if (bytepos == 0)
1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1931 emit_move_insn (mem, src);
1932 tmps[i] = adjust_address (mem, mode, 0);
1934 else
1935 abort ();
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1941 && REG_P (src))
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1944 rtx mem;
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1951 && XVECLEN (dst, 0) > 1)
1952 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1953 else if (CONSTANT_P (src)
1954 || (REG_P (src) && GET_MODE (src) == mode))
1955 tmps[i] = src;
1956 else
1957 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1958 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1959 mode, mode, ssize);
1961 if (shift)
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
1966 emit_queue ();
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1973 /* Emit code to move a block SRC to block DST, where SRC and DST are
1974 non-consecutive groups of registers, each represented by a PARALLEL. */
1976 void
1977 emit_group_move (rtx dst, rtx src)
1979 int i;
1981 if (GET_CODE (src) != PARALLEL
1982 || GET_CODE (dst) != PARALLEL
1983 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1984 abort ();
1986 /* Skip first entry if NULL. */
1987 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1988 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1989 XEXP (XVECEXP (src, 0, i), 0));
1992 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1993 where SRC is non-consecutive registers represented by a PARALLEL.
1994 SSIZE represents the total size of block ORIG_DST, or -1 if not
1995 known. */
1997 void
1998 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2000 rtx *tmps, dst;
2001 int start, i;
2003 if (GET_CODE (src) != PARALLEL)
2004 abort ();
2006 /* Check for a NULL entry, used to indicate that the parameter goes
2007 both on the stack and in registers. */
2008 if (XEXP (XVECEXP (src, 0, 0), 0))
2009 start = 0;
2010 else
2011 start = 1;
2013 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2015 /* Copy the (probable) hard regs into pseudos. */
2016 for (i = start; i < XVECLEN (src, 0); i++)
2018 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2019 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2020 emit_move_insn (tmps[i], reg);
2022 emit_queue ();
2024 /* If we won't be storing directly into memory, protect the real destination
2025 from strange tricks we might play. */
2026 dst = orig_dst;
2027 if (GET_CODE (dst) == PARALLEL)
2029 rtx temp;
2031 /* We can get a PARALLEL dst if there is a conditional expression in
2032 a return statement. In that case, the dst and src are the same,
2033 so no action is necessary. */
2034 if (rtx_equal_p (dst, src))
2035 return;
2037 /* It is unclear if we can ever reach here, but we may as well handle
2038 it. Allocate a temporary, and split this into a store/load to/from
2039 the temporary. */
2041 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2042 emit_group_store (temp, src, type, ssize);
2043 emit_group_load (dst, temp, type, ssize);
2044 return;
2046 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2048 dst = gen_reg_rtx (GET_MODE (orig_dst));
2049 /* Make life a bit easier for combine. */
2050 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2056 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2057 enum machine_mode mode = GET_MODE (tmps[i]);
2058 unsigned int bytelen = GET_MODE_SIZE (mode);
2059 rtx dest = dst;
2061 /* Handle trailing fragments that run over the size of the struct. */
2062 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2064 /* store_bit_field always takes its value from the lsb.
2065 Move the fragment to the lsb if it's not already there. */
2066 if (
2067 #ifdef BLOCK_REG_PADDING
2068 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2069 == (BYTES_BIG_ENDIAN ? upward : downward)
2070 #else
2071 BYTES_BIG_ENDIAN
2072 #endif
2075 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2076 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2077 tmps[i], 0, OPTAB_WIDEN);
2079 bytelen = ssize - bytepos;
2082 if (GET_CODE (dst) == CONCAT)
2084 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 dest = XEXP (dst, 0);
2086 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2088 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2089 dest = XEXP (dst, 1);
2091 else if (bytepos == 0 && XVECLEN (src, 0))
2093 dest = assign_stack_temp (GET_MODE (dest),
2094 GET_MODE_SIZE (GET_MODE (dest)), 0);
2095 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2096 tmps[i]);
2097 dst = dest;
2098 break;
2100 else
2101 abort ();
2104 /* Optimize the access just a bit. */
2105 if (GET_CODE (dest) == MEM
2106 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2111 else
2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2113 mode, tmps[i], ssize);
2116 emit_queue ();
2118 /* Copy from the pseudo into the (probable) hard reg. */
2119 if (orig_dst != dst)
2120 emit_move_insn (orig_dst, dst);
2123 /* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
2133 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2140 if (tgtblk == 0)
2142 tgtblk = assign_temp (build_qualified_type (type,
2143 (TYPE_QUALS (type)
2144 | TYPE_QUAL_CONST)),
2145 0, 1, 1);
2146 preserve_temp_slots (tgtblk);
2149 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2150 into a new pseudo which is a full word. */
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2154 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2167 ? !BYTES_BIG_ENDIAN
2168 : BYTES_BIG_ENDIAN))
2169 padding_correction
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2172 /* Copy the structure BITSIZE bites at a time.
2174 We could probably emit more efficient code for machines which do not use
2175 strict alignment, but it doesn't seem worth the effort at the current
2176 time. */
2177 for (bitpos = 0, xbitpos = padding_correction;
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
2185 || xbitpos == padding_correction)
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2187 GET_MODE (srcreg));
2189 /* We need a new destination operand each time bitpos is on
2190 a word boundary. */
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2194 /* Use xbitpos for the source extraction (right justified) and
2195 xbitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, word_mode, word_mode,
2200 BITS_PER_WORD),
2201 BITS_PER_WORD);
2204 return tgtblk;
2207 /* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
2210 void
2211 use_reg (rtx *call_fusage, rtx reg)
2213 if (!REG_P (reg)
2214 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2215 abort ();
2217 *call_fusage
2218 = gen_rtx_EXPR_LIST (VOIDmode,
2219 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2222 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2223 starting at REGNO. All of these registers must be hard registers. */
2225 void
2226 use_regs (rtx *call_fusage, int regno, int nregs)
2228 int i;
2230 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2231 abort ();
2233 for (i = 0; i < nregs; i++)
2234 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2237 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2238 PARALLEL REGS. This is for calls that pass values in multiple
2239 non-contiguous locations. The Irix 6 ABI has examples of this. */
2241 void
2242 use_group_regs (rtx *call_fusage, rtx regs)
2244 int i;
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
2253 if (reg != 0 && REG_P (reg))
2254 use_reg (call_fusage, reg);
2259 /* Determine whether the LEN bytes generated by CONSTFUN can be
2260 stored to memory using several move instructions. CONSTFUNDATA is
2261 a pointer which will be passed as argument in every CONSTFUN call.
2262 ALIGN is maximum alignment we can assume. Return nonzero if a
2263 call to store_by_pieces should succeed. */
2266 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2267 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2268 void *constfundata, unsigned int align)
2270 unsigned HOST_WIDE_INT max_size, l;
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2274 int reverse;
2275 rtx cst;
2277 if (len == 0)
2278 return 1;
2280 if (! STORE_BY_PIECES_P (len, align))
2281 return 0;
2283 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2284 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2285 align = MOVE_MAX * BITS_PER_UNIT;
2287 /* We would first store what we can in the largest integer mode, then go to
2288 successively smaller modes. */
2290 for (reverse = 0;
2291 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2292 reverse++)
2294 l = len;
2295 mode = VOIDmode;
2296 max_size = STORE_MAX_PIECES + 1;
2297 while (max_size > 1)
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2302 mode = tmode;
2304 if (mode == VOIDmode)
2305 break;
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= GET_MODE_ALIGNMENT (mode))
2311 unsigned int size = GET_MODE_SIZE (mode);
2313 while (l >= size)
2315 if (reverse)
2316 offset -= size;
2318 cst = (*constfun) (constfundata, offset, mode);
2319 if (!LEGITIMATE_CONSTANT_P (cst))
2320 return 0;
2322 if (!reverse)
2323 offset += size;
2325 l -= size;
2329 max_size = GET_MODE_SIZE (mode);
2332 /* The code above should have handled everything. */
2333 if (l != 0)
2334 abort ();
2337 return 1;
2340 /* Generate several move instructions to store LEN bytes generated by
2341 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2342 pointer which will be passed as argument in every CONSTFUN call.
2343 ALIGN is maximum alignment we can assume.
2344 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2345 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2346 stpcpy. */
2349 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2350 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2351 void *constfundata, unsigned int align, int endp)
2353 struct store_by_pieces data;
2355 if (len == 0)
2357 if (endp == 2)
2358 abort ();
2359 return to;
2362 if (! STORE_BY_PIECES_P (len, align))
2363 abort ();
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2367 data.len = len;
2368 data.to = to;
2369 store_by_pieces_1 (&data, align);
2370 if (endp)
2372 rtx to1;
2374 if (data.reverse)
2375 abort ();
2376 if (data.autinc_to)
2378 if (endp == 2)
2380 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2382 else
2383 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2384 -1));
2386 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2387 data.offset);
2389 else
2391 if (endp == 2)
2392 --data.offset;
2393 to1 = adjust_address (data.to, QImode, data.offset);
2395 return to1;
2397 else
2398 return data.to;
2401 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
2405 static void
2406 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2408 struct store_by_pieces data;
2410 if (len == 0)
2411 return;
2413 data.constfun = clear_by_pieces_1;
2414 data.constfundata = NULL;
2415 data.len = len;
2416 data.to = to;
2417 store_by_pieces_1 (&data, align);
2420 /* Callback routine for clear_by_pieces.
2421 Return const0_rtx unconditionally. */
2423 static rtx
2424 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2425 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2426 enum machine_mode mode ATTRIBUTE_UNUSED)
2428 return const0_rtx;
2431 /* Subroutine of clear_by_pieces and store_by_pieces.
2432 Generate several move instructions to store LEN bytes of block TO. (A MEM
2433 rtx with BLKmode). The caller must pass TO through protect_from_queue
2434 before calling. ALIGN is maximum alignment we can assume. */
2436 static void
2437 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2438 unsigned int align ATTRIBUTE_UNUSED)
2440 rtx to_addr = XEXP (data->to, 0);
2441 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2442 enum machine_mode mode = VOIDmode, tmode;
2443 enum insn_code icode;
2445 data->offset = 0;
2446 data->to_addr = to_addr;
2447 data->autinc_to
2448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2451 data->explicit_inc_to = 0;
2452 data->reverse
2453 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2454 if (data->reverse)
2455 data->offset = data->len;
2457 /* If storing requires more than two move insns,
2458 copy addresses to registers (to make displacements shorter)
2459 and use post-increment if available. */
2460 if (!data->autinc_to
2461 && move_by_pieces_ninsns (data->len, align) > 2)
2463 /* Determine the main mode we'll be using. */
2464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2466 if (GET_MODE_SIZE (tmode) < max_size)
2467 mode = tmode;
2469 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2471 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = -1;
2476 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2477 && ! data->autinc_to)
2479 data->to_addr = copy_addr_to_reg (to_addr);
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = 1;
2484 if ( !data->autinc_to && CONSTANT_P (to_addr))
2485 data->to_addr = copy_addr_to_reg (to_addr);
2488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2490 align = MOVE_MAX * BITS_PER_UNIT;
2492 /* First store what we can in the largest integer mode, then go to
2493 successively smaller modes. */
2495 while (max_size > 1)
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2500 mode = tmode;
2502 if (mode == VOIDmode)
2503 break;
2505 icode = mov_optab->handlers[(int) mode].insn_code;
2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2509 max_size = GET_MODE_SIZE (mode);
2512 /* The code above should have handled everything. */
2513 if (data->len != 0)
2514 abort ();
2517 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2521 static void
2522 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
2525 unsigned int size = GET_MODE_SIZE (mode);
2526 rtx to1, cst;
2528 while (data->len >= size)
2530 if (data->reverse)
2531 data->offset -= size;
2533 if (data->autinc_to)
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2535 data->offset);
2536 else
2537 to1 = adjust_address (data->to, mode, data->offset);
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2549 if (! data->reverse)
2550 data->offset += size;
2552 data->len -= size;
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2560 clear_storage (rtx object, rtx size)
2562 rtx retval = 0;
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2572 else
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2577 if (size == const0_rtx)
2579 else if (GET_CODE (size) == CONST_INT
2580 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2582 else if (clear_storage_via_clrstr (object, size, align))
2584 else
2585 retval = clear_storage_via_libcall (object, size);
2588 return retval;
2591 /* A subroutine of clear_storage. Expand a clrstr pattern;
2592 return true if successful. */
2594 static bool
2595 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2601 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2602 enum machine_mode mode;
2604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2605 mode = GET_MODE_WIDER_MODE (mode))
2607 enum insn_code code = clrstr_optab[(int) mode];
2608 insn_operand_predicate_fn pred;
2610 if (code != CODE_FOR_nothing
2611 /* We don't need MODE to be narrower than
2612 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2613 the mode mask, as it is returned by the macro, it will
2614 definitely be less than the actual mode mask. */
2615 && ((GET_CODE (size) == CONST_INT
2616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2617 <= (GET_MODE_MASK (mode) >> 1)))
2618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2619 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2620 || (*pred) (object, BLKmode))
2621 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2622 || (*pred) (opalign, VOIDmode)))
2624 rtx op1;
2625 rtx last = get_last_insn ();
2626 rtx pat;
2628 op1 = convert_to_mode (mode, size, 1);
2629 pred = insn_data[(int) code].operand[1].predicate;
2630 if (pred != 0 && ! (*pred) (op1, mode))
2631 op1 = copy_to_mode_reg (mode, op1);
2633 pat = GEN_FCN ((int) code) (object, op1, opalign);
2634 if (pat)
2636 emit_insn (pat);
2637 return true;
2639 else
2640 delete_insns_since (last);
2644 return false;
2647 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2648 Return the return value of memset, 0 otherwise. */
2650 static rtx
2651 clear_storage_via_libcall (rtx object, rtx size)
2653 tree call_expr, arg_list, fn, object_tree, size_tree;
2654 enum machine_mode size_mode;
2655 rtx retval;
2657 /* OBJECT or SIZE may have been passed through protect_from_queue.
2659 It is unsafe to save the value generated by protect_from_queue
2660 and reuse it later. Consider what happens if emit_queue is
2661 called before the return value from protect_from_queue is used.
2663 Expansion of the CALL_EXPR below will call emit_queue before
2664 we are finished emitting RTL for argument setup. So if we are
2665 not careful we could get the wrong value for an argument.
2667 To avoid this problem we go ahead and emit code to copy OBJECT
2668 and SIZE into new pseudos. We can then place those new pseudos
2669 into an RTL_EXPR and use them later, even after a call to
2670 emit_queue.
2672 Note this is not strictly needed for library calls since they
2673 do not call emit_queue before loading their arguments. However,
2674 we may need to have library calls call emit_queue in the future
2675 since failing to do so could cause problems for targets which
2676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2678 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2680 if (TARGET_MEM_FUNCTIONS)
2681 size_mode = TYPE_MODE (sizetype);
2682 else
2683 size_mode = TYPE_MODE (unsigned_type_node);
2684 size = convert_to_mode (size_mode, size, 1);
2685 size = copy_to_mode_reg (size_mode, size);
2687 /* It is incorrect to use the libcall calling conventions to call
2688 memset in this context. This could be a user call to memset and
2689 the user may wish to examine the return value from memset. For
2690 targets where libcalls and normal calls have different conventions
2691 for returning pointers, we could end up generating incorrect code.
2693 For convenience, we generate the call to bzero this way as well. */
2695 object_tree = make_tree (ptr_type_node, object);
2696 if (TARGET_MEM_FUNCTIONS)
2697 size_tree = make_tree (sizetype, size);
2698 else
2699 size_tree = make_tree (unsigned_type_node, size);
2701 fn = clear_storage_libcall_fn (true);
2702 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2703 if (TARGET_MEM_FUNCTIONS)
2704 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2705 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2709 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2710 call_expr, arg_list, NULL_TREE);
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2714 /* If we are initializing a readonly value, show the above call
2715 clobbered it. Otherwise, a load from it may erroneously be
2716 hoisted from a loop. */
2717 if (RTX_UNCHANGING_P (object))
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2720 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2723 /* A subroutine of clear_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2727 static GTY(()) tree block_clear_fn;
2729 void
2730 init_block_clear_fn (const char *asmspec)
2732 if (!block_clear_fn)
2734 tree fn, args;
2736 if (TARGET_MEM_FUNCTIONS)
2738 fn = get_identifier ("memset");
2739 args = build_function_type_list (ptr_type_node, ptr_type_node,
2740 integer_type_node, sizetype,
2741 NULL_TREE);
2743 else
2745 fn = get_identifier ("bzero");
2746 args = build_function_type_list (void_type_node, ptr_type_node,
2747 unsigned_type_node, NULL_TREE);
2750 fn = build_decl (FUNCTION_DECL, fn, args);
2751 DECL_EXTERNAL (fn) = 1;
2752 TREE_PUBLIC (fn) = 1;
2753 DECL_ARTIFICIAL (fn) = 1;
2754 TREE_NOTHROW (fn) = 1;
2756 block_clear_fn = fn;
2759 if (asmspec)
2761 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2762 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2766 static tree
2767 clear_storage_libcall_fn (int for_call)
2769 static bool emitted_extern;
2771 if (!block_clear_fn)
2772 init_block_clear_fn (NULL);
2774 if (for_call && !emitted_extern)
2776 emitted_extern = true;
2777 make_decl_rtl (block_clear_fn, NULL);
2778 assemble_external (block_clear_fn);
2781 return block_clear_fn;
2784 /* Generate code to copy Y into X.
2785 Both Y and X must have the same mode, except that
2786 Y can be a constant with VOIDmode.
2787 This mode cannot be BLKmode; use emit_block_move for that.
2789 Return the last instruction emitted. */
2792 emit_move_insn (rtx x, rtx y)
2794 enum machine_mode mode = GET_MODE (x);
2795 rtx y_cst = NULL_RTX;
2796 rtx last_insn, set;
2798 x = protect_from_queue (x, 1);
2799 y = protect_from_queue (y, 0);
2801 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2802 abort ();
2804 if (CONSTANT_P (y))
2806 if (optimize
2807 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2808 && (last_insn = compress_float_constant (x, y)))
2809 return last_insn;
2811 y_cst = y;
2813 if (!LEGITIMATE_CONSTANT_P (y))
2815 y = force_const_mem (mode, y);
2817 /* If the target's cannot_force_const_mem prevented the spill,
2818 assume that the target's move expanders will also take care
2819 of the non-legitimate constant. */
2820 if (!y)
2821 y = y_cst;
2825 /* If X or Y are memory references, verify that their addresses are valid
2826 for the machine. */
2827 if (GET_CODE (x) == MEM
2828 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2829 && ! push_operand (x, GET_MODE (x)))
2830 || (flag_force_addr
2831 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2832 x = validize_mem (x);
2834 if (GET_CODE (y) == MEM
2835 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2836 || (flag_force_addr
2837 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2838 y = validize_mem (y);
2840 if (mode == BLKmode)
2841 abort ();
2843 last_insn = emit_move_insn_1 (x, y);
2845 if (y_cst && REG_P (x)
2846 && (set = single_set (last_insn)) != NULL_RTX
2847 && SET_DEST (set) == x
2848 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2849 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2851 return last_insn;
2854 /* Low level part of emit_move_insn.
2855 Called just like emit_move_insn, but assumes X and Y
2856 are basically valid. */
2859 emit_move_insn_1 (rtx x, rtx y)
2861 enum machine_mode mode = GET_MODE (x);
2862 enum machine_mode submode;
2863 enum mode_class class = GET_MODE_CLASS (mode);
2865 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2866 abort ();
2868 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2869 return
2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2872 /* Expand complex moves by moving real part and imag part, if possible. */
2873 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2874 && BLKmode != (submode = GET_MODE_INNER (mode))
2875 && (mov_optab->handlers[(int) submode].insn_code
2876 != CODE_FOR_nothing))
2878 /* Don't split destination if it is a stack push. */
2879 int stack = push_operand (x, GET_MODE (x));
2881 #ifdef PUSH_ROUNDING
2882 /* In case we output to the stack, but the size is smaller than the
2883 machine can push exactly, we need to use move instructions. */
2884 if (stack
2885 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2886 != GET_MODE_SIZE (submode)))
2888 rtx temp;
2889 HOST_WIDE_INT offset1, offset2;
2891 /* Do not use anti_adjust_stack, since we don't want to update
2892 stack_pointer_delta. */
2893 temp = expand_binop (Pmode,
2894 #ifdef STACK_GROWS_DOWNWARD
2895 sub_optab,
2896 #else
2897 add_optab,
2898 #endif
2899 stack_pointer_rtx,
2900 GEN_INT
2901 (PUSH_ROUNDING
2902 (GET_MODE_SIZE (GET_MODE (x)))),
2903 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2905 if (temp != stack_pointer_rtx)
2906 emit_move_insn (stack_pointer_rtx, temp);
2908 #ifdef STACK_GROWS_DOWNWARD
2909 offset1 = 0;
2910 offset2 = GET_MODE_SIZE (submode);
2911 #else
2912 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2913 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2914 + GET_MODE_SIZE (submode));
2915 #endif
2917 emit_move_insn (change_address (x, submode,
2918 gen_rtx_PLUS (Pmode,
2919 stack_pointer_rtx,
2920 GEN_INT (offset1))),
2921 gen_realpart (submode, y));
2922 emit_move_insn (change_address (x, submode,
2923 gen_rtx_PLUS (Pmode,
2924 stack_pointer_rtx,
2925 GEN_INT (offset2))),
2926 gen_imagpart (submode, y));
2928 else
2929 #endif
2930 /* If this is a stack, push the highpart first, so it
2931 will be in the argument order.
2933 In that case, change_address is used only to convert
2934 the mode, not to change the address. */
2935 if (stack)
2937 /* Note that the real part always precedes the imag part in memory
2938 regardless of machine's endianness. */
2939 #ifdef STACK_GROWS_DOWNWARD
2940 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2941 gen_imagpart (submode, y));
2942 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2943 gen_realpart (submode, y));
2944 #else
2945 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2946 gen_realpart (submode, y));
2947 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2948 gen_imagpart (submode, y));
2949 #endif
2951 else
2953 rtx realpart_x, realpart_y;
2954 rtx imagpart_x, imagpart_y;
2956 /* If this is a complex value with each part being smaller than a
2957 word, the usual calling sequence will likely pack the pieces into
2958 a single register. Unfortunately, SUBREG of hard registers only
2959 deals in terms of words, so we have a problem converting input
2960 arguments to the CONCAT of two registers that is used elsewhere
2961 for complex values. If this is before reload, we can copy it into
2962 memory and reload. FIXME, we should see about using extract and
2963 insert on integer registers, but complex short and complex char
2964 variables should be rarely used. */
2965 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2966 && (reload_in_progress | reload_completed) == 0)
2968 int packed_dest_p
2969 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2970 int packed_src_p
2971 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2973 if (packed_dest_p || packed_src_p)
2975 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2976 ? MODE_FLOAT : MODE_INT);
2978 enum machine_mode reg_mode
2979 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2981 if (reg_mode != BLKmode)
2983 rtx mem = assign_stack_temp (reg_mode,
2984 GET_MODE_SIZE (mode), 0);
2985 rtx cmem = adjust_address (mem, mode, 0);
2987 if (packed_dest_p)
2989 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2991 emit_move_insn_1 (cmem, y);
2992 return emit_move_insn_1 (sreg, mem);
2994 else
2996 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2998 emit_move_insn_1 (mem, sreg);
2999 return emit_move_insn_1 (x, cmem);
3005 realpart_x = gen_realpart (submode, x);
3006 realpart_y = gen_realpart (submode, y);
3007 imagpart_x = gen_imagpart (submode, x);
3008 imagpart_y = gen_imagpart (submode, y);
3010 /* Show the output dies here. This is necessary for SUBREGs
3011 of pseudos since we cannot track their lifetimes correctly;
3012 hard regs shouldn't appear here except as return values.
3013 We never want to emit such a clobber after reload. */
3014 if (x != y
3015 && ! (reload_in_progress || reload_completed)
3016 && (GET_CODE (realpart_x) == SUBREG
3017 || GET_CODE (imagpart_x) == SUBREG))
3018 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3020 emit_move_insn (realpart_x, realpart_y);
3021 emit_move_insn (imagpart_x, imagpart_y);
3024 return get_last_insn ();
3027 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3028 find a mode to do it in. If we have a movcc, use it. Otherwise,
3029 find the MODE_INT mode of the same width. */
3030 else if (GET_MODE_CLASS (mode) == MODE_CC
3031 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3033 enum insn_code insn_code;
3034 enum machine_mode tmode = VOIDmode;
3035 rtx x1 = x, y1 = y;
3037 if (mode != CCmode
3038 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3039 tmode = CCmode;
3040 else
3041 for (tmode = QImode; tmode != VOIDmode;
3042 tmode = GET_MODE_WIDER_MODE (tmode))
3043 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3044 break;
3046 if (tmode == VOIDmode)
3047 abort ();
3049 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3050 may call change_address which is not appropriate if we were
3051 called when a reload was in progress. We don't have to worry
3052 about changing the address since the size in bytes is supposed to
3053 be the same. Copy the MEM to change the mode and move any
3054 substitutions from the old MEM to the new one. */
3056 if (reload_in_progress)
3058 x = gen_lowpart_common (tmode, x1);
3059 if (x == 0 && GET_CODE (x1) == MEM)
3061 x = adjust_address_nv (x1, tmode, 0);
3062 copy_replacements (x1, x);
3065 y = gen_lowpart_common (tmode, y1);
3066 if (y == 0 && GET_CODE (y1) == MEM)
3068 y = adjust_address_nv (y1, tmode, 0);
3069 copy_replacements (y1, y);
3072 else
3074 x = gen_lowpart (tmode, x);
3075 y = gen_lowpart (tmode, y);
3078 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3079 return emit_insn (GEN_FCN (insn_code) (x, y));
3082 /* Try using a move pattern for the corresponding integer mode. This is
3083 only safe when simplify_subreg can convert MODE constants into integer
3084 constants. At present, it can only do this reliably if the value
3085 fits within a HOST_WIDE_INT. */
3086 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3087 && (submode = int_mode_for_mode (mode)) != BLKmode
3088 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3089 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3090 (simplify_gen_subreg (submode, x, mode, 0),
3091 simplify_gen_subreg (submode, y, mode, 0)));
3093 /* This will handle any multi-word or full-word mode that lacks a move_insn
3094 pattern. However, you will get better code if you define such patterns,
3095 even if they must turn into multiple assembler instructions. */
3096 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3098 rtx last_insn = 0;
3099 rtx seq, inner;
3100 int need_clobber;
3101 int i;
3103 #ifdef PUSH_ROUNDING
3105 /* If X is a push on the stack, do the push now and replace
3106 X with a reference to the stack pointer. */
3107 if (push_operand (x, GET_MODE (x)))
3109 rtx temp;
3110 enum rtx_code code;
3112 /* Do not use anti_adjust_stack, since we don't want to update
3113 stack_pointer_delta. */
3114 temp = expand_binop (Pmode,
3115 #ifdef STACK_GROWS_DOWNWARD
3116 sub_optab,
3117 #else
3118 add_optab,
3119 #endif
3120 stack_pointer_rtx,
3121 GEN_INT
3122 (PUSH_ROUNDING
3123 (GET_MODE_SIZE (GET_MODE (x)))),
3124 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3126 if (temp != stack_pointer_rtx)
3127 emit_move_insn (stack_pointer_rtx, temp);
3129 code = GET_CODE (XEXP (x, 0));
3131 /* Just hope that small offsets off SP are OK. */
3132 if (code == POST_INC)
3133 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3134 GEN_INT (-((HOST_WIDE_INT)
3135 GET_MODE_SIZE (GET_MODE (x)))));
3136 else if (code == POST_DEC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3139 else
3140 temp = stack_pointer_rtx;
3142 x = change_address (x, VOIDmode, temp);
3144 #endif
3146 /* If we are in reload, see if either operand is a MEM whose address
3147 is scheduled for replacement. */
3148 if (reload_in_progress && GET_CODE (x) == MEM
3149 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3150 x = replace_equiv_address_nv (x, inner);
3151 if (reload_in_progress && GET_CODE (y) == MEM
3152 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3153 y = replace_equiv_address_nv (y, inner);
3155 start_sequence ();
3157 need_clobber = 0;
3158 for (i = 0;
3159 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3160 i++)
3162 rtx xpart = operand_subword (x, i, 1, mode);
3163 rtx ypart = operand_subword (y, i, 1, mode);
3165 /* If we can't get a part of Y, put Y into memory if it is a
3166 constant. Otherwise, force it into a register. If we still
3167 can't get a part of Y, abort. */
3168 if (ypart == 0 && CONSTANT_P (y))
3170 y = force_const_mem (mode, y);
3171 ypart = operand_subword (y, i, 1, mode);
3173 else if (ypart == 0)
3174 ypart = operand_subword_force (y, i, mode);
3176 if (xpart == 0 || ypart == 0)
3177 abort ();
3179 need_clobber |= (GET_CODE (xpart) == SUBREG);
3181 last_insn = emit_move_insn (xpart, ypart);
3184 seq = get_insns ();
3185 end_sequence ();
3187 /* Show the output dies here. This is necessary for SUBREGs
3188 of pseudos since we cannot track their lifetimes correctly;
3189 hard regs shouldn't appear here except as return values.
3190 We never want to emit such a clobber after reload. */
3191 if (x != y
3192 && ! (reload_in_progress || reload_completed)
3193 && need_clobber != 0)
3194 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3196 emit_insn (seq);
3198 return last_insn;
3200 else
3201 abort ();
3204 /* If Y is representable exactly in a narrower mode, and the target can
3205 perform the extension directly from constant or memory, then emit the
3206 move as an extension. */
3208 static rtx
3209 compress_float_constant (rtx x, rtx y)
3211 enum machine_mode dstmode = GET_MODE (x);
3212 enum machine_mode orig_srcmode = GET_MODE (y);
3213 enum machine_mode srcmode;
3214 REAL_VALUE_TYPE r;
3216 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3218 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3219 srcmode != orig_srcmode;
3220 srcmode = GET_MODE_WIDER_MODE (srcmode))
3222 enum insn_code ic;
3223 rtx trunc_y, last_insn;
3225 /* Skip if the target can't extend this way. */
3226 ic = can_extend_p (dstmode, srcmode, 0);
3227 if (ic == CODE_FOR_nothing)
3228 continue;
3230 /* Skip if the narrowed value isn't exact. */
3231 if (! exact_real_truncate (srcmode, &r))
3232 continue;
3234 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3236 if (LEGITIMATE_CONSTANT_P (trunc_y))
3238 /* Skip if the target needs extra instructions to perform
3239 the extension. */
3240 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3241 continue;
3243 else if (float_extend_from_mem[dstmode][srcmode])
3244 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3245 else
3246 continue;
3248 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3249 last_insn = get_last_insn ();
3251 if (REG_P (x))
3252 set_unique_reg_note (last_insn, REG_EQUAL, y);
3254 return last_insn;
3257 return NULL_RTX;
3260 /* Pushing data onto the stack. */
3262 /* Push a block of length SIZE (perhaps variable)
3263 and return an rtx to address the beginning of the block.
3264 Note that it is not possible for the value returned to be a QUEUED.
3265 The value may be virtual_outgoing_args_rtx.
3267 EXTRA is the number of bytes of padding to push in addition to SIZE.
3268 BELOW nonzero means this padding comes at low addresses;
3269 otherwise, the padding comes at high addresses. */
3272 push_block (rtx size, int extra, int below)
3274 rtx temp;
3276 size = convert_modes (Pmode, ptr_mode, size, 1);
3277 if (CONSTANT_P (size))
3278 anti_adjust_stack (plus_constant (size, extra));
3279 else if (REG_P (size) && extra == 0)
3280 anti_adjust_stack (size);
3281 else
3283 temp = copy_to_mode_reg (Pmode, size);
3284 if (extra != 0)
3285 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3286 temp, 0, OPTAB_LIB_WIDEN);
3287 anti_adjust_stack (temp);
3290 #ifndef STACK_GROWS_DOWNWARD
3291 if (0)
3292 #else
3293 if (1)
3294 #endif
3296 temp = virtual_outgoing_args_rtx;
3297 if (extra != 0 && below)
3298 temp = plus_constant (temp, extra);
3300 else
3302 if (GET_CODE (size) == CONST_INT)
3303 temp = plus_constant (virtual_outgoing_args_rtx,
3304 -INTVAL (size) - (below ? 0 : extra));
3305 else if (extra != 0 && !below)
3306 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3307 negate_rtx (Pmode, plus_constant (size, extra)));
3308 else
3309 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3310 negate_rtx (Pmode, size));
3313 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3316 #ifdef PUSH_ROUNDING
3318 /* Emit single push insn. */
3320 static void
3321 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3323 rtx dest_addr;
3324 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3325 rtx dest;
3326 enum insn_code icode;
3327 insn_operand_predicate_fn pred;
3329 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3330 /* If there is push pattern, use it. Otherwise try old way of throwing
3331 MEM representing push operation to move expander. */
3332 icode = push_optab->handlers[(int) mode].insn_code;
3333 if (icode != CODE_FOR_nothing)
3335 if (((pred = insn_data[(int) icode].operand[0].predicate)
3336 && !((*pred) (x, mode))))
3337 x = force_reg (mode, x);
3338 emit_insn (GEN_FCN (icode) (x));
3339 return;
3341 if (GET_MODE_SIZE (mode) == rounded_size)
3342 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3343 /* If we are to pad downward, adjust the stack pointer first and
3344 then store X into the stack location using an offset. This is
3345 because emit_move_insn does not know how to pad; it does not have
3346 access to type. */
3347 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3349 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3350 HOST_WIDE_INT offset;
3352 emit_move_insn (stack_pointer_rtx,
3353 expand_binop (Pmode,
3354 #ifdef STACK_GROWS_DOWNWARD
3355 sub_optab,
3356 #else
3357 add_optab,
3358 #endif
3359 stack_pointer_rtx,
3360 GEN_INT (rounded_size),
3361 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3363 offset = (HOST_WIDE_INT) padding_size;
3364 #ifdef STACK_GROWS_DOWNWARD
3365 if (STACK_PUSH_CODE == POST_DEC)
3366 /* We have already decremented the stack pointer, so get the
3367 previous value. */
3368 offset += (HOST_WIDE_INT) rounded_size;
3369 #else
3370 if (STACK_PUSH_CODE == POST_INC)
3371 /* We have already incremented the stack pointer, so get the
3372 previous value. */
3373 offset -= (HOST_WIDE_INT) rounded_size;
3374 #endif
3375 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3377 else
3379 #ifdef STACK_GROWS_DOWNWARD
3380 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3382 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3383 #else
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (rounded_size));
3387 #endif
3388 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3391 dest = gen_rtx_MEM (mode, dest_addr);
3393 if (type != 0)
3395 set_mem_attributes (dest, type, 1);
3397 if (flag_optimize_sibling_calls)
3398 /* Function incoming arguments may overlap with sibling call
3399 outgoing arguments and we cannot allow reordering of reads
3400 from function arguments with stores to outgoing arguments
3401 of sibling calls. */
3402 set_mem_alias_set (dest, 0);
3404 emit_move_insn (dest, x);
3406 #endif
3408 /* Generate code to push X onto the stack, assuming it has mode MODE and
3409 type TYPE.
3410 MODE is redundant except when X is a CONST_INT (since they don't
3411 carry mode info).
3412 SIZE is an rtx for the size of data to be copied (in bytes),
3413 needed only if X is BLKmode.
3415 ALIGN (in bits) is maximum alignment we can assume.
3417 If PARTIAL and REG are both nonzero, then copy that many of the first
3418 words of X into registers starting with REG, and push the rest of X.
3419 The amount of space pushed is decreased by PARTIAL words,
3420 rounded *down* to a multiple of PARM_BOUNDARY.
3421 REG must be a hard register in this case.
3422 If REG is zero but PARTIAL is not, take any all others actions for an
3423 argument partially in registers, but do not actually load any
3424 registers.
3426 EXTRA is the amount in bytes of extra space to leave next to this arg.
3427 This is ignored if an argument block has already been allocated.
3429 On a machine that lacks real push insns, ARGS_ADDR is the address of
3430 the bottom of the argument block for this call. We use indexing off there
3431 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3432 argument block has not been preallocated.
3434 ARGS_SO_FAR is the size of args previously pushed for this call.
3436 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3437 for arguments passed in registers. If nonzero, it will be the number
3438 of bytes required. */
3440 void
3441 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3442 unsigned int align, int partial, rtx reg, int extra,
3443 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3444 rtx alignment_pad)
3446 rtx xinner;
3447 enum direction stack_direction
3448 #ifdef STACK_GROWS_DOWNWARD
3449 = downward;
3450 #else
3451 = upward;
3452 #endif
3454 /* Decide where to pad the argument: `downward' for below,
3455 `upward' for above, or `none' for don't pad it.
3456 Default is below for small data on big-endian machines; else above. */
3457 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3459 /* Invert direction if stack is post-decrement.
3460 FIXME: why? */
3461 if (STACK_PUSH_CODE == POST_DEC)
3462 if (where_pad != none)
3463 where_pad = (where_pad == downward ? upward : downward);
3465 xinner = x = protect_from_queue (x, 0);
3467 if (mode == BLKmode)
3469 /* Copy a block into the stack, entirely or partially. */
3471 rtx temp;
3472 int used = partial * UNITS_PER_WORD;
3473 int offset;
3474 int skip;
3476 if (reg && GET_CODE (reg) == PARALLEL)
3478 /* Use the size of the elt to compute offset. */
3479 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3480 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3481 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3483 else
3484 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3486 if (size == 0)
3487 abort ();
3489 used -= offset;
3491 /* USED is now the # of bytes we need not copy to the stack
3492 because registers will take care of them. */
3494 if (partial != 0)
3495 xinner = adjust_address (xinner, BLKmode, used);
3497 /* If the partial register-part of the arg counts in its stack size,
3498 skip the part of stack space corresponding to the registers.
3499 Otherwise, start copying to the beginning of the stack space,
3500 by setting SKIP to 0. */
3501 skip = (reg_parm_stack_space == 0) ? 0 : used;
3503 #ifdef PUSH_ROUNDING
3504 /* Do it with several push insns if that doesn't take lots of insns
3505 and if there is no difficulty with push insns that skip bytes
3506 on the stack for alignment purposes. */
3507 if (args_addr == 0
3508 && PUSH_ARGS
3509 && GET_CODE (size) == CONST_INT
3510 && skip == 0
3511 && MEM_ALIGN (xinner) >= align
3512 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3513 /* Here we avoid the case of a structure whose weak alignment
3514 forces many pushes of a small amount of data,
3515 and such small pushes do rounding that causes trouble. */
3516 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3517 || align >= BIGGEST_ALIGNMENT
3518 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3519 == (align / BITS_PER_UNIT)))
3520 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3522 /* Push padding now if padding above and stack grows down,
3523 or if padding below and stack grows up.
3524 But if space already allocated, this has already been done. */
3525 if (extra && args_addr == 0
3526 && where_pad != none && where_pad != stack_direction)
3527 anti_adjust_stack (GEN_INT (extra));
3529 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3531 else
3532 #endif /* PUSH_ROUNDING */
3534 rtx target;
3536 /* Otherwise make space on the stack and copy the data
3537 to the address of that space. */
3539 /* Deduct words put into registers from the size we must copy. */
3540 if (partial != 0)
3542 if (GET_CODE (size) == CONST_INT)
3543 size = GEN_INT (INTVAL (size) - used);
3544 else
3545 size = expand_binop (GET_MODE (size), sub_optab, size,
3546 GEN_INT (used), NULL_RTX, 0,
3547 OPTAB_LIB_WIDEN);
3550 /* Get the address of the stack space.
3551 In this case, we do not deal with EXTRA separately.
3552 A single stack adjust will do. */
3553 if (! args_addr)
3555 temp = push_block (size, extra, where_pad == downward);
3556 extra = 0;
3558 else if (GET_CODE (args_so_far) == CONST_INT)
3559 temp = memory_address (BLKmode,
3560 plus_constant (args_addr,
3561 skip + INTVAL (args_so_far)));
3562 else
3563 temp = memory_address (BLKmode,
3564 plus_constant (gen_rtx_PLUS (Pmode,
3565 args_addr,
3566 args_so_far),
3567 skip));
3569 if (!ACCUMULATE_OUTGOING_ARGS)
3571 /* If the source is referenced relative to the stack pointer,
3572 copy it to another register to stabilize it. We do not need
3573 to do this if we know that we won't be changing sp. */
3575 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3576 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3577 temp = copy_to_reg (temp);
3580 target = gen_rtx_MEM (BLKmode, temp);
3582 if (type != 0)
3584 set_mem_attributes (target, type, 1);
3585 /* Function incoming arguments may overlap with sibling call
3586 outgoing arguments and we cannot allow reordering of reads
3587 from function arguments with stores to outgoing arguments
3588 of sibling calls. */
3589 set_mem_alias_set (target, 0);
3592 /* ALIGN may well be better aligned than TYPE, e.g. due to
3593 PARM_BOUNDARY. Assume the caller isn't lying. */
3594 set_mem_align (target, align);
3596 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3599 else if (partial > 0)
3601 /* Scalar partly in registers. */
3603 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3604 int i;
3605 int not_stack;
3606 /* # words of start of argument
3607 that we must make space for but need not store. */
3608 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3609 int args_offset = INTVAL (args_so_far);
3610 int skip;
3612 /* Push padding now if padding above and stack grows down,
3613 or if padding below and stack grows up.
3614 But if space already allocated, this has already been done. */
3615 if (extra && args_addr == 0
3616 && where_pad != none && where_pad != stack_direction)
3617 anti_adjust_stack (GEN_INT (extra));
3619 /* If we make space by pushing it, we might as well push
3620 the real data. Otherwise, we can leave OFFSET nonzero
3621 and leave the space uninitialized. */
3622 if (args_addr == 0)
3623 offset = 0;
3625 /* Now NOT_STACK gets the number of words that we don't need to
3626 allocate on the stack. */
3627 not_stack = partial - offset;
3629 /* If the partial register-part of the arg counts in its stack size,
3630 skip the part of stack space corresponding to the registers.
3631 Otherwise, start copying to the beginning of the stack space,
3632 by setting SKIP to 0. */
3633 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3635 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3636 x = validize_mem (force_const_mem (mode, x));
3638 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3639 SUBREGs of such registers are not allowed. */
3640 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3641 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3642 x = copy_to_reg (x);
3644 /* Loop over all the words allocated on the stack for this arg. */
3645 /* We can do it by words, because any scalar bigger than a word
3646 has a size a multiple of a word. */
3647 #ifndef PUSH_ARGS_REVERSED
3648 for (i = not_stack; i < size; i++)
3649 #else
3650 for (i = size - 1; i >= not_stack; i--)
3651 #endif
3652 if (i >= not_stack + offset)
3653 emit_push_insn (operand_subword_force (x, i, mode),
3654 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3655 0, args_addr,
3656 GEN_INT (args_offset + ((i - not_stack + skip)
3657 * UNITS_PER_WORD)),
3658 reg_parm_stack_space, alignment_pad);
3660 else
3662 rtx addr;
3663 rtx dest;
3665 /* Push padding now if padding above and stack grows down,
3666 or if padding below and stack grows up.
3667 But if space already allocated, this has already been done. */
3668 if (extra && args_addr == 0
3669 && where_pad != none && where_pad != stack_direction)
3670 anti_adjust_stack (GEN_INT (extra));
3672 #ifdef PUSH_ROUNDING
3673 if (args_addr == 0 && PUSH_ARGS)
3674 emit_single_push_insn (mode, x, type);
3675 else
3676 #endif
3678 if (GET_CODE (args_so_far) == CONST_INT)
3679 addr
3680 = memory_address (mode,
3681 plus_constant (args_addr,
3682 INTVAL (args_so_far)));
3683 else
3684 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3685 args_so_far));
3686 dest = gen_rtx_MEM (mode, addr);
3687 if (type != 0)
3689 set_mem_attributes (dest, type, 1);
3690 /* Function incoming arguments may overlap with sibling call
3691 outgoing arguments and we cannot allow reordering of reads
3692 from function arguments with stores to outgoing arguments
3693 of sibling calls. */
3694 set_mem_alias_set (dest, 0);
3697 emit_move_insn (dest, x);
3701 /* If part should go in registers, copy that part
3702 into the appropriate registers. Do this now, at the end,
3703 since mem-to-mem copies above may do function calls. */
3704 if (partial > 0 && reg != 0)
3706 /* Handle calls that pass values in multiple non-contiguous locations.
3707 The Irix 6 ABI has examples of this. */
3708 if (GET_CODE (reg) == PARALLEL)
3709 emit_group_load (reg, x, type, -1);
3710 else
3711 move_block_to_reg (REGNO (reg), x, partial, mode);
3714 if (extra && args_addr == 0 && where_pad == stack_direction)
3715 anti_adjust_stack (GEN_INT (extra));
3717 if (alignment_pad && args_addr == 0)
3718 anti_adjust_stack (alignment_pad);
3721 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3722 operations. */
3724 static rtx
3725 get_subtarget (rtx x)
3727 return ((x == 0
3728 /* Only registers can be subtargets. */
3729 || !REG_P (x)
3730 /* If the register is readonly, it can't be set more than once. */
3731 || RTX_UNCHANGING_P (x)
3732 /* Don't use hard regs to avoid extending their life. */
3733 || REGNO (x) < FIRST_PSEUDO_REGISTER
3734 /* Avoid subtargets inside loops,
3735 since they hide some invariant expressions. */
3736 || preserve_subexpressions_p ())
3737 ? 0 : x);
3740 /* Expand an assignment that stores the value of FROM into TO.
3741 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3742 (This may contain a QUEUED rtx;
3743 if the value is constant, this rtx is a constant.)
3744 Otherwise, the returned value is NULL_RTX. */
3747 expand_assignment (tree to, tree from, int want_value)
3749 rtx to_rtx = 0;
3750 rtx result;
3752 /* Don't crash if the lhs of the assignment was erroneous. */
3754 if (TREE_CODE (to) == ERROR_MARK)
3756 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3757 return want_value ? result : NULL_RTX;
3760 /* Assignment of a structure component needs special treatment
3761 if the structure component's rtx is not simply a MEM.
3762 Assignment of an array element at a constant index, and assignment of
3763 an array element in an unaligned packed structure field, has the same
3764 problem. */
3766 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3767 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3768 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3770 enum machine_mode mode1;
3771 HOST_WIDE_INT bitsize, bitpos;
3772 rtx orig_to_rtx;
3773 tree offset;
3774 int unsignedp;
3775 int volatilep = 0;
3776 tree tem;
3778 push_temp_slots ();
3779 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3780 &unsignedp, &volatilep);
3782 /* If we are going to use store_bit_field and extract_bit_field,
3783 make sure to_rtx will be safe for multiple use. */
3785 if (mode1 == VOIDmode && want_value)
3786 tem = stabilize_reference (tem);
3788 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3790 if (offset != 0)
3792 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3794 if (GET_CODE (to_rtx) != MEM)
3795 abort ();
3797 #ifdef POINTERS_EXTEND_UNSIGNED
3798 if (GET_MODE (offset_rtx) != Pmode)
3799 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3800 #else
3801 if (GET_MODE (offset_rtx) != ptr_mode)
3802 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3803 #endif
3805 /* A constant address in TO_RTX can have VOIDmode, we must not try
3806 to call force_reg for that case. Avoid that case. */
3807 if (GET_CODE (to_rtx) == MEM
3808 && GET_MODE (to_rtx) == BLKmode
3809 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3810 && bitsize > 0
3811 && (bitpos % bitsize) == 0
3812 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3813 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3815 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3816 bitpos = 0;
3819 to_rtx = offset_address (to_rtx, offset_rtx,
3820 highest_pow2_factor_for_target (to,
3821 offset));
3824 if (GET_CODE (to_rtx) == MEM)
3826 /* If the field is at offset zero, we could have been given the
3827 DECL_RTX of the parent struct. Don't munge it. */
3828 to_rtx = shallow_copy_rtx (to_rtx);
3830 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3833 /* Deal with volatile and readonly fields. The former is only done
3834 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3835 if (volatilep && GET_CODE (to_rtx) == MEM)
3837 if (to_rtx == orig_to_rtx)
3838 to_rtx = copy_rtx (to_rtx);
3839 MEM_VOLATILE_P (to_rtx) = 1;
3842 if (TREE_CODE (to) == COMPONENT_REF
3843 && TREE_READONLY (TREE_OPERAND (to, 1))
3844 /* We can't assert that a MEM won't be set more than once
3845 if the component is not addressable because another
3846 non-addressable component may be referenced by the same MEM. */
3847 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3849 if (to_rtx == orig_to_rtx)
3850 to_rtx = copy_rtx (to_rtx);
3851 RTX_UNCHANGING_P (to_rtx) = 1;
3854 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3856 if (to_rtx == orig_to_rtx)
3857 to_rtx = copy_rtx (to_rtx);
3858 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3861 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3862 (want_value
3863 /* Spurious cast for HPUX compiler. */
3864 ? ((enum machine_mode)
3865 TYPE_MODE (TREE_TYPE (to)))
3866 : VOIDmode),
3867 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3869 preserve_temp_slots (result);
3870 free_temp_slots ();
3871 pop_temp_slots ();
3873 /* If the value is meaningful, convert RESULT to the proper mode.
3874 Otherwise, return nothing. */
3875 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3876 TYPE_MODE (TREE_TYPE (from)),
3877 result,
3878 TYPE_UNSIGNED (TREE_TYPE (to)))
3879 : NULL_RTX);
3882 /* If the rhs is a function call and its value is not an aggregate,
3883 call the function before we start to compute the lhs.
3884 This is needed for correct code for cases such as
3885 val = setjmp (buf) on machines where reference to val
3886 requires loading up part of an address in a separate insn.
3888 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3889 since it might be a promoted variable where the zero- or sign- extension
3890 needs to be done. Handling this in the normal way is safe because no
3891 computation is done before the call. */
3892 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3893 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3894 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3895 && REG_P (DECL_RTL (to))))
3897 rtx value;
3899 push_temp_slots ();
3900 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3901 if (to_rtx == 0)
3902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3904 /* Handle calls that return values in multiple non-contiguous locations.
3905 The Irix 6 ABI has examples of this. */
3906 if (GET_CODE (to_rtx) == PARALLEL)
3907 emit_group_load (to_rtx, value, TREE_TYPE (from),
3908 int_size_in_bytes (TREE_TYPE (from)));
3909 else if (GET_MODE (to_rtx) == BLKmode)
3910 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3911 else
3913 if (POINTER_TYPE_P (TREE_TYPE (to)))
3914 value = convert_memory_address (GET_MODE (to_rtx), value);
3915 emit_move_insn (to_rtx, value);
3917 preserve_temp_slots (to_rtx);
3918 free_temp_slots ();
3919 pop_temp_slots ();
3920 return want_value ? to_rtx : NULL_RTX;
3923 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3924 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3926 if (to_rtx == 0)
3927 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3929 /* Don't move directly into a return register. */
3930 if (TREE_CODE (to) == RESULT_DECL
3931 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3933 rtx temp;
3935 push_temp_slots ();
3936 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3938 if (GET_CODE (to_rtx) == PARALLEL)
3939 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3940 int_size_in_bytes (TREE_TYPE (from)));
3941 else
3942 emit_move_insn (to_rtx, temp);
3944 preserve_temp_slots (to_rtx);
3945 free_temp_slots ();
3946 pop_temp_slots ();
3947 return want_value ? to_rtx : NULL_RTX;
3950 /* In case we are returning the contents of an object which overlaps
3951 the place the value is being stored, use a safe function when copying
3952 a value through a pointer into a structure value return block. */
3953 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3954 && current_function_returns_struct
3955 && !current_function_returns_pcc_struct)
3957 rtx from_rtx, size;
3959 push_temp_slots ();
3960 size = expr_size (from);
3961 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3963 if (TARGET_MEM_FUNCTIONS)
3964 emit_library_call (memmove_libfunc, LCT_NORMAL,
3965 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3966 XEXP (from_rtx, 0), Pmode,
3967 convert_to_mode (TYPE_MODE (sizetype),
3968 size, TYPE_UNSIGNED (sizetype)),
3969 TYPE_MODE (sizetype));
3970 else
3971 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3972 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3973 XEXP (to_rtx, 0), Pmode,
3974 convert_to_mode (TYPE_MODE (integer_type_node),
3975 size,
3976 TYPE_UNSIGNED (integer_type_node)),
3977 TYPE_MODE (integer_type_node));
3979 preserve_temp_slots (to_rtx);
3980 free_temp_slots ();
3981 pop_temp_slots ();
3982 return want_value ? to_rtx : NULL_RTX;
3985 /* Compute FROM and store the value in the rtx we got. */
3987 push_temp_slots ();
3988 result = store_expr (from, to_rtx, want_value);
3989 preserve_temp_slots (result);
3990 free_temp_slots ();
3991 pop_temp_slots ();
3992 return want_value ? result : NULL_RTX;
3995 /* Generate code for computing expression EXP,
3996 and storing the value into TARGET.
3997 TARGET may contain a QUEUED rtx.
3999 If WANT_VALUE & 1 is nonzero, return a copy of the value
4000 not in TARGET, so that we can be sure to use the proper
4001 value in a containing expression even if TARGET has something
4002 else stored in it. If possible, we copy the value through a pseudo
4003 and return that pseudo. Or, if the value is constant, we try to
4004 return the constant. In some cases, we return a pseudo
4005 copied *from* TARGET.
4007 If the mode is BLKmode then we may return TARGET itself.
4008 It turns out that in BLKmode it doesn't cause a problem.
4009 because C has no operators that could combine two different
4010 assignments into the same BLKmode object with different values
4011 with no sequence point. Will other languages need this to
4012 be more thorough?
4014 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4015 to catch quickly any cases where the caller uses the value
4016 and fails to set WANT_VALUE.
4018 If WANT_VALUE & 2 is set, this is a store into a call param on the
4019 stack, and block moves may need to be treated specially. */
4022 store_expr (tree exp, rtx target, int want_value)
4024 rtx temp;
4025 rtx alt_rtl = NULL_RTX;
4026 rtx mark = mark_queue ();
4027 int dont_return_target = 0;
4028 int dont_store_target = 0;
4030 if (VOID_TYPE_P (TREE_TYPE (exp)))
4032 /* C++ can generate ?: expressions with a throw expression in one
4033 branch and an rvalue in the other. Here, we resolve attempts to
4034 store the throw expression's nonexistent result. */
4035 if (want_value)
4036 abort ();
4037 expand_expr (exp, const0_rtx, VOIDmode, 0);
4038 return NULL_RTX;
4040 if (TREE_CODE (exp) == COMPOUND_EXPR)
4042 /* Perform first part of compound expression, then assign from second
4043 part. */
4044 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4045 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4046 emit_queue ();
4047 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4049 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4051 /* For conditional expression, get safe form of the target. Then
4052 test the condition, doing the appropriate assignment on either
4053 side. This avoids the creation of unnecessary temporaries.
4054 For non-BLKmode, it is more efficient not to do this. */
4056 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4058 emit_queue ();
4059 target = protect_from_queue (target, 1);
4061 do_pending_stack_adjust ();
4062 NO_DEFER_POP;
4063 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4066 end_cleanup_deferral ();
4067 emit_queue ();
4068 emit_jump_insn (gen_jump (lab2));
4069 emit_barrier ();
4070 emit_label (lab1);
4071 start_cleanup_deferral ();
4072 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4073 end_cleanup_deferral ();
4074 emit_queue ();
4075 emit_label (lab2);
4076 OK_DEFER_POP;
4078 return want_value & 1 ? target : NULL_RTX;
4080 else if (queued_subexp_p (target))
4081 /* If target contains a postincrement, let's not risk
4082 using it as the place to generate the rhs. */
4084 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4086 /* Expand EXP into a new pseudo. */
4087 temp = gen_reg_rtx (GET_MODE (target));
4088 temp = expand_expr (exp, temp, GET_MODE (target),
4089 (want_value & 2
4090 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4092 else
4093 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4094 (want_value & 2
4095 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4097 /* If target is volatile, ANSI requires accessing the value
4098 *from* the target, if it is accessed. So make that happen.
4099 In no case return the target itself. */
4100 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4101 dont_return_target = 1;
4103 else if ((want_value & 1) != 0
4104 && GET_CODE (target) == MEM
4105 && ! MEM_VOLATILE_P (target)
4106 && GET_MODE (target) != BLKmode)
4107 /* If target is in memory and caller wants value in a register instead,
4108 arrange that. Pass TARGET as target for expand_expr so that,
4109 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4110 We know expand_expr will not use the target in that case.
4111 Don't do this if TARGET is volatile because we are supposed
4112 to write it and then read it. */
4114 temp = expand_expr (exp, target, GET_MODE (target),
4115 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4116 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4118 /* If TEMP is already in the desired TARGET, only copy it from
4119 memory and don't store it there again. */
4120 if (temp == target
4121 || (rtx_equal_p (temp, target)
4122 && ! side_effects_p (temp) && ! side_effects_p (target)))
4123 dont_store_target = 1;
4124 temp = copy_to_reg (temp);
4126 dont_return_target = 1;
4128 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4129 /* If this is a scalar in a register that is stored in a wider mode
4130 than the declared mode, compute the result into its declared mode
4131 and then convert to the wider mode. Our value is the computed
4132 expression. */
4134 rtx inner_target = 0;
4136 /* If we don't want a value, we can do the conversion inside EXP,
4137 which will often result in some optimizations. Do the conversion
4138 in two steps: first change the signedness, if needed, then
4139 the extend. But don't do this if the type of EXP is a subtype
4140 of something else since then the conversion might involve
4141 more than just converting modes. */
4142 if ((want_value & 1) == 0
4143 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4144 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4146 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4147 != SUBREG_PROMOTED_UNSIGNED_P (target))
4148 exp = convert
4149 (lang_hooks.types.signed_or_unsigned_type
4150 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4152 exp = convert (lang_hooks.types.type_for_mode
4153 (GET_MODE (SUBREG_REG (target)),
4154 SUBREG_PROMOTED_UNSIGNED_P (target)),
4155 exp);
4157 inner_target = SUBREG_REG (target);
4160 temp = expand_expr (exp, inner_target, VOIDmode,
4161 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4163 /* If TEMP is a MEM and we want a result value, make the access
4164 now so it gets done only once. Strictly speaking, this is
4165 only necessary if the MEM is volatile, or if the address
4166 overlaps TARGET. But not performing the load twice also
4167 reduces the amount of rtl we generate and then have to CSE. */
4168 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4169 temp = copy_to_reg (temp);
4171 /* If TEMP is a VOIDmode constant, use convert_modes to make
4172 sure that we properly convert it. */
4173 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4175 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4176 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4177 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4178 GET_MODE (target), temp,
4179 SUBREG_PROMOTED_UNSIGNED_P (target));
4182 convert_move (SUBREG_REG (target), temp,
4183 SUBREG_PROMOTED_UNSIGNED_P (target));
4185 /* If we promoted a constant, change the mode back down to match
4186 target. Otherwise, the caller might get confused by a result whose
4187 mode is larger than expected. */
4189 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4191 if (GET_MODE (temp) != VOIDmode)
4193 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4194 SUBREG_PROMOTED_VAR_P (temp) = 1;
4195 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4196 SUBREG_PROMOTED_UNSIGNED_P (target));
4198 else
4199 temp = convert_modes (GET_MODE (target),
4200 GET_MODE (SUBREG_REG (target)),
4201 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4204 return want_value & 1 ? temp : NULL_RTX;
4206 else
4208 temp = expand_expr_real (exp, target, GET_MODE (target),
4209 (want_value & 2
4210 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4211 &alt_rtl);
4212 /* Return TARGET if it's a specified hardware register.
4213 If TARGET is a volatile mem ref, either return TARGET
4214 or return a reg copied *from* TARGET; ANSI requires this.
4216 Otherwise, if TEMP is not TARGET, return TEMP
4217 if it is constant (for efficiency),
4218 or if we really want the correct value. */
4219 if (!(target && REG_P (target)
4220 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4221 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4222 && ! rtx_equal_p (temp, target)
4223 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4224 dont_return_target = 1;
4227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4228 the same as that of TARGET, adjust the constant. This is needed, for
4229 example, in case it is a CONST_DOUBLE and we want only a word-sized
4230 value. */
4231 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4234 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4235 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4237 /* If value was not generated in the target, store it there.
4238 Convert the value to TARGET's type first if necessary and emit the
4239 pending incrementations that have been queued when expanding EXP.
4240 Note that we cannot emit the whole queue blindly because this will
4241 effectively disable the POST_INC optimization later.
4243 If TEMP and TARGET compare equal according to rtx_equal_p, but
4244 one or both of them are volatile memory refs, we have to distinguish
4245 two cases:
4246 - expand_expr has used TARGET. In this case, we must not generate
4247 another copy. This can be detected by TARGET being equal according
4248 to == .
4249 - expand_expr has not used TARGET - that means that the source just
4250 happens to have the same RTX form. Since temp will have been created
4251 by expand_expr, it will compare unequal according to == .
4252 We must generate a copy in this case, to reach the correct number
4253 of volatile memory references. */
4255 if ((! rtx_equal_p (temp, target)
4256 || (temp != target && (side_effects_p (temp)
4257 || side_effects_p (target))))
4258 && TREE_CODE (exp) != ERROR_MARK
4259 && ! dont_store_target
4260 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4261 but TARGET is not valid memory reference, TEMP will differ
4262 from TARGET although it is really the same location. */
4263 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4264 /* If there's nothing to copy, don't bother. Don't call expr_size
4265 unless necessary, because some front-ends (C++) expr_size-hook
4266 aborts on objects that are not supposed to be bit-copied or
4267 bit-initialized. */
4268 && expr_size (exp) != const0_rtx)
4270 emit_insns_enqueued_after_mark (mark);
4271 target = protect_from_queue (target, 1);
4272 temp = protect_from_queue (temp, 0);
4273 if (GET_MODE (temp) != GET_MODE (target)
4274 && GET_MODE (temp) != VOIDmode)
4276 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4277 if (dont_return_target)
4279 /* In this case, we will return TEMP,
4280 so make sure it has the proper mode.
4281 But don't forget to store the value into TARGET. */
4282 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4283 emit_move_insn (target, temp);
4285 else
4286 convert_move (target, temp, unsignedp);
4289 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4291 /* Handle copying a string constant into an array. The string
4292 constant may be shorter than the array. So copy just the string's
4293 actual length, and clear the rest. First get the size of the data
4294 type of the string, which is actually the size of the target. */
4295 rtx size = expr_size (exp);
4297 if (GET_CODE (size) == CONST_INT
4298 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4299 emit_block_move (target, temp, size,
4300 (want_value & 2
4301 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4302 else
4304 /* Compute the size of the data to copy from the string. */
4305 tree copy_size
4306 = size_binop (MIN_EXPR,
4307 make_tree (sizetype, size),
4308 size_int (TREE_STRING_LENGTH (exp)));
4309 rtx copy_size_rtx
4310 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4311 (want_value & 2
4312 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4313 rtx label = 0;
4315 /* Copy that much. */
4316 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4317 TYPE_UNSIGNED (sizetype));
4318 emit_block_move (target, temp, copy_size_rtx,
4319 (want_value & 2
4320 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4322 /* Figure out how much is left in TARGET that we have to clear.
4323 Do all calculations in ptr_mode. */
4324 if (GET_CODE (copy_size_rtx) == CONST_INT)
4326 size = plus_constant (size, -INTVAL (copy_size_rtx));
4327 target = adjust_address (target, BLKmode,
4328 INTVAL (copy_size_rtx));
4330 else
4332 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4333 copy_size_rtx, NULL_RTX, 0,
4334 OPTAB_LIB_WIDEN);
4336 #ifdef POINTERS_EXTEND_UNSIGNED
4337 if (GET_MODE (copy_size_rtx) != Pmode)
4338 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4339 TYPE_UNSIGNED (sizetype));
4340 #endif
4342 target = offset_address (target, copy_size_rtx,
4343 highest_pow2_factor (copy_size));
4344 label = gen_label_rtx ();
4345 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4346 GET_MODE (size), 0, label);
4349 if (size != const0_rtx)
4350 clear_storage (target, size);
4352 if (label)
4353 emit_label (label);
4356 /* Handle calls that return values in multiple non-contiguous locations.
4357 The Irix 6 ABI has examples of this. */
4358 else if (GET_CODE (target) == PARALLEL)
4359 emit_group_load (target, temp, TREE_TYPE (exp),
4360 int_size_in_bytes (TREE_TYPE (exp)));
4361 else if (GET_MODE (temp) == BLKmode)
4362 emit_block_move (target, temp, expr_size (exp),
4363 (want_value & 2
4364 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4365 else
4367 temp = force_operand (temp, target);
4368 if (temp != target)
4369 emit_move_insn (target, temp);
4373 /* If we don't want a value, return NULL_RTX. */
4374 if ((want_value & 1) == 0)
4375 return NULL_RTX;
4377 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4378 ??? The latter test doesn't seem to make sense. */
4379 else if (dont_return_target && GET_CODE (temp) != MEM)
4380 return temp;
4382 /* Return TARGET itself if it is a hard register. */
4383 else if ((want_value & 1) != 0
4384 && GET_MODE (target) != BLKmode
4385 && ! (REG_P (target)
4386 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4387 return copy_to_reg (target);
4389 else
4390 return target;
4393 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4394 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4395 are set to non-constant values and place it in *P_NC_ELTS. */
4397 static void
4398 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4399 HOST_WIDE_INT *p_nc_elts)
4401 HOST_WIDE_INT nz_elts, nc_elts;
4402 tree list;
4404 nz_elts = 0;
4405 nc_elts = 0;
4407 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4409 tree value = TREE_VALUE (list);
4410 tree purpose = TREE_PURPOSE (list);
4411 HOST_WIDE_INT mult;
4413 mult = 1;
4414 if (TREE_CODE (purpose) == RANGE_EXPR)
4416 tree lo_index = TREE_OPERAND (purpose, 0);
4417 tree hi_index = TREE_OPERAND (purpose, 1);
4419 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4420 mult = (tree_low_cst (hi_index, 1)
4421 - tree_low_cst (lo_index, 1) + 1);
4424 switch (TREE_CODE (value))
4426 case CONSTRUCTOR:
4428 HOST_WIDE_INT nz = 0, nc = 0;
4429 categorize_ctor_elements_1 (value, &nz, &nc);
4430 nz_elts += mult * nz;
4431 nc_elts += mult * nc;
4433 break;
4435 case INTEGER_CST:
4436 case REAL_CST:
4437 if (!initializer_zerop (value))
4438 nz_elts += mult;
4439 break;
4440 case COMPLEX_CST:
4441 if (!initializer_zerop (TREE_REALPART (value)))
4442 nz_elts += mult;
4443 if (!initializer_zerop (TREE_IMAGPART (value)))
4444 nz_elts += mult;
4445 break;
4446 case VECTOR_CST:
4448 tree v;
4449 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4450 if (!initializer_zerop (TREE_VALUE (v)))
4451 nz_elts += mult;
4453 break;
4455 default:
4456 nz_elts += mult;
4457 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4458 nc_elts += mult;
4459 break;
4463 *p_nz_elts += nz_elts;
4464 *p_nc_elts += nc_elts;
4467 void
4468 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4469 HOST_WIDE_INT *p_nc_elts)
4471 *p_nz_elts = 0;
4472 *p_nc_elts = 0;
4473 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4476 /* Count the number of scalars in TYPE. Return -1 on overflow or
4477 variable-sized. */
4479 HOST_WIDE_INT
4480 count_type_elements (tree type)
4482 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4483 switch (TREE_CODE (type))
4485 case ARRAY_TYPE:
4487 tree telts = array_type_nelts (type);
4488 if (telts && host_integerp (telts, 1))
4490 HOST_WIDE_INT n = tree_low_cst (telts, 1);
4491 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4492 if (n == 0)
4493 return 0;
4494 if (max / n < m)
4495 return n * m;
4497 return -1;
4500 case RECORD_TYPE:
4502 HOST_WIDE_INT n = 0, t;
4503 tree f;
4505 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4506 if (TREE_CODE (f) == FIELD_DECL)
4508 t = count_type_elements (TREE_TYPE (f));
4509 if (t < 0)
4510 return -1;
4511 n += t;
4514 return n;
4517 case UNION_TYPE:
4518 case QUAL_UNION_TYPE:
4520 /* Ho hum. How in the world do we guess here? Clearly it isn't
4521 right to count the fields. Guess based on the number of words. */
4522 HOST_WIDE_INT n = int_size_in_bytes (type);
4523 if (n < 0)
4524 return -1;
4525 return n / UNITS_PER_WORD;
4528 case COMPLEX_TYPE:
4529 return 2;
4531 case VECTOR_TYPE:
4532 /* ??? This is broke. We should encode the vector width in the tree. */
4533 return GET_MODE_NUNITS (TYPE_MODE (type));
4535 case INTEGER_TYPE:
4536 case REAL_TYPE:
4537 case ENUMERAL_TYPE:
4538 case BOOLEAN_TYPE:
4539 case CHAR_TYPE:
4540 case POINTER_TYPE:
4541 case OFFSET_TYPE:
4542 case REFERENCE_TYPE:
4543 return 1;
4545 case VOID_TYPE:
4546 case METHOD_TYPE:
4547 case FILE_TYPE:
4548 case SET_TYPE:
4549 case FUNCTION_TYPE:
4550 case LANG_TYPE:
4551 default:
4552 abort ();
4556 /* Return 1 if EXP contains mostly (3/4) zeros. */
4559 mostly_zeros_p (tree exp)
4561 if (TREE_CODE (exp) == CONSTRUCTOR)
4564 HOST_WIDE_INT nz_elts, nc_elts, elts;
4566 /* If there are no ranges of true bits, it is all zero. */
4567 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4568 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4570 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4571 elts = count_type_elements (TREE_TYPE (exp));
4573 return nz_elts < elts / 4;
4576 return initializer_zerop (exp);
4579 /* Helper function for store_constructor.
4580 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4581 TYPE is the type of the CONSTRUCTOR, not the element type.
4582 CLEARED is as for store_constructor.
4583 ALIAS_SET is the alias set to use for any stores.
4585 This provides a recursive shortcut back to store_constructor when it isn't
4586 necessary to go through store_field. This is so that we can pass through
4587 the cleared field to let store_constructor know that we may not have to
4588 clear a substructure if the outer structure has already been cleared. */
4590 static void
4591 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4592 HOST_WIDE_INT bitpos, enum machine_mode mode,
4593 tree exp, tree type, int cleared, int alias_set)
4595 if (TREE_CODE (exp) == CONSTRUCTOR
4596 /* We can only call store_constructor recursively if the size and
4597 bit position are on a byte boundary. */
4598 && bitpos % BITS_PER_UNIT == 0
4599 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4600 /* If we have a nonzero bitpos for a register target, then we just
4601 let store_field do the bitfield handling. This is unlikely to
4602 generate unnecessary clear instructions anyways. */
4603 && (bitpos == 0 || GET_CODE (target) == MEM))
4605 if (GET_CODE (target) == MEM)
4606 target
4607 = adjust_address (target,
4608 GET_MODE (target) == BLKmode
4609 || 0 != (bitpos
4610 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4611 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4614 /* Update the alias set, if required. */
4615 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4616 && MEM_ALIAS_SET (target) != 0)
4618 target = copy_rtx (target);
4619 set_mem_alias_set (target, alias_set);
4622 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4624 else
4625 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4626 alias_set);
4629 /* Store the value of constructor EXP into the rtx TARGET.
4630 TARGET is either a REG or a MEM; we know it cannot conflict, since
4631 safe_from_p has been called.
4632 CLEARED is true if TARGET is known to have been zero'd.
4633 SIZE is the number of bytes of TARGET we are allowed to modify: this
4634 may not be the same as the size of EXP if we are assigning to a field
4635 which has been packed to exclude padding bits. */
4637 static void
4638 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4640 tree type = TREE_TYPE (exp);
4641 #ifdef WORD_REGISTER_OPERATIONS
4642 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4643 #endif
4645 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4646 || TREE_CODE (type) == QUAL_UNION_TYPE)
4648 tree elt;
4650 /* If size is zero or the target is already cleared, do nothing. */
4651 if (size == 0 || cleared)
4652 cleared = 1;
4653 /* We either clear the aggregate or indicate the value is dead. */
4654 else if ((TREE_CODE (type) == UNION_TYPE
4655 || TREE_CODE (type) == QUAL_UNION_TYPE)
4656 && ! CONSTRUCTOR_ELTS (exp))
4657 /* If the constructor is empty, clear the union. */
4659 clear_storage (target, expr_size (exp));
4660 cleared = 1;
4663 /* If we are building a static constructor into a register,
4664 set the initial value as zero so we can fold the value into
4665 a constant. But if more than one register is involved,
4666 this probably loses. */
4667 else if (REG_P (target) && TREE_STATIC (exp)
4668 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4670 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4671 cleared = 1;
4674 /* If the constructor has fewer fields than the structure
4675 or if we are initializing the structure to mostly zeros,
4676 clear the whole structure first. Don't do this if TARGET is a
4677 register whose mode size isn't equal to SIZE since clear_storage
4678 can't handle this case. */
4679 else if (size > 0
4680 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4681 || mostly_zeros_p (exp))
4682 && (!REG_P (target)
4683 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4684 == size)))
4686 rtx xtarget = target;
4688 if (readonly_fields_p (type))
4690 xtarget = copy_rtx (xtarget);
4691 RTX_UNCHANGING_P (xtarget) = 1;
4694 clear_storage (xtarget, GEN_INT (size));
4695 cleared = 1;
4698 if (! cleared)
4699 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4701 /* Store each element of the constructor into
4702 the corresponding field of TARGET. */
4704 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4706 tree field = TREE_PURPOSE (elt);
4707 tree value = TREE_VALUE (elt);
4708 enum machine_mode mode;
4709 HOST_WIDE_INT bitsize;
4710 HOST_WIDE_INT bitpos = 0;
4711 tree offset;
4712 rtx to_rtx = target;
4714 /* Just ignore missing fields.
4715 We cleared the whole structure, above,
4716 if any fields are missing. */
4717 if (field == 0)
4718 continue;
4720 if (cleared && initializer_zerop (value))
4721 continue;
4723 if (host_integerp (DECL_SIZE (field), 1))
4724 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4725 else
4726 bitsize = -1;
4728 mode = DECL_MODE (field);
4729 if (DECL_BIT_FIELD (field))
4730 mode = VOIDmode;
4732 offset = DECL_FIELD_OFFSET (field);
4733 if (host_integerp (offset, 0)
4734 && host_integerp (bit_position (field), 0))
4736 bitpos = int_bit_position (field);
4737 offset = 0;
4739 else
4740 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4742 if (offset)
4744 rtx offset_rtx;
4746 offset
4747 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4748 make_tree (TREE_TYPE (exp),
4749 target));
4751 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4752 if (GET_CODE (to_rtx) != MEM)
4753 abort ();
4755 #ifdef POINTERS_EXTEND_UNSIGNED
4756 if (GET_MODE (offset_rtx) != Pmode)
4757 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4758 #else
4759 if (GET_MODE (offset_rtx) != ptr_mode)
4760 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4761 #endif
4763 to_rtx = offset_address (to_rtx, offset_rtx,
4764 highest_pow2_factor (offset));
4767 if (TREE_READONLY (field))
4769 if (GET_CODE (to_rtx) == MEM)
4770 to_rtx = copy_rtx (to_rtx);
4772 RTX_UNCHANGING_P (to_rtx) = 1;
4775 #ifdef WORD_REGISTER_OPERATIONS
4776 /* If this initializes a field that is smaller than a word, at the
4777 start of a word, try to widen it to a full word.
4778 This special case allows us to output C++ member function
4779 initializations in a form that the optimizers can understand. */
4780 if (REG_P (target)
4781 && bitsize < BITS_PER_WORD
4782 && bitpos % BITS_PER_WORD == 0
4783 && GET_MODE_CLASS (mode) == MODE_INT
4784 && TREE_CODE (value) == INTEGER_CST
4785 && exp_size >= 0
4786 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4788 tree type = TREE_TYPE (value);
4790 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4792 type = lang_hooks.types.type_for_size
4793 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4794 value = convert (type, value);
4797 if (BYTES_BIG_ENDIAN)
4798 value
4799 = fold (build (LSHIFT_EXPR, type, value,
4800 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4801 bitsize = BITS_PER_WORD;
4802 mode = word_mode;
4804 #endif
4806 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4807 && DECL_NONADDRESSABLE_P (field))
4809 to_rtx = copy_rtx (to_rtx);
4810 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4813 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4814 value, type, cleared,
4815 get_alias_set (TREE_TYPE (field)));
4818 else if (TREE_CODE (type) == ARRAY_TYPE
4819 || TREE_CODE (type) == VECTOR_TYPE)
4821 tree elt;
4822 int i;
4823 int need_to_clear;
4824 tree domain;
4825 tree elttype = TREE_TYPE (type);
4826 int const_bounds_p;
4827 HOST_WIDE_INT minelt = 0;
4828 HOST_WIDE_INT maxelt = 0;
4829 int icode = 0;
4830 rtx *vector = NULL;
4831 int elt_size = 0;
4832 unsigned n_elts = 0;
4834 if (TREE_CODE (type) == ARRAY_TYPE)
4835 domain = TYPE_DOMAIN (type);
4836 else
4837 /* Vectors do not have domains; look up the domain of
4838 the array embedded in the debug representation type.
4839 FIXME Would probably be more efficient to treat vectors
4840 separately from arrays. */
4842 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4843 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4844 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4846 enum machine_mode mode = GET_MODE (target);
4848 icode = (int) vec_init_optab->handlers[mode].insn_code;
4849 if (icode != CODE_FOR_nothing)
4851 unsigned int i;
4853 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4854 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4855 vector = alloca (n_elts);
4856 for (i = 0; i < n_elts; i++)
4857 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4862 const_bounds_p = (TYPE_MIN_VALUE (domain)
4863 && TYPE_MAX_VALUE (domain)
4864 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4865 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4867 /* If we have constant bounds for the range of the type, get them. */
4868 if (const_bounds_p)
4870 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4871 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4874 /* If the constructor has fewer elements than the array,
4875 clear the whole array first. Similarly if this is
4876 static constructor of a non-BLKmode object. */
4877 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4878 need_to_clear = 1;
4879 else
4881 HOST_WIDE_INT count = 0, zero_count = 0;
4882 need_to_clear = ! const_bounds_p;
4884 /* This loop is a more accurate version of the loop in
4885 mostly_zeros_p (it handles RANGE_EXPR in an index).
4886 It is also needed to check for missing elements. */
4887 for (elt = CONSTRUCTOR_ELTS (exp);
4888 elt != NULL_TREE && ! need_to_clear;
4889 elt = TREE_CHAIN (elt))
4891 tree index = TREE_PURPOSE (elt);
4892 HOST_WIDE_INT this_node_count;
4894 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4896 tree lo_index = TREE_OPERAND (index, 0);
4897 tree hi_index = TREE_OPERAND (index, 1);
4899 if (! host_integerp (lo_index, 1)
4900 || ! host_integerp (hi_index, 1))
4902 need_to_clear = 1;
4903 break;
4906 this_node_count = (tree_low_cst (hi_index, 1)
4907 - tree_low_cst (lo_index, 1) + 1);
4909 else
4910 this_node_count = 1;
4912 count += this_node_count;
4913 if (mostly_zeros_p (TREE_VALUE (elt)))
4914 zero_count += this_node_count;
4917 /* Clear the entire array first if there are any missing elements,
4918 or if the incidence of zero elements is >= 75%. */
4919 if (! need_to_clear
4920 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4921 need_to_clear = 1;
4924 if (need_to_clear && size > 0 && !vector)
4926 if (! cleared)
4928 if (REG_P (target))
4929 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4930 else
4931 clear_storage (target, GEN_INT (size));
4933 cleared = 1;
4935 else if (REG_P (target))
4936 /* Inform later passes that the old value is dead. */
4937 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4939 /* Store each element of the constructor into
4940 the corresponding element of TARGET, determined
4941 by counting the elements. */
4942 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4943 elt;
4944 elt = TREE_CHAIN (elt), i++)
4946 enum machine_mode mode;
4947 HOST_WIDE_INT bitsize;
4948 HOST_WIDE_INT bitpos;
4949 int unsignedp;
4950 tree value = TREE_VALUE (elt);
4951 tree index = TREE_PURPOSE (elt);
4952 rtx xtarget = target;
4954 if (cleared && initializer_zerop (value))
4955 continue;
4957 unsignedp = TYPE_UNSIGNED (elttype);
4958 mode = TYPE_MODE (elttype);
4959 if (mode == BLKmode)
4960 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4961 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4962 : -1);
4963 else
4964 bitsize = GET_MODE_BITSIZE (mode);
4966 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4968 tree lo_index = TREE_OPERAND (index, 0);
4969 tree hi_index = TREE_OPERAND (index, 1);
4970 rtx index_r, pos_rtx;
4971 HOST_WIDE_INT lo, hi, count;
4972 tree position;
4974 if (vector)
4975 abort ();
4977 /* If the range is constant and "small", unroll the loop. */
4978 if (const_bounds_p
4979 && host_integerp (lo_index, 0)
4980 && host_integerp (hi_index, 0)
4981 && (lo = tree_low_cst (lo_index, 0),
4982 hi = tree_low_cst (hi_index, 0),
4983 count = hi - lo + 1,
4984 (GET_CODE (target) != MEM
4985 || count <= 2
4986 || (host_integerp (TYPE_SIZE (elttype), 1)
4987 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4988 <= 40 * 8)))))
4990 lo -= minelt; hi -= minelt;
4991 for (; lo <= hi; lo++)
4993 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4995 if (GET_CODE (target) == MEM
4996 && !MEM_KEEP_ALIAS_SET_P (target)
4997 && TREE_CODE (type) == ARRAY_TYPE
4998 && TYPE_NONALIASED_COMPONENT (type))
5000 target = copy_rtx (target);
5001 MEM_KEEP_ALIAS_SET_P (target) = 1;
5004 store_constructor_field
5005 (target, bitsize, bitpos, mode, value, type, cleared,
5006 get_alias_set (elttype));
5009 else
5011 rtx loop_start = gen_label_rtx ();
5012 rtx loop_end = gen_label_rtx ();
5013 tree exit_cond;
5015 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5016 unsignedp = TYPE_UNSIGNED (domain);
5018 index = build_decl (VAR_DECL, NULL_TREE, domain);
5020 index_r
5021 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5022 &unsignedp, 0));
5023 SET_DECL_RTL (index, index_r);
5024 if (TREE_CODE (value) == SAVE_EXPR
5025 && SAVE_EXPR_RTL (value) == 0)
5027 /* Make sure value gets expanded once before the
5028 loop. */
5029 expand_expr (value, const0_rtx, VOIDmode, 0);
5030 emit_queue ();
5032 store_expr (lo_index, index_r, 0);
5034 /* Build the head of the loop. */
5035 do_pending_stack_adjust ();
5036 emit_queue ();
5037 emit_label (loop_start);
5039 /* Assign value to element index. */
5040 position
5041 = convert (ssizetype,
5042 fold (build (MINUS_EXPR, TREE_TYPE (index),
5043 index, TYPE_MIN_VALUE (domain))));
5044 position = size_binop (MULT_EXPR, position,
5045 convert (ssizetype,
5046 TYPE_SIZE_UNIT (elttype)));
5048 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5049 xtarget = offset_address (target, pos_rtx,
5050 highest_pow2_factor (position));
5051 xtarget = adjust_address (xtarget, mode, 0);
5052 if (TREE_CODE (value) == CONSTRUCTOR)
5053 store_constructor (value, xtarget, cleared,
5054 bitsize / BITS_PER_UNIT);
5055 else
5056 store_expr (value, xtarget, 0);
5058 /* Generate a conditional jump to exit the loop. */
5059 exit_cond = build (LT_EXPR, integer_type_node,
5060 index, hi_index);
5061 jumpif (exit_cond, loop_end);
5063 /* Update the loop counter, and jump to the head of
5064 the loop. */
5065 expand_increment (build (PREINCREMENT_EXPR,
5066 TREE_TYPE (index),
5067 index, integer_one_node), 0, 0);
5068 emit_jump (loop_start);
5070 /* Build the end of the loop. */
5071 emit_label (loop_end);
5074 else if ((index != 0 && ! host_integerp (index, 0))
5075 || ! host_integerp (TYPE_SIZE (elttype), 1))
5077 tree position;
5079 if (vector)
5080 abort ();
5082 if (index == 0)
5083 index = ssize_int (1);
5085 if (minelt)
5086 index = convert (ssizetype,
5087 fold (build (MINUS_EXPR, index,
5088 TYPE_MIN_VALUE (domain))));
5090 position = size_binop (MULT_EXPR, index,
5091 convert (ssizetype,
5092 TYPE_SIZE_UNIT (elttype)));
5093 xtarget = offset_address (target,
5094 expand_expr (position, 0, VOIDmode, 0),
5095 highest_pow2_factor (position));
5096 xtarget = adjust_address (xtarget, mode, 0);
5097 store_expr (value, xtarget, 0);
5099 else if (vector)
5101 int pos;
5103 if (index != 0)
5104 pos = tree_low_cst (index, 0) - minelt;
5105 else
5106 pos = i;
5107 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5109 else
5111 if (index != 0)
5112 bitpos = ((tree_low_cst (index, 0) - minelt)
5113 * tree_low_cst (TYPE_SIZE (elttype), 1));
5114 else
5115 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5117 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5118 && TREE_CODE (type) == ARRAY_TYPE
5119 && TYPE_NONALIASED_COMPONENT (type))
5121 target = copy_rtx (target);
5122 MEM_KEEP_ALIAS_SET_P (target) = 1;
5124 store_constructor_field (target, bitsize, bitpos, mode, value,
5125 type, cleared, get_alias_set (elttype));
5128 if (vector)
5130 emit_insn (GEN_FCN (icode) (target,
5131 gen_rtx_PARALLEL (GET_MODE (target),
5132 gen_rtvec_v (n_elts, vector))));
5136 /* Set constructor assignments. */
5137 else if (TREE_CODE (type) == SET_TYPE)
5139 tree elt = CONSTRUCTOR_ELTS (exp);
5140 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5141 tree domain = TYPE_DOMAIN (type);
5142 tree domain_min, domain_max, bitlength;
5144 /* The default implementation strategy is to extract the constant
5145 parts of the constructor, use that to initialize the target,
5146 and then "or" in whatever non-constant ranges we need in addition.
5148 If a large set is all zero or all ones, it is
5149 probably better to set it using memset (if available) or bzero.
5150 Also, if a large set has just a single range, it may also be
5151 better to first clear all the first clear the set (using
5152 bzero/memset), and set the bits we want. */
5154 /* Check for all zeros. */
5155 if (elt == NULL_TREE && size > 0)
5157 if (!cleared)
5158 clear_storage (target, GEN_INT (size));
5159 return;
5162 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5163 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5164 bitlength = size_binop (PLUS_EXPR,
5165 size_diffop (domain_max, domain_min),
5166 ssize_int (1));
5168 nbits = tree_low_cst (bitlength, 1);
5170 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5171 are "complicated" (more than one range), initialize (the
5172 constant parts) by copying from a constant. */
5173 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5174 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5176 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5177 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5178 char *bit_buffer = alloca (nbits);
5179 HOST_WIDE_INT word = 0;
5180 unsigned int bit_pos = 0;
5181 unsigned int ibit = 0;
5182 unsigned int offset = 0; /* In bytes from beginning of set. */
5184 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5185 for (;;)
5187 if (bit_buffer[ibit])
5189 if (BYTES_BIG_ENDIAN)
5190 word |= (1 << (set_word_size - 1 - bit_pos));
5191 else
5192 word |= 1 << bit_pos;
5195 bit_pos++; ibit++;
5196 if (bit_pos >= set_word_size || ibit == nbits)
5198 if (word != 0 || ! cleared)
5200 rtx datum = gen_int_mode (word, mode);
5201 rtx to_rtx;
5203 /* The assumption here is that it is safe to use
5204 XEXP if the set is multi-word, but not if
5205 it's single-word. */
5206 if (GET_CODE (target) == MEM)
5207 to_rtx = adjust_address (target, mode, offset);
5208 else if (offset == 0)
5209 to_rtx = target;
5210 else
5211 abort ();
5212 emit_move_insn (to_rtx, datum);
5215 if (ibit == nbits)
5216 break;
5217 word = 0;
5218 bit_pos = 0;
5219 offset += set_word_size / BITS_PER_UNIT;
5223 else if (!cleared)
5224 /* Don't bother clearing storage if the set is all ones. */
5225 if (TREE_CHAIN (elt) != NULL_TREE
5226 || (TREE_PURPOSE (elt) == NULL_TREE
5227 ? nbits != 1
5228 : ( ! host_integerp (TREE_VALUE (elt), 0)
5229 || ! host_integerp (TREE_PURPOSE (elt), 0)
5230 || (tree_low_cst (TREE_VALUE (elt), 0)
5231 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5232 != (HOST_WIDE_INT) nbits))))
5233 clear_storage (target, expr_size (exp));
5235 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5237 /* Start of range of element or NULL. */
5238 tree startbit = TREE_PURPOSE (elt);
5239 /* End of range of element, or element value. */
5240 tree endbit = TREE_VALUE (elt);
5241 HOST_WIDE_INT startb, endb;
5242 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5244 bitlength_rtx = expand_expr (bitlength,
5245 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5247 /* Handle non-range tuple element like [ expr ]. */
5248 if (startbit == NULL_TREE)
5250 startbit = save_expr (endbit);
5251 endbit = startbit;
5254 startbit = convert (sizetype, startbit);
5255 endbit = convert (sizetype, endbit);
5256 if (! integer_zerop (domain_min))
5258 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5259 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5261 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5262 EXPAND_CONST_ADDRESS);
5263 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5264 EXPAND_CONST_ADDRESS);
5266 if (REG_P (target))
5268 targetx
5269 = assign_temp
5270 ((build_qualified_type (lang_hooks.types.type_for_mode
5271 (GET_MODE (target), 0),
5272 TYPE_QUAL_CONST)),
5273 0, 1, 1);
5274 emit_move_insn (targetx, target);
5277 else if (GET_CODE (target) == MEM)
5278 targetx = target;
5279 else
5280 abort ();
5282 /* Optimization: If startbit and endbit are constants divisible
5283 by BITS_PER_UNIT, call memset instead. */
5284 if (TARGET_MEM_FUNCTIONS
5285 && TREE_CODE (startbit) == INTEGER_CST
5286 && TREE_CODE (endbit) == INTEGER_CST
5287 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5288 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5290 emit_library_call (memset_libfunc, LCT_NORMAL,
5291 VOIDmode, 3,
5292 plus_constant (XEXP (targetx, 0),
5293 startb / BITS_PER_UNIT),
5294 Pmode,
5295 constm1_rtx, TYPE_MODE (integer_type_node),
5296 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5297 TYPE_MODE (sizetype));
5299 else
5300 emit_library_call (setbits_libfunc, LCT_NORMAL,
5301 VOIDmode, 4, XEXP (targetx, 0),
5302 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5303 startbit_rtx, TYPE_MODE (sizetype),
5304 endbit_rtx, TYPE_MODE (sizetype));
5306 if (REG_P (target))
5307 emit_move_insn (target, targetx);
5311 else
5312 abort ();
5315 /* Store the value of EXP (an expression tree)
5316 into a subfield of TARGET which has mode MODE and occupies
5317 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5318 If MODE is VOIDmode, it means that we are storing into a bit-field.
5320 If VALUE_MODE is VOIDmode, return nothing in particular.
5321 UNSIGNEDP is not used in this case.
5323 Otherwise, return an rtx for the value stored. This rtx
5324 has mode VALUE_MODE if that is convenient to do.
5325 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5327 TYPE is the type of the underlying object,
5329 ALIAS_SET is the alias set for the destination. This value will
5330 (in general) be different from that for TARGET, since TARGET is a
5331 reference to the containing structure. */
5333 static rtx
5334 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5335 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5336 int unsignedp, tree type, int alias_set)
5338 HOST_WIDE_INT width_mask = 0;
5340 if (TREE_CODE (exp) == ERROR_MARK)
5341 return const0_rtx;
5343 /* If we have nothing to store, do nothing unless the expression has
5344 side-effects. */
5345 if (bitsize == 0)
5346 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5347 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5348 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5350 /* If we are storing into an unaligned field of an aligned union that is
5351 in a register, we may have the mode of TARGET being an integer mode but
5352 MODE == BLKmode. In that case, get an aligned object whose size and
5353 alignment are the same as TARGET and store TARGET into it (we can avoid
5354 the store if the field being stored is the entire width of TARGET). Then
5355 call ourselves recursively to store the field into a BLKmode version of
5356 that object. Finally, load from the object into TARGET. This is not
5357 very efficient in general, but should only be slightly more expensive
5358 than the otherwise-required unaligned accesses. Perhaps this can be
5359 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5360 twice, once with emit_move_insn and once via store_field. */
5362 if (mode == BLKmode
5363 && (REG_P (target) || GET_CODE (target) == SUBREG))
5365 rtx object = assign_temp (type, 0, 1, 1);
5366 rtx blk_object = adjust_address (object, BLKmode, 0);
5368 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5369 emit_move_insn (object, target);
5371 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5372 alias_set);
5374 emit_move_insn (target, object);
5376 /* We want to return the BLKmode version of the data. */
5377 return blk_object;
5380 if (GET_CODE (target) == CONCAT)
5382 /* We're storing into a struct containing a single __complex. */
5384 if (bitpos != 0)
5385 abort ();
5386 return store_expr (exp, target, value_mode != VOIDmode);
5389 /* If the structure is in a register or if the component
5390 is a bit field, we cannot use addressing to access it.
5391 Use bit-field techniques or SUBREG to store in it. */
5393 if (mode == VOIDmode
5394 || (mode != BLKmode && ! direct_store[(int) mode]
5395 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5396 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5397 || REG_P (target)
5398 || GET_CODE (target) == SUBREG
5399 /* If the field isn't aligned enough to store as an ordinary memref,
5400 store it as a bit field. */
5401 || (mode != BLKmode
5402 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5403 || bitpos % GET_MODE_ALIGNMENT (mode))
5404 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5405 || (bitpos % BITS_PER_UNIT != 0)))
5406 /* If the RHS and field are a constant size and the size of the
5407 RHS isn't the same size as the bitfield, we must use bitfield
5408 operations. */
5409 || (bitsize >= 0
5410 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5411 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5413 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5415 /* If BITSIZE is narrower than the size of the type of EXP
5416 we will be narrowing TEMP. Normally, what's wanted are the
5417 low-order bits. However, if EXP's type is a record and this is
5418 big-endian machine, we want the upper BITSIZE bits. */
5419 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5420 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5421 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5422 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5423 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5424 - bitsize),
5425 NULL_RTX, 1);
5427 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5428 MODE. */
5429 if (mode != VOIDmode && mode != BLKmode
5430 && mode != TYPE_MODE (TREE_TYPE (exp)))
5431 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5433 /* If the modes of TARGET and TEMP are both BLKmode, both
5434 must be in memory and BITPOS must be aligned on a byte
5435 boundary. If so, we simply do a block copy. */
5436 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5438 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5439 || bitpos % BITS_PER_UNIT != 0)
5440 abort ();
5442 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5443 emit_block_move (target, temp,
5444 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5445 / BITS_PER_UNIT),
5446 BLOCK_OP_NORMAL);
5448 return value_mode == VOIDmode ? const0_rtx : target;
5451 /* Store the value in the bitfield. */
5452 store_bit_field (target, bitsize, bitpos, mode, temp,
5453 int_size_in_bytes (type));
5455 if (value_mode != VOIDmode)
5457 /* The caller wants an rtx for the value.
5458 If possible, avoid refetching from the bitfield itself. */
5459 if (width_mask != 0
5460 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5462 tree count;
5463 enum machine_mode tmode;
5465 tmode = GET_MODE (temp);
5466 if (tmode == VOIDmode)
5467 tmode = value_mode;
5469 if (unsignedp)
5470 return expand_and (tmode, temp,
5471 gen_int_mode (width_mask, tmode),
5472 NULL_RTX);
5474 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5475 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5476 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5479 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5480 NULL_RTX, value_mode, VOIDmode,
5481 int_size_in_bytes (type));
5483 return const0_rtx;
5485 else
5487 rtx addr = XEXP (target, 0);
5488 rtx to_rtx = target;
5490 /* If a value is wanted, it must be the lhs;
5491 so make the address stable for multiple use. */
5493 if (value_mode != VOIDmode && !REG_P (addr)
5494 && ! CONSTANT_ADDRESS_P (addr)
5495 /* A frame-pointer reference is already stable. */
5496 && ! (GET_CODE (addr) == PLUS
5497 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5498 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5499 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5500 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5502 /* Now build a reference to just the desired component. */
5504 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5506 if (to_rtx == target)
5507 to_rtx = copy_rtx (to_rtx);
5509 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5510 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5511 set_mem_alias_set (to_rtx, alias_set);
5513 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5517 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5518 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5519 codes and find the ultimate containing object, which we return.
5521 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5522 bit position, and *PUNSIGNEDP to the signedness of the field.
5523 If the position of the field is variable, we store a tree
5524 giving the variable offset (in units) in *POFFSET.
5525 This offset is in addition to the bit position.
5526 If the position is not variable, we store 0 in *POFFSET.
5528 If any of the extraction expressions is volatile,
5529 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5531 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5532 is a mode that can be used to access the field. In that case, *PBITSIZE
5533 is redundant.
5535 If the field describes a variable-sized object, *PMODE is set to
5536 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5537 this case, but the address of the object can be found. */
5539 tree
5540 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5541 HOST_WIDE_INT *pbitpos, tree *poffset,
5542 enum machine_mode *pmode, int *punsignedp,
5543 int *pvolatilep)
5545 tree size_tree = 0;
5546 enum machine_mode mode = VOIDmode;
5547 tree offset = size_zero_node;
5548 tree bit_offset = bitsize_zero_node;
5549 tree tem;
5551 /* First get the mode, signedness, and size. We do this from just the
5552 outermost expression. */
5553 if (TREE_CODE (exp) == COMPONENT_REF)
5555 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5556 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5557 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5559 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5561 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5563 size_tree = TREE_OPERAND (exp, 1);
5564 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5566 else
5568 mode = TYPE_MODE (TREE_TYPE (exp));
5569 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5571 if (mode == BLKmode)
5572 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5573 else
5574 *pbitsize = GET_MODE_BITSIZE (mode);
5577 if (size_tree != 0)
5579 if (! host_integerp (size_tree, 1))
5580 mode = BLKmode, *pbitsize = -1;
5581 else
5582 *pbitsize = tree_low_cst (size_tree, 1);
5585 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5586 and find the ultimate containing object. */
5587 while (1)
5589 if (TREE_CODE (exp) == BIT_FIELD_REF)
5590 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5591 else if (TREE_CODE (exp) == COMPONENT_REF)
5593 tree field = TREE_OPERAND (exp, 1);
5594 tree this_offset = component_ref_field_offset (exp);
5596 /* If this field hasn't been filled in yet, don't go
5597 past it. This should only happen when folding expressions
5598 made during type construction. */
5599 if (this_offset == 0)
5600 break;
5602 offset = size_binop (PLUS_EXPR, offset, this_offset);
5603 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5604 DECL_FIELD_BIT_OFFSET (field));
5606 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5609 else if (TREE_CODE (exp) == ARRAY_REF
5610 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5612 tree index = TREE_OPERAND (exp, 1);
5613 tree low_bound = array_ref_low_bound (exp);
5614 tree unit_size = array_ref_element_size (exp);
5616 /* We assume all arrays have sizes that are a multiple of a byte.
5617 First subtract the lower bound, if any, in the type of the
5618 index, then convert to sizetype and multiply by the size of the
5619 array element. */
5620 if (! integer_zerop (low_bound))
5621 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5622 index, low_bound));
5624 offset = size_binop (PLUS_EXPR, offset,
5625 size_binop (MULT_EXPR,
5626 convert (sizetype, index),
5627 unit_size));
5630 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5631 conversions that don't change the mode, and all view conversions
5632 except those that need to "step up" the alignment. */
5633 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5634 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5635 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5636 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5637 && STRICT_ALIGNMENT
5638 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5639 < BIGGEST_ALIGNMENT)
5640 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5641 || TYPE_ALIGN_OK (TREE_TYPE
5642 (TREE_OPERAND (exp, 0))))))
5643 && ! ((TREE_CODE (exp) == NOP_EXPR
5644 || TREE_CODE (exp) == CONVERT_EXPR)
5645 && (TYPE_MODE (TREE_TYPE (exp))
5646 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5647 break;
5649 /* If any reference in the chain is volatile, the effect is volatile. */
5650 if (TREE_THIS_VOLATILE (exp))
5651 *pvolatilep = 1;
5653 exp = TREE_OPERAND (exp, 0);
5656 /* If OFFSET is constant, see if we can return the whole thing as a
5657 constant bit position. Otherwise, split it up. */
5658 if (host_integerp (offset, 0)
5659 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5660 bitsize_unit_node))
5661 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5662 && host_integerp (tem, 0))
5663 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5664 else
5665 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5667 *pmode = mode;
5668 return exp;
5671 /* Return a tree of sizetype representing the size, in bytes, of the element
5672 of EXP, an ARRAY_REF. */
5674 tree
5675 array_ref_element_size (tree exp)
5677 tree aligned_size = TREE_OPERAND (exp, 3);
5678 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5680 /* If a size was specified in the ARRAY_REF, it's the size measured
5681 in alignment units of the element type. So multiply by that value. */
5682 if (aligned_size)
5683 return size_binop (MULT_EXPR, aligned_size,
5684 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5686 /* Otherwise, take the size from that of the element type. Substitute
5687 any PLACEHOLDER_EXPR that we have. */
5688 else
5689 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5692 /* Return a tree representing the lower bound of the array mentioned in
5693 EXP, an ARRAY_REF. */
5695 tree
5696 array_ref_low_bound (tree exp)
5698 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5700 /* If a lower bound is specified in EXP, use it. */
5701 if (TREE_OPERAND (exp, 2))
5702 return TREE_OPERAND (exp, 2);
5704 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5705 substituting for a PLACEHOLDER_EXPR as needed. */
5706 if (domain_type && TYPE_MIN_VALUE (domain_type))
5707 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5709 /* Otherwise, return a zero of the appropriate type. */
5710 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5713 /* Return a tree representing the offset, in bytes, of the field referenced
5714 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5716 tree
5717 component_ref_field_offset (tree exp)
5719 tree aligned_offset = TREE_OPERAND (exp, 2);
5720 tree field = TREE_OPERAND (exp, 1);
5722 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5723 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5724 value. */
5725 if (aligned_offset)
5726 return size_binop (MULT_EXPR, aligned_offset,
5727 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5729 /* Otherwise, take the offset from that of the field. Substitute
5730 any PLACEHOLDER_EXPR that we have. */
5731 else
5732 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5735 /* Return 1 if T is an expression that get_inner_reference handles. */
5738 handled_component_p (tree t)
5740 switch (TREE_CODE (t))
5742 case BIT_FIELD_REF:
5743 case COMPONENT_REF:
5744 case ARRAY_REF:
5745 case ARRAY_RANGE_REF:
5746 case NON_LVALUE_EXPR:
5747 case VIEW_CONVERT_EXPR:
5748 return 1;
5750 /* ??? Sure they are handled, but get_inner_reference may return
5751 a different PBITSIZE, depending upon whether the expression is
5752 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5753 case NOP_EXPR:
5754 case CONVERT_EXPR:
5755 return (TYPE_MODE (TREE_TYPE (t))
5756 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5758 default:
5759 return 0;
5763 /* Given an rtx VALUE that may contain additions and multiplications, return
5764 an equivalent value that just refers to a register, memory, or constant.
5765 This is done by generating instructions to perform the arithmetic and
5766 returning a pseudo-register containing the value.
5768 The returned value may be a REG, SUBREG, MEM or constant. */
5771 force_operand (rtx value, rtx target)
5773 rtx op1, op2;
5774 /* Use subtarget as the target for operand 0 of a binary operation. */
5775 rtx subtarget = get_subtarget (target);
5776 enum rtx_code code = GET_CODE (value);
5778 /* Check for subreg applied to an expression produced by loop optimizer. */
5779 if (code == SUBREG
5780 && !REG_P (SUBREG_REG (value))
5781 && GET_CODE (SUBREG_REG (value)) != MEM)
5783 value = simplify_gen_subreg (GET_MODE (value),
5784 force_reg (GET_MODE (SUBREG_REG (value)),
5785 force_operand (SUBREG_REG (value),
5786 NULL_RTX)),
5787 GET_MODE (SUBREG_REG (value)),
5788 SUBREG_BYTE (value));
5789 code = GET_CODE (value);
5792 /* Check for a PIC address load. */
5793 if ((code == PLUS || code == MINUS)
5794 && XEXP (value, 0) == pic_offset_table_rtx
5795 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5796 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5797 || GET_CODE (XEXP (value, 1)) == CONST))
5799 if (!subtarget)
5800 subtarget = gen_reg_rtx (GET_MODE (value));
5801 emit_move_insn (subtarget, value);
5802 return subtarget;
5805 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5807 if (!target)
5808 target = gen_reg_rtx (GET_MODE (value));
5809 convert_move (target, force_operand (XEXP (value, 0), NULL),
5810 code == ZERO_EXTEND);
5811 return target;
5814 if (ARITHMETIC_P (value))
5816 op2 = XEXP (value, 1);
5817 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5818 subtarget = 0;
5819 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5821 code = PLUS;
5822 op2 = negate_rtx (GET_MODE (value), op2);
5825 /* Check for an addition with OP2 a constant integer and our first
5826 operand a PLUS of a virtual register and something else. In that
5827 case, we want to emit the sum of the virtual register and the
5828 constant first and then add the other value. This allows virtual
5829 register instantiation to simply modify the constant rather than
5830 creating another one around this addition. */
5831 if (code == PLUS && GET_CODE (op2) == CONST_INT
5832 && GET_CODE (XEXP (value, 0)) == PLUS
5833 && REG_P (XEXP (XEXP (value, 0), 0))
5834 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5835 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5837 rtx temp = expand_simple_binop (GET_MODE (value), code,
5838 XEXP (XEXP (value, 0), 0), op2,
5839 subtarget, 0, OPTAB_LIB_WIDEN);
5840 return expand_simple_binop (GET_MODE (value), code, temp,
5841 force_operand (XEXP (XEXP (value,
5842 0), 1), 0),
5843 target, 0, OPTAB_LIB_WIDEN);
5846 op1 = force_operand (XEXP (value, 0), subtarget);
5847 op2 = force_operand (op2, NULL_RTX);
5848 switch (code)
5850 case MULT:
5851 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5852 case DIV:
5853 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5854 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5855 target, 1, OPTAB_LIB_WIDEN);
5856 else
5857 return expand_divmod (0,
5858 FLOAT_MODE_P (GET_MODE (value))
5859 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5860 GET_MODE (value), op1, op2, target, 0);
5861 break;
5862 case MOD:
5863 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5864 target, 0);
5865 break;
5866 case UDIV:
5867 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5868 target, 1);
5869 break;
5870 case UMOD:
5871 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5872 target, 1);
5873 break;
5874 case ASHIFTRT:
5875 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5876 target, 0, OPTAB_LIB_WIDEN);
5877 break;
5878 default:
5879 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5880 target, 1, OPTAB_LIB_WIDEN);
5883 if (UNARY_P (value))
5885 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5886 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5889 #ifdef INSN_SCHEDULING
5890 /* On machines that have insn scheduling, we want all memory reference to be
5891 explicit, so we need to deal with such paradoxical SUBREGs. */
5892 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5893 && (GET_MODE_SIZE (GET_MODE (value))
5894 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5895 value
5896 = simplify_gen_subreg (GET_MODE (value),
5897 force_reg (GET_MODE (SUBREG_REG (value)),
5898 force_operand (SUBREG_REG (value),
5899 NULL_RTX)),
5900 GET_MODE (SUBREG_REG (value)),
5901 SUBREG_BYTE (value));
5902 #endif
5904 return value;
5907 /* Subroutine of expand_expr: return nonzero iff there is no way that
5908 EXP can reference X, which is being modified. TOP_P is nonzero if this
5909 call is going to be used to determine whether we need a temporary
5910 for EXP, as opposed to a recursive call to this function.
5912 It is always safe for this routine to return zero since it merely
5913 searches for optimization opportunities. */
5916 safe_from_p (rtx x, tree exp, int top_p)
5918 rtx exp_rtl = 0;
5919 int i, nops;
5920 static tree save_expr_list;
5922 if (x == 0
5923 /* If EXP has varying size, we MUST use a target since we currently
5924 have no way of allocating temporaries of variable size
5925 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5926 So we assume here that something at a higher level has prevented a
5927 clash. This is somewhat bogus, but the best we can do. Only
5928 do this when X is BLKmode and when we are at the top level. */
5929 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5930 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5931 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5932 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5933 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5934 != INTEGER_CST)
5935 && GET_MODE (x) == BLKmode)
5936 /* If X is in the outgoing argument area, it is always safe. */
5937 || (GET_CODE (x) == MEM
5938 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5939 || (GET_CODE (XEXP (x, 0)) == PLUS
5940 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5941 return 1;
5943 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5944 find the underlying pseudo. */
5945 if (GET_CODE (x) == SUBREG)
5947 x = SUBREG_REG (x);
5948 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5949 return 0;
5952 /* A SAVE_EXPR might appear many times in the expression passed to the
5953 top-level safe_from_p call, and if it has a complex subexpression,
5954 examining it multiple times could result in a combinatorial explosion.
5955 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5956 with optimization took about 28 minutes to compile -- even though it was
5957 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5958 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5959 we have processed. Note that the only test of top_p was above. */
5961 if (top_p)
5963 int rtn;
5964 tree t;
5966 save_expr_list = 0;
5968 rtn = safe_from_p (x, exp, 0);
5970 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5971 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5973 return rtn;
5976 /* Now look at our tree code and possibly recurse. */
5977 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5979 case 'd':
5980 exp_rtl = DECL_RTL_IF_SET (exp);
5981 break;
5983 case 'c':
5984 return 1;
5986 case 'x':
5987 if (TREE_CODE (exp) == TREE_LIST)
5989 while (1)
5991 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5992 return 0;
5993 exp = TREE_CHAIN (exp);
5994 if (!exp)
5995 return 1;
5996 if (TREE_CODE (exp) != TREE_LIST)
5997 return safe_from_p (x, exp, 0);
6000 else if (TREE_CODE (exp) == ERROR_MARK)
6001 return 1; /* An already-visited SAVE_EXPR? */
6002 else
6003 return 0;
6005 case 's':
6006 /* The only case we look at here is the DECL_INITIAL inside a
6007 DECL_EXPR. */
6008 return (TREE_CODE (exp) != DECL_EXPR
6009 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6010 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6011 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6013 case '2':
6014 case '<':
6015 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6016 return 0;
6017 /* Fall through. */
6019 case '1':
6020 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6022 case 'e':
6023 case 'r':
6024 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6025 the expression. If it is set, we conflict iff we are that rtx or
6026 both are in memory. Otherwise, we check all operands of the
6027 expression recursively. */
6029 switch (TREE_CODE (exp))
6031 case ADDR_EXPR:
6032 /* If the operand is static or we are static, we can't conflict.
6033 Likewise if we don't conflict with the operand at all. */
6034 if (staticp (TREE_OPERAND (exp, 0))
6035 || TREE_STATIC (exp)
6036 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6037 return 1;
6039 /* Otherwise, the only way this can conflict is if we are taking
6040 the address of a DECL a that address if part of X, which is
6041 very rare. */
6042 exp = TREE_OPERAND (exp, 0);
6043 if (DECL_P (exp))
6045 if (!DECL_RTL_SET_P (exp)
6046 || GET_CODE (DECL_RTL (exp)) != MEM)
6047 return 0;
6048 else
6049 exp_rtl = XEXP (DECL_RTL (exp), 0);
6051 break;
6053 case INDIRECT_REF:
6054 if (GET_CODE (x) == MEM
6055 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6056 get_alias_set (exp)))
6057 return 0;
6058 break;
6060 case CALL_EXPR:
6061 /* Assume that the call will clobber all hard registers and
6062 all of memory. */
6063 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6064 || GET_CODE (x) == MEM)
6065 return 0;
6066 break;
6068 case RTL_EXPR:
6069 /* If a sequence exists, we would have to scan every instruction
6070 in the sequence to see if it was safe. This is probably not
6071 worthwhile. */
6072 if (RTL_EXPR_SEQUENCE (exp))
6073 return 0;
6075 exp_rtl = RTL_EXPR_RTL (exp);
6076 break;
6078 case WITH_CLEANUP_EXPR:
6079 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6080 break;
6082 case CLEANUP_POINT_EXPR:
6083 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6085 case SAVE_EXPR:
6086 exp_rtl = SAVE_EXPR_RTL (exp);
6087 if (exp_rtl)
6088 break;
6090 /* If we've already scanned this, don't do it again. Otherwise,
6091 show we've scanned it and record for clearing the flag if we're
6092 going on. */
6093 if (TREE_PRIVATE (exp))
6094 return 1;
6096 TREE_PRIVATE (exp) = 1;
6097 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6099 TREE_PRIVATE (exp) = 0;
6100 return 0;
6103 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6104 return 1;
6106 case BIND_EXPR:
6107 /* The only operand we look at is operand 1. The rest aren't
6108 part of the expression. */
6109 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6111 default:
6112 break;
6115 /* If we have an rtx, we do not need to scan our operands. */
6116 if (exp_rtl)
6117 break;
6119 nops = first_rtl_op (TREE_CODE (exp));
6120 for (i = 0; i < nops; i++)
6121 if (TREE_OPERAND (exp, i) != 0
6122 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6123 return 0;
6125 /* If this is a language-specific tree code, it may require
6126 special handling. */
6127 if ((unsigned int) TREE_CODE (exp)
6128 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6129 && !lang_hooks.safe_from_p (x, exp))
6130 return 0;
6133 /* If we have an rtl, find any enclosed object. Then see if we conflict
6134 with it. */
6135 if (exp_rtl)
6137 if (GET_CODE (exp_rtl) == SUBREG)
6139 exp_rtl = SUBREG_REG (exp_rtl);
6140 if (REG_P (exp_rtl)
6141 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6142 return 0;
6145 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6146 are memory and they conflict. */
6147 return ! (rtx_equal_p (x, exp_rtl)
6148 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6149 && true_dependence (exp_rtl, VOIDmode, x,
6150 rtx_addr_varies_p)));
6153 /* If we reach here, it is safe. */
6154 return 1;
6157 /* Subroutine of expand_expr: return rtx if EXP is a
6158 variable or parameter; else return 0. */
6160 static rtx
6161 var_rtx (tree exp)
6163 STRIP_NOPS (exp);
6164 switch (TREE_CODE (exp))
6166 case PARM_DECL:
6167 case VAR_DECL:
6168 return DECL_RTL (exp);
6169 default:
6170 return 0;
6174 /* Return the highest power of two that EXP is known to be a multiple of.
6175 This is used in updating alignment of MEMs in array references. */
6177 static unsigned HOST_WIDE_INT
6178 highest_pow2_factor (tree exp)
6180 unsigned HOST_WIDE_INT c0, c1;
6182 switch (TREE_CODE (exp))
6184 case INTEGER_CST:
6185 /* We can find the lowest bit that's a one. If the low
6186 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6187 We need to handle this case since we can find it in a COND_EXPR,
6188 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6189 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6190 later ICE. */
6191 if (TREE_CONSTANT_OVERFLOW (exp))
6192 return BIGGEST_ALIGNMENT;
6193 else
6195 /* Note: tree_low_cst is intentionally not used here,
6196 we don't care about the upper bits. */
6197 c0 = TREE_INT_CST_LOW (exp);
6198 c0 &= -c0;
6199 return c0 ? c0 : BIGGEST_ALIGNMENT;
6201 break;
6203 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6204 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6205 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6206 return MIN (c0, c1);
6208 case MULT_EXPR:
6209 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6210 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6211 return c0 * c1;
6213 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6214 case CEIL_DIV_EXPR:
6215 if (integer_pow2p (TREE_OPERAND (exp, 1))
6216 && host_integerp (TREE_OPERAND (exp, 1), 1))
6218 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6219 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6220 return MAX (1, c0 / c1);
6222 break;
6224 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6225 case SAVE_EXPR:
6226 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6228 case COMPOUND_EXPR:
6229 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6231 case COND_EXPR:
6232 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6233 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6234 return MIN (c0, c1);
6236 default:
6237 break;
6240 return 1;
6243 /* Similar, except that the alignment requirements of TARGET are
6244 taken into account. Assume it is at least as aligned as its
6245 type, unless it is a COMPONENT_REF in which case the layout of
6246 the structure gives the alignment. */
6248 static unsigned HOST_WIDE_INT
6249 highest_pow2_factor_for_target (tree target, tree exp)
6251 unsigned HOST_WIDE_INT target_align, factor;
6253 factor = highest_pow2_factor (exp);
6254 if (TREE_CODE (target) == COMPONENT_REF)
6255 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6256 else
6257 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6258 return MAX (factor, target_align);
6261 /* Expands variable VAR. */
6263 void
6264 expand_var (tree var)
6266 if (DECL_EXTERNAL (var))
6267 return;
6269 if (TREE_STATIC (var))
6270 /* If this is an inlined copy of a static local variable,
6271 look up the original decl. */
6272 var = DECL_ORIGIN (var);
6274 if (TREE_STATIC (var)
6275 ? !TREE_ASM_WRITTEN (var)
6276 : !DECL_RTL_SET_P (var))
6278 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6280 /* Prepare a mem & address for the decl. */
6281 rtx x;
6283 if (TREE_STATIC (var))
6284 abort ();
6286 x = gen_rtx_MEM (DECL_MODE (var),
6287 gen_reg_rtx (Pmode));
6289 set_mem_attributes (x, var, 1);
6290 SET_DECL_RTL (var, x);
6292 else if (lang_hooks.expand_decl (var))
6293 /* OK. */;
6294 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6295 expand_decl (var);
6296 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6297 rest_of_decl_compilation (var, NULL, 0, 0);
6298 else if (TREE_CODE (var) == TYPE_DECL
6299 || TREE_CODE (var) == CONST_DECL
6300 || TREE_CODE (var) == FUNCTION_DECL
6301 || TREE_CODE (var) == LABEL_DECL)
6302 /* No expansion needed. */;
6303 else
6304 abort ();
6308 /* Expands declarations of variables in list VARS. */
6310 static void
6311 expand_vars (tree vars)
6313 for (; vars; vars = TREE_CHAIN (vars))
6315 tree var = vars;
6317 if (DECL_EXTERNAL (var))
6318 continue;
6320 expand_var (var);
6321 expand_decl_init (var);
6325 /* Subroutine of expand_expr. Expand the two operands of a binary
6326 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6327 The value may be stored in TARGET if TARGET is nonzero. The
6328 MODIFIER argument is as documented by expand_expr. */
6330 static void
6331 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6332 enum expand_modifier modifier)
6334 if (! safe_from_p (target, exp1, 1))
6335 target = 0;
6336 if (operand_equal_p (exp0, exp1, 0))
6338 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6339 *op1 = copy_rtx (*op0);
6341 else
6343 /* If we need to preserve evaluation order, copy exp0 into its own
6344 temporary variable so that it can't be clobbered by exp1. */
6345 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6346 exp0 = save_expr (exp0);
6347 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6348 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6353 /* expand_expr: generate code for computing expression EXP.
6354 An rtx for the computed value is returned. The value is never null.
6355 In the case of a void EXP, const0_rtx is returned.
6357 The value may be stored in TARGET if TARGET is nonzero.
6358 TARGET is just a suggestion; callers must assume that
6359 the rtx returned may not be the same as TARGET.
6361 If TARGET is CONST0_RTX, it means that the value will be ignored.
6363 If TMODE is not VOIDmode, it suggests generating the
6364 result in mode TMODE. But this is done only when convenient.
6365 Otherwise, TMODE is ignored and the value generated in its natural mode.
6366 TMODE is just a suggestion; callers must assume that
6367 the rtx returned may not have mode TMODE.
6369 Note that TARGET may have neither TMODE nor MODE. In that case, it
6370 probably will not be used.
6372 If MODIFIER is EXPAND_SUM then when EXP is an addition
6373 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6374 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6375 products as above, or REG or MEM, or constant.
6376 Ordinarily in such cases we would output mul or add instructions
6377 and then return a pseudo reg containing the sum.
6379 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6380 it also marks a label as absolutely required (it can't be dead).
6381 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6382 This is used for outputting expressions used in initializers.
6384 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6385 with a constant address even if that address is not normally legitimate.
6386 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6388 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6389 a call parameter. Such targets require special care as we haven't yet
6390 marked TARGET so that it's safe from being trashed by libcalls. We
6391 don't want to use TARGET for anything but the final result;
6392 Intermediate values must go elsewhere. Additionally, calls to
6393 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6395 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6396 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6397 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6398 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6399 recursively. */
6401 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6402 enum expand_modifier, rtx *);
6405 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6406 enum expand_modifier modifier, rtx *alt_rtl)
6408 int rn = -1;
6409 rtx ret, last = NULL;
6411 /* Handle ERROR_MARK before anybody tries to access its type. */
6412 if (TREE_CODE (exp) == ERROR_MARK
6413 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6415 ret = CONST0_RTX (tmode);
6416 return ret ? ret : const0_rtx;
6419 if (flag_non_call_exceptions)
6421 rn = lookup_stmt_eh_region (exp);
6422 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6423 if (rn >= 0)
6424 last = get_last_insn ();
6427 /* If this is an expression of some kind and it has an associated line
6428 number, then emit the line number before expanding the expression.
6430 We need to save and restore the file and line information so that
6431 errors discovered during expansion are emitted with the right
6432 information. It would be better of the diagnostic routines
6433 used the file/line information embedded in the tree nodes rather
6434 than globals. */
6435 if (cfun && EXPR_HAS_LOCATION (exp))
6437 location_t saved_location = input_location;
6438 input_location = EXPR_LOCATION (exp);
6439 emit_line_note (input_location);
6441 /* Record where the insns produced belong. */
6442 if (cfun->dont_emit_block_notes)
6443 record_block_change (TREE_BLOCK (exp));
6445 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6447 input_location = saved_location;
6449 else
6451 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6454 /* If using non-call exceptions, mark all insns that may trap.
6455 expand_call() will mark CALL_INSNs before we get to this code,
6456 but it doesn't handle libcalls, and these may trap. */
6457 if (rn >= 0)
6459 rtx insn;
6460 for (insn = next_real_insn (last); insn;
6461 insn = next_real_insn (insn))
6463 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6464 /* If we want exceptions for non-call insns, any
6465 may_trap_p instruction may throw. */
6466 && GET_CODE (PATTERN (insn)) != CLOBBER
6467 && GET_CODE (PATTERN (insn)) != USE
6468 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6470 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6471 REG_NOTES (insn));
6476 return ret;
6479 static rtx
6480 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6481 enum expand_modifier modifier, rtx *alt_rtl)
6483 rtx op0, op1, temp;
6484 tree type = TREE_TYPE (exp);
6485 int unsignedp;
6486 enum machine_mode mode;
6487 enum tree_code code = TREE_CODE (exp);
6488 optab this_optab;
6489 rtx subtarget, original_target;
6490 int ignore;
6491 tree context;
6493 mode = TYPE_MODE (type);
6494 unsignedp = TYPE_UNSIGNED (type);
6496 /* Use subtarget as the target for operand 0 of a binary operation. */
6497 subtarget = get_subtarget (target);
6498 original_target = target;
6499 ignore = (target == const0_rtx
6500 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6501 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6502 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6503 && TREE_CODE (type) == VOID_TYPE));
6505 /* If we are going to ignore this result, we need only do something
6506 if there is a side-effect somewhere in the expression. If there
6507 is, short-circuit the most common cases here. Note that we must
6508 not call expand_expr with anything but const0_rtx in case this
6509 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6511 if (ignore)
6513 if (! TREE_SIDE_EFFECTS (exp))
6514 return const0_rtx;
6516 /* Ensure we reference a volatile object even if value is ignored, but
6517 don't do this if all we are doing is taking its address. */
6518 if (TREE_THIS_VOLATILE (exp)
6519 && TREE_CODE (exp) != FUNCTION_DECL
6520 && mode != VOIDmode && mode != BLKmode
6521 && modifier != EXPAND_CONST_ADDRESS)
6523 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6524 if (GET_CODE (temp) == MEM)
6525 temp = copy_to_reg (temp);
6526 return const0_rtx;
6529 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6530 || code == INDIRECT_REF || code == BUFFER_REF)
6531 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6532 modifier);
6534 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6535 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6537 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6538 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6539 return const0_rtx;
6541 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6542 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6543 /* If the second operand has no side effects, just evaluate
6544 the first. */
6545 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6546 modifier);
6547 else if (code == BIT_FIELD_REF)
6549 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6550 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6551 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6552 return const0_rtx;
6555 target = 0;
6558 /* If will do cse, generate all results into pseudo registers
6559 since 1) that allows cse to find more things
6560 and 2) otherwise cse could produce an insn the machine
6561 cannot support. An exception is a CONSTRUCTOR into a multi-word
6562 MEM: that's much more likely to be most efficient into the MEM.
6563 Another is a CALL_EXPR which must return in memory. */
6565 if (! cse_not_expected && mode != BLKmode && target
6566 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6567 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6568 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6569 target = 0;
6571 switch (code)
6573 case LABEL_DECL:
6575 tree function = decl_function_context (exp);
6577 temp = label_rtx (exp);
6578 temp = gen_rtx_LABEL_REF (Pmode, temp);
6580 if (function != current_function_decl
6581 && function != 0)
6582 LABEL_REF_NONLOCAL_P (temp) = 1;
6584 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6585 return temp;
6588 case PARM_DECL:
6589 if (!DECL_RTL_SET_P (exp))
6591 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6592 return CONST0_RTX (mode);
6595 /* ... fall through ... */
6597 case VAR_DECL:
6598 /* If a static var's type was incomplete when the decl was written,
6599 but the type is complete now, lay out the decl now. */
6600 if (DECL_SIZE (exp) == 0
6601 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6602 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6603 layout_decl (exp, 0);
6605 /* ... fall through ... */
6607 case FUNCTION_DECL:
6608 case RESULT_DECL:
6609 if (DECL_RTL (exp) == 0)
6610 abort ();
6612 /* Ensure variable marked as used even if it doesn't go through
6613 a parser. If it hasn't be used yet, write out an external
6614 definition. */
6615 if (! TREE_USED (exp))
6617 assemble_external (exp);
6618 TREE_USED (exp) = 1;
6621 /* Show we haven't gotten RTL for this yet. */
6622 temp = 0;
6624 /* Handle variables inherited from containing functions. */
6625 context = decl_function_context (exp);
6627 if (context != 0 && context != current_function_decl
6628 /* If var is static, we don't need a static chain to access it. */
6629 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6630 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6632 rtx addr;
6634 /* Mark as non-local and addressable. */
6635 DECL_NONLOCAL (exp) = 1;
6636 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6637 abort ();
6638 lang_hooks.mark_addressable (exp);
6639 if (GET_CODE (DECL_RTL (exp)) != MEM)
6640 abort ();
6641 addr = XEXP (DECL_RTL (exp), 0);
6642 if (GET_CODE (addr) == MEM)
6643 addr
6644 = replace_equiv_address (addr,
6645 fix_lexical_addr (XEXP (addr, 0), exp));
6646 else
6647 addr = fix_lexical_addr (addr, exp);
6649 temp = replace_equiv_address (DECL_RTL (exp), addr);
6652 /* This is the case of an array whose size is to be determined
6653 from its initializer, while the initializer is still being parsed.
6654 See expand_decl. */
6656 else if (GET_CODE (DECL_RTL (exp)) == MEM
6657 && REG_P (XEXP (DECL_RTL (exp), 0)))
6658 temp = validize_mem (DECL_RTL (exp));
6660 /* If DECL_RTL is memory, we are in the normal case and either
6661 the address is not valid or it is not a register and -fforce-addr
6662 is specified, get the address into a register. */
6664 else if (GET_CODE (DECL_RTL (exp)) == MEM
6665 && modifier != EXPAND_CONST_ADDRESS
6666 && modifier != EXPAND_SUM
6667 && modifier != EXPAND_INITIALIZER
6668 && (! memory_address_p (DECL_MODE (exp),
6669 XEXP (DECL_RTL (exp), 0))
6670 || (flag_force_addr
6671 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6673 if (alt_rtl)
6674 *alt_rtl = DECL_RTL (exp);
6675 temp = replace_equiv_address (DECL_RTL (exp),
6676 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6679 /* If we got something, return it. But first, set the alignment
6680 if the address is a register. */
6681 if (temp != 0)
6683 if (GET_CODE (temp) == MEM && REG_P (XEXP (temp, 0)))
6684 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6686 return temp;
6689 /* If the mode of DECL_RTL does not match that of the decl, it
6690 must be a promoted value. We return a SUBREG of the wanted mode,
6691 but mark it so that we know that it was already extended. */
6693 if (REG_P (DECL_RTL (exp))
6694 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6696 /* Get the signedness used for this variable. Ensure we get the
6697 same mode we got when the variable was declared. */
6698 if (GET_MODE (DECL_RTL (exp))
6699 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6700 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6701 abort ();
6703 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6704 SUBREG_PROMOTED_VAR_P (temp) = 1;
6705 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6706 return temp;
6709 return DECL_RTL (exp);
6711 case INTEGER_CST:
6712 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6713 TREE_INT_CST_HIGH (exp), mode);
6715 /* ??? If overflow is set, fold will have done an incomplete job,
6716 which can result in (plus xx (const_int 0)), which can get
6717 simplified by validate_replace_rtx during virtual register
6718 instantiation, which can result in unrecognizable insns.
6719 Avoid this by forcing all overflows into registers. */
6720 if (TREE_CONSTANT_OVERFLOW (exp)
6721 && modifier != EXPAND_INITIALIZER)
6722 temp = force_reg (mode, temp);
6724 return temp;
6726 case VECTOR_CST:
6727 return const_vector_from_tree (exp);
6729 case CONST_DECL:
6730 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6732 case REAL_CST:
6733 /* If optimized, generate immediate CONST_DOUBLE
6734 which will be turned into memory by reload if necessary.
6736 We used to force a register so that loop.c could see it. But
6737 this does not allow gen_* patterns to perform optimizations with
6738 the constants. It also produces two insns in cases like "x = 1.0;".
6739 On most machines, floating-point constants are not permitted in
6740 many insns, so we'd end up copying it to a register in any case.
6742 Now, we do the copying in expand_binop, if appropriate. */
6743 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6744 TYPE_MODE (TREE_TYPE (exp)));
6746 case COMPLEX_CST:
6747 /* Handle evaluating a complex constant in a CONCAT target. */
6748 if (original_target && GET_CODE (original_target) == CONCAT)
6750 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6751 rtx rtarg, itarg;
6753 rtarg = XEXP (original_target, 0);
6754 itarg = XEXP (original_target, 1);
6756 /* Move the real and imaginary parts separately. */
6757 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6758 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6760 if (op0 != rtarg)
6761 emit_move_insn (rtarg, op0);
6762 if (op1 != itarg)
6763 emit_move_insn (itarg, op1);
6765 return original_target;
6768 /* ... fall through ... */
6770 case STRING_CST:
6771 temp = output_constant_def (exp, 1);
6773 /* temp contains a constant address.
6774 On RISC machines where a constant address isn't valid,
6775 make some insns to get that address into a register. */
6776 if (modifier != EXPAND_CONST_ADDRESS
6777 && modifier != EXPAND_INITIALIZER
6778 && modifier != EXPAND_SUM
6779 && (! memory_address_p (mode, XEXP (temp, 0))
6780 || flag_force_addr))
6781 return replace_equiv_address (temp,
6782 copy_rtx (XEXP (temp, 0)));
6783 return temp;
6785 case SAVE_EXPR:
6786 context = decl_function_context (exp);
6788 /* If this SAVE_EXPR was at global context, assume we are an
6789 initialization function and move it into our context. */
6790 if (context == 0)
6791 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6793 if (context == current_function_decl)
6794 context = 0;
6796 /* If this is non-local, handle it. */
6797 if (context)
6799 /* The following call just exists to abort if the context is
6800 not of a containing function. */
6801 find_function_data (context);
6803 temp = SAVE_EXPR_RTL (exp);
6804 if (temp && REG_P (temp))
6806 put_var_into_stack (exp, /*rescan=*/true);
6807 temp = SAVE_EXPR_RTL (exp);
6809 if (temp == 0 || GET_CODE (temp) != MEM)
6810 abort ();
6811 return
6812 replace_equiv_address (temp,
6813 fix_lexical_addr (XEXP (temp, 0), exp));
6815 if (SAVE_EXPR_RTL (exp) == 0)
6817 if (mode == VOIDmode)
6818 temp = const0_rtx;
6819 else
6820 temp = assign_temp (build_qualified_type (type,
6821 (TYPE_QUALS (type)
6822 | TYPE_QUAL_CONST)),
6823 3, 0, 0);
6825 SAVE_EXPR_RTL (exp) = temp;
6826 if (!optimize && REG_P (temp))
6827 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6828 save_expr_regs);
6830 /* If the mode of TEMP does not match that of the expression, it
6831 must be a promoted value. We pass store_expr a SUBREG of the
6832 wanted mode but mark it so that we know that it was already
6833 extended. */
6835 if (REG_P (temp) && GET_MODE (temp) != mode)
6837 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6838 promote_mode (type, mode, &unsignedp, 0);
6839 SUBREG_PROMOTED_VAR_P (temp) = 1;
6840 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6843 if (temp == const0_rtx)
6844 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6845 else
6846 store_expr (TREE_OPERAND (exp, 0), temp,
6847 modifier == EXPAND_STACK_PARM ? 2 : 0);
6849 TREE_USED (exp) = 1;
6852 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6853 must be a promoted value. We return a SUBREG of the wanted mode,
6854 but mark it so that we know that it was already extended. */
6856 if (REG_P (SAVE_EXPR_RTL (exp))
6857 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6859 /* Compute the signedness and make the proper SUBREG. */
6860 promote_mode (type, mode, &unsignedp, 0);
6861 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6862 SUBREG_PROMOTED_VAR_P (temp) = 1;
6863 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6864 return temp;
6867 return SAVE_EXPR_RTL (exp);
6869 case UNSAVE_EXPR:
6871 rtx temp;
6872 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6873 TREE_OPERAND (exp, 0)
6874 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6875 return temp;
6878 case GOTO_EXPR:
6879 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6880 expand_goto (TREE_OPERAND (exp, 0));
6881 else
6882 expand_computed_goto (TREE_OPERAND (exp, 0));
6883 return const0_rtx;
6885 /* These are lowered during gimplification, so we should never ever
6886 see them here. */
6887 case LOOP_EXPR:
6888 case EXIT_EXPR:
6889 abort ();
6891 case LABELED_BLOCK_EXPR:
6892 if (LABELED_BLOCK_BODY (exp))
6893 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6894 /* Should perhaps use expand_label, but this is simpler and safer. */
6895 do_pending_stack_adjust ();
6896 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6897 return const0_rtx;
6899 case EXIT_BLOCK_EXPR:
6900 if (EXIT_BLOCK_RETURN (exp))
6901 sorry ("returned value in block_exit_expr");
6902 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6903 return const0_rtx;
6905 case BIND_EXPR:
6907 tree block = BIND_EXPR_BLOCK (exp);
6908 int mark_ends;
6910 if (TREE_CODE (BIND_EXPR_BODY (exp)) != RTL_EXPR)
6912 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6913 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6914 mark_ends = (block != NULL_TREE);
6915 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6917 else
6919 /* If we're not in functions-as-trees mode, we've already emitted
6920 those notes into our RTL_EXPR, so we just want to splice our BLOCK
6921 into the enclosing one. */
6922 mark_ends = 0;
6924 /* Need to open a binding contour here because
6925 if there are any cleanups they must be contained here. */
6926 expand_start_bindings_and_block (2, NULL_TREE);
6928 /* Mark the corresponding BLOCK for output in its proper place. */
6929 if (block)
6931 if (TREE_USED (block))
6932 abort ();
6933 lang_hooks.decls.insert_block (block);
6937 /* If VARS have not yet been expanded, expand them now. */
6938 expand_vars (BIND_EXPR_VARS (exp));
6940 /* TARGET was clobbered early in this function. The correct
6941 indicator or whether or not we need the value of this
6942 expression is the IGNORE variable. */
6943 temp = expand_expr (BIND_EXPR_BODY (exp),
6944 ignore ? const0_rtx : target,
6945 tmode, modifier);
6947 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6949 return temp;
6952 case RTL_EXPR:
6953 if (RTL_EXPR_SEQUENCE (exp))
6955 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6956 abort ();
6957 emit_insn (RTL_EXPR_SEQUENCE (exp));
6958 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6960 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6961 free_temps_for_rtl_expr (exp);
6962 if (alt_rtl)
6963 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6964 return RTL_EXPR_RTL (exp);
6966 case CONSTRUCTOR:
6967 /* If we don't need the result, just ensure we evaluate any
6968 subexpressions. */
6969 if (ignore)
6971 tree elt;
6973 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6974 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6976 return const0_rtx;
6979 /* All elts simple constants => refer to a constant in memory. But
6980 if this is a non-BLKmode mode, let it store a field at a time
6981 since that should make a CONST_INT or CONST_DOUBLE when we
6982 fold. Likewise, if we have a target we can use, it is best to
6983 store directly into the target unless the type is large enough
6984 that memcpy will be used. If we are making an initializer and
6985 all operands are constant, put it in memory as well.
6987 FIXME: Avoid trying to fill vector constructors piece-meal.
6988 Output them with output_constant_def below unless we're sure
6989 they're zeros. This should go away when vector initializers
6990 are treated like VECTOR_CST instead of arrays.
6992 else if ((TREE_STATIC (exp)
6993 && ((mode == BLKmode
6994 && ! (target != 0 && safe_from_p (target, exp, 1)))
6995 || TREE_ADDRESSABLE (exp)
6996 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6997 && (! MOVE_BY_PIECES_P
6998 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6999 TYPE_ALIGN (type)))
7000 && ! mostly_zeros_p (exp))))
7001 || ((modifier == EXPAND_INITIALIZER
7002 || modifier == EXPAND_CONST_ADDRESS)
7003 && TREE_CONSTANT (exp)))
7005 rtx constructor = output_constant_def (exp, 1);
7007 if (modifier != EXPAND_CONST_ADDRESS
7008 && modifier != EXPAND_INITIALIZER
7009 && modifier != EXPAND_SUM)
7010 constructor = validize_mem (constructor);
7012 return constructor;
7014 else
7016 /* Handle calls that pass values in multiple non-contiguous
7017 locations. The Irix 6 ABI has examples of this. */
7018 if (target == 0 || ! safe_from_p (target, exp, 1)
7019 || GET_CODE (target) == PARALLEL
7020 || modifier == EXPAND_STACK_PARM)
7021 target
7022 = assign_temp (build_qualified_type (type,
7023 (TYPE_QUALS (type)
7024 | (TREE_READONLY (exp)
7025 * TYPE_QUAL_CONST))),
7026 0, TREE_ADDRESSABLE (exp), 1);
7028 store_constructor (exp, target, 0, int_expr_size (exp));
7029 return target;
7032 case INDIRECT_REF:
7034 tree exp1 = TREE_OPERAND (exp, 0);
7036 if (modifier != EXPAND_WRITE)
7038 tree t;
7040 t = fold_read_from_constant_string (exp);
7041 if (t)
7042 return expand_expr (t, target, tmode, modifier);
7045 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7046 op0 = memory_address (mode, op0);
7047 temp = gen_rtx_MEM (mode, op0);
7048 set_mem_attributes (temp, exp, 0);
7050 /* If we are writing to this object and its type is a record with
7051 readonly fields, we must mark it as readonly so it will
7052 conflict with readonly references to those fields. */
7053 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7054 RTX_UNCHANGING_P (temp) = 1;
7056 return temp;
7059 case ARRAY_REF:
7061 #ifdef ENABLE_CHECKING
7062 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7063 abort ();
7064 #endif
7067 tree array = TREE_OPERAND (exp, 0);
7068 tree low_bound = array_ref_low_bound (exp);
7069 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7070 HOST_WIDE_INT i;
7072 /* Optimize the special-case of a zero lower bound.
7074 We convert the low_bound to sizetype to avoid some problems
7075 with constant folding. (E.g. suppose the lower bound is 1,
7076 and its mode is QI. Without the conversion, (ARRAY
7077 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7078 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7080 if (! integer_zerop (low_bound))
7081 index = size_diffop (index, convert (sizetype, low_bound));
7083 /* Fold an expression like: "foo"[2].
7084 This is not done in fold so it won't happen inside &.
7085 Don't fold if this is for wide characters since it's too
7086 difficult to do correctly and this is a very rare case. */
7088 if (modifier != EXPAND_CONST_ADDRESS
7089 && modifier != EXPAND_INITIALIZER
7090 && modifier != EXPAND_MEMORY)
7092 tree t = fold_read_from_constant_string (exp);
7094 if (t)
7095 return expand_expr (t, target, tmode, modifier);
7098 /* If this is a constant index into a constant array,
7099 just get the value from the array. Handle both the cases when
7100 we have an explicit constructor and when our operand is a variable
7101 that was declared const. */
7103 if (modifier != EXPAND_CONST_ADDRESS
7104 && modifier != EXPAND_INITIALIZER
7105 && modifier != EXPAND_MEMORY
7106 && TREE_CODE (array) == CONSTRUCTOR
7107 && ! TREE_SIDE_EFFECTS (array)
7108 && TREE_CODE (index) == INTEGER_CST
7109 && 0 > compare_tree_int (index,
7110 list_length (CONSTRUCTOR_ELTS
7111 (TREE_OPERAND (exp, 0)))))
7113 tree elem;
7115 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7116 i = TREE_INT_CST_LOW (index);
7117 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7120 if (elem)
7121 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7122 modifier);
7125 else if (optimize >= 1
7126 && modifier != EXPAND_CONST_ADDRESS
7127 && modifier != EXPAND_INITIALIZER
7128 && modifier != EXPAND_MEMORY
7129 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7130 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7131 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7132 && targetm.binds_local_p (array))
7134 if (TREE_CODE (index) == INTEGER_CST)
7136 tree init = DECL_INITIAL (array);
7138 if (TREE_CODE (init) == CONSTRUCTOR)
7140 tree elem;
7142 for (elem = CONSTRUCTOR_ELTS (init);
7143 (elem
7144 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7145 elem = TREE_CHAIN (elem))
7148 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7149 return expand_expr (fold (TREE_VALUE (elem)), target,
7150 tmode, modifier);
7152 else if (TREE_CODE (init) == STRING_CST
7153 && 0 > compare_tree_int (index,
7154 TREE_STRING_LENGTH (init)))
7156 tree type = TREE_TYPE (TREE_TYPE (init));
7157 enum machine_mode mode = TYPE_MODE (type);
7159 if (GET_MODE_CLASS (mode) == MODE_INT
7160 && GET_MODE_SIZE (mode) == 1)
7161 return gen_int_mode (TREE_STRING_POINTER (init)
7162 [TREE_INT_CST_LOW (index)], mode);
7167 goto normal_inner_ref;
7169 case COMPONENT_REF:
7170 /* If the operand is a CONSTRUCTOR, we can just extract the
7171 appropriate field if it is present. */
7172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7174 tree elt;
7176 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7177 elt = TREE_CHAIN (elt))
7178 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7179 /* We can normally use the value of the field in the
7180 CONSTRUCTOR. However, if this is a bitfield in
7181 an integral mode that we can fit in a HOST_WIDE_INT,
7182 we must mask only the number of bits in the bitfield,
7183 since this is done implicitly by the constructor. If
7184 the bitfield does not meet either of those conditions,
7185 we can't do this optimization. */
7186 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7187 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7188 == MODE_INT)
7189 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7190 <= HOST_BITS_PER_WIDE_INT))))
7192 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7193 && modifier == EXPAND_STACK_PARM)
7194 target = 0;
7195 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7196 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7198 HOST_WIDE_INT bitsize
7199 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7200 enum machine_mode imode
7201 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7203 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7205 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7206 op0 = expand_and (imode, op0, op1, target);
7208 else
7210 tree count
7211 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7214 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7215 target, 0);
7216 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7217 target, 0);
7221 return op0;
7224 goto normal_inner_ref;
7226 case BIT_FIELD_REF:
7227 case ARRAY_RANGE_REF:
7228 normal_inner_ref:
7230 enum machine_mode mode1;
7231 HOST_WIDE_INT bitsize, bitpos;
7232 tree offset;
7233 int volatilep = 0;
7234 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7235 &mode1, &unsignedp, &volatilep);
7236 rtx orig_op0;
7238 /* If we got back the original object, something is wrong. Perhaps
7239 we are evaluating an expression too early. In any event, don't
7240 infinitely recurse. */
7241 if (tem == exp)
7242 abort ();
7244 /* If TEM's type is a union of variable size, pass TARGET to the inner
7245 computation, since it will need a temporary and TARGET is known
7246 to have to do. This occurs in unchecked conversion in Ada. */
7248 orig_op0 = op0
7249 = expand_expr (tem,
7250 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7251 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7252 != INTEGER_CST)
7253 && modifier != EXPAND_STACK_PARM
7254 ? target : NULL_RTX),
7255 VOIDmode,
7256 (modifier == EXPAND_INITIALIZER
7257 || modifier == EXPAND_CONST_ADDRESS
7258 || modifier == EXPAND_STACK_PARM)
7259 ? modifier : EXPAND_NORMAL);
7261 /* If this is a constant, put it into a register if it is a
7262 legitimate constant and OFFSET is 0 and memory if it isn't. */
7263 if (CONSTANT_P (op0))
7265 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7266 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7267 && offset == 0)
7268 op0 = force_reg (mode, op0);
7269 else
7270 op0 = validize_mem (force_const_mem (mode, op0));
7273 /* Otherwise, if this object not in memory and we either have an
7274 offset or a BLKmode result, put it there. This case can't occur in
7275 C, but can in Ada if we have unchecked conversion of an expression
7276 from a scalar type to an array or record type or for an
7277 ARRAY_RANGE_REF whose type is BLKmode. */
7278 else if (GET_CODE (op0) != MEM
7279 && (offset != 0
7280 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7282 /* If the operand is a SAVE_EXPR, we can deal with this by
7283 forcing the SAVE_EXPR into memory. */
7284 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7286 put_var_into_stack (TREE_OPERAND (exp, 0),
7287 /*rescan=*/true);
7288 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7290 else
7292 tree nt
7293 = build_qualified_type (TREE_TYPE (tem),
7294 (TYPE_QUALS (TREE_TYPE (tem))
7295 | TYPE_QUAL_CONST));
7296 rtx memloc = assign_temp (nt, 1, 1, 1);
7298 emit_move_insn (memloc, op0);
7299 op0 = memloc;
7303 if (offset != 0)
7305 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7306 EXPAND_SUM);
7308 if (GET_CODE (op0) != MEM)
7309 abort ();
7311 #ifdef POINTERS_EXTEND_UNSIGNED
7312 if (GET_MODE (offset_rtx) != Pmode)
7313 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7314 #else
7315 if (GET_MODE (offset_rtx) != ptr_mode)
7316 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7317 #endif
7319 if (GET_MODE (op0) == BLKmode
7320 /* A constant address in OP0 can have VOIDmode, we must
7321 not try to call force_reg in that case. */
7322 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7323 && bitsize != 0
7324 && (bitpos % bitsize) == 0
7325 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7326 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7328 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7329 bitpos = 0;
7332 op0 = offset_address (op0, offset_rtx,
7333 highest_pow2_factor (offset));
7336 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7337 record its alignment as BIGGEST_ALIGNMENT. */
7338 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7339 && is_aligning_offset (offset, tem))
7340 set_mem_align (op0, BIGGEST_ALIGNMENT);
7342 /* Don't forget about volatility even if this is a bitfield. */
7343 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7345 if (op0 == orig_op0)
7346 op0 = copy_rtx (op0);
7348 MEM_VOLATILE_P (op0) = 1;
7351 /* The following code doesn't handle CONCAT.
7352 Assume only bitpos == 0 can be used for CONCAT, due to
7353 one element arrays having the same mode as its element. */
7354 if (GET_CODE (op0) == CONCAT)
7356 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7357 abort ();
7358 return op0;
7361 /* In cases where an aligned union has an unaligned object
7362 as a field, we might be extracting a BLKmode value from
7363 an integer-mode (e.g., SImode) object. Handle this case
7364 by doing the extract into an object as wide as the field
7365 (which we know to be the width of a basic mode), then
7366 storing into memory, and changing the mode to BLKmode. */
7367 if (mode1 == VOIDmode
7368 || REG_P (op0) || GET_CODE (op0) == SUBREG
7369 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7370 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7371 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7372 && modifier != EXPAND_CONST_ADDRESS
7373 && modifier != EXPAND_INITIALIZER)
7374 /* If the field isn't aligned enough to fetch as a memref,
7375 fetch it as a bit field. */
7376 || (mode1 != BLKmode
7377 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7378 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7379 || (GET_CODE (op0) == MEM
7380 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7381 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7382 && ((modifier == EXPAND_CONST_ADDRESS
7383 || modifier == EXPAND_INITIALIZER)
7384 ? STRICT_ALIGNMENT
7385 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7386 || (bitpos % BITS_PER_UNIT != 0)))
7387 /* If the type and the field are a constant size and the
7388 size of the type isn't the same size as the bitfield,
7389 we must use bitfield operations. */
7390 || (bitsize >= 0
7391 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7392 == INTEGER_CST)
7393 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7394 bitsize)))
7396 enum machine_mode ext_mode = mode;
7398 if (ext_mode == BLKmode
7399 && ! (target != 0 && GET_CODE (op0) == MEM
7400 && GET_CODE (target) == MEM
7401 && bitpos % BITS_PER_UNIT == 0))
7402 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7404 if (ext_mode == BLKmode)
7406 if (target == 0)
7407 target = assign_temp (type, 0, 1, 1);
7409 if (bitsize == 0)
7410 return target;
7412 /* In this case, BITPOS must start at a byte boundary and
7413 TARGET, if specified, must be a MEM. */
7414 if (GET_CODE (op0) != MEM
7415 || (target != 0 && GET_CODE (target) != MEM)
7416 || bitpos % BITS_PER_UNIT != 0)
7417 abort ();
7419 emit_block_move (target,
7420 adjust_address (op0, VOIDmode,
7421 bitpos / BITS_PER_UNIT),
7422 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7423 / BITS_PER_UNIT),
7424 (modifier == EXPAND_STACK_PARM
7425 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7427 return target;
7430 op0 = validize_mem (op0);
7432 if (GET_CODE (op0) == MEM && REG_P (XEXP (op0, 0)))
7433 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7435 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7436 (modifier == EXPAND_STACK_PARM
7437 ? NULL_RTX : target),
7438 ext_mode, ext_mode,
7439 int_size_in_bytes (TREE_TYPE (tem)));
7441 /* If the result is a record type and BITSIZE is narrower than
7442 the mode of OP0, an integral mode, and this is a big endian
7443 machine, we must put the field into the high-order bits. */
7444 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7445 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7446 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7447 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7448 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7449 - bitsize),
7450 op0, 1);
7452 /* If the result type is BLKmode, store the data into a temporary
7453 of the appropriate type, but with the mode corresponding to the
7454 mode for the data we have (op0's mode). It's tempting to make
7455 this a constant type, since we know it's only being stored once,
7456 but that can cause problems if we are taking the address of this
7457 COMPONENT_REF because the MEM of any reference via that address
7458 will have flags corresponding to the type, which will not
7459 necessarily be constant. */
7460 if (mode == BLKmode)
7462 rtx new
7463 = assign_stack_temp_for_type
7464 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7466 emit_move_insn (new, op0);
7467 op0 = copy_rtx (new);
7468 PUT_MODE (op0, BLKmode);
7469 set_mem_attributes (op0, exp, 1);
7472 return op0;
7475 /* If the result is BLKmode, use that to access the object
7476 now as well. */
7477 if (mode == BLKmode)
7478 mode1 = BLKmode;
7480 /* Get a reference to just this component. */
7481 if (modifier == EXPAND_CONST_ADDRESS
7482 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7483 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7484 else
7485 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7487 if (op0 == orig_op0)
7488 op0 = copy_rtx (op0);
7490 set_mem_attributes (op0, exp, 0);
7491 if (REG_P (XEXP (op0, 0)))
7492 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7494 MEM_VOLATILE_P (op0) |= volatilep;
7495 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7496 || modifier == EXPAND_CONST_ADDRESS
7497 || modifier == EXPAND_INITIALIZER)
7498 return op0;
7499 else if (target == 0)
7500 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7502 convert_move (target, op0, unsignedp);
7503 return target;
7506 case OBJ_TYPE_REF:
7507 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7509 /* Intended for a reference to a buffer of a file-object in Pascal.
7510 But it's not certain that a special tree code will really be
7511 necessary for these. INDIRECT_REF might work for them. */
7512 case BUFFER_REF:
7513 abort ();
7515 case IN_EXPR:
7517 /* Pascal set IN expression.
7519 Algorithm:
7520 rlo = set_low - (set_low%bits_per_word);
7521 the_word = set [ (index - rlo)/bits_per_word ];
7522 bit_index = index % bits_per_word;
7523 bitmask = 1 << bit_index;
7524 return !!(the_word & bitmask); */
7526 tree set = TREE_OPERAND (exp, 0);
7527 tree index = TREE_OPERAND (exp, 1);
7528 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7529 tree set_type = TREE_TYPE (set);
7530 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7531 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7532 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7533 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7534 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7535 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7536 rtx setaddr = XEXP (setval, 0);
7537 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7538 rtx rlow;
7539 rtx diff, quo, rem, addr, bit, result;
7541 /* If domain is empty, answer is no. Likewise if index is constant
7542 and out of bounds. */
7543 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7544 && TREE_CODE (set_low_bound) == INTEGER_CST
7545 && tree_int_cst_lt (set_high_bound, set_low_bound))
7546 || (TREE_CODE (index) == INTEGER_CST
7547 && TREE_CODE (set_low_bound) == INTEGER_CST
7548 && tree_int_cst_lt (index, set_low_bound))
7549 || (TREE_CODE (set_high_bound) == INTEGER_CST
7550 && TREE_CODE (index) == INTEGER_CST
7551 && tree_int_cst_lt (set_high_bound, index))))
7552 return const0_rtx;
7554 if (target == 0)
7555 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7557 /* If we get here, we have to generate the code for both cases
7558 (in range and out of range). */
7560 op0 = gen_label_rtx ();
7561 op1 = gen_label_rtx ();
7563 if (! (GET_CODE (index_val) == CONST_INT
7564 && GET_CODE (lo_r) == CONST_INT))
7565 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7566 GET_MODE (index_val), iunsignedp, op1);
7568 if (! (GET_CODE (index_val) == CONST_INT
7569 && GET_CODE (hi_r) == CONST_INT))
7570 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7571 GET_MODE (index_val), iunsignedp, op1);
7573 /* Calculate the element number of bit zero in the first word
7574 of the set. */
7575 if (GET_CODE (lo_r) == CONST_INT)
7576 rlow = GEN_INT (INTVAL (lo_r)
7577 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7578 else
7579 rlow = expand_binop (index_mode, and_optab, lo_r,
7580 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7581 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7583 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7584 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7586 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7587 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7588 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7589 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7591 addr = memory_address (byte_mode,
7592 expand_binop (index_mode, add_optab, diff,
7593 setaddr, NULL_RTX, iunsignedp,
7594 OPTAB_LIB_WIDEN));
7596 /* Extract the bit we want to examine. */
7597 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7598 gen_rtx_MEM (byte_mode, addr),
7599 make_tree (TREE_TYPE (index), rem),
7600 NULL_RTX, 1);
7601 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7602 GET_MODE (target) == byte_mode ? target : 0,
7603 1, OPTAB_LIB_WIDEN);
7605 if (result != target)
7606 convert_move (target, result, 1);
7608 /* Output the code to handle the out-of-range case. */
7609 emit_jump (op0);
7610 emit_label (op1);
7611 emit_move_insn (target, const0_rtx);
7612 emit_label (op0);
7613 return target;
7616 case WITH_CLEANUP_EXPR:
7617 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7619 WITH_CLEANUP_EXPR_RTL (exp)
7620 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7621 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7622 CLEANUP_EH_ONLY (exp));
7624 /* That's it for this cleanup. */
7625 TREE_OPERAND (exp, 1) = 0;
7627 return WITH_CLEANUP_EXPR_RTL (exp);
7629 case CLEANUP_POINT_EXPR:
7631 /* Start a new binding layer that will keep track of all cleanup
7632 actions to be performed. */
7633 expand_start_bindings (2);
7635 target_temp_slot_level = temp_slot_level;
7637 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7638 /* If we're going to use this value, load it up now. */
7639 if (! ignore)
7640 op0 = force_not_mem (op0);
7641 preserve_temp_slots (op0);
7642 expand_end_bindings (NULL_TREE, 0, 0);
7644 return op0;
7646 case CALL_EXPR:
7647 /* Check for a built-in function. */
7648 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7649 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7650 == FUNCTION_DECL)
7651 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7653 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7654 == BUILT_IN_FRONTEND)
7655 return lang_hooks.expand_expr (exp, original_target,
7656 tmode, modifier,
7657 alt_rtl);
7658 else
7659 return expand_builtin (exp, target, subtarget, tmode, ignore);
7662 return expand_call (exp, target, ignore);
7664 case NON_LVALUE_EXPR:
7665 case NOP_EXPR:
7666 case CONVERT_EXPR:
7667 case REFERENCE_EXPR:
7668 if (TREE_OPERAND (exp, 0) == error_mark_node)
7669 return const0_rtx;
7671 if (TREE_CODE (type) == UNION_TYPE)
7673 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7675 /* If both input and output are BLKmode, this conversion isn't doing
7676 anything except possibly changing memory attribute. */
7677 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7679 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7680 modifier);
7682 result = copy_rtx (result);
7683 set_mem_attributes (result, exp, 0);
7684 return result;
7687 if (target == 0)
7689 if (TYPE_MODE (type) != BLKmode)
7690 target = gen_reg_rtx (TYPE_MODE (type));
7691 else
7692 target = assign_temp (type, 0, 1, 1);
7695 if (GET_CODE (target) == MEM)
7696 /* Store data into beginning of memory target. */
7697 store_expr (TREE_OPERAND (exp, 0),
7698 adjust_address (target, TYPE_MODE (valtype), 0),
7699 modifier == EXPAND_STACK_PARM ? 2 : 0);
7701 else if (REG_P (target))
7702 /* Store this field into a union of the proper type. */
7703 store_field (target,
7704 MIN ((int_size_in_bytes (TREE_TYPE
7705 (TREE_OPERAND (exp, 0)))
7706 * BITS_PER_UNIT),
7707 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7708 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7709 VOIDmode, 0, type, 0);
7710 else
7711 abort ();
7713 /* Return the entire union. */
7714 return target;
7717 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7719 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7720 modifier);
7722 /* If the signedness of the conversion differs and OP0 is
7723 a promoted SUBREG, clear that indication since we now
7724 have to do the proper extension. */
7725 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7726 && GET_CODE (op0) == SUBREG)
7727 SUBREG_PROMOTED_VAR_P (op0) = 0;
7729 return op0;
7732 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7733 if (GET_MODE (op0) == mode)
7734 return op0;
7736 /* If OP0 is a constant, just convert it into the proper mode. */
7737 if (CONSTANT_P (op0))
7739 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7740 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7742 if (modifier == EXPAND_INITIALIZER)
7743 return simplify_gen_subreg (mode, op0, inner_mode,
7744 subreg_lowpart_offset (mode,
7745 inner_mode));
7746 else
7747 return convert_modes (mode, inner_mode, op0,
7748 TYPE_UNSIGNED (inner_type));
7751 if (modifier == EXPAND_INITIALIZER)
7752 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7754 if (target == 0)
7755 return
7756 convert_to_mode (mode, op0,
7757 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7758 else
7759 convert_move (target, op0,
7760 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7761 return target;
7763 case VIEW_CONVERT_EXPR:
7764 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7766 /* If the input and output modes are both the same, we are done.
7767 Otherwise, if neither mode is BLKmode and both are integral and within
7768 a word, we can use gen_lowpart. If neither is true, make sure the
7769 operand is in memory and convert the MEM to the new mode. */
7770 if (TYPE_MODE (type) == GET_MODE (op0))
7772 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7773 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7774 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7775 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7776 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7777 op0 = gen_lowpart (TYPE_MODE (type), op0);
7778 else if (GET_CODE (op0) != MEM)
7780 /* If the operand is not a MEM, force it into memory. Since we
7781 are going to be be changing the mode of the MEM, don't call
7782 force_const_mem for constants because we don't allow pool
7783 constants to change mode. */
7784 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7786 if (TREE_ADDRESSABLE (exp))
7787 abort ();
7789 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7790 target
7791 = assign_stack_temp_for_type
7792 (TYPE_MODE (inner_type),
7793 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7795 emit_move_insn (target, op0);
7796 op0 = target;
7799 /* At this point, OP0 is in the correct mode. If the output type is such
7800 that the operand is known to be aligned, indicate that it is.
7801 Otherwise, we need only be concerned about alignment for non-BLKmode
7802 results. */
7803 if (GET_CODE (op0) == MEM)
7805 op0 = copy_rtx (op0);
7807 if (TYPE_ALIGN_OK (type))
7808 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7809 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7810 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7812 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7813 HOST_WIDE_INT temp_size
7814 = MAX (int_size_in_bytes (inner_type),
7815 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7816 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7817 temp_size, 0, type);
7818 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7820 if (TREE_ADDRESSABLE (exp))
7821 abort ();
7823 if (GET_MODE (op0) == BLKmode)
7824 emit_block_move (new_with_op0_mode, op0,
7825 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7826 (modifier == EXPAND_STACK_PARM
7827 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7828 else
7829 emit_move_insn (new_with_op0_mode, op0);
7831 op0 = new;
7834 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7837 return op0;
7839 case PLUS_EXPR:
7840 this_optab = ! unsignedp && flag_trapv
7841 && (GET_MODE_CLASS (mode) == MODE_INT)
7842 ? addv_optab : add_optab;
7844 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7845 something else, make sure we add the register to the constant and
7846 then to the other thing. This case can occur during strength
7847 reduction and doing it this way will produce better code if the
7848 frame pointer or argument pointer is eliminated.
7850 fold-const.c will ensure that the constant is always in the inner
7851 PLUS_EXPR, so the only case we need to do anything about is if
7852 sp, ap, or fp is our second argument, in which case we must swap
7853 the innermost first argument and our second argument. */
7855 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7856 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7857 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7858 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7859 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7860 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7862 tree t = TREE_OPERAND (exp, 1);
7864 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7865 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7868 /* If the result is to be ptr_mode and we are adding an integer to
7869 something, we might be forming a constant. So try to use
7870 plus_constant. If it produces a sum and we can't accept it,
7871 use force_operand. This allows P = &ARR[const] to generate
7872 efficient code on machines where a SYMBOL_REF is not a valid
7873 address.
7875 If this is an EXPAND_SUM call, always return the sum. */
7876 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7877 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7879 if (modifier == EXPAND_STACK_PARM)
7880 target = 0;
7881 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7882 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7883 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7885 rtx constant_part;
7887 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7888 EXPAND_SUM);
7889 /* Use immed_double_const to ensure that the constant is
7890 truncated according to the mode of OP1, then sign extended
7891 to a HOST_WIDE_INT. Using the constant directly can result
7892 in non-canonical RTL in a 64x32 cross compile. */
7893 constant_part
7894 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7895 (HOST_WIDE_INT) 0,
7896 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7897 op1 = plus_constant (op1, INTVAL (constant_part));
7898 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7899 op1 = force_operand (op1, target);
7900 return op1;
7903 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7904 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7905 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7907 rtx constant_part;
7909 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7910 (modifier == EXPAND_INITIALIZER
7911 ? EXPAND_INITIALIZER : EXPAND_SUM));
7912 if (! CONSTANT_P (op0))
7914 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7915 VOIDmode, modifier);
7916 /* Return a PLUS if modifier says it's OK. */
7917 if (modifier == EXPAND_SUM
7918 || modifier == EXPAND_INITIALIZER)
7919 return simplify_gen_binary (PLUS, mode, op0, op1);
7920 goto binop2;
7922 /* Use immed_double_const to ensure that the constant is
7923 truncated according to the mode of OP1, then sign extended
7924 to a HOST_WIDE_INT. Using the constant directly can result
7925 in non-canonical RTL in a 64x32 cross compile. */
7926 constant_part
7927 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7928 (HOST_WIDE_INT) 0,
7929 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7930 op0 = plus_constant (op0, INTVAL (constant_part));
7931 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7932 op0 = force_operand (op0, target);
7933 return op0;
7937 /* No sense saving up arithmetic to be done
7938 if it's all in the wrong mode to form part of an address.
7939 And force_operand won't know whether to sign-extend or
7940 zero-extend. */
7941 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7942 || mode != ptr_mode)
7944 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7945 subtarget, &op0, &op1, 0);
7946 if (op0 == const0_rtx)
7947 return op1;
7948 if (op1 == const0_rtx)
7949 return op0;
7950 goto binop2;
7953 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7954 subtarget, &op0, &op1, modifier);
7955 return simplify_gen_binary (PLUS, mode, op0, op1);
7957 case MINUS_EXPR:
7958 /* For initializers, we are allowed to return a MINUS of two
7959 symbolic constants. Here we handle all cases when both operands
7960 are constant. */
7961 /* Handle difference of two symbolic constants,
7962 for the sake of an initializer. */
7963 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7964 && really_constant_p (TREE_OPERAND (exp, 0))
7965 && really_constant_p (TREE_OPERAND (exp, 1)))
7967 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7968 NULL_RTX, &op0, &op1, modifier);
7970 /* If the last operand is a CONST_INT, use plus_constant of
7971 the negated constant. Else make the MINUS. */
7972 if (GET_CODE (op1) == CONST_INT)
7973 return plus_constant (op0, - INTVAL (op1));
7974 else
7975 return gen_rtx_MINUS (mode, op0, op1);
7978 this_optab = ! unsignedp && flag_trapv
7979 && (GET_MODE_CLASS(mode) == MODE_INT)
7980 ? subv_optab : sub_optab;
7982 /* No sense saving up arithmetic to be done
7983 if it's all in the wrong mode to form part of an address.
7984 And force_operand won't know whether to sign-extend or
7985 zero-extend. */
7986 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7987 || mode != ptr_mode)
7988 goto binop;
7990 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7991 subtarget, &op0, &op1, modifier);
7993 /* Convert A - const to A + (-const). */
7994 if (GET_CODE (op1) == CONST_INT)
7996 op1 = negate_rtx (mode, op1);
7997 return simplify_gen_binary (PLUS, mode, op0, op1);
8000 goto binop2;
8002 case MULT_EXPR:
8003 /* If first operand is constant, swap them.
8004 Thus the following special case checks need only
8005 check the second operand. */
8006 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8008 tree t1 = TREE_OPERAND (exp, 0);
8009 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8010 TREE_OPERAND (exp, 1) = t1;
8013 /* Attempt to return something suitable for generating an
8014 indexed address, for machines that support that. */
8016 if (modifier == EXPAND_SUM && mode == ptr_mode
8017 && host_integerp (TREE_OPERAND (exp, 1), 0))
8019 tree exp1 = TREE_OPERAND (exp, 1);
8021 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8022 EXPAND_SUM);
8024 if (!REG_P (op0))
8025 op0 = force_operand (op0, NULL_RTX);
8026 if (!REG_P (op0))
8027 op0 = copy_to_mode_reg (mode, op0);
8029 return gen_rtx_MULT (mode, op0,
8030 gen_int_mode (tree_low_cst (exp1, 0),
8031 TYPE_MODE (TREE_TYPE (exp1))));
8034 if (modifier == EXPAND_STACK_PARM)
8035 target = 0;
8037 /* Check for multiplying things that have been extended
8038 from a narrower type. If this machine supports multiplying
8039 in that narrower type with a result in the desired type,
8040 do it that way, and avoid the explicit type-conversion. */
8041 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8042 && TREE_CODE (type) == INTEGER_TYPE
8043 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8044 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8045 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8046 && int_fits_type_p (TREE_OPERAND (exp, 1),
8047 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8048 /* Don't use a widening multiply if a shift will do. */
8049 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8050 > HOST_BITS_PER_WIDE_INT)
8051 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8053 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8054 && (TYPE_PRECISION (TREE_TYPE
8055 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8056 == TYPE_PRECISION (TREE_TYPE
8057 (TREE_OPERAND
8058 (TREE_OPERAND (exp, 0), 0))))
8059 /* If both operands are extended, they must either both
8060 be zero-extended or both be sign-extended. */
8061 && (TYPE_UNSIGNED (TREE_TYPE
8062 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8063 == TYPE_UNSIGNED (TREE_TYPE
8064 (TREE_OPERAND
8065 (TREE_OPERAND (exp, 0), 0)))))))
8067 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8068 enum machine_mode innermode = TYPE_MODE (op0type);
8069 bool zextend_p = TYPE_UNSIGNED (op0type);
8070 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8071 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8073 if (mode == GET_MODE_WIDER_MODE (innermode))
8075 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8077 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8078 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8079 TREE_OPERAND (exp, 1),
8080 NULL_RTX, &op0, &op1, 0);
8081 else
8082 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8083 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8084 NULL_RTX, &op0, &op1, 0);
8085 goto binop2;
8087 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8088 && innermode == word_mode)
8090 rtx htem, hipart;
8091 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8092 NULL_RTX, VOIDmode, 0);
8093 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8094 op1 = convert_modes (innermode, mode,
8095 expand_expr (TREE_OPERAND (exp, 1),
8096 NULL_RTX, VOIDmode, 0),
8097 unsignedp);
8098 else
8099 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8100 NULL_RTX, VOIDmode, 0);
8101 temp = expand_binop (mode, other_optab, op0, op1, target,
8102 unsignedp, OPTAB_LIB_WIDEN);
8103 hipart = gen_highpart (innermode, temp);
8104 htem = expand_mult_highpart_adjust (innermode, hipart,
8105 op0, op1, hipart,
8106 zextend_p);
8107 if (htem != hipart)
8108 emit_move_insn (hipart, htem);
8109 return temp;
8113 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8114 subtarget, &op0, &op1, 0);
8115 return expand_mult (mode, op0, op1, target, unsignedp);
8117 case TRUNC_DIV_EXPR:
8118 case FLOOR_DIV_EXPR:
8119 case CEIL_DIV_EXPR:
8120 case ROUND_DIV_EXPR:
8121 case EXACT_DIV_EXPR:
8122 if (modifier == EXPAND_STACK_PARM)
8123 target = 0;
8124 /* Possible optimization: compute the dividend with EXPAND_SUM
8125 then if the divisor is constant can optimize the case
8126 where some terms of the dividend have coeffs divisible by it. */
8127 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8128 subtarget, &op0, &op1, 0);
8129 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8131 case RDIV_EXPR:
8132 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8133 expensive divide. If not, combine will rebuild the original
8134 computation. */
8135 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8136 && TREE_CODE (type) == REAL_TYPE
8137 && !real_onep (TREE_OPERAND (exp, 0)))
8138 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8139 build (RDIV_EXPR, type,
8140 build_real (type, dconst1),
8141 TREE_OPERAND (exp, 1))),
8142 target, tmode, modifier);
8143 this_optab = sdiv_optab;
8144 goto binop;
8146 case TRUNC_MOD_EXPR:
8147 case FLOOR_MOD_EXPR:
8148 case CEIL_MOD_EXPR:
8149 case ROUND_MOD_EXPR:
8150 if (modifier == EXPAND_STACK_PARM)
8151 target = 0;
8152 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8153 subtarget, &op0, &op1, 0);
8154 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8156 case FIX_ROUND_EXPR:
8157 case FIX_FLOOR_EXPR:
8158 case FIX_CEIL_EXPR:
8159 abort (); /* Not used for C. */
8161 case FIX_TRUNC_EXPR:
8162 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8163 if (target == 0 || modifier == EXPAND_STACK_PARM)
8164 target = gen_reg_rtx (mode);
8165 expand_fix (target, op0, unsignedp);
8166 return target;
8168 case FLOAT_EXPR:
8169 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8170 if (target == 0 || modifier == EXPAND_STACK_PARM)
8171 target = gen_reg_rtx (mode);
8172 /* expand_float can't figure out what to do if FROM has VOIDmode.
8173 So give it the correct mode. With -O, cse will optimize this. */
8174 if (GET_MODE (op0) == VOIDmode)
8175 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8176 op0);
8177 expand_float (target, op0,
8178 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8179 return target;
8181 case NEGATE_EXPR:
8182 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8183 if (modifier == EXPAND_STACK_PARM)
8184 target = 0;
8185 temp = expand_unop (mode,
8186 ! unsignedp && flag_trapv
8187 && (GET_MODE_CLASS(mode) == MODE_INT)
8188 ? negv_optab : neg_optab, op0, target, 0);
8189 if (temp == 0)
8190 abort ();
8191 return temp;
8193 case ABS_EXPR:
8194 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8195 if (modifier == EXPAND_STACK_PARM)
8196 target = 0;
8198 /* ABS_EXPR is not valid for complex arguments. */
8199 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8200 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8201 abort ();
8203 /* Unsigned abs is simply the operand. Testing here means we don't
8204 risk generating incorrect code below. */
8205 if (TYPE_UNSIGNED (type))
8206 return op0;
8208 return expand_abs (mode, op0, target, unsignedp,
8209 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8211 case MAX_EXPR:
8212 case MIN_EXPR:
8213 target = original_target;
8214 if (target == 0
8215 || modifier == EXPAND_STACK_PARM
8216 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8217 || GET_MODE (target) != mode
8218 || (REG_P (target)
8219 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8220 target = gen_reg_rtx (mode);
8221 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8222 target, &op0, &op1, 0);
8224 /* First try to do it with a special MIN or MAX instruction.
8225 If that does not win, use a conditional jump to select the proper
8226 value. */
8227 this_optab = (unsignedp
8228 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8229 : (code == MIN_EXPR ? smin_optab : smax_optab));
8231 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8232 OPTAB_WIDEN);
8233 if (temp != 0)
8234 return temp;
8236 /* At this point, a MEM target is no longer useful; we will get better
8237 code without it. */
8239 if (GET_CODE (target) == MEM)
8240 target = gen_reg_rtx (mode);
8242 /* If op1 was placed in target, swap op0 and op1. */
8243 if (target != op0 && target == op1)
8245 rtx tem = op0;
8246 op0 = op1;
8247 op1 = tem;
8250 if (target != op0)
8251 emit_move_insn (target, op0);
8253 op0 = gen_label_rtx ();
8255 /* If this mode is an integer too wide to compare properly,
8256 compare word by word. Rely on cse to optimize constant cases. */
8257 if (GET_MODE_CLASS (mode) == MODE_INT
8258 && ! can_compare_p (GE, mode, ccp_jump))
8260 if (code == MAX_EXPR)
8261 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8262 NULL_RTX, op0);
8263 else
8264 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8265 NULL_RTX, op0);
8267 else
8269 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8270 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8272 emit_move_insn (target, op1);
8273 emit_label (op0);
8274 return target;
8276 case BIT_NOT_EXPR:
8277 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8278 if (modifier == EXPAND_STACK_PARM)
8279 target = 0;
8280 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8281 if (temp == 0)
8282 abort ();
8283 return temp;
8285 /* ??? Can optimize bitwise operations with one arg constant.
8286 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8287 and (a bitwise1 b) bitwise2 b (etc)
8288 but that is probably not worth while. */
8290 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8291 boolean values when we want in all cases to compute both of them. In
8292 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8293 as actual zero-or-1 values and then bitwise anding. In cases where
8294 there cannot be any side effects, better code would be made by
8295 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8296 how to recognize those cases. */
8298 case TRUTH_AND_EXPR:
8299 case BIT_AND_EXPR:
8300 this_optab = and_optab;
8301 goto binop;
8303 case TRUTH_OR_EXPR:
8304 case BIT_IOR_EXPR:
8305 this_optab = ior_optab;
8306 goto binop;
8308 case TRUTH_XOR_EXPR:
8309 case BIT_XOR_EXPR:
8310 this_optab = xor_optab;
8311 goto binop;
8313 case LSHIFT_EXPR:
8314 case RSHIFT_EXPR:
8315 case LROTATE_EXPR:
8316 case RROTATE_EXPR:
8317 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8318 subtarget = 0;
8319 if (modifier == EXPAND_STACK_PARM)
8320 target = 0;
8321 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8322 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8323 unsignedp);
8325 /* Could determine the answer when only additive constants differ. Also,
8326 the addition of one can be handled by changing the condition. */
8327 case LT_EXPR:
8328 case LE_EXPR:
8329 case GT_EXPR:
8330 case GE_EXPR:
8331 case EQ_EXPR:
8332 case NE_EXPR:
8333 case UNORDERED_EXPR:
8334 case ORDERED_EXPR:
8335 case UNLT_EXPR:
8336 case UNLE_EXPR:
8337 case UNGT_EXPR:
8338 case UNGE_EXPR:
8339 case UNEQ_EXPR:
8340 case LTGT_EXPR:
8341 temp = do_store_flag (exp,
8342 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8343 tmode != VOIDmode ? tmode : mode, 0);
8344 if (temp != 0)
8345 return temp;
8347 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8348 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8349 && original_target
8350 && REG_P (original_target)
8351 && (GET_MODE (original_target)
8352 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8354 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8355 VOIDmode, 0);
8357 /* If temp is constant, we can just compute the result. */
8358 if (GET_CODE (temp) == CONST_INT)
8360 if (INTVAL (temp) != 0)
8361 emit_move_insn (target, const1_rtx);
8362 else
8363 emit_move_insn (target, const0_rtx);
8365 return target;
8368 if (temp != original_target)
8370 enum machine_mode mode1 = GET_MODE (temp);
8371 if (mode1 == VOIDmode)
8372 mode1 = tmode != VOIDmode ? tmode : mode;
8374 temp = copy_to_mode_reg (mode1, temp);
8377 op1 = gen_label_rtx ();
8378 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8379 GET_MODE (temp), unsignedp, op1);
8380 emit_move_insn (temp, const1_rtx);
8381 emit_label (op1);
8382 return temp;
8385 /* If no set-flag instruction, must generate a conditional
8386 store into a temporary variable. Drop through
8387 and handle this like && and ||. */
8389 case TRUTH_ANDIF_EXPR:
8390 case TRUTH_ORIF_EXPR:
8391 if (! ignore
8392 && (target == 0
8393 || modifier == EXPAND_STACK_PARM
8394 || ! safe_from_p (target, exp, 1)
8395 /* Make sure we don't have a hard reg (such as function's return
8396 value) live across basic blocks, if not optimizing. */
8397 || (!optimize && REG_P (target)
8398 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8399 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8401 if (target)
8402 emit_clr_insn (target);
8404 op1 = gen_label_rtx ();
8405 jumpifnot (exp, op1);
8407 if (target)
8408 emit_0_to_1_insn (target);
8410 emit_label (op1);
8411 return ignore ? const0_rtx : target;
8413 case TRUTH_NOT_EXPR:
8414 if (modifier == EXPAND_STACK_PARM)
8415 target = 0;
8416 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8417 /* The parser is careful to generate TRUTH_NOT_EXPR
8418 only with operands that are always zero or one. */
8419 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8420 target, 1, OPTAB_LIB_WIDEN);
8421 if (temp == 0)
8422 abort ();
8423 return temp;
8425 case COMPOUND_EXPR:
8426 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8427 emit_queue ();
8428 return expand_expr_real (TREE_OPERAND (exp, 1),
8429 (ignore ? const0_rtx : target),
8430 VOIDmode, modifier, alt_rtl);
8432 case STATEMENT_LIST:
8434 tree_stmt_iterator iter;
8436 if (!ignore)
8437 abort ();
8439 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8440 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8442 return const0_rtx;
8444 case COND_EXPR:
8445 /* If it's void, we don't need to worry about computing a value. */
8446 if (VOID_TYPE_P (TREE_TYPE (exp)))
8448 tree pred = TREE_OPERAND (exp, 0);
8449 tree then_ = TREE_OPERAND (exp, 1);
8450 tree else_ = TREE_OPERAND (exp, 2);
8452 /* If we do not have any pending cleanups or stack_levels
8453 to restore, and at least one arm of the COND_EXPR is a
8454 GOTO_EXPR to a local label, then we can emit more efficient
8455 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8456 if (! optimize
8457 || containing_blocks_have_cleanups_or_stack_level ())
8459 else if (TREE_CODE (then_) == GOTO_EXPR
8460 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8462 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8463 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8465 else if (TREE_CODE (else_) == GOTO_EXPR
8466 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8468 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8469 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8472 /* Just use the 'if' machinery. */
8473 expand_start_cond (pred, 0);
8474 start_cleanup_deferral ();
8475 expand_expr (then_, const0_rtx, VOIDmode, 0);
8477 exp = else_;
8479 /* Iterate over 'else if's instead of recursing. */
8480 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8482 expand_start_else ();
8483 if (EXPR_HAS_LOCATION (exp))
8485 emit_line_note (EXPR_LOCATION (exp));
8486 if (cfun->dont_emit_block_notes)
8487 record_block_change (TREE_BLOCK (exp));
8489 expand_elseif (TREE_OPERAND (exp, 0));
8490 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8492 /* Don't emit the jump and label if there's no 'else' clause. */
8493 if (TREE_SIDE_EFFECTS (exp))
8495 expand_start_else ();
8496 expand_expr (exp, const0_rtx, VOIDmode, 0);
8498 end_cleanup_deferral ();
8499 expand_end_cond ();
8500 return const0_rtx;
8503 /* If we would have a "singleton" (see below) were it not for a
8504 conversion in each arm, bring that conversion back out. */
8505 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8506 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8507 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8508 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8510 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8511 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8513 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8514 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8515 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8516 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8517 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8518 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8519 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8520 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8521 return expand_expr (build1 (NOP_EXPR, type,
8522 build (COND_EXPR, TREE_TYPE (iftrue),
8523 TREE_OPERAND (exp, 0),
8524 iftrue, iffalse)),
8525 target, tmode, modifier);
8529 /* Note that COND_EXPRs whose type is a structure or union
8530 are required to be constructed to contain assignments of
8531 a temporary variable, so that we can evaluate them here
8532 for side effect only. If type is void, we must do likewise. */
8534 /* If an arm of the branch requires a cleanup,
8535 only that cleanup is performed. */
8537 tree singleton = 0;
8538 tree binary_op = 0, unary_op = 0;
8540 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8541 convert it to our mode, if necessary. */
8542 if (integer_onep (TREE_OPERAND (exp, 1))
8543 && integer_zerop (TREE_OPERAND (exp, 2))
8544 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8546 if (ignore)
8548 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8549 modifier);
8550 return const0_rtx;
8553 if (modifier == EXPAND_STACK_PARM)
8554 target = 0;
8555 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8556 if (GET_MODE (op0) == mode)
8557 return op0;
8559 if (target == 0)
8560 target = gen_reg_rtx (mode);
8561 convert_move (target, op0, unsignedp);
8562 return target;
8565 /* Check for X ? A + B : A. If we have this, we can copy A to the
8566 output and conditionally add B. Similarly for unary operations.
8567 Don't do this if X has side-effects because those side effects
8568 might affect A or B and the "?" operation is a sequence point in
8569 ANSI. (operand_equal_p tests for side effects.) */
8571 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8572 && operand_equal_p (TREE_OPERAND (exp, 2),
8573 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8574 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8575 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8576 && operand_equal_p (TREE_OPERAND (exp, 1),
8577 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8578 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8579 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8580 && operand_equal_p (TREE_OPERAND (exp, 2),
8581 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8582 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8583 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8584 && operand_equal_p (TREE_OPERAND (exp, 1),
8585 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8586 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8588 /* If we are not to produce a result, we have no target. Otherwise,
8589 if a target was specified use it; it will not be used as an
8590 intermediate target unless it is safe. If no target, use a
8591 temporary. */
8593 if (ignore)
8594 temp = 0;
8595 else if (modifier == EXPAND_STACK_PARM)
8596 temp = assign_temp (type, 0, 0, 1);
8597 else if (original_target
8598 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8599 || (singleton && REG_P (original_target)
8600 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8601 && original_target == var_rtx (singleton)))
8602 && GET_MODE (original_target) == mode
8603 #ifdef HAVE_conditional_move
8604 && (! can_conditionally_move_p (mode)
8605 || REG_P (original_target)
8606 || TREE_ADDRESSABLE (type))
8607 #endif
8608 && (GET_CODE (original_target) != MEM
8609 || TREE_ADDRESSABLE (type)))
8610 temp = original_target;
8611 else if (TREE_ADDRESSABLE (type))
8612 abort ();
8613 else
8614 temp = assign_temp (type, 0, 0, 1);
8616 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8617 do the test of X as a store-flag operation, do this as
8618 A + ((X != 0) << log C). Similarly for other simple binary
8619 operators. Only do for C == 1 if BRANCH_COST is low. */
8620 if (temp && singleton && binary_op
8621 && (TREE_CODE (binary_op) == PLUS_EXPR
8622 || TREE_CODE (binary_op) == MINUS_EXPR
8623 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8624 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8625 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8626 : integer_onep (TREE_OPERAND (binary_op, 1)))
8627 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8629 rtx result;
8630 tree cond;
8631 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8632 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8633 ? addv_optab : add_optab)
8634 : TREE_CODE (binary_op) == MINUS_EXPR
8635 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8636 ? subv_optab : sub_optab)
8637 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8638 : xor_optab);
8640 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8641 if (singleton == TREE_OPERAND (exp, 1))
8642 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8643 else
8644 cond = TREE_OPERAND (exp, 0);
8646 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8647 ? temp : NULL_RTX),
8648 mode, BRANCH_COST <= 1);
8650 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8651 result = expand_shift (LSHIFT_EXPR, mode, result,
8652 build_int_2 (tree_log2
8653 (TREE_OPERAND
8654 (binary_op, 1)),
8656 (safe_from_p (temp, singleton, 1)
8657 ? temp : NULL_RTX), 0);
8659 if (result)
8661 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8662 return expand_binop (mode, boptab, op1, result, temp,
8663 unsignedp, OPTAB_LIB_WIDEN);
8667 do_pending_stack_adjust ();
8668 NO_DEFER_POP;
8669 op0 = gen_label_rtx ();
8671 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8673 if (temp != 0)
8675 /* If the target conflicts with the other operand of the
8676 binary op, we can't use it. Also, we can't use the target
8677 if it is a hard register, because evaluating the condition
8678 might clobber it. */
8679 if ((binary_op
8680 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8681 || (REG_P (temp)
8682 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8683 temp = gen_reg_rtx (mode);
8684 store_expr (singleton, temp,
8685 modifier == EXPAND_STACK_PARM ? 2 : 0);
8687 else
8688 expand_expr (singleton,
8689 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8690 if (singleton == TREE_OPERAND (exp, 1))
8691 jumpif (TREE_OPERAND (exp, 0), op0);
8692 else
8693 jumpifnot (TREE_OPERAND (exp, 0), op0);
8695 start_cleanup_deferral ();
8696 if (binary_op && temp == 0)
8697 /* Just touch the other operand. */
8698 expand_expr (TREE_OPERAND (binary_op, 1),
8699 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8700 else if (binary_op)
8701 store_expr (build (TREE_CODE (binary_op), type,
8702 make_tree (type, temp),
8703 TREE_OPERAND (binary_op, 1)),
8704 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8705 else
8706 store_expr (build1 (TREE_CODE (unary_op), type,
8707 make_tree (type, temp)),
8708 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8709 op1 = op0;
8711 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8712 comparison operator. If we have one of these cases, set the
8713 output to A, branch on A (cse will merge these two references),
8714 then set the output to FOO. */
8715 else if (temp
8716 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8717 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8718 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8719 TREE_OPERAND (exp, 1), 0)
8720 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8721 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8722 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8724 if (REG_P (temp)
8725 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8726 temp = gen_reg_rtx (mode);
8727 store_expr (TREE_OPERAND (exp, 1), temp,
8728 modifier == EXPAND_STACK_PARM ? 2 : 0);
8729 jumpif (TREE_OPERAND (exp, 0), op0);
8731 start_cleanup_deferral ();
8732 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8733 store_expr (TREE_OPERAND (exp, 2), temp,
8734 modifier == EXPAND_STACK_PARM ? 2 : 0);
8735 else
8736 expand_expr (TREE_OPERAND (exp, 2),
8737 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8738 op1 = op0;
8740 else if (temp
8741 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8742 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8743 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8744 TREE_OPERAND (exp, 2), 0)
8745 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8746 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8747 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8749 if (REG_P (temp)
8750 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8751 temp = gen_reg_rtx (mode);
8752 store_expr (TREE_OPERAND (exp, 2), temp,
8753 modifier == EXPAND_STACK_PARM ? 2 : 0);
8754 jumpifnot (TREE_OPERAND (exp, 0), op0);
8756 start_cleanup_deferral ();
8757 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8758 store_expr (TREE_OPERAND (exp, 1), temp,
8759 modifier == EXPAND_STACK_PARM ? 2 : 0);
8760 else
8761 expand_expr (TREE_OPERAND (exp, 1),
8762 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8763 op1 = op0;
8765 else
8767 op1 = gen_label_rtx ();
8768 jumpifnot (TREE_OPERAND (exp, 0), op0);
8770 start_cleanup_deferral ();
8772 /* One branch of the cond can be void, if it never returns. For
8773 example A ? throw : E */
8774 if (temp != 0
8775 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8776 store_expr (TREE_OPERAND (exp, 1), temp,
8777 modifier == EXPAND_STACK_PARM ? 2 : 0);
8778 else
8779 expand_expr (TREE_OPERAND (exp, 1),
8780 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8781 end_cleanup_deferral ();
8782 emit_queue ();
8783 emit_jump_insn (gen_jump (op1));
8784 emit_barrier ();
8785 emit_label (op0);
8786 start_cleanup_deferral ();
8787 if (temp != 0
8788 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8789 store_expr (TREE_OPERAND (exp, 2), temp,
8790 modifier == EXPAND_STACK_PARM ? 2 : 0);
8791 else
8792 expand_expr (TREE_OPERAND (exp, 2),
8793 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8796 end_cleanup_deferral ();
8798 emit_queue ();
8799 emit_label (op1);
8800 OK_DEFER_POP;
8802 return temp;
8805 case TARGET_EXPR:
8807 /* Something needs to be initialized, but we didn't know
8808 where that thing was when building the tree. For example,
8809 it could be the return value of a function, or a parameter
8810 to a function which lays down in the stack, or a temporary
8811 variable which must be passed by reference.
8813 We guarantee that the expression will either be constructed
8814 or copied into our original target. */
8816 tree slot = TREE_OPERAND (exp, 0);
8817 tree cleanups = NULL_TREE;
8818 tree exp1;
8820 if (TREE_CODE (slot) != VAR_DECL)
8821 abort ();
8823 if (! ignore)
8824 target = original_target;
8826 /* Set this here so that if we get a target that refers to a
8827 register variable that's already been used, put_reg_into_stack
8828 knows that it should fix up those uses. */
8829 TREE_USED (slot) = 1;
8831 if (target == 0)
8833 if (DECL_RTL_SET_P (slot))
8835 target = DECL_RTL (slot);
8836 /* If we have already expanded the slot, so don't do
8837 it again. (mrs) */
8838 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8839 return target;
8841 else
8843 target = assign_temp (type, 2, 0, 1);
8844 SET_DECL_RTL (slot, target);
8845 if (TREE_ADDRESSABLE (slot))
8846 put_var_into_stack (slot, /*rescan=*/false);
8848 /* Since SLOT is not known to the called function
8849 to belong to its stack frame, we must build an explicit
8850 cleanup. This case occurs when we must build up a reference
8851 to pass the reference as an argument. In this case,
8852 it is very likely that such a reference need not be
8853 built here. */
8855 if (TREE_OPERAND (exp, 2) == 0)
8856 TREE_OPERAND (exp, 2)
8857 = lang_hooks.maybe_build_cleanup (slot);
8858 cleanups = TREE_OPERAND (exp, 2);
8861 else
8863 /* This case does occur, when expanding a parameter which
8864 needs to be constructed on the stack. The target
8865 is the actual stack address that we want to initialize.
8866 The function we call will perform the cleanup in this case. */
8868 /* If we have already assigned it space, use that space,
8869 not target that we were passed in, as our target
8870 parameter is only a hint. */
8871 if (DECL_RTL_SET_P (slot))
8873 target = DECL_RTL (slot);
8874 /* If we have already expanded the slot, so don't do
8875 it again. (mrs) */
8876 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8877 return target;
8879 else
8881 SET_DECL_RTL (slot, target);
8882 /* If we must have an addressable slot, then make sure that
8883 the RTL that we just stored in slot is OK. */
8884 if (TREE_ADDRESSABLE (slot))
8885 put_var_into_stack (slot, /*rescan=*/true);
8889 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8890 /* Mark it as expanded. */
8891 TREE_OPERAND (exp, 1) = NULL_TREE;
8893 if (VOID_TYPE_P (TREE_TYPE (exp1)))
8894 /* If the initializer is void, just expand it; it will initialize
8895 the object directly. */
8896 expand_expr (exp1, const0_rtx, VOIDmode, 0);
8897 else
8898 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8900 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8902 return target;
8905 case INIT_EXPR:
8907 tree lhs = TREE_OPERAND (exp, 0);
8908 tree rhs = TREE_OPERAND (exp, 1);
8910 temp = expand_assignment (lhs, rhs, ! ignore);
8911 return temp;
8914 case MODIFY_EXPR:
8916 /* If lhs is complex, expand calls in rhs before computing it.
8917 That's so we don't compute a pointer and save it over a
8918 call. If lhs is simple, compute it first so we can give it
8919 as a target if the rhs is just a call. This avoids an
8920 extra temp and copy and that prevents a partial-subsumption
8921 which makes bad code. Actually we could treat
8922 component_ref's of vars like vars. */
8924 tree lhs = TREE_OPERAND (exp, 0);
8925 tree rhs = TREE_OPERAND (exp, 1);
8927 temp = 0;
8929 /* Check for |= or &= of a bitfield of size one into another bitfield
8930 of size 1. In this case, (unless we need the result of the
8931 assignment) we can do this more efficiently with a
8932 test followed by an assignment, if necessary.
8934 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8935 things change so we do, this code should be enhanced to
8936 support it. */
8937 if (ignore
8938 && TREE_CODE (lhs) == COMPONENT_REF
8939 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8940 || TREE_CODE (rhs) == BIT_AND_EXPR)
8941 && TREE_OPERAND (rhs, 0) == lhs
8942 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8943 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8944 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8946 rtx label = gen_label_rtx ();
8948 do_jump (TREE_OPERAND (rhs, 1),
8949 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8950 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8951 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8952 (TREE_CODE (rhs) == BIT_IOR_EXPR
8953 ? integer_one_node
8954 : integer_zero_node)),
8956 do_pending_stack_adjust ();
8957 emit_label (label);
8958 return const0_rtx;
8961 temp = expand_assignment (lhs, rhs, ! ignore);
8963 return temp;
8966 case RETURN_EXPR:
8967 if (!TREE_OPERAND (exp, 0))
8968 expand_null_return ();
8969 else
8970 expand_return (TREE_OPERAND (exp, 0));
8971 return const0_rtx;
8973 case PREINCREMENT_EXPR:
8974 case PREDECREMENT_EXPR:
8975 return expand_increment (exp, 0, ignore);
8977 case POSTINCREMENT_EXPR:
8978 case POSTDECREMENT_EXPR:
8979 /* Faster to treat as pre-increment if result is not used. */
8980 return expand_increment (exp, ! ignore, ignore);
8982 case ADDR_EXPR:
8983 if (modifier == EXPAND_STACK_PARM)
8984 target = 0;
8985 /* If we are taking the address of something erroneous, just
8986 return a zero. */
8987 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8988 return const0_rtx;
8989 /* If we are taking the address of a constant and are at the
8990 top level, we have to use output_constant_def since we can't
8991 call force_const_mem at top level. */
8992 else if (cfun == 0
8993 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8994 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8995 == 'c')))
8996 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8997 else
8999 /* We make sure to pass const0_rtx down if we came in with
9000 ignore set, to avoid doing the cleanups twice for something. */
9001 op0 = expand_expr (TREE_OPERAND (exp, 0),
9002 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9003 (modifier == EXPAND_INITIALIZER
9004 ? modifier : EXPAND_CONST_ADDRESS));
9006 /* If we are going to ignore the result, OP0 will have been set
9007 to const0_rtx, so just return it. Don't get confused and
9008 think we are taking the address of the constant. */
9009 if (ignore)
9010 return op0;
9012 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9013 clever and returns a REG when given a MEM. */
9014 op0 = protect_from_queue (op0, 1);
9016 /* We would like the object in memory. If it is a constant, we can
9017 have it be statically allocated into memory. For a non-constant,
9018 we need to allocate some memory and store the value into it. */
9020 if (CONSTANT_P (op0))
9021 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9022 op0);
9023 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
9024 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9025 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9027 /* If the operand is a SAVE_EXPR, we can deal with this by
9028 forcing the SAVE_EXPR into memory. */
9029 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9031 put_var_into_stack (TREE_OPERAND (exp, 0),
9032 /*rescan=*/true);
9033 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9035 else
9037 /* If this object is in a register, it can't be BLKmode. */
9038 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9039 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9041 if (GET_CODE (op0) == PARALLEL)
9042 /* Handle calls that pass values in multiple
9043 non-contiguous locations. The Irix 6 ABI has examples
9044 of this. */
9045 emit_group_store (memloc, op0, inner_type,
9046 int_size_in_bytes (inner_type));
9047 else
9048 emit_move_insn (memloc, op0);
9050 op0 = memloc;
9054 if (GET_CODE (op0) != MEM)
9055 abort ();
9057 mark_temp_addr_taken (op0);
9058 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9060 op0 = XEXP (op0, 0);
9061 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9062 op0 = convert_memory_address (ptr_mode, op0);
9063 return op0;
9066 /* If OP0 is not aligned as least as much as the type requires, we
9067 need to make a temporary, copy OP0 to it, and take the address of
9068 the temporary. We want to use the alignment of the type, not of
9069 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9070 the test for BLKmode means that can't happen. The test for
9071 BLKmode is because we never make mis-aligned MEMs with
9072 non-BLKmode.
9074 We don't need to do this at all if the machine doesn't have
9075 strict alignment. */
9076 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9077 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9078 > MEM_ALIGN (op0))
9079 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9081 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9082 rtx new;
9084 if (TYPE_ALIGN_OK (inner_type))
9085 abort ();
9087 if (TREE_ADDRESSABLE (inner_type))
9089 /* We can't make a bitwise copy of this object, so fail. */
9090 error ("cannot take the address of an unaligned member");
9091 return const0_rtx;
9094 new = assign_stack_temp_for_type
9095 (TYPE_MODE (inner_type),
9096 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9097 : int_size_in_bytes (inner_type),
9098 1, build_qualified_type (inner_type,
9099 (TYPE_QUALS (inner_type)
9100 | TYPE_QUAL_CONST)));
9102 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9103 (modifier == EXPAND_STACK_PARM
9104 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9106 op0 = new;
9109 op0 = force_operand (XEXP (op0, 0), target);
9112 if (flag_force_addr
9113 && !REG_P (op0)
9114 && modifier != EXPAND_CONST_ADDRESS
9115 && modifier != EXPAND_INITIALIZER
9116 && modifier != EXPAND_SUM)
9117 op0 = force_reg (Pmode, op0);
9119 if (REG_P (op0)
9120 && ! REG_USERVAR_P (op0))
9121 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9123 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9124 op0 = convert_memory_address (ptr_mode, op0);
9126 return op0;
9128 case ENTRY_VALUE_EXPR:
9129 abort ();
9131 /* COMPLEX type for Extended Pascal & Fortran */
9132 case COMPLEX_EXPR:
9134 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9135 rtx insns;
9137 /* Get the rtx code of the operands. */
9138 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9139 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9141 if (! target)
9142 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9144 start_sequence ();
9146 /* Move the real (op0) and imaginary (op1) parts to their location. */
9147 emit_move_insn (gen_realpart (mode, target), op0);
9148 emit_move_insn (gen_imagpart (mode, target), op1);
9150 insns = get_insns ();
9151 end_sequence ();
9153 /* Complex construction should appear as a single unit. */
9154 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9155 each with a separate pseudo as destination.
9156 It's not correct for flow to treat them as a unit. */
9157 if (GET_CODE (target) != CONCAT)
9158 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9159 else
9160 emit_insn (insns);
9162 return target;
9165 case REALPART_EXPR:
9166 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9167 return gen_realpart (mode, op0);
9169 case IMAGPART_EXPR:
9170 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9171 return gen_imagpart (mode, op0);
9173 case CONJ_EXPR:
9175 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9176 rtx imag_t;
9177 rtx insns;
9179 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9181 if (! target)
9182 target = gen_reg_rtx (mode);
9184 start_sequence ();
9186 /* Store the realpart and the negated imagpart to target. */
9187 emit_move_insn (gen_realpart (partmode, target),
9188 gen_realpart (partmode, op0));
9190 imag_t = gen_imagpart (partmode, target);
9191 temp = expand_unop (partmode,
9192 ! unsignedp && flag_trapv
9193 && (GET_MODE_CLASS(partmode) == MODE_INT)
9194 ? negv_optab : neg_optab,
9195 gen_imagpart (partmode, op0), imag_t, 0);
9196 if (temp != imag_t)
9197 emit_move_insn (imag_t, temp);
9199 insns = get_insns ();
9200 end_sequence ();
9202 /* Conjugate should appear as a single unit
9203 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9204 each with a separate pseudo as destination.
9205 It's not correct for flow to treat them as a unit. */
9206 if (GET_CODE (target) != CONCAT)
9207 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9208 else
9209 emit_insn (insns);
9211 return target;
9214 case RESX_EXPR:
9215 expand_resx_expr (exp);
9216 return const0_rtx;
9218 case TRY_CATCH_EXPR:
9220 tree handler = TREE_OPERAND (exp, 1);
9222 expand_eh_region_start ();
9223 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9224 expand_eh_handler (handler);
9226 return op0;
9229 case CATCH_EXPR:
9230 expand_start_catch (CATCH_TYPES (exp));
9231 expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
9232 expand_end_catch ();
9233 return const0_rtx;
9235 case EH_FILTER_EXPR:
9236 /* Should have been handled in expand_eh_handler. */
9237 abort ();
9239 case TRY_FINALLY_EXPR:
9241 tree try_block = TREE_OPERAND (exp, 0);
9242 tree finally_block = TREE_OPERAND (exp, 1);
9244 if ((!optimize && lang_protect_cleanup_actions == NULL)
9245 || unsafe_for_reeval (finally_block) > 1)
9247 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9248 is not sufficient, so we cannot expand the block twice.
9249 So we play games with GOTO_SUBROUTINE_EXPR to let us
9250 expand the thing only once. */
9251 /* When not optimizing, we go ahead with this form since
9252 (1) user breakpoints operate more predictably without
9253 code duplication, and
9254 (2) we're not running any of the global optimizers
9255 that would explode in time/space with the highly
9256 connected CFG created by the indirect branching. */
9258 rtx finally_label = gen_label_rtx ();
9259 rtx done_label = gen_label_rtx ();
9260 rtx return_link = gen_reg_rtx (Pmode);
9261 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9262 (tree) finally_label, (tree) return_link);
9263 TREE_SIDE_EFFECTS (cleanup) = 1;
9265 /* Start a new binding layer that will keep track of all cleanup
9266 actions to be performed. */
9267 expand_start_bindings (2);
9268 target_temp_slot_level = temp_slot_level;
9270 expand_decl_cleanup (NULL_TREE, cleanup);
9271 op0 = expand_expr (try_block, target, tmode, modifier);
9273 preserve_temp_slots (op0);
9274 expand_end_bindings (NULL_TREE, 0, 0);
9275 emit_jump (done_label);
9276 emit_label (finally_label);
9277 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9278 emit_indirect_jump (return_link);
9279 emit_label (done_label);
9281 else
9283 expand_start_bindings (2);
9284 target_temp_slot_level = temp_slot_level;
9286 expand_decl_cleanup (NULL_TREE, finally_block);
9287 op0 = expand_expr (try_block, target, tmode, modifier);
9289 preserve_temp_slots (op0);
9290 expand_end_bindings (NULL_TREE, 0, 0);
9293 return op0;
9296 case GOTO_SUBROUTINE_EXPR:
9298 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9299 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9300 rtx return_address = gen_label_rtx ();
9301 emit_move_insn (return_link,
9302 gen_rtx_LABEL_REF (Pmode, return_address));
9303 emit_jump (subr);
9304 emit_label (return_address);
9305 return const0_rtx;
9308 case VA_ARG_EXPR:
9309 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9311 case EXC_PTR_EXPR:
9312 return get_exception_pointer (cfun);
9314 case FILTER_EXPR:
9315 return get_exception_filter (cfun);
9317 case FDESC_EXPR:
9318 /* Function descriptors are not valid except for as
9319 initialization constants, and should not be expanded. */
9320 abort ();
9322 case SWITCH_EXPR:
9323 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
9324 "switch");
9325 if (SWITCH_BODY (exp))
9326 expand_expr_stmt (SWITCH_BODY (exp));
9327 if (SWITCH_LABELS (exp))
9329 tree duplicate = 0;
9330 tree vec = SWITCH_LABELS (exp);
9331 size_t i, n = TREE_VEC_LENGTH (vec);
9333 for (i = 0; i < n; ++i)
9335 tree elt = TREE_VEC_ELT (vec, i);
9336 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
9337 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
9338 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
9340 tree case_low = CASE_LOW (elt);
9341 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
9342 if (case_low && case_high)
9344 /* Case label is less than minimum for type. */
9345 if ((tree_int_cst_compare (case_low, min_value) < 0)
9346 && (tree_int_cst_compare (case_high, min_value) < 0))
9348 warning ("case label value %d is less than minimum value for type",
9349 TREE_INT_CST (case_low));
9350 continue;
9353 /* Case value is greater than maximum for type. */
9354 if ((tree_int_cst_compare (case_low, max_value) > 0)
9355 && (tree_int_cst_compare (case_high, max_value) > 0))
9357 warning ("case label value %d exceeds maximum value for type",
9358 TREE_INT_CST (case_high));
9359 continue;
9362 /* Saturate lower case label value to minimum. */
9363 if ((tree_int_cst_compare (case_high, min_value) >= 0)
9364 && (tree_int_cst_compare (case_low, min_value) < 0))
9366 warning ("lower value %d in case label range less than minimum value for type",
9367 TREE_INT_CST (case_low));
9368 case_low = min_value;
9371 /* Saturate upper case label value to maximum. */
9372 if ((tree_int_cst_compare (case_low, max_value) <= 0)
9373 && (tree_int_cst_compare (case_high, max_value) > 0))
9375 warning ("upper value %d in case label range exceeds maximum value for type",
9376 TREE_INT_CST (case_high));
9377 case_high = max_value;
9381 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
9382 if (duplicate)
9383 abort ();
9386 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
9387 return const0_rtx;
9389 case LABEL_EXPR:
9390 expand_label (TREE_OPERAND (exp, 0));
9391 return const0_rtx;
9393 case CASE_LABEL_EXPR:
9395 tree duplicate = 0;
9396 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
9397 &duplicate, false);
9398 if (duplicate)
9399 abort ();
9400 return const0_rtx;
9403 case ASM_EXPR:
9404 expand_asm_expr (exp);
9405 return const0_rtx;
9407 default:
9408 return lang_hooks.expand_expr (exp, original_target, tmode,
9409 modifier, alt_rtl);
9412 /* Here to do an ordinary binary operator, generating an instruction
9413 from the optab already placed in `this_optab'. */
9414 binop:
9415 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9416 subtarget, &op0, &op1, 0);
9417 binop2:
9418 if (modifier == EXPAND_STACK_PARM)
9419 target = 0;
9420 temp = expand_binop (mode, this_optab, op0, op1, target,
9421 unsignedp, OPTAB_LIB_WIDEN);
9422 if (temp == 0)
9423 abort ();
9424 return temp;
9427 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9428 when applied to the address of EXP produces an address known to be
9429 aligned more than BIGGEST_ALIGNMENT. */
9431 static int
9432 is_aligning_offset (tree offset, tree exp)
9434 /* Strip off any conversions. */
9435 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9436 || TREE_CODE (offset) == NOP_EXPR
9437 || TREE_CODE (offset) == CONVERT_EXPR)
9438 offset = TREE_OPERAND (offset, 0);
9440 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9441 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9442 if (TREE_CODE (offset) != BIT_AND_EXPR
9443 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9444 || compare_tree_int (TREE_OPERAND (offset, 1),
9445 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9446 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9447 return 0;
9449 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9450 It must be NEGATE_EXPR. Then strip any more conversions. */
9451 offset = TREE_OPERAND (offset, 0);
9452 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9453 || TREE_CODE (offset) == NOP_EXPR
9454 || TREE_CODE (offset) == CONVERT_EXPR)
9455 offset = TREE_OPERAND (offset, 0);
9457 if (TREE_CODE (offset) != NEGATE_EXPR)
9458 return 0;
9460 offset = TREE_OPERAND (offset, 0);
9461 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9462 || TREE_CODE (offset) == NOP_EXPR
9463 || TREE_CODE (offset) == CONVERT_EXPR)
9464 offset = TREE_OPERAND (offset, 0);
9466 /* This must now be the address of EXP. */
9467 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9470 /* Return the tree node if an ARG corresponds to a string constant or zero
9471 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9472 in bytes within the string that ARG is accessing. The type of the
9473 offset will be `sizetype'. */
9475 tree
9476 string_constant (tree arg, tree *ptr_offset)
9478 STRIP_NOPS (arg);
9480 if (TREE_CODE (arg) == ADDR_EXPR
9481 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9483 *ptr_offset = size_zero_node;
9484 return TREE_OPERAND (arg, 0);
9486 if (TREE_CODE (arg) == ADDR_EXPR
9487 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9488 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9490 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9491 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9493 else if (TREE_CODE (arg) == PLUS_EXPR)
9495 tree arg0 = TREE_OPERAND (arg, 0);
9496 tree arg1 = TREE_OPERAND (arg, 1);
9498 STRIP_NOPS (arg0);
9499 STRIP_NOPS (arg1);
9501 if (TREE_CODE (arg0) == ADDR_EXPR
9502 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9504 *ptr_offset = convert (sizetype, arg1);
9505 return TREE_OPERAND (arg0, 0);
9507 else if (TREE_CODE (arg1) == ADDR_EXPR
9508 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9510 *ptr_offset = convert (sizetype, arg0);
9511 return TREE_OPERAND (arg1, 0);
9515 return 0;
9518 /* Expand code for a post- or pre- increment or decrement
9519 and return the RTX for the result.
9520 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9522 static rtx
9523 expand_increment (tree exp, int post, int ignore)
9525 rtx op0, op1;
9526 rtx temp, value;
9527 tree incremented = TREE_OPERAND (exp, 0);
9528 optab this_optab = add_optab;
9529 int icode;
9530 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9531 int op0_is_copy = 0;
9532 int single_insn = 0;
9533 /* 1 means we can't store into OP0 directly,
9534 because it is a subreg narrower than a word,
9535 and we don't dare clobber the rest of the word. */
9536 int bad_subreg = 0;
9538 /* Stabilize any component ref that might need to be
9539 evaluated more than once below. */
9540 if (!post
9541 || TREE_CODE (incremented) == BIT_FIELD_REF
9542 || (TREE_CODE (incremented) == COMPONENT_REF
9543 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9544 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9545 incremented = stabilize_reference (incremented);
9546 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9547 ones into save exprs so that they don't accidentally get evaluated
9548 more than once by the code below. */
9549 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9550 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9551 incremented = save_expr (incremented);
9553 /* Compute the operands as RTX.
9554 Note whether OP0 is the actual lvalue or a copy of it:
9555 I believe it is a copy iff it is a register or subreg
9556 and insns were generated in computing it. */
9558 temp = get_last_insn ();
9559 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9561 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9562 in place but instead must do sign- or zero-extension during assignment,
9563 so we copy it into a new register and let the code below use it as
9564 a copy.
9566 Note that we can safely modify this SUBREG since it is know not to be
9567 shared (it was made by the expand_expr call above). */
9569 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9571 if (post)
9572 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9573 else
9574 bad_subreg = 1;
9576 else if (GET_CODE (op0) == SUBREG
9577 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9579 /* We cannot increment this SUBREG in place. If we are
9580 post-incrementing, get a copy of the old value. Otherwise,
9581 just mark that we cannot increment in place. */
9582 if (post)
9583 op0 = copy_to_reg (op0);
9584 else
9585 bad_subreg = 1;
9588 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
9589 && temp != get_last_insn ());
9590 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9592 /* Decide whether incrementing or decrementing. */
9593 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9594 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9595 this_optab = sub_optab;
9597 /* Convert decrement by a constant into a negative increment. */
9598 if (this_optab == sub_optab
9599 && GET_CODE (op1) == CONST_INT)
9601 op1 = GEN_INT (-INTVAL (op1));
9602 this_optab = add_optab;
9605 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9606 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9608 /* For a preincrement, see if we can do this with a single instruction. */
9609 if (!post)
9611 icode = (int) this_optab->handlers[(int) mode].insn_code;
9612 if (icode != (int) CODE_FOR_nothing
9613 /* Make sure that OP0 is valid for operands 0 and 1
9614 of the insn we want to queue. */
9615 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9616 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9617 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9618 single_insn = 1;
9621 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9622 then we cannot just increment OP0. We must therefore contrive to
9623 increment the original value. Then, for postincrement, we can return
9624 OP0 since it is a copy of the old value. For preincrement, expand here
9625 unless we can do it with a single insn.
9627 Likewise if storing directly into OP0 would clobber high bits
9628 we need to preserve (bad_subreg). */
9629 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9631 /* This is the easiest way to increment the value wherever it is.
9632 Problems with multiple evaluation of INCREMENTED are prevented
9633 because either (1) it is a component_ref or preincrement,
9634 in which case it was stabilized above, or (2) it is an array_ref
9635 with constant index in an array in a register, which is
9636 safe to reevaluate. */
9637 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9638 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9639 ? MINUS_EXPR : PLUS_EXPR),
9640 TREE_TYPE (exp),
9641 incremented,
9642 TREE_OPERAND (exp, 1));
9644 while (TREE_CODE (incremented) == NOP_EXPR
9645 || TREE_CODE (incremented) == CONVERT_EXPR)
9647 newexp = convert (TREE_TYPE (incremented), newexp);
9648 incremented = TREE_OPERAND (incremented, 0);
9651 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9652 return post ? op0 : temp;
9655 if (post)
9657 /* We have a true reference to the value in OP0.
9658 If there is an insn to add or subtract in this mode, queue it.
9659 Queuing the increment insn avoids the register shuffling
9660 that often results if we must increment now and first save
9661 the old value for subsequent use. */
9663 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9664 op0 = stabilize (op0);
9665 #endif
9667 icode = (int) this_optab->handlers[(int) mode].insn_code;
9668 if (icode != (int) CODE_FOR_nothing
9669 /* Make sure that OP0 is valid for operands 0 and 1
9670 of the insn we want to queue. */
9671 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9672 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9674 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9675 op1 = force_reg (mode, op1);
9677 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9679 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9681 rtx addr = (general_operand (XEXP (op0, 0), mode)
9682 ? force_reg (Pmode, XEXP (op0, 0))
9683 : copy_to_reg (XEXP (op0, 0)));
9684 rtx temp, result;
9686 op0 = replace_equiv_address (op0, addr);
9687 temp = force_reg (GET_MODE (op0), op0);
9688 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9689 op1 = force_reg (mode, op1);
9691 /* The increment queue is LIFO, thus we have to `queue'
9692 the instructions in reverse order. */
9693 enqueue_insn (op0, gen_move_insn (op0, temp));
9694 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9695 return result;
9699 /* Preincrement, or we can't increment with one simple insn. */
9700 if (post)
9701 /* Save a copy of the value before inc or dec, to return it later. */
9702 temp = value = copy_to_reg (op0);
9703 else
9704 /* Arrange to return the incremented value. */
9705 /* Copy the rtx because expand_binop will protect from the queue,
9706 and the results of that would be invalid for us to return
9707 if our caller does emit_queue before using our result. */
9708 temp = copy_rtx (value = op0);
9710 /* Increment however we can. */
9711 op1 = expand_binop (mode, this_optab, value, op1, op0,
9712 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9714 /* Make sure the value is stored into OP0. */
9715 if (op1 != op0)
9716 emit_move_insn (op0, op1);
9718 return temp;
9721 /* Generate code to calculate EXP using a store-flag instruction
9722 and return an rtx for the result. EXP is either a comparison
9723 or a TRUTH_NOT_EXPR whose operand is a comparison.
9725 If TARGET is nonzero, store the result there if convenient.
9727 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9728 cheap.
9730 Return zero if there is no suitable set-flag instruction
9731 available on this machine.
9733 Once expand_expr has been called on the arguments of the comparison,
9734 we are committed to doing the store flag, since it is not safe to
9735 re-evaluate the expression. We emit the store-flag insn by calling
9736 emit_store_flag, but only expand the arguments if we have a reason
9737 to believe that emit_store_flag will be successful. If we think that
9738 it will, but it isn't, we have to simulate the store-flag with a
9739 set/jump/set sequence. */
9741 static rtx
9742 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9744 enum rtx_code code;
9745 tree arg0, arg1, type;
9746 tree tem;
9747 enum machine_mode operand_mode;
9748 int invert = 0;
9749 int unsignedp;
9750 rtx op0, op1;
9751 enum insn_code icode;
9752 rtx subtarget = target;
9753 rtx result, label;
9755 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9756 result at the end. We can't simply invert the test since it would
9757 have already been inverted if it were valid. This case occurs for
9758 some floating-point comparisons. */
9760 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9761 invert = 1, exp = TREE_OPERAND (exp, 0);
9763 arg0 = TREE_OPERAND (exp, 0);
9764 arg1 = TREE_OPERAND (exp, 1);
9766 /* Don't crash if the comparison was erroneous. */
9767 if (arg0 == error_mark_node || arg1 == error_mark_node)
9768 return const0_rtx;
9770 type = TREE_TYPE (arg0);
9771 operand_mode = TYPE_MODE (type);
9772 unsignedp = TYPE_UNSIGNED (type);
9774 /* We won't bother with BLKmode store-flag operations because it would mean
9775 passing a lot of information to emit_store_flag. */
9776 if (operand_mode == BLKmode)
9777 return 0;
9779 /* We won't bother with store-flag operations involving function pointers
9780 when function pointers must be canonicalized before comparisons. */
9781 #ifdef HAVE_canonicalize_funcptr_for_compare
9782 if (HAVE_canonicalize_funcptr_for_compare
9783 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9784 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9785 == FUNCTION_TYPE))
9786 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9787 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9788 == FUNCTION_TYPE))))
9789 return 0;
9790 #endif
9792 STRIP_NOPS (arg0);
9793 STRIP_NOPS (arg1);
9795 /* Get the rtx comparison code to use. We know that EXP is a comparison
9796 operation of some type. Some comparisons against 1 and -1 can be
9797 converted to comparisons with zero. Do so here so that the tests
9798 below will be aware that we have a comparison with zero. These
9799 tests will not catch constants in the first operand, but constants
9800 are rarely passed as the first operand. */
9802 switch (TREE_CODE (exp))
9804 case EQ_EXPR:
9805 code = EQ;
9806 break;
9807 case NE_EXPR:
9808 code = NE;
9809 break;
9810 case LT_EXPR:
9811 if (integer_onep (arg1))
9812 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9813 else
9814 code = unsignedp ? LTU : LT;
9815 break;
9816 case LE_EXPR:
9817 if (! unsignedp && integer_all_onesp (arg1))
9818 arg1 = integer_zero_node, code = LT;
9819 else
9820 code = unsignedp ? LEU : LE;
9821 break;
9822 case GT_EXPR:
9823 if (! unsignedp && integer_all_onesp (arg1))
9824 arg1 = integer_zero_node, code = GE;
9825 else
9826 code = unsignedp ? GTU : GT;
9827 break;
9828 case GE_EXPR:
9829 if (integer_onep (arg1))
9830 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9831 else
9832 code = unsignedp ? GEU : GE;
9833 break;
9835 case UNORDERED_EXPR:
9836 code = UNORDERED;
9837 break;
9838 case ORDERED_EXPR:
9839 code = ORDERED;
9840 break;
9841 case UNLT_EXPR:
9842 code = UNLT;
9843 break;
9844 case UNLE_EXPR:
9845 code = UNLE;
9846 break;
9847 case UNGT_EXPR:
9848 code = UNGT;
9849 break;
9850 case UNGE_EXPR:
9851 code = UNGE;
9852 break;
9853 case UNEQ_EXPR:
9854 code = UNEQ;
9855 break;
9856 case LTGT_EXPR:
9857 code = LTGT;
9858 break;
9860 default:
9861 abort ();
9864 /* Put a constant second. */
9865 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9867 tem = arg0; arg0 = arg1; arg1 = tem;
9868 code = swap_condition (code);
9871 /* If this is an equality or inequality test of a single bit, we can
9872 do this by shifting the bit being tested to the low-order bit and
9873 masking the result with the constant 1. If the condition was EQ,
9874 we xor it with 1. This does not require an scc insn and is faster
9875 than an scc insn even if we have it.
9877 The code to make this transformation was moved into fold_single_bit_test,
9878 so we just call into the folder and expand its result. */
9880 if ((code == NE || code == EQ)
9881 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9882 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9884 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9885 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9886 arg0, arg1, type),
9887 target, VOIDmode, EXPAND_NORMAL);
9890 /* Now see if we are likely to be able to do this. Return if not. */
9891 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9892 return 0;
9894 icode = setcc_gen_code[(int) code];
9895 if (icode == CODE_FOR_nothing
9896 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9898 /* We can only do this if it is one of the special cases that
9899 can be handled without an scc insn. */
9900 if ((code == LT && integer_zerop (arg1))
9901 || (! only_cheap && code == GE && integer_zerop (arg1)))
9903 else if (BRANCH_COST >= 0
9904 && ! only_cheap && (code == NE || code == EQ)
9905 && TREE_CODE (type) != REAL_TYPE
9906 && ((abs_optab->handlers[(int) operand_mode].insn_code
9907 != CODE_FOR_nothing)
9908 || (ffs_optab->handlers[(int) operand_mode].insn_code
9909 != CODE_FOR_nothing)))
9911 else
9912 return 0;
9915 if (! get_subtarget (target)
9916 || GET_MODE (subtarget) != operand_mode)
9917 subtarget = 0;
9919 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9921 if (target == 0)
9922 target = gen_reg_rtx (mode);
9924 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9925 because, if the emit_store_flag does anything it will succeed and
9926 OP0 and OP1 will not be used subsequently. */
9928 result = emit_store_flag (target, code,
9929 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9930 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9931 operand_mode, unsignedp, 1);
9933 if (result)
9935 if (invert)
9936 result = expand_binop (mode, xor_optab, result, const1_rtx,
9937 result, 0, OPTAB_LIB_WIDEN);
9938 return result;
9941 /* If this failed, we have to do this with set/compare/jump/set code. */
9942 if (!REG_P (target)
9943 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9944 target = gen_reg_rtx (GET_MODE (target));
9946 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9947 result = compare_from_rtx (op0, op1, code, unsignedp,
9948 operand_mode, NULL_RTX);
9949 if (GET_CODE (result) == CONST_INT)
9950 return (((result == const0_rtx && ! invert)
9951 || (result != const0_rtx && invert))
9952 ? const0_rtx : const1_rtx);
9954 /* The code of RESULT may not match CODE if compare_from_rtx
9955 decided to swap its operands and reverse the original code.
9957 We know that compare_from_rtx returns either a CONST_INT or
9958 a new comparison code, so it is safe to just extract the
9959 code from RESULT. */
9960 code = GET_CODE (result);
9962 label = gen_label_rtx ();
9963 if (bcc_gen_fctn[(int) code] == 0)
9964 abort ();
9966 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9967 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9968 emit_label (label);
9970 return target;
9974 /* Stubs in case we haven't got a casesi insn. */
9975 #ifndef HAVE_casesi
9976 # define HAVE_casesi 0
9977 # define gen_casesi(a, b, c, d, e) (0)
9978 # define CODE_FOR_casesi CODE_FOR_nothing
9979 #endif
9981 /* If the machine does not have a case insn that compares the bounds,
9982 this means extra overhead for dispatch tables, which raises the
9983 threshold for using them. */
9984 #ifndef CASE_VALUES_THRESHOLD
9985 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9986 #endif /* CASE_VALUES_THRESHOLD */
9988 unsigned int
9989 case_values_threshold (void)
9991 return CASE_VALUES_THRESHOLD;
9994 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9995 0 otherwise (i.e. if there is no casesi instruction). */
9997 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9998 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10000 enum machine_mode index_mode = SImode;
10001 int index_bits = GET_MODE_BITSIZE (index_mode);
10002 rtx op1, op2, index;
10003 enum machine_mode op_mode;
10005 if (! HAVE_casesi)
10006 return 0;
10008 /* Convert the index to SImode. */
10009 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10011 enum machine_mode omode = TYPE_MODE (index_type);
10012 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10014 /* We must handle the endpoints in the original mode. */
10015 index_expr = build (MINUS_EXPR, index_type,
10016 index_expr, minval);
10017 minval = integer_zero_node;
10018 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10019 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10020 omode, 1, default_label);
10021 /* Now we can safely truncate. */
10022 index = convert_to_mode (index_mode, index, 0);
10024 else
10026 if (TYPE_MODE (index_type) != index_mode)
10028 index_expr = convert (lang_hooks.types.type_for_size
10029 (index_bits, 0), index_expr);
10030 index_type = TREE_TYPE (index_expr);
10033 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10035 emit_queue ();
10036 index = protect_from_queue (index, 0);
10037 do_pending_stack_adjust ();
10039 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10040 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10041 (index, op_mode))
10042 index = copy_to_mode_reg (op_mode, index);
10044 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10046 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10047 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10048 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10049 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10050 (op1, op_mode))
10051 op1 = copy_to_mode_reg (op_mode, op1);
10053 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10055 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10056 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10057 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10058 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10059 (op2, op_mode))
10060 op2 = copy_to_mode_reg (op_mode, op2);
10062 emit_jump_insn (gen_casesi (index, op1, op2,
10063 table_label, default_label));
10064 return 1;
10067 /* Attempt to generate a tablejump instruction; same concept. */
10068 #ifndef HAVE_tablejump
10069 #define HAVE_tablejump 0
10070 #define gen_tablejump(x, y) (0)
10071 #endif
10073 /* Subroutine of the next function.
10075 INDEX is the value being switched on, with the lowest value
10076 in the table already subtracted.
10077 MODE is its expected mode (needed if INDEX is constant).
10078 RANGE is the length of the jump table.
10079 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10081 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10082 index value is out of range. */
10084 static void
10085 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10086 rtx default_label)
10088 rtx temp, vector;
10090 if (INTVAL (range) > cfun->max_jumptable_ents)
10091 cfun->max_jumptable_ents = INTVAL (range);
10093 /* Do an unsigned comparison (in the proper mode) between the index
10094 expression and the value which represents the length of the range.
10095 Since we just finished subtracting the lower bound of the range
10096 from the index expression, this comparison allows us to simultaneously
10097 check that the original index expression value is both greater than
10098 or equal to the minimum value of the range and less than or equal to
10099 the maximum value of the range. */
10101 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10102 default_label);
10104 /* If index is in range, it must fit in Pmode.
10105 Convert to Pmode so we can index with it. */
10106 if (mode != Pmode)
10107 index = convert_to_mode (Pmode, index, 1);
10109 /* Don't let a MEM slip through, because then INDEX that comes
10110 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10111 and break_out_memory_refs will go to work on it and mess it up. */
10112 #ifdef PIC_CASE_VECTOR_ADDRESS
10113 if (flag_pic && !REG_P (index))
10114 index = copy_to_mode_reg (Pmode, index);
10115 #endif
10117 /* If flag_force_addr were to affect this address
10118 it could interfere with the tricky assumptions made
10119 about addresses that contain label-refs,
10120 which may be valid only very near the tablejump itself. */
10121 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10122 GET_MODE_SIZE, because this indicates how large insns are. The other
10123 uses should all be Pmode, because they are addresses. This code
10124 could fail if addresses and insns are not the same size. */
10125 index = gen_rtx_PLUS (Pmode,
10126 gen_rtx_MULT (Pmode, index,
10127 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10128 gen_rtx_LABEL_REF (Pmode, table_label));
10129 #ifdef PIC_CASE_VECTOR_ADDRESS
10130 if (flag_pic)
10131 index = PIC_CASE_VECTOR_ADDRESS (index);
10132 else
10133 #endif
10134 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10135 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10136 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10137 RTX_UNCHANGING_P (vector) = 1;
10138 MEM_NOTRAP_P (vector) = 1;
10139 convert_move (temp, vector, 0);
10141 emit_jump_insn (gen_tablejump (temp, table_label));
10143 /* If we are generating PIC code or if the table is PC-relative, the
10144 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10145 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10146 emit_barrier ();
10150 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10151 rtx table_label, rtx default_label)
10153 rtx index;
10155 if (! HAVE_tablejump)
10156 return 0;
10158 index_expr = fold (build (MINUS_EXPR, index_type,
10159 convert (index_type, index_expr),
10160 convert (index_type, minval)));
10161 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10162 emit_queue ();
10163 index = protect_from_queue (index, 0);
10164 do_pending_stack_adjust ();
10166 do_tablejump (index, TYPE_MODE (index_type),
10167 convert_modes (TYPE_MODE (index_type),
10168 TYPE_MODE (TREE_TYPE (range)),
10169 expand_expr (range, NULL_RTX,
10170 VOIDmode, 0),
10171 TYPE_UNSIGNED (TREE_TYPE (range))),
10172 table_label, default_label);
10173 return 1;
10176 /* Nonzero if the mode is a valid vector mode for this architecture.
10177 This returns nonzero even if there is no hardware support for the
10178 vector mode, but we can emulate with narrower modes. */
10181 vector_mode_valid_p (enum machine_mode mode)
10183 enum mode_class class = GET_MODE_CLASS (mode);
10184 enum machine_mode innermode;
10186 /* Doh! What's going on? */
10187 if (class != MODE_VECTOR_INT
10188 && class != MODE_VECTOR_FLOAT)
10189 return 0;
10191 /* Hardware support. Woo hoo! */
10192 if (VECTOR_MODE_SUPPORTED_P (mode))
10193 return 1;
10195 innermode = GET_MODE_INNER (mode);
10197 /* We should probably return 1 if requesting V4DI and we have no DI,
10198 but we have V2DI, but this is probably very unlikely. */
10200 /* If we have support for the inner mode, we can safely emulate it.
10201 We may not have V2DI, but me can emulate with a pair of DIs. */
10202 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10205 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10206 static rtx
10207 const_vector_from_tree (tree exp)
10209 rtvec v;
10210 int units, i;
10211 tree link, elt;
10212 enum machine_mode inner, mode;
10214 mode = TYPE_MODE (TREE_TYPE (exp));
10216 if (initializer_zerop (exp))
10217 return CONST0_RTX (mode);
10219 units = GET_MODE_NUNITS (mode);
10220 inner = GET_MODE_INNER (mode);
10222 v = rtvec_alloc (units);
10224 link = TREE_VECTOR_CST_ELTS (exp);
10225 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10227 elt = TREE_VALUE (link);
10229 if (TREE_CODE (elt) == REAL_CST)
10230 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10231 inner);
10232 else
10233 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10234 TREE_INT_CST_HIGH (elt),
10235 inner);
10238 /* Initialize remaining elements to 0. */
10239 for (; i < units; ++i)
10240 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10242 return gen_rtx_raw_CONST_VECTOR (mode, v);
10244 #include "gt-expr.h"