* doc/contrib.texi (Contributors): Add gfortran contributors and
[official-gcc.git] / gcc / expr.c
blob6806f1f7d25d0d7b166c9f95285de2fa805b26b9
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static rtx enqueue_insn (rtx, rtx);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
148 static rtx var_rtx (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
151 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
153 static int is_aligning_offset (tree, tree);
154 static rtx expand_increment (tree, int, int);
155 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
157 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
158 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
159 #ifdef PUSH_ROUNDING
160 static void emit_single_push_insn (enum machine_mode, rtx, tree);
161 #endif
162 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
163 static rtx const_vector_from_tree (tree);
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* This macro is used to determine whether move_by_pieces should be called
177 to perform a structure copy. */
178 #ifndef MOVE_BY_PIECES_P
179 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
180 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
181 #endif
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
195 #endif
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block clears. */
201 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of two different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
208 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
210 #ifndef SLOW_UNALIGNED_ACCESS
211 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
212 #endif
214 /* This is run once per compilation to set up which modes can be used
215 directly in memory and to initialize the block move optab. */
217 void
218 init_expr_once (void)
220 rtx insn, pat;
221 enum machine_mode mode;
222 int num_clobbers;
223 rtx mem, mem1;
224 rtx reg;
226 /* Try indexing by frame ptr and try by stack ptr.
227 It is known that on the Convex the stack ptr isn't a valid index.
228 With luck, one or the other is valid on any machine. */
229 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
230 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
232 /* A scratch register we can modify in-place below to avoid
233 useless RTL allocations. */
234 reg = gen_rtx_REG (VOIDmode, -1);
236 insn = rtx_alloc (INSN);
237 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
238 PATTERN (insn) = pat;
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
243 int regno;
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
247 PUT_MODE (mem1, mode);
248 PUT_MODE (reg, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 regno++)
258 if (! HARD_REGNO_MODE_OK (regno, mode))
259 continue;
261 REGNO (reg) = regno;
263 SET_SRC (pat) = mem;
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
273 SET_SRC (pat) = reg;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
278 SET_SRC (pat) = reg;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
285 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
288 mode = GET_MODE_WIDER_MODE (mode))
290 enum machine_mode srcmode;
291 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
292 srcmode = GET_MODE_WIDER_MODE (srcmode))
294 enum insn_code ic;
296 ic = can_extend_p (mode, srcmode, 0);
297 if (ic == CODE_FOR_nothing)
298 continue;
300 PUT_MODE (mem, srcmode);
302 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
303 float_extend_from_mem[mode][srcmode] = true;
308 /* This is run at the start of compiling a function. */
310 void
311 init_expr (void)
313 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
316 /* Small sanity check that the queue is empty at the end of a function. */
318 void
319 finish_expr_for_function (void)
321 if (pending_chain)
322 abort ();
325 /* Manage the queue of increment instructions to be output
326 for POSTINCREMENT_EXPR expressions, etc. */
328 /* Queue up to increment (or change) VAR later. BODY says how:
329 BODY should be the same thing you would pass to emit_insn
330 to increment right away. It will go to emit_insn later on.
332 The value is a QUEUED expression to be used in place of VAR
333 where you want to guarantee the pre-incrementation value of VAR. */
335 static rtx
336 enqueue_insn (rtx var, rtx body)
338 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
339 body, pending_chain);
340 return pending_chain;
343 /* Use protect_from_queue to convert a QUEUED expression
344 into something that you can put immediately into an instruction.
345 If the queued incrementation has not happened yet,
346 protect_from_queue returns the variable itself.
347 If the incrementation has happened, protect_from_queue returns a temp
348 that contains a copy of the old value of the variable.
350 Any time an rtx which might possibly be a QUEUED is to be put
351 into an instruction, it must be passed through protect_from_queue first.
352 QUEUED expressions are not meaningful in instructions.
354 Do not pass a value through protect_from_queue and then hold
355 on to it for a while before putting it in an instruction!
356 If the queue is flushed in between, incorrect code will result. */
359 protect_from_queue (rtx x, int modify)
361 RTX_CODE code = GET_CODE (x);
363 #if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain == 0)
366 return x;
367 #endif
369 if (code != QUEUED)
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
375 shared. */
376 if (code == MEM && GET_MODE (x) != BLKmode
377 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
379 rtx y = XEXP (x, 0);
380 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
382 if (QUEUED_INSN (y))
384 rtx temp = gen_reg_rtx (GET_MODE (x));
386 emit_insn_before (gen_move_insn (temp, new),
387 QUEUED_INSN (y));
388 return temp;
391 /* Copy the address into a pseudo, so that the returned value
392 remains correct across calls to emit_queue. */
393 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
396 /* Otherwise, recursively protect the subexpressions of all
397 the kinds of rtx's that can contain a QUEUED. */
398 if (code == MEM)
400 rtx tem = protect_from_queue (XEXP (x, 0), 0);
401 if (tem != XEXP (x, 0))
403 x = copy_rtx (x);
404 XEXP (x, 0) = tem;
407 else if (code == PLUS || code == MULT)
409 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
410 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
411 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
413 x = copy_rtx (x);
414 XEXP (x, 0) = new0;
415 XEXP (x, 1) = new1;
418 return x;
420 /* If the increment has not happened, use the variable itself. Copy it
421 into a new pseudo so that the value remains correct across calls to
422 emit_queue. */
423 if (QUEUED_INSN (x) == 0)
424 return copy_to_reg (QUEUED_VAR (x));
425 /* If the increment has happened and a pre-increment copy exists,
426 use that copy. */
427 if (QUEUED_COPY (x) != 0)
428 return QUEUED_COPY (x);
429 /* The increment has happened but we haven't set up a pre-increment copy.
430 Set one up now, and use it. */
431 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
432 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
433 QUEUED_INSN (x));
434 return QUEUED_COPY (x);
437 /* Return nonzero if X contains a QUEUED expression:
438 if it contains anything that will be altered by a queued increment.
439 We handle only combinations of MEM, PLUS, MINUS and MULT operators
440 since memory addresses generally contain only those. */
443 queued_subexp_p (rtx x)
445 enum rtx_code code = GET_CODE (x);
446 switch (code)
448 case QUEUED:
449 return 1;
450 case MEM:
451 return queued_subexp_p (XEXP (x, 0));
452 case MULT:
453 case PLUS:
454 case MINUS:
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
457 default:
458 return 0;
462 /* Retrieve a mark on the queue. */
464 static rtx
465 mark_queue (void)
467 return pending_chain;
470 /* Perform all the pending incrementations that have been enqueued
471 after MARK was retrieved. If MARK is null, perform all the
472 pending incrementations. */
474 static void
475 emit_insns_enqueued_after_mark (rtx mark)
477 rtx p;
479 /* The marked incrementation may have been emitted in the meantime
480 through a call to emit_queue. In this case, the mark is not valid
481 anymore so do nothing. */
482 if (mark && ! QUEUED_BODY (mark))
483 return;
485 while ((p = pending_chain) != mark)
487 rtx body = QUEUED_BODY (p);
489 switch (GET_CODE (body))
491 case INSN:
492 case JUMP_INSN:
493 case CALL_INSN:
494 case CODE_LABEL:
495 case BARRIER:
496 case NOTE:
497 QUEUED_INSN (p) = body;
498 emit_insn (body);
499 break;
501 #ifdef ENABLE_CHECKING
502 case SEQUENCE:
503 abort ();
504 break;
505 #endif
507 default:
508 QUEUED_INSN (p) = emit_insn (body);
509 break;
512 QUEUED_BODY (p) = 0;
513 pending_chain = QUEUED_NEXT (p);
517 /* Perform all the pending incrementations. */
519 void
520 emit_queue (void)
522 emit_insns_enqueued_after_mark (NULL_RTX);
525 /* Copy data from FROM to TO, where the machine modes are not the same.
526 Both modes may be integer, or both may be floating.
527 UNSIGNEDP should be nonzero if FROM is an unsigned type.
528 This causes zero-extension instead of sign-extension. */
530 void
531 convert_move (rtx to, rtx from, int unsignedp)
533 enum machine_mode to_mode = GET_MODE (to);
534 enum machine_mode from_mode = GET_MODE (from);
535 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
536 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
537 enum insn_code code;
538 rtx libcall;
540 /* rtx code for making an equivalent value. */
541 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
542 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
544 to = protect_from_queue (to, 1);
545 from = protect_from_queue (from, 0);
547 if (to_real != from_real)
548 abort ();
550 /* If the source and destination are already the same, then there's
551 nothing to do. */
552 if (to == from)
553 return;
555 /* If FROM is a SUBREG that indicates that we have already done at least
556 the required extension, strip it. We don't handle such SUBREGs as
557 TO here. */
559 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
560 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
561 >= GET_MODE_SIZE (to_mode))
562 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
563 from = gen_lowpart (to_mode, from), from_mode = to_mode;
565 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
566 abort ();
568 if (to_mode == from_mode
569 || (from_mode == VOIDmode && CONSTANT_P (from)))
571 emit_move_insn (to, from);
572 return;
575 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
577 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
578 abort ();
580 if (VECTOR_MODE_P (to_mode))
581 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
582 else
583 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
585 emit_move_insn (to, from);
586 return;
589 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
591 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
592 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
593 return;
596 if (to_real)
598 rtx value, insns;
599 convert_optab tab;
601 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
602 tab = sext_optab;
603 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
604 tab = trunc_optab;
605 else
606 abort ();
608 /* Try converting directly if the insn is supported. */
610 code = tab->handlers[to_mode][from_mode].insn_code;
611 if (code != CODE_FOR_nothing)
613 emit_unop_insn (code, to, from,
614 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
615 return;
618 /* Otherwise use a libcall. */
619 libcall = tab->handlers[to_mode][from_mode].libfunc;
621 if (!libcall)
622 /* This conversion is not implemented yet. */
623 abort ();
625 start_sequence ();
626 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
627 1, from, from_mode);
628 insns = get_insns ();
629 end_sequence ();
630 emit_libcall_block (insns, to, value,
631 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
632 from)
633 : gen_rtx_FLOAT_EXTEND (to_mode, from));
634 return;
637 /* Handle pointer conversion. */ /* SPEE 900220. */
638 /* Targets are expected to provide conversion insns between PxImode and
639 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
640 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
642 enum machine_mode full_mode
643 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
645 if (trunc_optab->handlers[to_mode][full_mode].insn_code
646 == CODE_FOR_nothing)
647 abort ();
649 if (full_mode != from_mode)
650 from = convert_to_mode (full_mode, from, unsignedp);
651 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
652 to, from, UNKNOWN);
653 return;
655 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
657 enum machine_mode full_mode
658 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
660 if (sext_optab->handlers[full_mode][from_mode].insn_code
661 == CODE_FOR_nothing)
662 abort ();
664 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
665 to, from, UNKNOWN);
666 if (to_mode == full_mode)
667 return;
669 /* else proceed to integer conversions below. */
670 from_mode = full_mode;
673 /* Now both modes are integers. */
675 /* Handle expanding beyond a word. */
676 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
677 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
679 rtx insns;
680 rtx lowpart;
681 rtx fill_value;
682 rtx lowfrom;
683 int i;
684 enum machine_mode lowpart_mode;
685 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
687 /* Try converting directly if the insn is supported. */
688 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
689 != CODE_FOR_nothing)
691 /* If FROM is a SUBREG, put it into a register. Do this
692 so that we always generate the same set of insns for
693 better cse'ing; if an intermediate assignment occurred,
694 we won't be doing the operation directly on the SUBREG. */
695 if (optimize > 0 && GET_CODE (from) == SUBREG)
696 from = force_reg (from_mode, from);
697 emit_unop_insn (code, to, from, equiv_code);
698 return;
700 /* Next, try converting via full word. */
701 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
702 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
703 != CODE_FOR_nothing))
705 if (REG_P (to))
707 if (reg_overlap_mentioned_p (to, from))
708 from = force_reg (from_mode, from);
709 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
711 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
712 emit_unop_insn (code, to,
713 gen_lowpart (word_mode, to), equiv_code);
714 return;
717 /* No special multiword conversion insn; do it by hand. */
718 start_sequence ();
720 /* Since we will turn this into a no conflict block, we must ensure
721 that the source does not overlap the target. */
723 if (reg_overlap_mentioned_p (to, from))
724 from = force_reg (from_mode, from);
726 /* Get a copy of FROM widened to a word, if necessary. */
727 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
728 lowpart_mode = word_mode;
729 else
730 lowpart_mode = from_mode;
732 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
734 lowpart = gen_lowpart (lowpart_mode, to);
735 emit_move_insn (lowpart, lowfrom);
737 /* Compute the value to put in each remaining word. */
738 if (unsignedp)
739 fill_value = const0_rtx;
740 else
742 #ifdef HAVE_slt
743 if (HAVE_slt
744 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
745 && STORE_FLAG_VALUE == -1)
747 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
748 lowpart_mode, 0);
749 fill_value = gen_reg_rtx (word_mode);
750 emit_insn (gen_slt (fill_value));
752 else
753 #endif
755 fill_value
756 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
757 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
758 NULL_RTX, 0);
759 fill_value = convert_to_mode (word_mode, fill_value, 1);
763 /* Fill the remaining words. */
764 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
766 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
767 rtx subword = operand_subword (to, index, 1, to_mode);
769 if (subword == 0)
770 abort ();
772 if (fill_value != subword)
773 emit_move_insn (subword, fill_value);
776 insns = get_insns ();
777 end_sequence ();
779 emit_no_conflict_block (insns, to, from, NULL_RTX,
780 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
781 return;
784 /* Truncating multi-word to a word or less. */
785 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
786 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
788 if (!((MEM_P (from)
789 && ! MEM_VOLATILE_P (from)
790 && direct_load[(int) to_mode]
791 && ! mode_dependent_address_p (XEXP (from, 0)))
792 || REG_P (from)
793 || GET_CODE (from) == SUBREG))
794 from = force_reg (from_mode, from);
795 convert_move (to, gen_lowpart (word_mode, from), 0);
796 return;
799 /* Now follow all the conversions between integers
800 no more than a word long. */
802 /* For truncation, usually we can just refer to FROM in a narrower mode. */
803 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
804 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
805 GET_MODE_BITSIZE (from_mode)))
807 if (!((MEM_P (from)
808 && ! MEM_VOLATILE_P (from)
809 && direct_load[(int) to_mode]
810 && ! mode_dependent_address_p (XEXP (from, 0)))
811 || REG_P (from)
812 || GET_CODE (from) == SUBREG))
813 from = force_reg (from_mode, from);
814 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
815 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
816 from = copy_to_reg (from);
817 emit_move_insn (to, gen_lowpart (to_mode, from));
818 return;
821 /* Handle extension. */
822 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
824 /* Convert directly if that works. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 != CODE_FOR_nothing)
828 if (flag_force_mem)
829 from = force_not_mem (from);
831 emit_unop_insn (code, to, from, equiv_code);
832 return;
834 else
836 enum machine_mode intermediate;
837 rtx tmp;
838 tree shift_amount;
840 /* Search for a mode to convert via. */
841 for (intermediate = from_mode; intermediate != VOIDmode;
842 intermediate = GET_MODE_WIDER_MODE (intermediate))
843 if (((can_extend_p (to_mode, intermediate, unsignedp)
844 != CODE_FOR_nothing)
845 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
846 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
847 GET_MODE_BITSIZE (intermediate))))
848 && (can_extend_p (intermediate, from_mode, unsignedp)
849 != CODE_FOR_nothing))
851 convert_move (to, convert_to_mode (intermediate, from,
852 unsignedp), unsignedp);
853 return;
856 /* No suitable intermediate mode.
857 Generate what we need with shifts. */
858 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
859 - GET_MODE_BITSIZE (from_mode), 0);
860 from = gen_lowpart (to_mode, force_reg (from_mode, from));
861 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
862 to, unsignedp);
863 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
864 to, unsignedp);
865 if (tmp != to)
866 emit_move_insn (to, tmp);
867 return;
871 /* Support special truncate insns for certain modes. */
872 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
874 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
875 to, from, UNKNOWN);
876 return;
879 /* Handle truncation of volatile memrefs, and so on;
880 the things that couldn't be truncated directly,
881 and for which there was no special instruction.
883 ??? Code above formerly short-circuited this, for most integer
884 mode pairs, with a force_reg in from_mode followed by a recursive
885 call to this routine. Appears always to have been wrong. */
886 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
888 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
889 emit_move_insn (to, temp);
890 return;
893 /* Mode combination is not recognized. */
894 abort ();
897 /* Return an rtx for a value that would result
898 from converting X to mode MODE.
899 Both X and MODE may be floating, or both integer.
900 UNSIGNEDP is nonzero if X is an unsigned value.
901 This can be done by referring to a part of X in place
902 or by copying to a new temporary with conversion.
904 This function *must not* call protect_from_queue
905 except when putting X into an insn (in which case convert_move does it). */
908 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
910 return convert_modes (mode, VOIDmode, x, unsignedp);
913 /* Return an rtx for a value that would result
914 from converting X from mode OLDMODE to mode MODE.
915 Both modes may be floating, or both integer.
916 UNSIGNEDP is nonzero if X is an unsigned value.
918 This can be done by referring to a part of X in place
919 or by copying to a new temporary with conversion.
921 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
923 This function *must not* call protect_from_queue
924 except when putting X into an insn (in which case convert_move does it). */
927 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
929 rtx temp;
931 /* If FROM is a SUBREG that indicates that we have already done at least
932 the required extension, strip it. */
934 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
935 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
936 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
937 x = gen_lowpart (mode, x);
939 if (GET_MODE (x) != VOIDmode)
940 oldmode = GET_MODE (x);
942 if (mode == oldmode)
943 return x;
945 /* There is one case that we must handle specially: If we are converting
946 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
947 we are to interpret the constant as unsigned, gen_lowpart will do
948 the wrong if the constant appears negative. What we want to do is
949 make the high-order word of the constant zero, not all ones. */
951 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
952 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
953 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
955 HOST_WIDE_INT val = INTVAL (x);
957 if (oldmode != VOIDmode
958 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
960 int width = GET_MODE_BITSIZE (oldmode);
962 /* We need to zero extend VAL. */
963 val &= ((HOST_WIDE_INT) 1 << width) - 1;
966 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
969 /* We can do this with a gen_lowpart if both desired and current modes
970 are integer, and this is either a constant integer, a register, or a
971 non-volatile MEM. Except for the constant case where MODE is no
972 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
974 if ((GET_CODE (x) == CONST_INT
975 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
976 || (GET_MODE_CLASS (mode) == MODE_INT
977 && GET_MODE_CLASS (oldmode) == MODE_INT
978 && (GET_CODE (x) == CONST_DOUBLE
979 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
980 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
981 && direct_load[(int) mode])
982 || (REG_P (x)
983 && (! HARD_REGISTER_P (x)
984 || HARD_REGNO_MODE_OK (REGNO (x), mode))
985 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
986 GET_MODE_BITSIZE (GET_MODE (x)))))))))
988 /* ?? If we don't know OLDMODE, we have to assume here that
989 X does not need sign- or zero-extension. This may not be
990 the case, but it's the best we can do. */
991 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
992 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
994 HOST_WIDE_INT val = INTVAL (x);
995 int width = GET_MODE_BITSIZE (oldmode);
997 /* We must sign or zero-extend in this case. Start by
998 zero-extending, then sign extend if we need to. */
999 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1000 if (! unsignedp
1001 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1002 val |= (HOST_WIDE_INT) (-1) << width;
1004 return gen_int_mode (val, mode);
1007 return gen_lowpart (mode, x);
1010 /* Converting from integer constant into mode is always equivalent to an
1011 subreg operation. */
1012 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1014 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1015 abort ();
1016 return simplify_gen_subreg (mode, x, oldmode, 0);
1019 temp = gen_reg_rtx (mode);
1020 convert_move (temp, x, unsignedp);
1021 return temp;
1024 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1025 store efficiently. Due to internal GCC limitations, this is
1026 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1027 for an immediate constant. */
1029 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1031 /* Determine whether the LEN bytes can be moved by using several move
1032 instructions. Return nonzero if a call to move_by_pieces should
1033 succeed. */
1036 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1037 unsigned int align ATTRIBUTE_UNUSED)
1039 return MOVE_BY_PIECES_P (len, align);
1042 /* Generate several move instructions to copy LEN bytes from block FROM to
1043 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1044 and TO through protect_from_queue before calling.
1046 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1047 used to push FROM to the stack.
1049 ALIGN is maximum stack alignment we can assume.
1051 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1052 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1053 stpcpy. */
1056 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1057 unsigned int align, int endp)
1059 struct move_by_pieces data;
1060 rtx to_addr, from_addr = XEXP (from, 0);
1061 unsigned int max_size = MOVE_MAX_PIECES + 1;
1062 enum machine_mode mode = VOIDmode, tmode;
1063 enum insn_code icode;
1065 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1067 data.offset = 0;
1068 data.from_addr = from_addr;
1069 if (to)
1071 to_addr = XEXP (to, 0);
1072 data.to = to;
1073 data.autinc_to
1074 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1075 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1076 data.reverse
1077 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1079 else
1081 to_addr = NULL_RTX;
1082 data.to = NULL_RTX;
1083 data.autinc_to = 1;
1084 #ifdef STACK_GROWS_DOWNWARD
1085 data.reverse = 1;
1086 #else
1087 data.reverse = 0;
1088 #endif
1090 data.to_addr = to_addr;
1091 data.from = from;
1092 data.autinc_from
1093 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1094 || GET_CODE (from_addr) == POST_INC
1095 || GET_CODE (from_addr) == POST_DEC);
1097 data.explicit_inc_from = 0;
1098 data.explicit_inc_to = 0;
1099 if (data.reverse) data.offset = len;
1100 data.len = len;
1102 /* If copying requires more than two move insns,
1103 copy addresses to registers (to make displacements shorter)
1104 and use post-increment if available. */
1105 if (!(data.autinc_from && data.autinc_to)
1106 && move_by_pieces_ninsns (len, align) > 2)
1108 /* Find the mode of the largest move... */
1109 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1110 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1111 if (GET_MODE_SIZE (tmode) < max_size)
1112 mode = tmode;
1114 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1116 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1117 data.autinc_from = 1;
1118 data.explicit_inc_from = -1;
1120 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1122 data.from_addr = copy_addr_to_reg (from_addr);
1123 data.autinc_from = 1;
1124 data.explicit_inc_from = 1;
1126 if (!data.autinc_from && CONSTANT_P (from_addr))
1127 data.from_addr = copy_addr_to_reg (from_addr);
1128 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1130 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1131 data.autinc_to = 1;
1132 data.explicit_inc_to = -1;
1134 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1136 data.to_addr = copy_addr_to_reg (to_addr);
1137 data.autinc_to = 1;
1138 data.explicit_inc_to = 1;
1140 if (!data.autinc_to && CONSTANT_P (to_addr))
1141 data.to_addr = copy_addr_to_reg (to_addr);
1144 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1145 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1146 align = MOVE_MAX * BITS_PER_UNIT;
1148 /* First move what we can in the largest integer mode, then go to
1149 successively smaller modes. */
1151 while (max_size > 1)
1153 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1154 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1155 if (GET_MODE_SIZE (tmode) < max_size)
1156 mode = tmode;
1158 if (mode == VOIDmode)
1159 break;
1161 icode = mov_optab->handlers[(int) mode].insn_code;
1162 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1163 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1165 max_size = GET_MODE_SIZE (mode);
1168 /* The code above should have handled everything. */
1169 if (data.len > 0)
1170 abort ();
1172 if (endp)
1174 rtx to1;
1176 if (data.reverse)
1177 abort ();
1178 if (data.autinc_to)
1180 if (endp == 2)
1182 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1183 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1184 else
1185 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1186 -1));
1188 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1189 data.offset);
1191 else
1193 if (endp == 2)
1194 --data.offset;
1195 to1 = adjust_address (data.to, QImode, data.offset);
1197 return to1;
1199 else
1200 return data.to;
1203 /* Return number of insns required to move L bytes by pieces.
1204 ALIGN (in bits) is maximum alignment we can assume. */
1206 static unsigned HOST_WIDE_INT
1207 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1209 unsigned HOST_WIDE_INT n_insns = 0;
1210 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1212 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1213 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1214 align = MOVE_MAX * BITS_PER_UNIT;
1216 while (max_size > 1)
1218 enum machine_mode mode = VOIDmode, tmode;
1219 enum insn_code icode;
1221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1222 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1223 if (GET_MODE_SIZE (tmode) < max_size)
1224 mode = tmode;
1226 if (mode == VOIDmode)
1227 break;
1229 icode = mov_optab->handlers[(int) mode].insn_code;
1230 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1231 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1233 max_size = GET_MODE_SIZE (mode);
1236 if (l)
1237 abort ();
1238 return n_insns;
1241 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1242 with move instructions for mode MODE. GENFUN is the gen_... function
1243 to make a move insn for that mode. DATA has all the other info. */
1245 static void
1246 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1247 struct move_by_pieces *data)
1249 unsigned int size = GET_MODE_SIZE (mode);
1250 rtx to1 = NULL_RTX, from1;
1252 while (data->len >= size)
1254 if (data->reverse)
1255 data->offset -= size;
1257 if (data->to)
1259 if (data->autinc_to)
1260 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1261 data->offset);
1262 else
1263 to1 = adjust_address (data->to, mode, data->offset);
1266 if (data->autinc_from)
1267 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1268 data->offset);
1269 else
1270 from1 = adjust_address (data->from, mode, data->offset);
1272 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1273 emit_insn (gen_add2_insn (data->to_addr,
1274 GEN_INT (-(HOST_WIDE_INT)size)));
1275 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1276 emit_insn (gen_add2_insn (data->from_addr,
1277 GEN_INT (-(HOST_WIDE_INT)size)));
1279 if (data->to)
1280 emit_insn ((*genfun) (to1, from1));
1281 else
1283 #ifdef PUSH_ROUNDING
1284 emit_single_push_insn (mode, from1, NULL);
1285 #else
1286 abort ();
1287 #endif
1290 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1291 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1292 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1293 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1295 if (! data->reverse)
1296 data->offset += size;
1298 data->len -= size;
1302 /* Emit code to move a block Y to a block X. This may be done with
1303 string-move instructions, with multiple scalar move instructions,
1304 or with a library call.
1306 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1307 SIZE is an rtx that says how long they are.
1308 ALIGN is the maximum alignment we can assume they have.
1309 METHOD describes what kind of copy this is, and what mechanisms may be used.
1311 Return the address of the new block, if memcpy is called and returns it,
1312 0 otherwise. */
1315 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1317 bool may_use_call;
1318 rtx retval = 0;
1319 unsigned int align;
1321 switch (method)
1323 case BLOCK_OP_NORMAL:
1324 may_use_call = true;
1325 break;
1327 case BLOCK_OP_CALL_PARM:
1328 may_use_call = block_move_libcall_safe_for_call_parm ();
1330 /* Make inhibit_defer_pop nonzero around the library call
1331 to force it to pop the arguments right away. */
1332 NO_DEFER_POP;
1333 break;
1335 case BLOCK_OP_NO_LIBCALL:
1336 may_use_call = false;
1337 break;
1339 default:
1340 abort ();
1343 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1345 x = protect_from_queue (x, 1);
1346 y = protect_from_queue (y, 0);
1347 size = protect_from_queue (size, 0);
1349 if (!MEM_P (x))
1350 abort ();
1351 if (!MEM_P (y))
1352 abort ();
1353 if (size == 0)
1354 abort ();
1356 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1357 block copy is more efficient for other large modes, e.g. DCmode. */
1358 x = adjust_address (x, BLKmode, 0);
1359 y = adjust_address (y, BLKmode, 0);
1361 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1362 can be incorrect is coming from __builtin_memcpy. */
1363 if (GET_CODE (size) == CONST_INT)
1365 if (INTVAL (size) == 0)
1366 return 0;
1368 x = shallow_copy_rtx (x);
1369 y = shallow_copy_rtx (y);
1370 set_mem_size (x, size);
1371 set_mem_size (y, size);
1374 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1375 move_by_pieces (x, y, INTVAL (size), align, 0);
1376 else if (emit_block_move_via_movmem (x, y, size, align))
1378 else if (may_use_call)
1379 retval = emit_block_move_via_libcall (x, y, size);
1380 else
1381 emit_block_move_via_loop (x, y, size, align);
1383 if (method == BLOCK_OP_CALL_PARM)
1384 OK_DEFER_POP;
1386 return retval;
1389 /* A subroutine of emit_block_move. Returns true if calling the
1390 block move libcall will not clobber any parameters which may have
1391 already been placed on the stack. */
1393 static bool
1394 block_move_libcall_safe_for_call_parm (void)
1396 /* If arguments are pushed on the stack, then they're safe. */
1397 if (PUSH_ARGS)
1398 return true;
1400 /* If registers go on the stack anyway, any argument is sure to clobber
1401 an outgoing argument. */
1402 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1404 tree fn = emit_block_move_libcall_fn (false);
1405 (void) fn;
1406 if (REG_PARM_STACK_SPACE (fn) != 0)
1407 return false;
1409 #endif
1411 /* If any argument goes in memory, then it might clobber an outgoing
1412 argument. */
1414 CUMULATIVE_ARGS args_so_far;
1415 tree fn, arg;
1417 fn = emit_block_move_libcall_fn (false);
1418 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1420 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1421 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1423 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1424 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1425 if (!tmp || !REG_P (tmp))
1426 return false;
1427 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1428 NULL_TREE, 1))
1429 return false;
1430 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1433 return true;
1436 /* A subroutine of emit_block_move. Expand a movmem pattern;
1437 return true if successful. */
1439 static bool
1440 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1442 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1443 int save_volatile_ok = volatile_ok;
1444 enum machine_mode mode;
1446 /* Since this is a move insn, we don't care about volatility. */
1447 volatile_ok = 1;
1449 /* Try the most limited insn first, because there's no point
1450 including more than one in the machine description unless
1451 the more limited one has some advantage. */
1453 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1454 mode = GET_MODE_WIDER_MODE (mode))
1456 enum insn_code code = movmem_optab[(int) mode];
1457 insn_operand_predicate_fn pred;
1459 if (code != CODE_FOR_nothing
1460 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1461 here because if SIZE is less than the mode mask, as it is
1462 returned by the macro, it will definitely be less than the
1463 actual mode mask. */
1464 && ((GET_CODE (size) == CONST_INT
1465 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1466 <= (GET_MODE_MASK (mode) >> 1)))
1467 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1468 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1469 || (*pred) (x, BLKmode))
1470 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1471 || (*pred) (y, BLKmode))
1472 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1473 || (*pred) (opalign, VOIDmode)))
1475 rtx op2;
1476 rtx last = get_last_insn ();
1477 rtx pat;
1479 op2 = convert_to_mode (mode, size, 1);
1480 pred = insn_data[(int) code].operand[2].predicate;
1481 if (pred != 0 && ! (*pred) (op2, mode))
1482 op2 = copy_to_mode_reg (mode, op2);
1484 /* ??? When called via emit_block_move_for_call, it'd be
1485 nice if there were some way to inform the backend, so
1486 that it doesn't fail the expansion because it thinks
1487 emitting the libcall would be more efficient. */
1489 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1490 if (pat)
1492 emit_insn (pat);
1493 volatile_ok = save_volatile_ok;
1494 return true;
1496 else
1497 delete_insns_since (last);
1501 volatile_ok = save_volatile_ok;
1502 return false;
1505 /* A subroutine of emit_block_move. Expand a call to memcpy.
1506 Return the return value from memcpy, 0 otherwise. */
1508 static rtx
1509 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1511 rtx dst_addr, src_addr;
1512 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1513 enum machine_mode size_mode;
1514 rtx retval;
1516 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1518 It is unsafe to save the value generated by protect_from_queue and reuse
1519 it later. Consider what happens if emit_queue is called before the
1520 return value from protect_from_queue is used.
1522 Expansion of the CALL_EXPR below will call emit_queue before we are
1523 finished emitting RTL for argument setup. So if we are not careful we
1524 could get the wrong value for an argument.
1526 To avoid this problem we go ahead and emit code to copy the addresses of
1527 DST and SRC and SIZE into new pseudos.
1529 Note this is not strictly needed for library calls since they do not call
1530 emit_queue before loading their arguments. However, we may need to have
1531 library calls call emit_queue in the future since failing to do so could
1532 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1533 arguments in registers. */
1535 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1536 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1538 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1539 src_addr = convert_memory_address (ptr_mode, src_addr);
1541 dst_tree = make_tree (ptr_type_node, dst_addr);
1542 src_tree = make_tree (ptr_type_node, src_addr);
1544 size_mode = TYPE_MODE (sizetype);
1546 size = convert_to_mode (size_mode, size, 1);
1547 size = copy_to_mode_reg (size_mode, size);
1549 /* It is incorrect to use the libcall calling conventions to call
1550 memcpy in this context. This could be a user call to memcpy and
1551 the user may wish to examine the return value from memcpy. For
1552 targets where libcalls and normal calls have different conventions
1553 for returning pointers, we could end up generating incorrect code. */
1555 size_tree = make_tree (sizetype, size);
1557 fn = emit_block_move_libcall_fn (true);
1558 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1559 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1560 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1562 /* Now we have to build up the CALL_EXPR itself. */
1563 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1564 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1565 call_expr, arg_list, NULL_TREE);
1567 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1569 /* If we are initializing a readonly value, show the above call clobbered
1570 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1571 the delay slot scheduler might overlook conflicts and take nasty
1572 decisions. */
1573 if (RTX_UNCHANGING_P (dst))
1574 add_function_usage_to
1575 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1576 gen_rtx_CLOBBER (VOIDmode, dst),
1577 NULL_RTX));
1579 return retval;
1582 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1583 for the function we use for block copies. The first time FOR_CALL
1584 is true, we call assemble_external. */
1586 static GTY(()) tree block_move_fn;
1588 void
1589 init_block_move_fn (const char *asmspec)
1591 if (!block_move_fn)
1593 tree args, fn;
1595 fn = get_identifier ("memcpy");
1596 args = build_function_type_list (ptr_type_node, ptr_type_node,
1597 const_ptr_type_node, sizetype,
1598 NULL_TREE);
1600 fn = build_decl (FUNCTION_DECL, fn, args);
1601 DECL_EXTERNAL (fn) = 1;
1602 TREE_PUBLIC (fn) = 1;
1603 DECL_ARTIFICIAL (fn) = 1;
1604 TREE_NOTHROW (fn) = 1;
1606 block_move_fn = fn;
1609 if (asmspec)
1611 SET_DECL_RTL (block_move_fn, NULL_RTX);
1612 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1616 static tree
1617 emit_block_move_libcall_fn (int for_call)
1619 static bool emitted_extern;
1621 if (!block_move_fn)
1622 init_block_move_fn (NULL);
1624 if (for_call && !emitted_extern)
1626 emitted_extern = true;
1627 make_decl_rtl (block_move_fn, NULL);
1628 assemble_external (block_move_fn);
1631 return block_move_fn;
1634 /* A subroutine of emit_block_move. Copy the data via an explicit
1635 loop. This is used only when libcalls are forbidden. */
1636 /* ??? It'd be nice to copy in hunks larger than QImode. */
1638 static void
1639 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1640 unsigned int align ATTRIBUTE_UNUSED)
1642 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1643 enum machine_mode iter_mode;
1645 iter_mode = GET_MODE (size);
1646 if (iter_mode == VOIDmode)
1647 iter_mode = word_mode;
1649 top_label = gen_label_rtx ();
1650 cmp_label = gen_label_rtx ();
1651 iter = gen_reg_rtx (iter_mode);
1653 emit_move_insn (iter, const0_rtx);
1655 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1656 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1657 do_pending_stack_adjust ();
1659 emit_jump (cmp_label);
1660 emit_label (top_label);
1662 tmp = convert_modes (Pmode, iter_mode, iter, true);
1663 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1664 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1665 x = change_address (x, QImode, x_addr);
1666 y = change_address (y, QImode, y_addr);
1668 emit_move_insn (x, y);
1670 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1671 true, OPTAB_LIB_WIDEN);
1672 if (tmp != iter)
1673 emit_move_insn (iter, tmp);
1675 emit_label (cmp_label);
1677 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1678 true, top_label);
1681 /* Copy all or part of a value X into registers starting at REGNO.
1682 The number of registers to be filled is NREGS. */
1684 void
1685 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1687 int i;
1688 #ifdef HAVE_load_multiple
1689 rtx pat;
1690 rtx last;
1691 #endif
1693 if (nregs == 0)
1694 return;
1696 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1697 x = validize_mem (force_const_mem (mode, x));
1699 /* See if the machine can do this with a load multiple insn. */
1700 #ifdef HAVE_load_multiple
1701 if (HAVE_load_multiple)
1703 last = get_last_insn ();
1704 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1705 GEN_INT (nregs));
1706 if (pat)
1708 emit_insn (pat);
1709 return;
1711 else
1712 delete_insns_since (last);
1714 #endif
1716 for (i = 0; i < nregs; i++)
1717 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1718 operand_subword_force (x, i, mode));
1721 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1722 The number of registers to be filled is NREGS. */
1724 void
1725 move_block_from_reg (int regno, rtx x, int nregs)
1727 int i;
1729 if (nregs == 0)
1730 return;
1732 /* See if the machine can do this with a store multiple insn. */
1733 #ifdef HAVE_store_multiple
1734 if (HAVE_store_multiple)
1736 rtx last = get_last_insn ();
1737 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1738 GEN_INT (nregs));
1739 if (pat)
1741 emit_insn (pat);
1742 return;
1744 else
1745 delete_insns_since (last);
1747 #endif
1749 for (i = 0; i < nregs; i++)
1751 rtx tem = operand_subword (x, i, 1, BLKmode);
1753 if (tem == 0)
1754 abort ();
1756 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1760 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1761 ORIG, where ORIG is a non-consecutive group of registers represented by
1762 a PARALLEL. The clone is identical to the original except in that the
1763 original set of registers is replaced by a new set of pseudo registers.
1764 The new set has the same modes as the original set. */
1767 gen_group_rtx (rtx orig)
1769 int i, length;
1770 rtx *tmps;
1772 if (GET_CODE (orig) != PARALLEL)
1773 abort ();
1775 length = XVECLEN (orig, 0);
1776 tmps = alloca (sizeof (rtx) * length);
1778 /* Skip a NULL entry in first slot. */
1779 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1781 if (i)
1782 tmps[0] = 0;
1784 for (; i < length; i++)
1786 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1787 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1789 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1792 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1795 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1796 where DST is non-consecutive registers represented by a PARALLEL.
1797 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1798 if not known. */
1800 void
1801 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1803 rtx *tmps, src;
1804 int start, i;
1806 if (GET_CODE (dst) != PARALLEL)
1807 abort ();
1809 /* Check for a NULL entry, used to indicate that the parameter goes
1810 both on the stack and in registers. */
1811 if (XEXP (XVECEXP (dst, 0, 0), 0))
1812 start = 0;
1813 else
1814 start = 1;
1816 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1818 /* Process the pieces. */
1819 for (i = start; i < XVECLEN (dst, 0); i++)
1821 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1822 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1823 unsigned int bytelen = GET_MODE_SIZE (mode);
1824 int shift = 0;
1826 /* Handle trailing fragments that run over the size of the struct. */
1827 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1829 /* Arrange to shift the fragment to where it belongs.
1830 extract_bit_field loads to the lsb of the reg. */
1831 if (
1832 #ifdef BLOCK_REG_PADDING
1833 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1834 == (BYTES_BIG_ENDIAN ? upward : downward)
1835 #else
1836 BYTES_BIG_ENDIAN
1837 #endif
1839 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1840 bytelen = ssize - bytepos;
1841 if (bytelen <= 0)
1842 abort ();
1845 /* If we won't be loading directly from memory, protect the real source
1846 from strange tricks we might play; but make sure that the source can
1847 be loaded directly into the destination. */
1848 src = orig_src;
1849 if (!MEM_P (orig_src)
1850 && (!CONSTANT_P (orig_src)
1851 || (GET_MODE (orig_src) != mode
1852 && GET_MODE (orig_src) != VOIDmode)))
1854 if (GET_MODE (orig_src) == VOIDmode)
1855 src = gen_reg_rtx (mode);
1856 else
1857 src = gen_reg_rtx (GET_MODE (orig_src));
1859 emit_move_insn (src, orig_src);
1862 /* Optimize the access just a bit. */
1863 if (MEM_P (src)
1864 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1865 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1866 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1867 && bytelen == GET_MODE_SIZE (mode))
1869 tmps[i] = gen_reg_rtx (mode);
1870 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1872 else if (GET_CODE (src) == CONCAT)
1874 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1875 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1877 if ((bytepos == 0 && bytelen == slen0)
1878 || (bytepos != 0 && bytepos + bytelen <= slen))
1880 /* The following assumes that the concatenated objects all
1881 have the same size. In this case, a simple calculation
1882 can be used to determine the object and the bit field
1883 to be extracted. */
1884 tmps[i] = XEXP (src, bytepos / slen0);
1885 if (! CONSTANT_P (tmps[i])
1886 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1887 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1888 (bytepos % slen0) * BITS_PER_UNIT,
1889 1, NULL_RTX, mode, mode);
1891 else if (bytepos == 0)
1893 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1894 emit_move_insn (mem, src);
1895 tmps[i] = adjust_address (mem, mode, 0);
1897 else
1898 abort ();
1900 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1901 SIMD register, which is currently broken. While we get GCC
1902 to emit proper RTL for these cases, let's dump to memory. */
1903 else if (VECTOR_MODE_P (GET_MODE (dst))
1904 && REG_P (src))
1906 int slen = GET_MODE_SIZE (GET_MODE (src));
1907 rtx mem;
1909 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1910 emit_move_insn (mem, src);
1911 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1913 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1914 && XVECLEN (dst, 0) > 1)
1915 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1916 else if (CONSTANT_P (src)
1917 || (REG_P (src) && GET_MODE (src) == mode))
1918 tmps[i] = src;
1919 else
1920 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1921 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1922 mode, mode);
1924 if (shift)
1925 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1926 build_int_2 (shift, 0), tmps[i], 0);
1929 emit_queue ();
1931 /* Copy the extracted pieces into the proper (probable) hard regs. */
1932 for (i = start; i < XVECLEN (dst, 0); i++)
1933 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1936 /* Emit code to move a block SRC to block DST, where SRC and DST are
1937 non-consecutive groups of registers, each represented by a PARALLEL. */
1939 void
1940 emit_group_move (rtx dst, rtx src)
1942 int i;
1944 if (GET_CODE (src) != PARALLEL
1945 || GET_CODE (dst) != PARALLEL
1946 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1947 abort ();
1949 /* Skip first entry if NULL. */
1950 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1951 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1952 XEXP (XVECEXP (src, 0, i), 0));
1955 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1956 where SRC is non-consecutive registers represented by a PARALLEL.
1957 SSIZE represents the total size of block ORIG_DST, or -1 if not
1958 known. */
1960 void
1961 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1963 rtx *tmps, dst;
1964 int start, i;
1966 if (GET_CODE (src) != PARALLEL)
1967 abort ();
1969 /* Check for a NULL entry, used to indicate that the parameter goes
1970 both on the stack and in registers. */
1971 if (XEXP (XVECEXP (src, 0, 0), 0))
1972 start = 0;
1973 else
1974 start = 1;
1976 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1978 /* Copy the (probable) hard regs into pseudos. */
1979 for (i = start; i < XVECLEN (src, 0); i++)
1981 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1982 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1983 emit_move_insn (tmps[i], reg);
1985 emit_queue ();
1987 /* If we won't be storing directly into memory, protect the real destination
1988 from strange tricks we might play. */
1989 dst = orig_dst;
1990 if (GET_CODE (dst) == PARALLEL)
1992 rtx temp;
1994 /* We can get a PARALLEL dst if there is a conditional expression in
1995 a return statement. In that case, the dst and src are the same,
1996 so no action is necessary. */
1997 if (rtx_equal_p (dst, src))
1998 return;
2000 /* It is unclear if we can ever reach here, but we may as well handle
2001 it. Allocate a temporary, and split this into a store/load to/from
2002 the temporary. */
2004 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2005 emit_group_store (temp, src, type, ssize);
2006 emit_group_load (dst, temp, type, ssize);
2007 return;
2009 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2011 dst = gen_reg_rtx (GET_MODE (orig_dst));
2012 /* Make life a bit easier for combine. */
2013 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2016 /* Process the pieces. */
2017 for (i = start; i < XVECLEN (src, 0); i++)
2019 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2020 enum machine_mode mode = GET_MODE (tmps[i]);
2021 unsigned int bytelen = GET_MODE_SIZE (mode);
2022 rtx dest = dst;
2024 /* Handle trailing fragments that run over the size of the struct. */
2025 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2027 /* store_bit_field always takes its value from the lsb.
2028 Move the fragment to the lsb if it's not already there. */
2029 if (
2030 #ifdef BLOCK_REG_PADDING
2031 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2032 == (BYTES_BIG_ENDIAN ? upward : downward)
2033 #else
2034 BYTES_BIG_ENDIAN
2035 #endif
2038 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2039 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2040 build_int_2 (shift, 0), tmps[i], 0);
2042 bytelen = ssize - bytepos;
2045 if (GET_CODE (dst) == CONCAT)
2047 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2048 dest = XEXP (dst, 0);
2049 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2051 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2052 dest = XEXP (dst, 1);
2054 else if (bytepos == 0 && XVECLEN (src, 0))
2056 dest = assign_stack_temp (GET_MODE (dest),
2057 GET_MODE_SIZE (GET_MODE (dest)), 0);
2058 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2059 tmps[i]);
2060 dst = dest;
2061 break;
2063 else
2064 abort ();
2067 /* Optimize the access just a bit. */
2068 if (MEM_P (dest)
2069 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2070 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2071 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2072 && bytelen == GET_MODE_SIZE (mode))
2073 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2074 else
2075 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2076 mode, tmps[i]);
2079 emit_queue ();
2081 /* Copy from the pseudo into the (probable) hard reg. */
2082 if (orig_dst != dst)
2083 emit_move_insn (orig_dst, dst);
2086 /* Generate code to copy a BLKmode object of TYPE out of a
2087 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2088 is null, a stack temporary is created. TGTBLK is returned.
2090 The purpose of this routine is to handle functions that return
2091 BLKmode structures in registers. Some machines (the PA for example)
2092 want to return all small structures in registers regardless of the
2093 structure's alignment. */
2096 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2098 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2101 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2103 if (tgtblk == 0)
2105 tgtblk = assign_temp (build_qualified_type (type,
2106 (TYPE_QUALS (type)
2107 | TYPE_QUAL_CONST)),
2108 0, 1, 1);
2109 preserve_temp_slots (tgtblk);
2112 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2113 into a new pseudo which is a full word. */
2115 if (GET_MODE (srcreg) != BLKmode
2116 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2117 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2119 /* If the structure doesn't take up a whole number of words, see whether
2120 SRCREG is padded on the left or on the right. If it's on the left,
2121 set PADDING_CORRECTION to the number of bits to skip.
2123 In most ABIs, the structure will be returned at the least end of
2124 the register, which translates to right padding on little-endian
2125 targets and left padding on big-endian targets. The opposite
2126 holds if the structure is returned at the most significant
2127 end of the register. */
2128 if (bytes % UNITS_PER_WORD != 0
2129 && (targetm.calls.return_in_msb (type)
2130 ? !BYTES_BIG_ENDIAN
2131 : BYTES_BIG_ENDIAN))
2132 padding_correction
2133 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2135 /* Copy the structure BITSIZE bites at a time.
2137 We could probably emit more efficient code for machines which do not use
2138 strict alignment, but it doesn't seem worth the effort at the current
2139 time. */
2140 for (bitpos = 0, xbitpos = padding_correction;
2141 bitpos < bytes * BITS_PER_UNIT;
2142 bitpos += bitsize, xbitpos += bitsize)
2144 /* We need a new source operand each time xbitpos is on a
2145 word boundary and when xbitpos == padding_correction
2146 (the first time through). */
2147 if (xbitpos % BITS_PER_WORD == 0
2148 || xbitpos == padding_correction)
2149 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2150 GET_MODE (srcreg));
2152 /* We need a new destination operand each time bitpos is on
2153 a word boundary. */
2154 if (bitpos % BITS_PER_WORD == 0)
2155 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2157 /* Use xbitpos for the source extraction (right justified) and
2158 xbitpos for the destination store (left justified). */
2159 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2160 extract_bit_field (src, bitsize,
2161 xbitpos % BITS_PER_WORD, 1,
2162 NULL_RTX, word_mode, word_mode));
2165 return tgtblk;
2168 /* Add a USE expression for REG to the (possibly empty) list pointed
2169 to by CALL_FUSAGE. REG must denote a hard register. */
2171 void
2172 use_reg (rtx *call_fusage, rtx reg)
2174 if (!REG_P (reg)
2175 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2176 abort ();
2178 *call_fusage
2179 = gen_rtx_EXPR_LIST (VOIDmode,
2180 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2183 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2184 starting at REGNO. All of these registers must be hard registers. */
2186 void
2187 use_regs (rtx *call_fusage, int regno, int nregs)
2189 int i;
2191 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2192 abort ();
2194 for (i = 0; i < nregs; i++)
2195 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2198 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2199 PARALLEL REGS. This is for calls that pass values in multiple
2200 non-contiguous locations. The Irix 6 ABI has examples of this. */
2202 void
2203 use_group_regs (rtx *call_fusage, rtx regs)
2205 int i;
2207 for (i = 0; i < XVECLEN (regs, 0); i++)
2209 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2211 /* A NULL entry means the parameter goes both on the stack and in
2212 registers. This can also be a MEM for targets that pass values
2213 partially on the stack and partially in registers. */
2214 if (reg != 0 && REG_P (reg))
2215 use_reg (call_fusage, reg);
2220 /* Determine whether the LEN bytes generated by CONSTFUN can be
2221 stored to memory using several move instructions. CONSTFUNDATA is
2222 a pointer which will be passed as argument in every CONSTFUN call.
2223 ALIGN is maximum alignment we can assume. Return nonzero if a
2224 call to store_by_pieces should succeed. */
2227 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2228 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2229 void *constfundata, unsigned int align)
2231 unsigned HOST_WIDE_INT max_size, l;
2232 HOST_WIDE_INT offset = 0;
2233 enum machine_mode mode, tmode;
2234 enum insn_code icode;
2235 int reverse;
2236 rtx cst;
2238 if (len == 0)
2239 return 1;
2241 if (! STORE_BY_PIECES_P (len, align))
2242 return 0;
2244 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2245 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2246 align = MOVE_MAX * BITS_PER_UNIT;
2248 /* We would first store what we can in the largest integer mode, then go to
2249 successively smaller modes. */
2251 for (reverse = 0;
2252 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2253 reverse++)
2255 l = len;
2256 mode = VOIDmode;
2257 max_size = STORE_MAX_PIECES + 1;
2258 while (max_size > 1)
2260 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2262 if (GET_MODE_SIZE (tmode) < max_size)
2263 mode = tmode;
2265 if (mode == VOIDmode)
2266 break;
2268 icode = mov_optab->handlers[(int) mode].insn_code;
2269 if (icode != CODE_FOR_nothing
2270 && align >= GET_MODE_ALIGNMENT (mode))
2272 unsigned int size = GET_MODE_SIZE (mode);
2274 while (l >= size)
2276 if (reverse)
2277 offset -= size;
2279 cst = (*constfun) (constfundata, offset, mode);
2280 if (!LEGITIMATE_CONSTANT_P (cst))
2281 return 0;
2283 if (!reverse)
2284 offset += size;
2286 l -= size;
2290 max_size = GET_MODE_SIZE (mode);
2293 /* The code above should have handled everything. */
2294 if (l != 0)
2295 abort ();
2298 return 1;
2301 /* Generate several move instructions to store LEN bytes generated by
2302 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2303 pointer which will be passed as argument in every CONSTFUN call.
2304 ALIGN is maximum alignment we can assume.
2305 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2306 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2307 stpcpy. */
2310 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2311 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2312 void *constfundata, unsigned int align, int endp)
2314 struct store_by_pieces data;
2316 if (len == 0)
2318 if (endp == 2)
2319 abort ();
2320 return to;
2323 if (! STORE_BY_PIECES_P (len, align))
2324 abort ();
2325 to = protect_from_queue (to, 1);
2326 data.constfun = constfun;
2327 data.constfundata = constfundata;
2328 data.len = len;
2329 data.to = to;
2330 store_by_pieces_1 (&data, align);
2331 if (endp)
2333 rtx to1;
2335 if (data.reverse)
2336 abort ();
2337 if (data.autinc_to)
2339 if (endp == 2)
2341 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2342 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2343 else
2344 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2345 -1));
2347 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2348 data.offset);
2350 else
2352 if (endp == 2)
2353 --data.offset;
2354 to1 = adjust_address (data.to, QImode, data.offset);
2356 return to1;
2358 else
2359 return data.to;
2362 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2363 rtx with BLKmode). The caller must pass TO through protect_from_queue
2364 before calling. ALIGN is maximum alignment we can assume. */
2366 static void
2367 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2369 struct store_by_pieces data;
2371 if (len == 0)
2372 return;
2374 data.constfun = clear_by_pieces_1;
2375 data.constfundata = NULL;
2376 data.len = len;
2377 data.to = to;
2378 store_by_pieces_1 (&data, align);
2381 /* Callback routine for clear_by_pieces.
2382 Return const0_rtx unconditionally. */
2384 static rtx
2385 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2386 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2387 enum machine_mode mode ATTRIBUTE_UNUSED)
2389 return const0_rtx;
2392 /* Subroutine of clear_by_pieces and store_by_pieces.
2393 Generate several move instructions to store LEN bytes of block TO. (A MEM
2394 rtx with BLKmode). The caller must pass TO through protect_from_queue
2395 before calling. ALIGN is maximum alignment we can assume. */
2397 static void
2398 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2399 unsigned int align ATTRIBUTE_UNUSED)
2401 rtx to_addr = XEXP (data->to, 0);
2402 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2403 enum machine_mode mode = VOIDmode, tmode;
2404 enum insn_code icode;
2406 data->offset = 0;
2407 data->to_addr = to_addr;
2408 data->autinc_to
2409 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2410 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2412 data->explicit_inc_to = 0;
2413 data->reverse
2414 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2415 if (data->reverse)
2416 data->offset = data->len;
2418 /* If storing requires more than two move insns,
2419 copy addresses to registers (to make displacements shorter)
2420 and use post-increment if available. */
2421 if (!data->autinc_to
2422 && move_by_pieces_ninsns (data->len, align) > 2)
2424 /* Determine the main mode we'll be using. */
2425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2426 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2427 if (GET_MODE_SIZE (tmode) < max_size)
2428 mode = tmode;
2430 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2432 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2433 data->autinc_to = 1;
2434 data->explicit_inc_to = -1;
2437 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2438 && ! data->autinc_to)
2440 data->to_addr = copy_addr_to_reg (to_addr);
2441 data->autinc_to = 1;
2442 data->explicit_inc_to = 1;
2445 if ( !data->autinc_to && CONSTANT_P (to_addr))
2446 data->to_addr = copy_addr_to_reg (to_addr);
2449 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2450 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2451 align = MOVE_MAX * BITS_PER_UNIT;
2453 /* First store what we can in the largest integer mode, then go to
2454 successively smaller modes. */
2456 while (max_size > 1)
2458 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2459 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2460 if (GET_MODE_SIZE (tmode) < max_size)
2461 mode = tmode;
2463 if (mode == VOIDmode)
2464 break;
2466 icode = mov_optab->handlers[(int) mode].insn_code;
2467 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2468 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2470 max_size = GET_MODE_SIZE (mode);
2473 /* The code above should have handled everything. */
2474 if (data->len != 0)
2475 abort ();
2478 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2479 with move instructions for mode MODE. GENFUN is the gen_... function
2480 to make a move insn for that mode. DATA has all the other info. */
2482 static void
2483 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2484 struct store_by_pieces *data)
2486 unsigned int size = GET_MODE_SIZE (mode);
2487 rtx to1, cst;
2489 while (data->len >= size)
2491 if (data->reverse)
2492 data->offset -= size;
2494 if (data->autinc_to)
2495 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2496 data->offset);
2497 else
2498 to1 = adjust_address (data->to, mode, data->offset);
2500 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2501 emit_insn (gen_add2_insn (data->to_addr,
2502 GEN_INT (-(HOST_WIDE_INT) size)));
2504 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2505 emit_insn ((*genfun) (to1, cst));
2507 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2508 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2510 if (! data->reverse)
2511 data->offset += size;
2513 data->len -= size;
2517 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2518 its length in bytes. */
2521 clear_storage (rtx object, rtx size)
2523 rtx retval = 0;
2524 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2525 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2527 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2528 just move a zero. Otherwise, do this a piece at a time. */
2529 if (GET_MODE (object) != BLKmode
2530 && GET_CODE (size) == CONST_INT
2531 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2532 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2533 else
2535 object = protect_from_queue (object, 1);
2536 size = protect_from_queue (size, 0);
2538 if (size == const0_rtx)
2540 else if (GET_CODE (size) == CONST_INT
2541 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2542 clear_by_pieces (object, INTVAL (size), align);
2543 else if (clear_storage_via_clrmem (object, size, align))
2545 else
2546 retval = clear_storage_via_libcall (object, size);
2549 return retval;
2552 /* A subroutine of clear_storage. Expand a clrmem pattern;
2553 return true if successful. */
2555 static bool
2556 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2558 /* Try the most limited insn first, because there's no point
2559 including more than one in the machine description unless
2560 the more limited one has some advantage. */
2562 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2563 enum machine_mode mode;
2565 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2566 mode = GET_MODE_WIDER_MODE (mode))
2568 enum insn_code code = clrmem_optab[(int) mode];
2569 insn_operand_predicate_fn pred;
2571 if (code != CODE_FOR_nothing
2572 /* We don't need MODE to be narrower than
2573 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2574 the mode mask, as it is returned by the macro, it will
2575 definitely be less than the actual mode mask. */
2576 && ((GET_CODE (size) == CONST_INT
2577 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2578 <= (GET_MODE_MASK (mode) >> 1)))
2579 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2580 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2581 || (*pred) (object, BLKmode))
2582 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2583 || (*pred) (opalign, VOIDmode)))
2585 rtx op1;
2586 rtx last = get_last_insn ();
2587 rtx pat;
2589 op1 = convert_to_mode (mode, size, 1);
2590 pred = insn_data[(int) code].operand[1].predicate;
2591 if (pred != 0 && ! (*pred) (op1, mode))
2592 op1 = copy_to_mode_reg (mode, op1);
2594 pat = GEN_FCN ((int) code) (object, op1, opalign);
2595 if (pat)
2597 emit_insn (pat);
2598 return true;
2600 else
2601 delete_insns_since (last);
2605 return false;
2608 /* A subroutine of clear_storage. Expand a call to memset.
2609 Return the return value of memset, 0 otherwise. */
2611 static rtx
2612 clear_storage_via_libcall (rtx object, rtx size)
2614 tree call_expr, arg_list, fn, object_tree, size_tree;
2615 enum machine_mode size_mode;
2616 rtx retval;
2618 /* OBJECT or SIZE may have been passed through protect_from_queue.
2620 It is unsafe to save the value generated by protect_from_queue
2621 and reuse it later. Consider what happens if emit_queue is
2622 called before the return value from protect_from_queue is used.
2624 Expansion of the CALL_EXPR below will call emit_queue before
2625 we are finished emitting RTL for argument setup. So if we are
2626 not careful we could get the wrong value for an argument.
2628 To avoid this problem we go ahead and emit code to copy OBJECT
2629 and SIZE into new pseudos.
2631 Note this is not strictly needed for library calls since they
2632 do not call emit_queue before loading their arguments. However,
2633 we may need to have library calls call emit_queue in the future
2634 since failing to do so could cause problems for targets which
2635 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2637 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2639 size_mode = TYPE_MODE (sizetype);
2640 size = convert_to_mode (size_mode, size, 1);
2641 size = copy_to_mode_reg (size_mode, size);
2643 /* It is incorrect to use the libcall calling conventions to call
2644 memset in this context. This could be a user call to memset and
2645 the user may wish to examine the return value from memset. For
2646 targets where libcalls and normal calls have different conventions
2647 for returning pointers, we could end up generating incorrect code. */
2649 object_tree = make_tree (ptr_type_node, object);
2650 size_tree = make_tree (sizetype, size);
2652 fn = clear_storage_libcall_fn (true);
2653 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2654 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2655 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2657 /* Now we have to build up the CALL_EXPR itself. */
2658 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2659 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2660 call_expr, arg_list, NULL_TREE);
2662 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2664 /* If we are initializing a readonly value, show the above call
2665 clobbered it. Otherwise, a load from it may erroneously be
2666 hoisted from a loop. */
2667 if (RTX_UNCHANGING_P (object))
2668 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2670 return retval;
2673 /* A subroutine of clear_storage_via_libcall. Create the tree node
2674 for the function we use for block clears. The first time FOR_CALL
2675 is true, we call assemble_external. */
2677 static GTY(()) tree block_clear_fn;
2679 void
2680 init_block_clear_fn (const char *asmspec)
2682 if (!block_clear_fn)
2684 tree fn, args;
2686 fn = get_identifier ("memset");
2687 args = build_function_type_list (ptr_type_node, ptr_type_node,
2688 integer_type_node, sizetype,
2689 NULL_TREE);
2691 fn = build_decl (FUNCTION_DECL, fn, args);
2692 DECL_EXTERNAL (fn) = 1;
2693 TREE_PUBLIC (fn) = 1;
2694 DECL_ARTIFICIAL (fn) = 1;
2695 TREE_NOTHROW (fn) = 1;
2697 block_clear_fn = fn;
2700 if (asmspec)
2702 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2703 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2707 static tree
2708 clear_storage_libcall_fn (int for_call)
2710 static bool emitted_extern;
2712 if (!block_clear_fn)
2713 init_block_clear_fn (NULL);
2715 if (for_call && !emitted_extern)
2717 emitted_extern = true;
2718 make_decl_rtl (block_clear_fn, NULL);
2719 assemble_external (block_clear_fn);
2722 return block_clear_fn;
2725 /* Generate code to copy Y into X.
2726 Both Y and X must have the same mode, except that
2727 Y can be a constant with VOIDmode.
2728 This mode cannot be BLKmode; use emit_block_move for that.
2730 Return the last instruction emitted. */
2733 emit_move_insn (rtx x, rtx y)
2735 enum machine_mode mode = GET_MODE (x);
2736 rtx y_cst = NULL_RTX;
2737 rtx last_insn, set;
2739 x = protect_from_queue (x, 1);
2740 y = protect_from_queue (y, 0);
2742 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2743 abort ();
2745 if (CONSTANT_P (y))
2747 if (optimize
2748 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2749 && (last_insn = compress_float_constant (x, y)))
2750 return last_insn;
2752 y_cst = y;
2754 if (!LEGITIMATE_CONSTANT_P (y))
2756 y = force_const_mem (mode, y);
2758 /* If the target's cannot_force_const_mem prevented the spill,
2759 assume that the target's move expanders will also take care
2760 of the non-legitimate constant. */
2761 if (!y)
2762 y = y_cst;
2766 /* If X or Y are memory references, verify that their addresses are valid
2767 for the machine. */
2768 if (MEM_P (x)
2769 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2770 && ! push_operand (x, GET_MODE (x)))
2771 || (flag_force_addr
2772 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2773 x = validize_mem (x);
2775 if (MEM_P (y)
2776 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2777 || (flag_force_addr
2778 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2779 y = validize_mem (y);
2781 if (mode == BLKmode)
2782 abort ();
2784 last_insn = emit_move_insn_1 (x, y);
2786 if (y_cst && REG_P (x)
2787 && (set = single_set (last_insn)) != NULL_RTX
2788 && SET_DEST (set) == x
2789 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2790 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2792 return last_insn;
2795 /* Low level part of emit_move_insn.
2796 Called just like emit_move_insn, but assumes X and Y
2797 are basically valid. */
2800 emit_move_insn_1 (rtx x, rtx y)
2802 enum machine_mode mode = GET_MODE (x);
2803 enum machine_mode submode;
2804 enum mode_class class = GET_MODE_CLASS (mode);
2806 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2807 abort ();
2809 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2810 return
2811 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2813 /* Expand complex moves by moving real part and imag part, if possible. */
2814 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2815 && BLKmode != (submode = GET_MODE_INNER (mode))
2816 && (mov_optab->handlers[(int) submode].insn_code
2817 != CODE_FOR_nothing))
2819 /* Don't split destination if it is a stack push. */
2820 int stack = push_operand (x, GET_MODE (x));
2822 #ifdef PUSH_ROUNDING
2823 /* In case we output to the stack, but the size is smaller than the
2824 machine can push exactly, we need to use move instructions. */
2825 if (stack
2826 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2827 != GET_MODE_SIZE (submode)))
2829 rtx temp;
2830 HOST_WIDE_INT offset1, offset2;
2832 /* Do not use anti_adjust_stack, since we don't want to update
2833 stack_pointer_delta. */
2834 temp = expand_binop (Pmode,
2835 #ifdef STACK_GROWS_DOWNWARD
2836 sub_optab,
2837 #else
2838 add_optab,
2839 #endif
2840 stack_pointer_rtx,
2841 GEN_INT
2842 (PUSH_ROUNDING
2843 (GET_MODE_SIZE (GET_MODE (x)))),
2844 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2846 if (temp != stack_pointer_rtx)
2847 emit_move_insn (stack_pointer_rtx, temp);
2849 #ifdef STACK_GROWS_DOWNWARD
2850 offset1 = 0;
2851 offset2 = GET_MODE_SIZE (submode);
2852 #else
2853 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2854 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2855 + GET_MODE_SIZE (submode));
2856 #endif
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2860 stack_pointer_rtx,
2861 GEN_INT (offset1))),
2862 gen_realpart (submode, y));
2863 emit_move_insn (change_address (x, submode,
2864 gen_rtx_PLUS (Pmode,
2865 stack_pointer_rtx,
2866 GEN_INT (offset2))),
2867 gen_imagpart (submode, y));
2869 else
2870 #endif
2871 /* If this is a stack, push the highpart first, so it
2872 will be in the argument order.
2874 In that case, change_address is used only to convert
2875 the mode, not to change the address. */
2876 if (stack)
2878 /* Note that the real part always precedes the imag part in memory
2879 regardless of machine's endianness. */
2880 #ifdef STACK_GROWS_DOWNWARD
2881 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2882 gen_imagpart (submode, y));
2883 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2884 gen_realpart (submode, y));
2885 #else
2886 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2887 gen_realpart (submode, y));
2888 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2889 gen_imagpart (submode, y));
2890 #endif
2892 else
2894 rtx realpart_x, realpart_y;
2895 rtx imagpart_x, imagpart_y;
2897 /* If this is a complex value with each part being smaller than a
2898 word, the usual calling sequence will likely pack the pieces into
2899 a single register. Unfortunately, SUBREG of hard registers only
2900 deals in terms of words, so we have a problem converting input
2901 arguments to the CONCAT of two registers that is used elsewhere
2902 for complex values. If this is before reload, we can copy it into
2903 memory and reload. FIXME, we should see about using extract and
2904 insert on integer registers, but complex short and complex char
2905 variables should be rarely used. */
2906 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2907 && (reload_in_progress | reload_completed) == 0)
2909 int packed_dest_p
2910 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2911 int packed_src_p
2912 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2914 if (packed_dest_p || packed_src_p)
2916 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2917 ? MODE_FLOAT : MODE_INT);
2919 enum machine_mode reg_mode
2920 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2922 if (reg_mode != BLKmode)
2924 rtx mem = assign_stack_temp (reg_mode,
2925 GET_MODE_SIZE (mode), 0);
2926 rtx cmem = adjust_address (mem, mode, 0);
2928 if (packed_dest_p)
2930 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2932 emit_move_insn_1 (cmem, y);
2933 return emit_move_insn_1 (sreg, mem);
2935 else
2937 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2939 emit_move_insn_1 (mem, sreg);
2940 return emit_move_insn_1 (x, cmem);
2946 realpart_x = gen_realpart (submode, x);
2947 realpart_y = gen_realpart (submode, y);
2948 imagpart_x = gen_imagpart (submode, x);
2949 imagpart_y = gen_imagpart (submode, y);
2951 /* Show the output dies here. This is necessary for SUBREGs
2952 of pseudos since we cannot track their lifetimes correctly;
2953 hard regs shouldn't appear here except as return values.
2954 We never want to emit such a clobber after reload. */
2955 if (x != y
2956 && ! (reload_in_progress || reload_completed)
2957 && (GET_CODE (realpart_x) == SUBREG
2958 || GET_CODE (imagpart_x) == SUBREG))
2959 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2961 emit_move_insn (realpart_x, realpart_y);
2962 emit_move_insn (imagpart_x, imagpart_y);
2965 return get_last_insn ();
2968 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2969 find a mode to do it in. If we have a movcc, use it. Otherwise,
2970 find the MODE_INT mode of the same width. */
2971 else if (GET_MODE_CLASS (mode) == MODE_CC
2972 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2974 enum insn_code insn_code;
2975 enum machine_mode tmode = VOIDmode;
2976 rtx x1 = x, y1 = y;
2978 if (mode != CCmode
2979 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2980 tmode = CCmode;
2981 else
2982 for (tmode = QImode; tmode != VOIDmode;
2983 tmode = GET_MODE_WIDER_MODE (tmode))
2984 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2985 break;
2987 if (tmode == VOIDmode)
2988 abort ();
2990 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2991 may call change_address which is not appropriate if we were
2992 called when a reload was in progress. We don't have to worry
2993 about changing the address since the size in bytes is supposed to
2994 be the same. Copy the MEM to change the mode and move any
2995 substitutions from the old MEM to the new one. */
2997 if (reload_in_progress)
2999 x = gen_lowpart_common (tmode, x1);
3000 if (x == 0 && MEM_P (x1))
3002 x = adjust_address_nv (x1, tmode, 0);
3003 copy_replacements (x1, x);
3006 y = gen_lowpart_common (tmode, y1);
3007 if (y == 0 && MEM_P (y1))
3009 y = adjust_address_nv (y1, tmode, 0);
3010 copy_replacements (y1, y);
3013 else
3015 x = gen_lowpart (tmode, x);
3016 y = gen_lowpart (tmode, y);
3019 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3020 return emit_insn (GEN_FCN (insn_code) (x, y));
3023 /* Try using a move pattern for the corresponding integer mode. This is
3024 only safe when simplify_subreg can convert MODE constants into integer
3025 constants. At present, it can only do this reliably if the value
3026 fits within a HOST_WIDE_INT. */
3027 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3028 && (submode = int_mode_for_mode (mode)) != BLKmode
3029 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3030 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3031 (simplify_gen_subreg (submode, x, mode, 0),
3032 simplify_gen_subreg (submode, y, mode, 0)));
3034 /* This will handle any multi-word or full-word mode that lacks a move_insn
3035 pattern. However, you will get better code if you define such patterns,
3036 even if they must turn into multiple assembler instructions. */
3037 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3039 rtx last_insn = 0;
3040 rtx seq, inner;
3041 int need_clobber;
3042 int i;
3044 #ifdef PUSH_ROUNDING
3046 /* If X is a push on the stack, do the push now and replace
3047 X with a reference to the stack pointer. */
3048 if (push_operand (x, GET_MODE (x)))
3050 rtx temp;
3051 enum rtx_code code;
3053 /* Do not use anti_adjust_stack, since we don't want to update
3054 stack_pointer_delta. */
3055 temp = expand_binop (Pmode,
3056 #ifdef STACK_GROWS_DOWNWARD
3057 sub_optab,
3058 #else
3059 add_optab,
3060 #endif
3061 stack_pointer_rtx,
3062 GEN_INT
3063 (PUSH_ROUNDING
3064 (GET_MODE_SIZE (GET_MODE (x)))),
3065 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3067 if (temp != stack_pointer_rtx)
3068 emit_move_insn (stack_pointer_rtx, temp);
3070 code = GET_CODE (XEXP (x, 0));
3072 /* Just hope that small offsets off SP are OK. */
3073 if (code == POST_INC)
3074 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3075 GEN_INT (-((HOST_WIDE_INT)
3076 GET_MODE_SIZE (GET_MODE (x)))));
3077 else if (code == POST_DEC)
3078 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3079 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3080 else
3081 temp = stack_pointer_rtx;
3083 x = change_address (x, VOIDmode, temp);
3085 #endif
3087 /* If we are in reload, see if either operand is a MEM whose address
3088 is scheduled for replacement. */
3089 if (reload_in_progress && MEM_P (x)
3090 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3091 x = replace_equiv_address_nv (x, inner);
3092 if (reload_in_progress && MEM_P (y)
3093 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3094 y = replace_equiv_address_nv (y, inner);
3096 start_sequence ();
3098 need_clobber = 0;
3099 for (i = 0;
3100 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3101 i++)
3103 rtx xpart = operand_subword (x, i, 1, mode);
3104 rtx ypart = operand_subword (y, i, 1, mode);
3106 /* If we can't get a part of Y, put Y into memory if it is a
3107 constant. Otherwise, force it into a register. If we still
3108 can't get a part of Y, abort. */
3109 if (ypart == 0 && CONSTANT_P (y))
3111 y = force_const_mem (mode, y);
3112 ypart = operand_subword (y, i, 1, mode);
3114 else if (ypart == 0)
3115 ypart = operand_subword_force (y, i, mode);
3117 if (xpart == 0 || ypart == 0)
3118 abort ();
3120 need_clobber |= (GET_CODE (xpart) == SUBREG);
3122 last_insn = emit_move_insn (xpart, ypart);
3125 seq = get_insns ();
3126 end_sequence ();
3128 /* Show the output dies here. This is necessary for SUBREGs
3129 of pseudos since we cannot track their lifetimes correctly;
3130 hard regs shouldn't appear here except as return values.
3131 We never want to emit such a clobber after reload. */
3132 if (x != y
3133 && ! (reload_in_progress || reload_completed)
3134 && need_clobber != 0)
3135 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3137 emit_insn (seq);
3139 return last_insn;
3141 else
3142 abort ();
3145 /* If Y is representable exactly in a narrower mode, and the target can
3146 perform the extension directly from constant or memory, then emit the
3147 move as an extension. */
3149 static rtx
3150 compress_float_constant (rtx x, rtx y)
3152 enum machine_mode dstmode = GET_MODE (x);
3153 enum machine_mode orig_srcmode = GET_MODE (y);
3154 enum machine_mode srcmode;
3155 REAL_VALUE_TYPE r;
3157 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3159 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3160 srcmode != orig_srcmode;
3161 srcmode = GET_MODE_WIDER_MODE (srcmode))
3163 enum insn_code ic;
3164 rtx trunc_y, last_insn;
3166 /* Skip if the target can't extend this way. */
3167 ic = can_extend_p (dstmode, srcmode, 0);
3168 if (ic == CODE_FOR_nothing)
3169 continue;
3171 /* Skip if the narrowed value isn't exact. */
3172 if (! exact_real_truncate (srcmode, &r))
3173 continue;
3175 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3177 if (LEGITIMATE_CONSTANT_P (trunc_y))
3179 /* Skip if the target needs extra instructions to perform
3180 the extension. */
3181 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3182 continue;
3184 else if (float_extend_from_mem[dstmode][srcmode])
3185 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3186 else
3187 continue;
3189 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3190 last_insn = get_last_insn ();
3192 if (REG_P (x))
3193 set_unique_reg_note (last_insn, REG_EQUAL, y);
3195 return last_insn;
3198 return NULL_RTX;
3201 /* Pushing data onto the stack. */
3203 /* Push a block of length SIZE (perhaps variable)
3204 and return an rtx to address the beginning of the block.
3205 Note that it is not possible for the value returned to be a QUEUED.
3206 The value may be virtual_outgoing_args_rtx.
3208 EXTRA is the number of bytes of padding to push in addition to SIZE.
3209 BELOW nonzero means this padding comes at low addresses;
3210 otherwise, the padding comes at high addresses. */
3213 push_block (rtx size, int extra, int below)
3215 rtx temp;
3217 size = convert_modes (Pmode, ptr_mode, size, 1);
3218 if (CONSTANT_P (size))
3219 anti_adjust_stack (plus_constant (size, extra));
3220 else if (REG_P (size) && extra == 0)
3221 anti_adjust_stack (size);
3222 else
3224 temp = copy_to_mode_reg (Pmode, size);
3225 if (extra != 0)
3226 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3227 temp, 0, OPTAB_LIB_WIDEN);
3228 anti_adjust_stack (temp);
3231 #ifndef STACK_GROWS_DOWNWARD
3232 if (0)
3233 #else
3234 if (1)
3235 #endif
3237 temp = virtual_outgoing_args_rtx;
3238 if (extra != 0 && below)
3239 temp = plus_constant (temp, extra);
3241 else
3243 if (GET_CODE (size) == CONST_INT)
3244 temp = plus_constant (virtual_outgoing_args_rtx,
3245 -INTVAL (size) - (below ? 0 : extra));
3246 else if (extra != 0 && !below)
3247 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3248 negate_rtx (Pmode, plus_constant (size, extra)));
3249 else
3250 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3251 negate_rtx (Pmode, size));
3254 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3257 #ifdef PUSH_ROUNDING
3259 /* Emit single push insn. */
3261 static void
3262 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3264 rtx dest_addr;
3265 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3266 rtx dest;
3267 enum insn_code icode;
3268 insn_operand_predicate_fn pred;
3270 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3271 /* If there is push pattern, use it. Otherwise try old way of throwing
3272 MEM representing push operation to move expander. */
3273 icode = push_optab->handlers[(int) mode].insn_code;
3274 if (icode != CODE_FOR_nothing)
3276 if (((pred = insn_data[(int) icode].operand[0].predicate)
3277 && !((*pred) (x, mode))))
3278 x = force_reg (mode, x);
3279 emit_insn (GEN_FCN (icode) (x));
3280 return;
3282 if (GET_MODE_SIZE (mode) == rounded_size)
3283 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3284 /* If we are to pad downward, adjust the stack pointer first and
3285 then store X into the stack location using an offset. This is
3286 because emit_move_insn does not know how to pad; it does not have
3287 access to type. */
3288 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3290 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3291 HOST_WIDE_INT offset;
3293 emit_move_insn (stack_pointer_rtx,
3294 expand_binop (Pmode,
3295 #ifdef STACK_GROWS_DOWNWARD
3296 sub_optab,
3297 #else
3298 add_optab,
3299 #endif
3300 stack_pointer_rtx,
3301 GEN_INT (rounded_size),
3302 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3304 offset = (HOST_WIDE_INT) padding_size;
3305 #ifdef STACK_GROWS_DOWNWARD
3306 if (STACK_PUSH_CODE == POST_DEC)
3307 /* We have already decremented the stack pointer, so get the
3308 previous value. */
3309 offset += (HOST_WIDE_INT) rounded_size;
3310 #else
3311 if (STACK_PUSH_CODE == POST_INC)
3312 /* We have already incremented the stack pointer, so get the
3313 previous value. */
3314 offset -= (HOST_WIDE_INT) rounded_size;
3315 #endif
3316 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3318 else
3320 #ifdef STACK_GROWS_DOWNWARD
3321 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3322 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3323 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3324 #else
3325 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3326 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3327 GEN_INT (rounded_size));
3328 #endif
3329 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3332 dest = gen_rtx_MEM (mode, dest_addr);
3334 if (type != 0)
3336 set_mem_attributes (dest, type, 1);
3338 if (flag_optimize_sibling_calls)
3339 /* Function incoming arguments may overlap with sibling call
3340 outgoing arguments and we cannot allow reordering of reads
3341 from function arguments with stores to outgoing arguments
3342 of sibling calls. */
3343 set_mem_alias_set (dest, 0);
3345 emit_move_insn (dest, x);
3347 #endif
3349 /* Generate code to push X onto the stack, assuming it has mode MODE and
3350 type TYPE.
3351 MODE is redundant except when X is a CONST_INT (since they don't
3352 carry mode info).
3353 SIZE is an rtx for the size of data to be copied (in bytes),
3354 needed only if X is BLKmode.
3356 ALIGN (in bits) is maximum alignment we can assume.
3358 If PARTIAL and REG are both nonzero, then copy that many of the first
3359 words of X into registers starting with REG, and push the rest of X.
3360 The amount of space pushed is decreased by PARTIAL words,
3361 rounded *down* to a multiple of PARM_BOUNDARY.
3362 REG must be a hard register in this case.
3363 If REG is zero but PARTIAL is not, take any all others actions for an
3364 argument partially in registers, but do not actually load any
3365 registers.
3367 EXTRA is the amount in bytes of extra space to leave next to this arg.
3368 This is ignored if an argument block has already been allocated.
3370 On a machine that lacks real push insns, ARGS_ADDR is the address of
3371 the bottom of the argument block for this call. We use indexing off there
3372 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3373 argument block has not been preallocated.
3375 ARGS_SO_FAR is the size of args previously pushed for this call.
3377 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3378 for arguments passed in registers. If nonzero, it will be the number
3379 of bytes required. */
3381 void
3382 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3383 unsigned int align, int partial, rtx reg, int extra,
3384 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3385 rtx alignment_pad)
3387 rtx xinner;
3388 enum direction stack_direction
3389 #ifdef STACK_GROWS_DOWNWARD
3390 = downward;
3391 #else
3392 = upward;
3393 #endif
3395 /* Decide where to pad the argument: `downward' for below,
3396 `upward' for above, or `none' for don't pad it.
3397 Default is below for small data on big-endian machines; else above. */
3398 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3400 /* Invert direction if stack is post-decrement.
3401 FIXME: why? */
3402 if (STACK_PUSH_CODE == POST_DEC)
3403 if (where_pad != none)
3404 where_pad = (where_pad == downward ? upward : downward);
3406 xinner = x = protect_from_queue (x, 0);
3408 if (mode == BLKmode)
3410 /* Copy a block into the stack, entirely or partially. */
3412 rtx temp;
3413 int used = partial * UNITS_PER_WORD;
3414 int offset;
3415 int skip;
3417 if (reg && GET_CODE (reg) == PARALLEL)
3419 /* Use the size of the elt to compute offset. */
3420 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3421 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3422 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3424 else
3425 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3427 if (size == 0)
3428 abort ();
3430 used -= offset;
3432 /* USED is now the # of bytes we need not copy to the stack
3433 because registers will take care of them. */
3435 if (partial != 0)
3436 xinner = adjust_address (xinner, BLKmode, used);
3438 /* If the partial register-part of the arg counts in its stack size,
3439 skip the part of stack space corresponding to the registers.
3440 Otherwise, start copying to the beginning of the stack space,
3441 by setting SKIP to 0. */
3442 skip = (reg_parm_stack_space == 0) ? 0 : used;
3444 #ifdef PUSH_ROUNDING
3445 /* Do it with several push insns if that doesn't take lots of insns
3446 and if there is no difficulty with push insns that skip bytes
3447 on the stack for alignment purposes. */
3448 if (args_addr == 0
3449 && PUSH_ARGS
3450 && GET_CODE (size) == CONST_INT
3451 && skip == 0
3452 && MEM_ALIGN (xinner) >= align
3453 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3454 /* Here we avoid the case of a structure whose weak alignment
3455 forces many pushes of a small amount of data,
3456 and such small pushes do rounding that causes trouble. */
3457 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3458 || align >= BIGGEST_ALIGNMENT
3459 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3460 == (align / BITS_PER_UNIT)))
3461 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3463 /* Push padding now if padding above and stack grows down,
3464 or if padding below and stack grows up.
3465 But if space already allocated, this has already been done. */
3466 if (extra && args_addr == 0
3467 && where_pad != none && where_pad != stack_direction)
3468 anti_adjust_stack (GEN_INT (extra));
3470 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3472 else
3473 #endif /* PUSH_ROUNDING */
3475 rtx target;
3477 /* Otherwise make space on the stack and copy the data
3478 to the address of that space. */
3480 /* Deduct words put into registers from the size we must copy. */
3481 if (partial != 0)
3483 if (GET_CODE (size) == CONST_INT)
3484 size = GEN_INT (INTVAL (size) - used);
3485 else
3486 size = expand_binop (GET_MODE (size), sub_optab, size,
3487 GEN_INT (used), NULL_RTX, 0,
3488 OPTAB_LIB_WIDEN);
3491 /* Get the address of the stack space.
3492 In this case, we do not deal with EXTRA separately.
3493 A single stack adjust will do. */
3494 if (! args_addr)
3496 temp = push_block (size, extra, where_pad == downward);
3497 extra = 0;
3499 else if (GET_CODE (args_so_far) == CONST_INT)
3500 temp = memory_address (BLKmode,
3501 plus_constant (args_addr,
3502 skip + INTVAL (args_so_far)));
3503 else
3504 temp = memory_address (BLKmode,
3505 plus_constant (gen_rtx_PLUS (Pmode,
3506 args_addr,
3507 args_so_far),
3508 skip));
3510 if (!ACCUMULATE_OUTGOING_ARGS)
3512 /* If the source is referenced relative to the stack pointer,
3513 copy it to another register to stabilize it. We do not need
3514 to do this if we know that we won't be changing sp. */
3516 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3517 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3518 temp = copy_to_reg (temp);
3521 target = gen_rtx_MEM (BLKmode, temp);
3523 if (type != 0)
3525 set_mem_attributes (target, type, 1);
3526 /* Function incoming arguments may overlap with sibling call
3527 outgoing arguments and we cannot allow reordering of reads
3528 from function arguments with stores to outgoing arguments
3529 of sibling calls. */
3530 set_mem_alias_set (target, 0);
3533 /* ALIGN may well be better aligned than TYPE, e.g. due to
3534 PARM_BOUNDARY. Assume the caller isn't lying. */
3535 set_mem_align (target, align);
3537 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3540 else if (partial > 0)
3542 /* Scalar partly in registers. */
3544 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3545 int i;
3546 int not_stack;
3547 /* # words of start of argument
3548 that we must make space for but need not store. */
3549 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3550 int args_offset = INTVAL (args_so_far);
3551 int skip;
3553 /* Push padding now if padding above and stack grows down,
3554 or if padding below and stack grows up.
3555 But if space already allocated, this has already been done. */
3556 if (extra && args_addr == 0
3557 && where_pad != none && where_pad != stack_direction)
3558 anti_adjust_stack (GEN_INT (extra));
3560 /* If we make space by pushing it, we might as well push
3561 the real data. Otherwise, we can leave OFFSET nonzero
3562 and leave the space uninitialized. */
3563 if (args_addr == 0)
3564 offset = 0;
3566 /* Now NOT_STACK gets the number of words that we don't need to
3567 allocate on the stack. */
3568 not_stack = partial - offset;
3570 /* If the partial register-part of the arg counts in its stack size,
3571 skip the part of stack space corresponding to the registers.
3572 Otherwise, start copying to the beginning of the stack space,
3573 by setting SKIP to 0. */
3574 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3576 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3577 x = validize_mem (force_const_mem (mode, x));
3579 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3580 SUBREGs of such registers are not allowed. */
3581 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3582 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3583 x = copy_to_reg (x);
3585 /* Loop over all the words allocated on the stack for this arg. */
3586 /* We can do it by words, because any scalar bigger than a word
3587 has a size a multiple of a word. */
3588 #ifndef PUSH_ARGS_REVERSED
3589 for (i = not_stack; i < size; i++)
3590 #else
3591 for (i = size - 1; i >= not_stack; i--)
3592 #endif
3593 if (i >= not_stack + offset)
3594 emit_push_insn (operand_subword_force (x, i, mode),
3595 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3596 0, args_addr,
3597 GEN_INT (args_offset + ((i - not_stack + skip)
3598 * UNITS_PER_WORD)),
3599 reg_parm_stack_space, alignment_pad);
3601 else
3603 rtx addr;
3604 rtx dest;
3606 /* Push padding now if padding above and stack grows down,
3607 or if padding below and stack grows up.
3608 But if space already allocated, this has already been done. */
3609 if (extra && args_addr == 0
3610 && where_pad != none && where_pad != stack_direction)
3611 anti_adjust_stack (GEN_INT (extra));
3613 #ifdef PUSH_ROUNDING
3614 if (args_addr == 0 && PUSH_ARGS)
3615 emit_single_push_insn (mode, x, type);
3616 else
3617 #endif
3619 if (GET_CODE (args_so_far) == CONST_INT)
3620 addr
3621 = memory_address (mode,
3622 plus_constant (args_addr,
3623 INTVAL (args_so_far)));
3624 else
3625 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3626 args_so_far));
3627 dest = gen_rtx_MEM (mode, addr);
3628 if (type != 0)
3630 set_mem_attributes (dest, type, 1);
3631 /* Function incoming arguments may overlap with sibling call
3632 outgoing arguments and we cannot allow reordering of reads
3633 from function arguments with stores to outgoing arguments
3634 of sibling calls. */
3635 set_mem_alias_set (dest, 0);
3638 emit_move_insn (dest, x);
3642 /* If part should go in registers, copy that part
3643 into the appropriate registers. Do this now, at the end,
3644 since mem-to-mem copies above may do function calls. */
3645 if (partial > 0 && reg != 0)
3647 /* Handle calls that pass values in multiple non-contiguous locations.
3648 The Irix 6 ABI has examples of this. */
3649 if (GET_CODE (reg) == PARALLEL)
3650 emit_group_load (reg, x, type, -1);
3651 else
3652 move_block_to_reg (REGNO (reg), x, partial, mode);
3655 if (extra && args_addr == 0 && where_pad == stack_direction)
3656 anti_adjust_stack (GEN_INT (extra));
3658 if (alignment_pad && args_addr == 0)
3659 anti_adjust_stack (alignment_pad);
3662 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3663 operations. */
3665 static rtx
3666 get_subtarget (rtx x)
3668 return ((x == 0
3669 /* Only registers can be subtargets. */
3670 || !REG_P (x)
3671 /* If the register is readonly, it can't be set more than once. */
3672 || RTX_UNCHANGING_P (x)
3673 /* Don't use hard regs to avoid extending their life. */
3674 || REGNO (x) < FIRST_PSEUDO_REGISTER
3675 /* Avoid subtargets inside loops,
3676 since they hide some invariant expressions. */
3677 || preserve_subexpressions_p ())
3678 ? 0 : x);
3681 /* Expand an assignment that stores the value of FROM into TO.
3682 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3683 (This may contain a QUEUED rtx;
3684 if the value is constant, this rtx is a constant.)
3685 Otherwise, the returned value is NULL_RTX. */
3688 expand_assignment (tree to, tree from, int want_value)
3690 rtx to_rtx = 0;
3691 rtx result;
3693 /* Don't crash if the lhs of the assignment was erroneous. */
3695 if (TREE_CODE (to) == ERROR_MARK)
3697 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3698 return want_value ? result : NULL_RTX;
3701 /* Assignment of a structure component needs special treatment
3702 if the structure component's rtx is not simply a MEM.
3703 Assignment of an array element at a constant index, and assignment of
3704 an array element in an unaligned packed structure field, has the same
3705 problem. */
3707 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3708 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3709 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3711 enum machine_mode mode1;
3712 HOST_WIDE_INT bitsize, bitpos;
3713 rtx orig_to_rtx;
3714 tree offset;
3715 int unsignedp;
3716 int volatilep = 0;
3717 tree tem;
3719 push_temp_slots ();
3720 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3721 &unsignedp, &volatilep);
3723 /* If we are going to use store_bit_field and extract_bit_field,
3724 make sure to_rtx will be safe for multiple use. */
3726 if (mode1 == VOIDmode && want_value)
3727 tem = stabilize_reference (tem);
3729 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3731 if (offset != 0)
3733 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3735 if (!MEM_P (to_rtx))
3736 abort ();
3738 #ifdef POINTERS_EXTEND_UNSIGNED
3739 if (GET_MODE (offset_rtx) != Pmode)
3740 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3741 #else
3742 if (GET_MODE (offset_rtx) != ptr_mode)
3743 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3744 #endif
3746 /* A constant address in TO_RTX can have VOIDmode, we must not try
3747 to call force_reg for that case. Avoid that case. */
3748 if (MEM_P (to_rtx)
3749 && GET_MODE (to_rtx) == BLKmode
3750 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3751 && bitsize > 0
3752 && (bitpos % bitsize) == 0
3753 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3754 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3756 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3757 bitpos = 0;
3760 to_rtx = offset_address (to_rtx, offset_rtx,
3761 highest_pow2_factor_for_target (to,
3762 offset));
3765 if (MEM_P (to_rtx))
3767 /* If the field is at offset zero, we could have been given the
3768 DECL_RTX of the parent struct. Don't munge it. */
3769 to_rtx = shallow_copy_rtx (to_rtx);
3771 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3774 /* Deal with volatile and readonly fields. The former is only done
3775 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3776 if (volatilep && MEM_P (to_rtx))
3778 if (to_rtx == orig_to_rtx)
3779 to_rtx = copy_rtx (to_rtx);
3780 MEM_VOLATILE_P (to_rtx) = 1;
3783 if (TREE_CODE (to) == COMPONENT_REF
3784 && TREE_READONLY (TREE_OPERAND (to, 1))
3785 /* We can't assert that a MEM won't be set more than once
3786 if the component is not addressable because another
3787 non-addressable component may be referenced by the same MEM. */
3788 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3790 if (to_rtx == orig_to_rtx)
3791 to_rtx = copy_rtx (to_rtx);
3792 RTX_UNCHANGING_P (to_rtx) = 1;
3795 if (MEM_P (to_rtx) && ! can_address_p (to))
3797 if (to_rtx == orig_to_rtx)
3798 to_rtx = copy_rtx (to_rtx);
3799 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3802 /* Disabled temporarily. GET_MODE (to_rtx) is often not the right
3803 mode. */
3804 while (0 && mode1 == VOIDmode && !want_value
3805 && bitpos + bitsize <= BITS_PER_WORD
3806 && bitsize < BITS_PER_WORD
3807 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3808 && !TREE_SIDE_EFFECTS (to)
3809 && !TREE_THIS_VOLATILE (to))
3811 tree src, op0, op1;
3812 rtx value;
3813 HOST_WIDE_INT count = bitpos;
3814 optab binop;
3816 src = from;
3817 STRIP_NOPS (src);
3818 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3819 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3820 break;
3822 op0 = TREE_OPERAND (src, 0);
3823 op1 = TREE_OPERAND (src, 1);
3824 STRIP_NOPS (op0);
3826 if (! operand_equal_p (to, op0, 0))
3827 break;
3829 if (BYTES_BIG_ENDIAN)
3830 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3832 /* Special case some bitfield op= exp. */
3833 switch (TREE_CODE (src))
3835 case PLUS_EXPR:
3836 case MINUS_EXPR:
3837 if (count <= 0)
3838 break;
3840 /* For now, just optimize the case of the topmost bitfield
3841 where we don't need to do any masking and also
3842 1 bit bitfields where xor can be used.
3843 We might win by one instruction for the other bitfields
3844 too if insv/extv instructions aren't used, so that
3845 can be added later. */
3846 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))
3847 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3848 break;
3849 value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
3850 value = protect_from_queue (value, 0);
3851 to_rtx = protect_from_queue (to_rtx, 1);
3852 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3853 if (bitsize == 1
3854 && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3856 value = expand_and (GET_MODE (to_rtx), value, const1_rtx,
3857 NULL_RTX);
3858 binop = xor_optab;
3860 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3861 value, build_int_2 (count, 0),
3862 NULL_RTX, 1);
3863 result = expand_binop (GET_MODE (to_rtx), binop, to_rtx,
3864 value, to_rtx, 1, OPTAB_WIDEN);
3865 if (result != to_rtx)
3866 emit_move_insn (to_rtx, result);
3867 free_temp_slots ();
3868 pop_temp_slots ();
3869 return NULL_RTX;
3870 default:
3871 break;
3874 break;
3877 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3878 (want_value
3879 /* Spurious cast for HPUX compiler. */
3880 ? ((enum machine_mode)
3881 TYPE_MODE (TREE_TYPE (to)))
3882 : VOIDmode),
3883 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3885 preserve_temp_slots (result);
3886 free_temp_slots ();
3887 pop_temp_slots ();
3889 /* If the value is meaningful, convert RESULT to the proper mode.
3890 Otherwise, return nothing. */
3891 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3892 TYPE_MODE (TREE_TYPE (from)),
3893 result,
3894 TYPE_UNSIGNED (TREE_TYPE (to)))
3895 : NULL_RTX);
3898 /* If the rhs is a function call and its value is not an aggregate,
3899 call the function before we start to compute the lhs.
3900 This is needed for correct code for cases such as
3901 val = setjmp (buf) on machines where reference to val
3902 requires loading up part of an address in a separate insn.
3904 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3905 since it might be a promoted variable where the zero- or sign- extension
3906 needs to be done. Handling this in the normal way is safe because no
3907 computation is done before the call. */
3908 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3909 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3910 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3911 && REG_P (DECL_RTL (to))))
3913 rtx value;
3915 push_temp_slots ();
3916 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3917 if (to_rtx == 0)
3918 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3920 /* Handle calls that return values in multiple non-contiguous locations.
3921 The Irix 6 ABI has examples of this. */
3922 if (GET_CODE (to_rtx) == PARALLEL)
3923 emit_group_load (to_rtx, value, TREE_TYPE (from),
3924 int_size_in_bytes (TREE_TYPE (from)));
3925 else if (GET_MODE (to_rtx) == BLKmode)
3926 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3927 else
3929 if (POINTER_TYPE_P (TREE_TYPE (to)))
3930 value = convert_memory_address (GET_MODE (to_rtx), value);
3931 emit_move_insn (to_rtx, value);
3933 preserve_temp_slots (to_rtx);
3934 free_temp_slots ();
3935 pop_temp_slots ();
3936 return want_value ? to_rtx : NULL_RTX;
3939 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3940 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3942 if (to_rtx == 0)
3943 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3945 /* Don't move directly into a return register. */
3946 if (TREE_CODE (to) == RESULT_DECL
3947 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3949 rtx temp;
3951 push_temp_slots ();
3952 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3954 if (GET_CODE (to_rtx) == PARALLEL)
3955 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3956 int_size_in_bytes (TREE_TYPE (from)));
3957 else
3958 emit_move_insn (to_rtx, temp);
3960 preserve_temp_slots (to_rtx);
3961 free_temp_slots ();
3962 pop_temp_slots ();
3963 return want_value ? to_rtx : NULL_RTX;
3966 /* In case we are returning the contents of an object which overlaps
3967 the place the value is being stored, use a safe function when copying
3968 a value through a pointer into a structure value return block. */
3969 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3970 && current_function_returns_struct
3971 && !current_function_returns_pcc_struct)
3973 rtx from_rtx, size;
3975 push_temp_slots ();
3976 size = expr_size (from);
3977 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3979 emit_library_call (memmove_libfunc, LCT_NORMAL,
3980 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3981 XEXP (from_rtx, 0), Pmode,
3982 convert_to_mode (TYPE_MODE (sizetype),
3983 size, TYPE_UNSIGNED (sizetype)),
3984 TYPE_MODE (sizetype));
3986 preserve_temp_slots (to_rtx);
3987 free_temp_slots ();
3988 pop_temp_slots ();
3989 return want_value ? to_rtx : NULL_RTX;
3992 /* Compute FROM and store the value in the rtx we got. */
3994 push_temp_slots ();
3995 result = store_expr (from, to_rtx, want_value);
3996 preserve_temp_slots (result);
3997 free_temp_slots ();
3998 pop_temp_slots ();
3999 return want_value ? result : NULL_RTX;
4002 /* Generate code for computing expression EXP,
4003 and storing the value into TARGET.
4004 TARGET may contain a QUEUED rtx.
4006 If WANT_VALUE & 1 is nonzero, return a copy of the value
4007 not in TARGET, so that we can be sure to use the proper
4008 value in a containing expression even if TARGET has something
4009 else stored in it. If possible, we copy the value through a pseudo
4010 and return that pseudo. Or, if the value is constant, we try to
4011 return the constant. In some cases, we return a pseudo
4012 copied *from* TARGET.
4014 If the mode is BLKmode then we may return TARGET itself.
4015 It turns out that in BLKmode it doesn't cause a problem.
4016 because C has no operators that could combine two different
4017 assignments into the same BLKmode object with different values
4018 with no sequence point. Will other languages need this to
4019 be more thorough?
4021 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4022 to catch quickly any cases where the caller uses the value
4023 and fails to set WANT_VALUE.
4025 If WANT_VALUE & 2 is set, this is a store into a call param on the
4026 stack, and block moves may need to be treated specially. */
4029 store_expr (tree exp, rtx target, int want_value)
4031 rtx temp;
4032 rtx alt_rtl = NULL_RTX;
4033 rtx mark = mark_queue ();
4034 int dont_return_target = 0;
4035 int dont_store_target = 0;
4037 if (VOID_TYPE_P (TREE_TYPE (exp)))
4039 /* C++ can generate ?: expressions with a throw expression in one
4040 branch and an rvalue in the other. Here, we resolve attempts to
4041 store the throw expression's nonexistent result. */
4042 if (want_value)
4043 abort ();
4044 expand_expr (exp, const0_rtx, VOIDmode, 0);
4045 return NULL_RTX;
4047 if (TREE_CODE (exp) == COMPOUND_EXPR)
4049 /* Perform first part of compound expression, then assign from second
4050 part. */
4051 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4052 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4053 emit_queue ();
4054 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4056 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4058 /* For conditional expression, get safe form of the target. Then
4059 test the condition, doing the appropriate assignment on either
4060 side. This avoids the creation of unnecessary temporaries.
4061 For non-BLKmode, it is more efficient not to do this. */
4063 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4065 emit_queue ();
4066 target = protect_from_queue (target, 1);
4068 do_pending_stack_adjust ();
4069 NO_DEFER_POP;
4070 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4071 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4072 emit_queue ();
4073 emit_jump_insn (gen_jump (lab2));
4074 emit_barrier ();
4075 emit_label (lab1);
4076 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4077 emit_queue ();
4078 emit_label (lab2);
4079 OK_DEFER_POP;
4081 return want_value & 1 ? target : NULL_RTX;
4083 else if (queued_subexp_p (target))
4084 /* If target contains a postincrement, let's not risk
4085 using it as the place to generate the rhs. */
4087 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4089 /* Expand EXP into a new pseudo. */
4090 temp = gen_reg_rtx (GET_MODE (target));
4091 temp = expand_expr (exp, temp, GET_MODE (target),
4092 (want_value & 2
4093 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4095 else
4096 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4097 (want_value & 2
4098 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4100 /* If target is volatile, ANSI requires accessing the value
4101 *from* the target, if it is accessed. So make that happen.
4102 In no case return the target itself. */
4103 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4104 dont_return_target = 1;
4106 else if ((want_value & 1) != 0
4107 && MEM_P (target)
4108 && ! MEM_VOLATILE_P (target)
4109 && GET_MODE (target) != BLKmode)
4110 /* If target is in memory and caller wants value in a register instead,
4111 arrange that. Pass TARGET as target for expand_expr so that,
4112 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4113 We know expand_expr will not use the target in that case.
4114 Don't do this if TARGET is volatile because we are supposed
4115 to write it and then read it. */
4117 temp = expand_expr (exp, target, GET_MODE (target),
4118 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4119 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4121 /* If TEMP is already in the desired TARGET, only copy it from
4122 memory and don't store it there again. */
4123 if (temp == target
4124 || (rtx_equal_p (temp, target)
4125 && ! side_effects_p (temp) && ! side_effects_p (target)))
4126 dont_store_target = 1;
4127 temp = copy_to_reg (temp);
4129 dont_return_target = 1;
4131 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4132 /* If this is a scalar in a register that is stored in a wider mode
4133 than the declared mode, compute the result into its declared mode
4134 and then convert to the wider mode. Our value is the computed
4135 expression. */
4137 rtx inner_target = 0;
4139 /* If we don't want a value, we can do the conversion inside EXP,
4140 which will often result in some optimizations. Do the conversion
4141 in two steps: first change the signedness, if needed, then
4142 the extend. But don't do this if the type of EXP is a subtype
4143 of something else since then the conversion might involve
4144 more than just converting modes. */
4145 if ((want_value & 1) == 0
4146 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4147 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4149 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4150 != SUBREG_PROMOTED_UNSIGNED_P (target))
4151 exp = convert
4152 (lang_hooks.types.signed_or_unsigned_type
4153 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4155 exp = convert (lang_hooks.types.type_for_mode
4156 (GET_MODE (SUBREG_REG (target)),
4157 SUBREG_PROMOTED_UNSIGNED_P (target)),
4158 exp);
4160 inner_target = SUBREG_REG (target);
4163 temp = expand_expr (exp, inner_target, VOIDmode,
4164 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4166 /* If TEMP is a MEM and we want a result value, make the access
4167 now so it gets done only once. Strictly speaking, this is
4168 only necessary if the MEM is volatile, or if the address
4169 overlaps TARGET. But not performing the load twice also
4170 reduces the amount of rtl we generate and then have to CSE. */
4171 if (MEM_P (temp) && (want_value & 1) != 0)
4172 temp = copy_to_reg (temp);
4174 /* If TEMP is a VOIDmode constant, use convert_modes to make
4175 sure that we properly convert it. */
4176 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4178 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4179 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4180 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4181 GET_MODE (target), temp,
4182 SUBREG_PROMOTED_UNSIGNED_P (target));
4185 convert_move (SUBREG_REG (target), temp,
4186 SUBREG_PROMOTED_UNSIGNED_P (target));
4188 /* If we promoted a constant, change the mode back down to match
4189 target. Otherwise, the caller might get confused by a result whose
4190 mode is larger than expected. */
4192 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4194 if (GET_MODE (temp) != VOIDmode)
4196 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4197 SUBREG_PROMOTED_VAR_P (temp) = 1;
4198 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4199 SUBREG_PROMOTED_UNSIGNED_P (target));
4201 else
4202 temp = convert_modes (GET_MODE (target),
4203 GET_MODE (SUBREG_REG (target)),
4204 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4207 return want_value & 1 ? temp : NULL_RTX;
4209 else
4211 temp = expand_expr_real (exp, target, GET_MODE (target),
4212 (want_value & 2
4213 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4214 &alt_rtl);
4215 /* Return TARGET if it's a specified hardware register.
4216 If TARGET is a volatile mem ref, either return TARGET
4217 or return a reg copied *from* TARGET; ANSI requires this.
4219 Otherwise, if TEMP is not TARGET, return TEMP
4220 if it is constant (for efficiency),
4221 or if we really want the correct value. */
4222 if (!(target && REG_P (target)
4223 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4224 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4225 && ! rtx_equal_p (temp, target)
4226 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4227 dont_return_target = 1;
4230 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4231 the same as that of TARGET, adjust the constant. This is needed, for
4232 example, in case it is a CONST_DOUBLE and we want only a word-sized
4233 value. */
4234 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4235 && TREE_CODE (exp) != ERROR_MARK
4236 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4237 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4238 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4240 /* If value was not generated in the target, store it there.
4241 Convert the value to TARGET's type first if necessary and emit the
4242 pending incrementations that have been queued when expanding EXP.
4243 Note that we cannot emit the whole queue blindly because this will
4244 effectively disable the POST_INC optimization later.
4246 If TEMP and TARGET compare equal according to rtx_equal_p, but
4247 one or both of them are volatile memory refs, we have to distinguish
4248 two cases:
4249 - expand_expr has used TARGET. In this case, we must not generate
4250 another copy. This can be detected by TARGET being equal according
4251 to == .
4252 - expand_expr has not used TARGET - that means that the source just
4253 happens to have the same RTX form. Since temp will have been created
4254 by expand_expr, it will compare unequal according to == .
4255 We must generate a copy in this case, to reach the correct number
4256 of volatile memory references. */
4258 if ((! rtx_equal_p (temp, target)
4259 || (temp != target && (side_effects_p (temp)
4260 || side_effects_p (target))))
4261 && TREE_CODE (exp) != ERROR_MARK
4262 && ! dont_store_target
4263 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4264 but TARGET is not valid memory reference, TEMP will differ
4265 from TARGET although it is really the same location. */
4266 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4267 /* If there's nothing to copy, don't bother. Don't call expr_size
4268 unless necessary, because some front-ends (C++) expr_size-hook
4269 aborts on objects that are not supposed to be bit-copied or
4270 bit-initialized. */
4271 && expr_size (exp) != const0_rtx)
4273 emit_insns_enqueued_after_mark (mark);
4274 target = protect_from_queue (target, 1);
4275 temp = protect_from_queue (temp, 0);
4276 if (GET_MODE (temp) != GET_MODE (target)
4277 && GET_MODE (temp) != VOIDmode)
4279 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4280 if (dont_return_target)
4282 /* In this case, we will return TEMP,
4283 so make sure it has the proper mode.
4284 But don't forget to store the value into TARGET. */
4285 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4286 emit_move_insn (target, temp);
4288 else
4289 convert_move (target, temp, unsignedp);
4292 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4294 /* Handle copying a string constant into an array. The string
4295 constant may be shorter than the array. So copy just the string's
4296 actual length, and clear the rest. First get the size of the data
4297 type of the string, which is actually the size of the target. */
4298 rtx size = expr_size (exp);
4300 if (GET_CODE (size) == CONST_INT
4301 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4302 emit_block_move (target, temp, size,
4303 (want_value & 2
4304 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4305 else
4307 /* Compute the size of the data to copy from the string. */
4308 tree copy_size
4309 = size_binop (MIN_EXPR,
4310 make_tree (sizetype, size),
4311 size_int (TREE_STRING_LENGTH (exp)));
4312 rtx copy_size_rtx
4313 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4314 (want_value & 2
4315 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4316 rtx label = 0;
4318 /* Copy that much. */
4319 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4320 TYPE_UNSIGNED (sizetype));
4321 emit_block_move (target, temp, copy_size_rtx,
4322 (want_value & 2
4323 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4325 /* Figure out how much is left in TARGET that we have to clear.
4326 Do all calculations in ptr_mode. */
4327 if (GET_CODE (copy_size_rtx) == CONST_INT)
4329 size = plus_constant (size, -INTVAL (copy_size_rtx));
4330 target = adjust_address (target, BLKmode,
4331 INTVAL (copy_size_rtx));
4333 else
4335 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4336 copy_size_rtx, NULL_RTX, 0,
4337 OPTAB_LIB_WIDEN);
4339 #ifdef POINTERS_EXTEND_UNSIGNED
4340 if (GET_MODE (copy_size_rtx) != Pmode)
4341 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4342 TYPE_UNSIGNED (sizetype));
4343 #endif
4345 target = offset_address (target, copy_size_rtx,
4346 highest_pow2_factor (copy_size));
4347 label = gen_label_rtx ();
4348 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4349 GET_MODE (size), 0, label);
4352 if (size != const0_rtx)
4353 clear_storage (target, size);
4355 if (label)
4356 emit_label (label);
4359 /* Handle calls that return values in multiple non-contiguous locations.
4360 The Irix 6 ABI has examples of this. */
4361 else if (GET_CODE (target) == PARALLEL)
4362 emit_group_load (target, temp, TREE_TYPE (exp),
4363 int_size_in_bytes (TREE_TYPE (exp)));
4364 else if (GET_MODE (temp) == BLKmode)
4365 emit_block_move (target, temp, expr_size (exp),
4366 (want_value & 2
4367 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4368 else
4370 temp = force_operand (temp, target);
4371 if (temp != target)
4372 emit_move_insn (target, temp);
4376 /* If we don't want a value, return NULL_RTX. */
4377 if ((want_value & 1) == 0)
4378 return NULL_RTX;
4380 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4381 ??? The latter test doesn't seem to make sense. */
4382 else if (dont_return_target && !MEM_P (temp))
4383 return temp;
4385 /* Return TARGET itself if it is a hard register. */
4386 else if ((want_value & 1) != 0
4387 && GET_MODE (target) != BLKmode
4388 && ! (REG_P (target)
4389 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4390 return copy_to_reg (target);
4392 else
4393 return target;
4396 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4397 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4398 are set to non-constant values and place it in *P_NC_ELTS. */
4400 static void
4401 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4402 HOST_WIDE_INT *p_nc_elts)
4404 HOST_WIDE_INT nz_elts, nc_elts;
4405 tree list;
4407 nz_elts = 0;
4408 nc_elts = 0;
4410 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4412 tree value = TREE_VALUE (list);
4413 tree purpose = TREE_PURPOSE (list);
4414 HOST_WIDE_INT mult;
4416 mult = 1;
4417 if (TREE_CODE (purpose) == RANGE_EXPR)
4419 tree lo_index = TREE_OPERAND (purpose, 0);
4420 tree hi_index = TREE_OPERAND (purpose, 1);
4422 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4423 mult = (tree_low_cst (hi_index, 1)
4424 - tree_low_cst (lo_index, 1) + 1);
4427 switch (TREE_CODE (value))
4429 case CONSTRUCTOR:
4431 HOST_WIDE_INT nz = 0, nc = 0;
4432 categorize_ctor_elements_1 (value, &nz, &nc);
4433 nz_elts += mult * nz;
4434 nc_elts += mult * nc;
4436 break;
4438 case INTEGER_CST:
4439 case REAL_CST:
4440 if (!initializer_zerop (value))
4441 nz_elts += mult;
4442 break;
4443 case COMPLEX_CST:
4444 if (!initializer_zerop (TREE_REALPART (value)))
4445 nz_elts += mult;
4446 if (!initializer_zerop (TREE_IMAGPART (value)))
4447 nz_elts += mult;
4448 break;
4449 case VECTOR_CST:
4451 tree v;
4452 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4453 if (!initializer_zerop (TREE_VALUE (v)))
4454 nz_elts += mult;
4456 break;
4458 default:
4459 nz_elts += mult;
4460 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4461 nc_elts += mult;
4462 break;
4466 *p_nz_elts += nz_elts;
4467 *p_nc_elts += nc_elts;
4470 void
4471 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4472 HOST_WIDE_INT *p_nc_elts)
4474 *p_nz_elts = 0;
4475 *p_nc_elts = 0;
4476 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4479 /* Count the number of scalars in TYPE. Return -1 on overflow or
4480 variable-sized. */
4482 HOST_WIDE_INT
4483 count_type_elements (tree type)
4485 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4486 switch (TREE_CODE (type))
4488 case ARRAY_TYPE:
4490 tree telts = array_type_nelts (type);
4491 if (telts && host_integerp (telts, 1))
4493 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4494 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4495 if (n == 0)
4496 return 0;
4497 else if (max / n > m)
4498 return n * m;
4500 return -1;
4503 case RECORD_TYPE:
4505 HOST_WIDE_INT n = 0, t;
4506 tree f;
4508 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4509 if (TREE_CODE (f) == FIELD_DECL)
4511 t = count_type_elements (TREE_TYPE (f));
4512 if (t < 0)
4513 return -1;
4514 n += t;
4517 return n;
4520 case UNION_TYPE:
4521 case QUAL_UNION_TYPE:
4523 /* Ho hum. How in the world do we guess here? Clearly it isn't
4524 right to count the fields. Guess based on the number of words. */
4525 HOST_WIDE_INT n = int_size_in_bytes (type);
4526 if (n < 0)
4527 return -1;
4528 return n / UNITS_PER_WORD;
4531 case COMPLEX_TYPE:
4532 return 2;
4534 case VECTOR_TYPE:
4535 /* ??? This is broke. We should encode the vector width in the tree. */
4536 return GET_MODE_NUNITS (TYPE_MODE (type));
4538 case INTEGER_TYPE:
4539 case REAL_TYPE:
4540 case ENUMERAL_TYPE:
4541 case BOOLEAN_TYPE:
4542 case CHAR_TYPE:
4543 case POINTER_TYPE:
4544 case OFFSET_TYPE:
4545 case REFERENCE_TYPE:
4546 return 1;
4548 case VOID_TYPE:
4549 case METHOD_TYPE:
4550 case FILE_TYPE:
4551 case SET_TYPE:
4552 case FUNCTION_TYPE:
4553 case LANG_TYPE:
4554 default:
4555 abort ();
4559 /* Return 1 if EXP contains mostly (3/4) zeros. */
4562 mostly_zeros_p (tree exp)
4564 if (TREE_CODE (exp) == CONSTRUCTOR)
4567 HOST_WIDE_INT nz_elts, nc_elts, elts;
4569 /* If there are no ranges of true bits, it is all zero. */
4570 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4571 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4573 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4574 elts = count_type_elements (TREE_TYPE (exp));
4576 return nz_elts < elts / 4;
4579 return initializer_zerop (exp);
4582 /* Helper function for store_constructor.
4583 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4584 TYPE is the type of the CONSTRUCTOR, not the element type.
4585 CLEARED is as for store_constructor.
4586 ALIAS_SET is the alias set to use for any stores.
4588 This provides a recursive shortcut back to store_constructor when it isn't
4589 necessary to go through store_field. This is so that we can pass through
4590 the cleared field to let store_constructor know that we may not have to
4591 clear a substructure if the outer structure has already been cleared. */
4593 static void
4594 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4595 HOST_WIDE_INT bitpos, enum machine_mode mode,
4596 tree exp, tree type, int cleared, int alias_set)
4598 if (TREE_CODE (exp) == CONSTRUCTOR
4599 /* We can only call store_constructor recursively if the size and
4600 bit position are on a byte boundary. */
4601 && bitpos % BITS_PER_UNIT == 0
4602 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4603 /* If we have a nonzero bitpos for a register target, then we just
4604 let store_field do the bitfield handling. This is unlikely to
4605 generate unnecessary clear instructions anyways. */
4606 && (bitpos == 0 || MEM_P (target)))
4608 if (MEM_P (target))
4609 target
4610 = adjust_address (target,
4611 GET_MODE (target) == BLKmode
4612 || 0 != (bitpos
4613 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4614 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4617 /* Update the alias set, if required. */
4618 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4619 && MEM_ALIAS_SET (target) != 0)
4621 target = copy_rtx (target);
4622 set_mem_alias_set (target, alias_set);
4625 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4627 else
4628 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4629 alias_set);
4632 /* Store the value of constructor EXP into the rtx TARGET.
4633 TARGET is either a REG or a MEM; we know it cannot conflict, since
4634 safe_from_p has been called.
4635 CLEARED is true if TARGET is known to have been zero'd.
4636 SIZE is the number of bytes of TARGET we are allowed to modify: this
4637 may not be the same as the size of EXP if we are assigning to a field
4638 which has been packed to exclude padding bits. */
4640 static void
4641 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4643 tree type = TREE_TYPE (exp);
4644 #ifdef WORD_REGISTER_OPERATIONS
4645 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4646 #endif
4648 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4649 || TREE_CODE (type) == QUAL_UNION_TYPE)
4651 tree elt;
4653 /* If size is zero or the target is already cleared, do nothing. */
4654 if (size == 0 || cleared)
4655 cleared = 1;
4656 /* We either clear the aggregate or indicate the value is dead. */
4657 else if ((TREE_CODE (type) == UNION_TYPE
4658 || TREE_CODE (type) == QUAL_UNION_TYPE)
4659 && ! CONSTRUCTOR_ELTS (exp))
4660 /* If the constructor is empty, clear the union. */
4662 clear_storage (target, expr_size (exp));
4663 cleared = 1;
4666 /* If we are building a static constructor into a register,
4667 set the initial value as zero so we can fold the value into
4668 a constant. But if more than one register is involved,
4669 this probably loses. */
4670 else if (REG_P (target) && TREE_STATIC (exp)
4671 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4673 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4674 cleared = 1;
4677 /* If the constructor has fewer fields than the structure
4678 or if we are initializing the structure to mostly zeros,
4679 clear the whole structure first. Don't do this if TARGET is a
4680 register whose mode size isn't equal to SIZE since clear_storage
4681 can't handle this case. */
4682 else if (size > 0
4683 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4684 || mostly_zeros_p (exp))
4685 && (!REG_P (target)
4686 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4687 == size)))
4689 rtx xtarget = target;
4691 if (readonly_fields_p (type))
4693 xtarget = copy_rtx (xtarget);
4694 RTX_UNCHANGING_P (xtarget) = 1;
4697 clear_storage (xtarget, GEN_INT (size));
4698 cleared = 1;
4701 if (! cleared)
4702 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4704 /* Store each element of the constructor into
4705 the corresponding field of TARGET. */
4707 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4709 tree field = TREE_PURPOSE (elt);
4710 tree value = TREE_VALUE (elt);
4711 enum machine_mode mode;
4712 HOST_WIDE_INT bitsize;
4713 HOST_WIDE_INT bitpos = 0;
4714 tree offset;
4715 rtx to_rtx = target;
4717 /* Just ignore missing fields.
4718 We cleared the whole structure, above,
4719 if any fields are missing. */
4720 if (field == 0)
4721 continue;
4723 if (cleared && initializer_zerop (value))
4724 continue;
4726 if (host_integerp (DECL_SIZE (field), 1))
4727 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4728 else
4729 bitsize = -1;
4731 mode = DECL_MODE (field);
4732 if (DECL_BIT_FIELD (field))
4733 mode = VOIDmode;
4735 offset = DECL_FIELD_OFFSET (field);
4736 if (host_integerp (offset, 0)
4737 && host_integerp (bit_position (field), 0))
4739 bitpos = int_bit_position (field);
4740 offset = 0;
4742 else
4743 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4745 if (offset)
4747 rtx offset_rtx;
4749 offset
4750 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4751 make_tree (TREE_TYPE (exp),
4752 target));
4754 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4755 if (!MEM_P (to_rtx))
4756 abort ();
4758 #ifdef POINTERS_EXTEND_UNSIGNED
4759 if (GET_MODE (offset_rtx) != Pmode)
4760 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4761 #else
4762 if (GET_MODE (offset_rtx) != ptr_mode)
4763 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4764 #endif
4766 to_rtx = offset_address (to_rtx, offset_rtx,
4767 highest_pow2_factor (offset));
4770 if (TREE_READONLY (field))
4772 if (MEM_P (to_rtx))
4773 to_rtx = copy_rtx (to_rtx);
4775 RTX_UNCHANGING_P (to_rtx) = 1;
4778 #ifdef WORD_REGISTER_OPERATIONS
4779 /* If this initializes a field that is smaller than a word, at the
4780 start of a word, try to widen it to a full word.
4781 This special case allows us to output C++ member function
4782 initializations in a form that the optimizers can understand. */
4783 if (REG_P (target)
4784 && bitsize < BITS_PER_WORD
4785 && bitpos % BITS_PER_WORD == 0
4786 && GET_MODE_CLASS (mode) == MODE_INT
4787 && TREE_CODE (value) == INTEGER_CST
4788 && exp_size >= 0
4789 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4791 tree type = TREE_TYPE (value);
4793 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4795 type = lang_hooks.types.type_for_size
4796 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4797 value = convert (type, value);
4800 if (BYTES_BIG_ENDIAN)
4801 value
4802 = fold (build (LSHIFT_EXPR, type, value,
4803 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4804 bitsize = BITS_PER_WORD;
4805 mode = word_mode;
4807 #endif
4809 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4810 && DECL_NONADDRESSABLE_P (field))
4812 to_rtx = copy_rtx (to_rtx);
4813 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4816 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4817 value, type, cleared,
4818 get_alias_set (TREE_TYPE (field)));
4821 else if (TREE_CODE (type) == ARRAY_TYPE
4822 || TREE_CODE (type) == VECTOR_TYPE)
4824 tree elt;
4825 int i;
4826 int need_to_clear;
4827 tree domain;
4828 tree elttype = TREE_TYPE (type);
4829 int const_bounds_p;
4830 HOST_WIDE_INT minelt = 0;
4831 HOST_WIDE_INT maxelt = 0;
4832 int icode = 0;
4833 rtx *vector = NULL;
4834 int elt_size = 0;
4835 unsigned n_elts = 0;
4837 if (TREE_CODE (type) == ARRAY_TYPE)
4838 domain = TYPE_DOMAIN (type);
4839 else
4840 /* Vectors do not have domains; look up the domain of
4841 the array embedded in the debug representation type.
4842 FIXME Would probably be more efficient to treat vectors
4843 separately from arrays. */
4845 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4846 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4847 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4849 enum machine_mode mode = GET_MODE (target);
4851 icode = (int) vec_init_optab->handlers[mode].insn_code;
4852 if (icode != CODE_FOR_nothing)
4854 unsigned int i;
4856 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4857 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4858 vector = alloca (n_elts);
4859 for (i = 0; i < n_elts; i++)
4860 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4865 const_bounds_p = (TYPE_MIN_VALUE (domain)
4866 && TYPE_MAX_VALUE (domain)
4867 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4868 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4870 /* If we have constant bounds for the range of the type, get them. */
4871 if (const_bounds_p)
4873 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4874 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4877 /* If the constructor has fewer elements than the array,
4878 clear the whole array first. Similarly if this is
4879 static constructor of a non-BLKmode object. */
4880 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4881 need_to_clear = 1;
4882 else
4884 HOST_WIDE_INT count = 0, zero_count = 0;
4885 need_to_clear = ! const_bounds_p;
4887 /* This loop is a more accurate version of the loop in
4888 mostly_zeros_p (it handles RANGE_EXPR in an index).
4889 It is also needed to check for missing elements. */
4890 for (elt = CONSTRUCTOR_ELTS (exp);
4891 elt != NULL_TREE && ! need_to_clear;
4892 elt = TREE_CHAIN (elt))
4894 tree index = TREE_PURPOSE (elt);
4895 HOST_WIDE_INT this_node_count;
4897 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4899 tree lo_index = TREE_OPERAND (index, 0);
4900 tree hi_index = TREE_OPERAND (index, 1);
4902 if (! host_integerp (lo_index, 1)
4903 || ! host_integerp (hi_index, 1))
4905 need_to_clear = 1;
4906 break;
4909 this_node_count = (tree_low_cst (hi_index, 1)
4910 - tree_low_cst (lo_index, 1) + 1);
4912 else
4913 this_node_count = 1;
4915 count += this_node_count;
4916 if (mostly_zeros_p (TREE_VALUE (elt)))
4917 zero_count += this_node_count;
4920 /* Clear the entire array first if there are any missing elements,
4921 or if the incidence of zero elements is >= 75%. */
4922 if (! need_to_clear
4923 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4924 need_to_clear = 1;
4927 if (need_to_clear && size > 0 && !vector)
4929 if (! cleared)
4931 if (REG_P (target))
4932 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4933 else
4934 clear_storage (target, GEN_INT (size));
4936 cleared = 1;
4938 else if (REG_P (target))
4939 /* Inform later passes that the old value is dead. */
4940 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4942 /* Store each element of the constructor into
4943 the corresponding element of TARGET, determined
4944 by counting the elements. */
4945 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4946 elt;
4947 elt = TREE_CHAIN (elt), i++)
4949 enum machine_mode mode;
4950 HOST_WIDE_INT bitsize;
4951 HOST_WIDE_INT bitpos;
4952 int unsignedp;
4953 tree value = TREE_VALUE (elt);
4954 tree index = TREE_PURPOSE (elt);
4955 rtx xtarget = target;
4957 if (cleared && initializer_zerop (value))
4958 continue;
4960 unsignedp = TYPE_UNSIGNED (elttype);
4961 mode = TYPE_MODE (elttype);
4962 if (mode == BLKmode)
4963 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4964 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4965 : -1);
4966 else
4967 bitsize = GET_MODE_BITSIZE (mode);
4969 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4971 tree lo_index = TREE_OPERAND (index, 0);
4972 tree hi_index = TREE_OPERAND (index, 1);
4973 rtx index_r, pos_rtx;
4974 HOST_WIDE_INT lo, hi, count;
4975 tree position;
4977 if (vector)
4978 abort ();
4980 /* If the range is constant and "small", unroll the loop. */
4981 if (const_bounds_p
4982 && host_integerp (lo_index, 0)
4983 && host_integerp (hi_index, 0)
4984 && (lo = tree_low_cst (lo_index, 0),
4985 hi = tree_low_cst (hi_index, 0),
4986 count = hi - lo + 1,
4987 (!MEM_P (target)
4988 || count <= 2
4989 || (host_integerp (TYPE_SIZE (elttype), 1)
4990 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4991 <= 40 * 8)))))
4993 lo -= minelt; hi -= minelt;
4994 for (; lo <= hi; lo++)
4996 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4998 if (MEM_P (target)
4999 && !MEM_KEEP_ALIAS_SET_P (target)
5000 && TREE_CODE (type) == ARRAY_TYPE
5001 && TYPE_NONALIASED_COMPONENT (type))
5003 target = copy_rtx (target);
5004 MEM_KEEP_ALIAS_SET_P (target) = 1;
5007 store_constructor_field
5008 (target, bitsize, bitpos, mode, value, type, cleared,
5009 get_alias_set (elttype));
5012 else
5014 rtx loop_start = gen_label_rtx ();
5015 rtx loop_end = gen_label_rtx ();
5016 tree exit_cond;
5018 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5019 unsignedp = TYPE_UNSIGNED (domain);
5021 index = build_decl (VAR_DECL, NULL_TREE, domain);
5023 index_r
5024 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5025 &unsignedp, 0));
5026 SET_DECL_RTL (index, index_r);
5027 store_expr (lo_index, index_r, 0);
5029 /* Build the head of the loop. */
5030 do_pending_stack_adjust ();
5031 emit_queue ();
5032 emit_label (loop_start);
5034 /* Assign value to element index. */
5035 position
5036 = convert (ssizetype,
5037 fold (build (MINUS_EXPR, TREE_TYPE (index),
5038 index, TYPE_MIN_VALUE (domain))));
5039 position = size_binop (MULT_EXPR, position,
5040 convert (ssizetype,
5041 TYPE_SIZE_UNIT (elttype)));
5043 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5044 xtarget = offset_address (target, pos_rtx,
5045 highest_pow2_factor (position));
5046 xtarget = adjust_address (xtarget, mode, 0);
5047 if (TREE_CODE (value) == CONSTRUCTOR)
5048 store_constructor (value, xtarget, cleared,
5049 bitsize / BITS_PER_UNIT);
5050 else
5051 store_expr (value, xtarget, 0);
5053 /* Generate a conditional jump to exit the loop. */
5054 exit_cond = build (LT_EXPR, integer_type_node,
5055 index, hi_index);
5056 jumpif (exit_cond, loop_end);
5058 /* Update the loop counter, and jump to the head of
5059 the loop. */
5060 expand_increment (build (PREINCREMENT_EXPR,
5061 TREE_TYPE (index),
5062 index, integer_one_node), 0, 0);
5063 emit_jump (loop_start);
5065 /* Build the end of the loop. */
5066 emit_label (loop_end);
5069 else if ((index != 0 && ! host_integerp (index, 0))
5070 || ! host_integerp (TYPE_SIZE (elttype), 1))
5072 tree position;
5074 if (vector)
5075 abort ();
5077 if (index == 0)
5078 index = ssize_int (1);
5080 if (minelt)
5081 index = convert (ssizetype,
5082 fold (build (MINUS_EXPR, index,
5083 TYPE_MIN_VALUE (domain))));
5085 position = size_binop (MULT_EXPR, index,
5086 convert (ssizetype,
5087 TYPE_SIZE_UNIT (elttype)));
5088 xtarget = offset_address (target,
5089 expand_expr (position, 0, VOIDmode, 0),
5090 highest_pow2_factor (position));
5091 xtarget = adjust_address (xtarget, mode, 0);
5092 store_expr (value, xtarget, 0);
5094 else if (vector)
5096 int pos;
5098 if (index != 0)
5099 pos = tree_low_cst (index, 0) - minelt;
5100 else
5101 pos = i;
5102 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5104 else
5106 if (index != 0)
5107 bitpos = ((tree_low_cst (index, 0) - minelt)
5108 * tree_low_cst (TYPE_SIZE (elttype), 1));
5109 else
5110 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5112 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5113 && TREE_CODE (type) == ARRAY_TYPE
5114 && TYPE_NONALIASED_COMPONENT (type))
5116 target = copy_rtx (target);
5117 MEM_KEEP_ALIAS_SET_P (target) = 1;
5119 store_constructor_field (target, bitsize, bitpos, mode, value,
5120 type, cleared, get_alias_set (elttype));
5123 if (vector)
5125 emit_insn (GEN_FCN (icode) (target,
5126 gen_rtx_PARALLEL (GET_MODE (target),
5127 gen_rtvec_v (n_elts, vector))));
5131 /* Set constructor assignments. */
5132 else if (TREE_CODE (type) == SET_TYPE)
5134 tree elt = CONSTRUCTOR_ELTS (exp);
5135 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5136 tree domain = TYPE_DOMAIN (type);
5137 tree domain_min, domain_max, bitlength;
5139 /* The default implementation strategy is to extract the constant
5140 parts of the constructor, use that to initialize the target,
5141 and then "or" in whatever non-constant ranges we need in addition.
5143 If a large set is all zero or all ones, it is
5144 probably better to set it using memset.
5145 Also, if a large set has just a single range, it may also be
5146 better to first clear all the first clear the set (using
5147 memset), and set the bits we want. */
5149 /* Check for all zeros. */
5150 if (elt == NULL_TREE && size > 0)
5152 if (!cleared)
5153 clear_storage (target, GEN_INT (size));
5154 return;
5157 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5158 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5159 bitlength = size_binop (PLUS_EXPR,
5160 size_diffop (domain_max, domain_min),
5161 ssize_int (1));
5163 nbits = tree_low_cst (bitlength, 1);
5165 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5166 are "complicated" (more than one range), initialize (the
5167 constant parts) by copying from a constant. */
5168 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5169 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5171 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5172 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5173 char *bit_buffer = alloca (nbits);
5174 HOST_WIDE_INT word = 0;
5175 unsigned int bit_pos = 0;
5176 unsigned int ibit = 0;
5177 unsigned int offset = 0; /* In bytes from beginning of set. */
5179 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5180 for (;;)
5182 if (bit_buffer[ibit])
5184 if (BYTES_BIG_ENDIAN)
5185 word |= (1 << (set_word_size - 1 - bit_pos));
5186 else
5187 word |= 1 << bit_pos;
5190 bit_pos++; ibit++;
5191 if (bit_pos >= set_word_size || ibit == nbits)
5193 if (word != 0 || ! cleared)
5195 rtx datum = gen_int_mode (word, mode);
5196 rtx to_rtx;
5198 /* The assumption here is that it is safe to use
5199 XEXP if the set is multi-word, but not if
5200 it's single-word. */
5201 if (MEM_P (target))
5202 to_rtx = adjust_address (target, mode, offset);
5203 else if (offset == 0)
5204 to_rtx = target;
5205 else
5206 abort ();
5207 emit_move_insn (to_rtx, datum);
5210 if (ibit == nbits)
5211 break;
5212 word = 0;
5213 bit_pos = 0;
5214 offset += set_word_size / BITS_PER_UNIT;
5218 else if (!cleared)
5219 /* Don't bother clearing storage if the set is all ones. */
5220 if (TREE_CHAIN (elt) != NULL_TREE
5221 || (TREE_PURPOSE (elt) == NULL_TREE
5222 ? nbits != 1
5223 : ( ! host_integerp (TREE_VALUE (elt), 0)
5224 || ! host_integerp (TREE_PURPOSE (elt), 0)
5225 || (tree_low_cst (TREE_VALUE (elt), 0)
5226 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5227 != (HOST_WIDE_INT) nbits))))
5228 clear_storage (target, expr_size (exp));
5230 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5232 /* Start of range of element or NULL. */
5233 tree startbit = TREE_PURPOSE (elt);
5234 /* End of range of element, or element value. */
5235 tree endbit = TREE_VALUE (elt);
5236 HOST_WIDE_INT startb, endb;
5237 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5239 bitlength_rtx = expand_expr (bitlength,
5240 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5242 /* Handle non-range tuple element like [ expr ]. */
5243 if (startbit == NULL_TREE)
5245 startbit = save_expr (endbit);
5246 endbit = startbit;
5249 startbit = convert (sizetype, startbit);
5250 endbit = convert (sizetype, endbit);
5251 if (! integer_zerop (domain_min))
5253 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5254 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5256 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5257 EXPAND_CONST_ADDRESS);
5258 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5259 EXPAND_CONST_ADDRESS);
5261 if (REG_P (target))
5263 targetx
5264 = assign_temp
5265 ((build_qualified_type (lang_hooks.types.type_for_mode
5266 (GET_MODE (target), 0),
5267 TYPE_QUAL_CONST)),
5268 0, 1, 1);
5269 emit_move_insn (targetx, target);
5272 else if (MEM_P (target))
5273 targetx = target;
5274 else
5275 abort ();
5277 /* Optimization: If startbit and endbit are constants divisible
5278 by BITS_PER_UNIT, call memset instead. */
5279 if (TREE_CODE (startbit) == INTEGER_CST
5280 && TREE_CODE (endbit) == INTEGER_CST
5281 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5282 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5284 emit_library_call (memset_libfunc, LCT_NORMAL,
5285 VOIDmode, 3,
5286 plus_constant (XEXP (targetx, 0),
5287 startb / BITS_PER_UNIT),
5288 Pmode,
5289 constm1_rtx, TYPE_MODE (integer_type_node),
5290 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5291 TYPE_MODE (sizetype));
5293 else
5294 emit_library_call (setbits_libfunc, LCT_NORMAL,
5295 VOIDmode, 4, XEXP (targetx, 0),
5296 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5297 startbit_rtx, TYPE_MODE (sizetype),
5298 endbit_rtx, TYPE_MODE (sizetype));
5300 if (REG_P (target))
5301 emit_move_insn (target, targetx);
5305 else
5306 abort ();
5309 /* Store the value of EXP (an expression tree)
5310 into a subfield of TARGET which has mode MODE and occupies
5311 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5312 If MODE is VOIDmode, it means that we are storing into a bit-field.
5314 If VALUE_MODE is VOIDmode, return nothing in particular.
5315 UNSIGNEDP is not used in this case.
5317 Otherwise, return an rtx for the value stored. This rtx
5318 has mode VALUE_MODE if that is convenient to do.
5319 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5321 TYPE is the type of the underlying object,
5323 ALIAS_SET is the alias set for the destination. This value will
5324 (in general) be different from that for TARGET, since TARGET is a
5325 reference to the containing structure. */
5327 static rtx
5328 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5329 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5330 int unsignedp, tree type, int alias_set)
5332 HOST_WIDE_INT width_mask = 0;
5334 if (TREE_CODE (exp) == ERROR_MARK)
5335 return const0_rtx;
5337 /* If we have nothing to store, do nothing unless the expression has
5338 side-effects. */
5339 if (bitsize == 0)
5340 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5341 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5342 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5344 /* If we are storing into an unaligned field of an aligned union that is
5345 in a register, we may have the mode of TARGET being an integer mode but
5346 MODE == BLKmode. In that case, get an aligned object whose size and
5347 alignment are the same as TARGET and store TARGET into it (we can avoid
5348 the store if the field being stored is the entire width of TARGET). Then
5349 call ourselves recursively to store the field into a BLKmode version of
5350 that object. Finally, load from the object into TARGET. This is not
5351 very efficient in general, but should only be slightly more expensive
5352 than the otherwise-required unaligned accesses. Perhaps this can be
5353 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5354 twice, once with emit_move_insn and once via store_field. */
5356 if (mode == BLKmode
5357 && (REG_P (target) || GET_CODE (target) == SUBREG))
5359 rtx object = assign_temp (type, 0, 1, 1);
5360 rtx blk_object = adjust_address (object, BLKmode, 0);
5362 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5363 emit_move_insn (object, target);
5365 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5366 alias_set);
5368 emit_move_insn (target, object);
5370 /* We want to return the BLKmode version of the data. */
5371 return blk_object;
5374 if (GET_CODE (target) == CONCAT)
5376 /* We're storing into a struct containing a single __complex. */
5378 if (bitpos != 0)
5379 abort ();
5380 return store_expr (exp, target, value_mode != VOIDmode);
5383 /* If the structure is in a register or if the component
5384 is a bit field, we cannot use addressing to access it.
5385 Use bit-field techniques or SUBREG to store in it. */
5387 if (mode == VOIDmode
5388 || (mode != BLKmode && ! direct_store[(int) mode]
5389 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5390 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5391 || REG_P (target)
5392 || GET_CODE (target) == SUBREG
5393 /* If the field isn't aligned enough to store as an ordinary memref,
5394 store it as a bit field. */
5395 || (mode != BLKmode
5396 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5397 || bitpos % GET_MODE_ALIGNMENT (mode))
5398 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5399 || (bitpos % BITS_PER_UNIT != 0)))
5400 /* If the RHS and field are a constant size and the size of the
5401 RHS isn't the same size as the bitfield, we must use bitfield
5402 operations. */
5403 || (bitsize >= 0
5404 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5405 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5407 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5409 /* If BITSIZE is narrower than the size of the type of EXP
5410 we will be narrowing TEMP. Normally, what's wanted are the
5411 low-order bits. However, if EXP's type is a record and this is
5412 big-endian machine, we want the upper BITSIZE bits. */
5413 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5414 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5415 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5416 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5417 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5418 - bitsize),
5419 NULL_RTX, 1);
5421 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5422 MODE. */
5423 if (mode != VOIDmode && mode != BLKmode
5424 && mode != TYPE_MODE (TREE_TYPE (exp)))
5425 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5427 /* If the modes of TARGET and TEMP are both BLKmode, both
5428 must be in memory and BITPOS must be aligned on a byte
5429 boundary. If so, we simply do a block copy. */
5430 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5432 if (!MEM_P (target) || !MEM_P (temp)
5433 || bitpos % BITS_PER_UNIT != 0)
5434 abort ();
5436 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5437 emit_block_move (target, temp,
5438 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5439 / BITS_PER_UNIT),
5440 BLOCK_OP_NORMAL);
5442 return value_mode == VOIDmode ? const0_rtx : target;
5445 /* Store the value in the bitfield. */
5446 store_bit_field (target, bitsize, bitpos, mode, temp);
5448 if (value_mode != VOIDmode)
5450 /* The caller wants an rtx for the value.
5451 If possible, avoid refetching from the bitfield itself. */
5452 if (width_mask != 0
5453 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5455 tree count;
5456 enum machine_mode tmode;
5458 tmode = GET_MODE (temp);
5459 if (tmode == VOIDmode)
5460 tmode = value_mode;
5462 if (unsignedp)
5463 return expand_and (tmode, temp,
5464 gen_int_mode (width_mask, tmode),
5465 NULL_RTX);
5467 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5468 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5469 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5472 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5473 NULL_RTX, value_mode, VOIDmode);
5475 return const0_rtx;
5477 else
5479 rtx addr = XEXP (target, 0);
5480 rtx to_rtx = target;
5482 /* If a value is wanted, it must be the lhs;
5483 so make the address stable for multiple use. */
5485 if (value_mode != VOIDmode && !REG_P (addr)
5486 && ! CONSTANT_ADDRESS_P (addr)
5487 /* A frame-pointer reference is already stable. */
5488 && ! (GET_CODE (addr) == PLUS
5489 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5490 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5491 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5492 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5494 /* Now build a reference to just the desired component. */
5496 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5498 if (to_rtx == target)
5499 to_rtx = copy_rtx (to_rtx);
5501 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5502 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5503 set_mem_alias_set (to_rtx, alias_set);
5505 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5509 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5510 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5511 codes and find the ultimate containing object, which we return.
5513 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5514 bit position, and *PUNSIGNEDP to the signedness of the field.
5515 If the position of the field is variable, we store a tree
5516 giving the variable offset (in units) in *POFFSET.
5517 This offset is in addition to the bit position.
5518 If the position is not variable, we store 0 in *POFFSET.
5520 If any of the extraction expressions is volatile,
5521 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5523 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5524 is a mode that can be used to access the field. In that case, *PBITSIZE
5525 is redundant.
5527 If the field describes a variable-sized object, *PMODE is set to
5528 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5529 this case, but the address of the object can be found. */
5531 tree
5532 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5533 HOST_WIDE_INT *pbitpos, tree *poffset,
5534 enum machine_mode *pmode, int *punsignedp,
5535 int *pvolatilep)
5537 tree size_tree = 0;
5538 enum machine_mode mode = VOIDmode;
5539 tree offset = size_zero_node;
5540 tree bit_offset = bitsize_zero_node;
5541 tree tem;
5543 /* First get the mode, signedness, and size. We do this from just the
5544 outermost expression. */
5545 if (TREE_CODE (exp) == COMPONENT_REF)
5547 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5548 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5549 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5551 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5553 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5555 size_tree = TREE_OPERAND (exp, 1);
5556 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5558 else
5560 mode = TYPE_MODE (TREE_TYPE (exp));
5561 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5563 if (mode == BLKmode)
5564 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5565 else
5566 *pbitsize = GET_MODE_BITSIZE (mode);
5569 if (size_tree != 0)
5571 if (! host_integerp (size_tree, 1))
5572 mode = BLKmode, *pbitsize = -1;
5573 else
5574 *pbitsize = tree_low_cst (size_tree, 1);
5577 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5578 and find the ultimate containing object. */
5579 while (1)
5581 if (TREE_CODE (exp) == BIT_FIELD_REF)
5582 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5583 else if (TREE_CODE (exp) == COMPONENT_REF)
5585 tree field = TREE_OPERAND (exp, 1);
5586 tree this_offset = component_ref_field_offset (exp);
5588 /* If this field hasn't been filled in yet, don't go
5589 past it. This should only happen when folding expressions
5590 made during type construction. */
5591 if (this_offset == 0)
5592 break;
5594 offset = size_binop (PLUS_EXPR, offset, this_offset);
5595 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5596 DECL_FIELD_BIT_OFFSET (field));
5598 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5601 else if (TREE_CODE (exp) == ARRAY_REF
5602 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5604 tree index = TREE_OPERAND (exp, 1);
5605 tree low_bound = array_ref_low_bound (exp);
5606 tree unit_size = array_ref_element_size (exp);
5608 /* We assume all arrays have sizes that are a multiple of a byte.
5609 First subtract the lower bound, if any, in the type of the
5610 index, then convert to sizetype and multiply by the size of the
5611 array element. */
5612 if (! integer_zerop (low_bound))
5613 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5614 index, low_bound));
5616 offset = size_binop (PLUS_EXPR, offset,
5617 size_binop (MULT_EXPR,
5618 convert (sizetype, index),
5619 unit_size));
5622 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5623 conversions that don't change the mode, and all view conversions
5624 except those that need to "step up" the alignment. */
5625 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5626 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5627 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5628 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5629 && STRICT_ALIGNMENT
5630 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5631 < BIGGEST_ALIGNMENT)
5632 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5633 || TYPE_ALIGN_OK (TREE_TYPE
5634 (TREE_OPERAND (exp, 0))))))
5635 && ! ((TREE_CODE (exp) == NOP_EXPR
5636 || TREE_CODE (exp) == CONVERT_EXPR)
5637 && (TYPE_MODE (TREE_TYPE (exp))
5638 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5639 break;
5641 /* If any reference in the chain is volatile, the effect is volatile. */
5642 if (TREE_THIS_VOLATILE (exp))
5643 *pvolatilep = 1;
5645 exp = TREE_OPERAND (exp, 0);
5648 /* If OFFSET is constant, see if we can return the whole thing as a
5649 constant bit position. Otherwise, split it up. */
5650 if (host_integerp (offset, 0)
5651 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5652 bitsize_unit_node))
5653 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5654 && host_integerp (tem, 0))
5655 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5656 else
5657 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5659 *pmode = mode;
5660 return exp;
5663 /* Return a tree of sizetype representing the size, in bytes, of the element
5664 of EXP, an ARRAY_REF. */
5666 tree
5667 array_ref_element_size (tree exp)
5669 tree aligned_size = TREE_OPERAND (exp, 3);
5670 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5672 /* If a size was specified in the ARRAY_REF, it's the size measured
5673 in alignment units of the element type. So multiply by that value. */
5674 if (aligned_size)
5675 return size_binop (MULT_EXPR, aligned_size,
5676 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5678 /* Otherwise, take the size from that of the element type. Substitute
5679 any PLACEHOLDER_EXPR that we have. */
5680 else
5681 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5684 /* Return a tree representing the lower bound of the array mentioned in
5685 EXP, an ARRAY_REF. */
5687 tree
5688 array_ref_low_bound (tree exp)
5690 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5692 /* If a lower bound is specified in EXP, use it. */
5693 if (TREE_OPERAND (exp, 2))
5694 return TREE_OPERAND (exp, 2);
5696 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5697 substituting for a PLACEHOLDER_EXPR as needed. */
5698 if (domain_type && TYPE_MIN_VALUE (domain_type))
5699 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5701 /* Otherwise, return a zero of the appropriate type. */
5702 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5705 /* Return a tree representing the upper bound of the array mentioned in
5706 EXP, an ARRAY_REF. */
5708 tree
5709 array_ref_up_bound (tree exp)
5711 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5713 /* If there is a domain type and it has an upper bound, use it, substituting
5714 for a PLACEHOLDER_EXPR as needed. */
5715 if (domain_type && TYPE_MAX_VALUE (domain_type))
5716 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5718 /* Otherwise fail. */
5719 return NULL_TREE;
5722 /* Return a tree representing the offset, in bytes, of the field referenced
5723 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5725 tree
5726 component_ref_field_offset (tree exp)
5728 tree aligned_offset = TREE_OPERAND (exp, 2);
5729 tree field = TREE_OPERAND (exp, 1);
5731 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5732 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5733 value. */
5734 if (aligned_offset)
5735 return size_binop (MULT_EXPR, aligned_offset,
5736 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5738 /* Otherwise, take the offset from that of the field. Substitute
5739 any PLACEHOLDER_EXPR that we have. */
5740 else
5741 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5744 /* Return 1 if T is an expression that get_inner_reference handles. */
5747 handled_component_p (tree t)
5749 switch (TREE_CODE (t))
5751 case BIT_FIELD_REF:
5752 case COMPONENT_REF:
5753 case ARRAY_REF:
5754 case ARRAY_RANGE_REF:
5755 case NON_LVALUE_EXPR:
5756 case VIEW_CONVERT_EXPR:
5757 return 1;
5759 /* ??? Sure they are handled, but get_inner_reference may return
5760 a different PBITSIZE, depending upon whether the expression is
5761 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5762 case NOP_EXPR:
5763 case CONVERT_EXPR:
5764 return (TYPE_MODE (TREE_TYPE (t))
5765 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5767 default:
5768 return 0;
5772 /* Given an rtx VALUE that may contain additions and multiplications, return
5773 an equivalent value that just refers to a register, memory, or constant.
5774 This is done by generating instructions to perform the arithmetic and
5775 returning a pseudo-register containing the value.
5777 The returned value may be a REG, SUBREG, MEM or constant. */
5780 force_operand (rtx value, rtx target)
5782 rtx op1, op2;
5783 /* Use subtarget as the target for operand 0 of a binary operation. */
5784 rtx subtarget = get_subtarget (target);
5785 enum rtx_code code = GET_CODE (value);
5787 /* Check for subreg applied to an expression produced by loop optimizer. */
5788 if (code == SUBREG
5789 && !REG_P (SUBREG_REG (value))
5790 && !MEM_P (SUBREG_REG (value)))
5792 value = simplify_gen_subreg (GET_MODE (value),
5793 force_reg (GET_MODE (SUBREG_REG (value)),
5794 force_operand (SUBREG_REG (value),
5795 NULL_RTX)),
5796 GET_MODE (SUBREG_REG (value)),
5797 SUBREG_BYTE (value));
5798 code = GET_CODE (value);
5801 /* Check for a PIC address load. */
5802 if ((code == PLUS || code == MINUS)
5803 && XEXP (value, 0) == pic_offset_table_rtx
5804 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5805 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5806 || GET_CODE (XEXP (value, 1)) == CONST))
5808 if (!subtarget)
5809 subtarget = gen_reg_rtx (GET_MODE (value));
5810 emit_move_insn (subtarget, value);
5811 return subtarget;
5814 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5816 if (!target)
5817 target = gen_reg_rtx (GET_MODE (value));
5818 convert_move (target, force_operand (XEXP (value, 0), NULL),
5819 code == ZERO_EXTEND);
5820 return target;
5823 if (ARITHMETIC_P (value))
5825 op2 = XEXP (value, 1);
5826 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5827 subtarget = 0;
5828 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5830 code = PLUS;
5831 op2 = negate_rtx (GET_MODE (value), op2);
5834 /* Check for an addition with OP2 a constant integer and our first
5835 operand a PLUS of a virtual register and something else. In that
5836 case, we want to emit the sum of the virtual register and the
5837 constant first and then add the other value. This allows virtual
5838 register instantiation to simply modify the constant rather than
5839 creating another one around this addition. */
5840 if (code == PLUS && GET_CODE (op2) == CONST_INT
5841 && GET_CODE (XEXP (value, 0)) == PLUS
5842 && REG_P (XEXP (XEXP (value, 0), 0))
5843 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5844 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5846 rtx temp = expand_simple_binop (GET_MODE (value), code,
5847 XEXP (XEXP (value, 0), 0), op2,
5848 subtarget, 0, OPTAB_LIB_WIDEN);
5849 return expand_simple_binop (GET_MODE (value), code, temp,
5850 force_operand (XEXP (XEXP (value,
5851 0), 1), 0),
5852 target, 0, OPTAB_LIB_WIDEN);
5855 op1 = force_operand (XEXP (value, 0), subtarget);
5856 op2 = force_operand (op2, NULL_RTX);
5857 switch (code)
5859 case MULT:
5860 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5861 case DIV:
5862 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5863 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5864 target, 1, OPTAB_LIB_WIDEN);
5865 else
5866 return expand_divmod (0,
5867 FLOAT_MODE_P (GET_MODE (value))
5868 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5869 GET_MODE (value), op1, op2, target, 0);
5870 break;
5871 case MOD:
5872 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5873 target, 0);
5874 break;
5875 case UDIV:
5876 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5877 target, 1);
5878 break;
5879 case UMOD:
5880 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5881 target, 1);
5882 break;
5883 case ASHIFTRT:
5884 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5885 target, 0, OPTAB_LIB_WIDEN);
5886 break;
5887 default:
5888 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5889 target, 1, OPTAB_LIB_WIDEN);
5892 if (UNARY_P (value))
5894 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5895 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5898 #ifdef INSN_SCHEDULING
5899 /* On machines that have insn scheduling, we want all memory reference to be
5900 explicit, so we need to deal with such paradoxical SUBREGs. */
5901 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5902 && (GET_MODE_SIZE (GET_MODE (value))
5903 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5904 value
5905 = simplify_gen_subreg (GET_MODE (value),
5906 force_reg (GET_MODE (SUBREG_REG (value)),
5907 force_operand (SUBREG_REG (value),
5908 NULL_RTX)),
5909 GET_MODE (SUBREG_REG (value)),
5910 SUBREG_BYTE (value));
5911 #endif
5913 return value;
5916 /* Subroutine of expand_expr: return nonzero iff there is no way that
5917 EXP can reference X, which is being modified. TOP_P is nonzero if this
5918 call is going to be used to determine whether we need a temporary
5919 for EXP, as opposed to a recursive call to this function.
5921 It is always safe for this routine to return zero since it merely
5922 searches for optimization opportunities. */
5925 safe_from_p (rtx x, tree exp, int top_p)
5927 rtx exp_rtl = 0;
5928 int i, nops;
5930 if (x == 0
5931 /* If EXP has varying size, we MUST use a target since we currently
5932 have no way of allocating temporaries of variable size
5933 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5934 So we assume here that something at a higher level has prevented a
5935 clash. This is somewhat bogus, but the best we can do. Only
5936 do this when X is BLKmode and when we are at the top level. */
5937 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5938 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5939 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5940 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5941 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5942 != INTEGER_CST)
5943 && GET_MODE (x) == BLKmode)
5944 /* If X is in the outgoing argument area, it is always safe. */
5945 || (MEM_P (x)
5946 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5947 || (GET_CODE (XEXP (x, 0)) == PLUS
5948 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5949 return 1;
5951 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5952 find the underlying pseudo. */
5953 if (GET_CODE (x) == SUBREG)
5955 x = SUBREG_REG (x);
5956 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5957 return 0;
5960 /* Now look at our tree code and possibly recurse. */
5961 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5963 case 'd':
5964 exp_rtl = DECL_RTL_IF_SET (exp);
5965 break;
5967 case 'c':
5968 return 1;
5970 case 'x':
5971 if (TREE_CODE (exp) == TREE_LIST)
5973 while (1)
5975 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5976 return 0;
5977 exp = TREE_CHAIN (exp);
5978 if (!exp)
5979 return 1;
5980 if (TREE_CODE (exp) != TREE_LIST)
5981 return safe_from_p (x, exp, 0);
5984 else if (TREE_CODE (exp) == ERROR_MARK)
5985 return 1; /* An already-visited SAVE_EXPR? */
5986 else
5987 return 0;
5989 case 's':
5990 /* The only case we look at here is the DECL_INITIAL inside a
5991 DECL_EXPR. */
5992 return (TREE_CODE (exp) != DECL_EXPR
5993 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5994 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5995 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5997 case '2':
5998 case '<':
5999 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6000 return 0;
6001 /* Fall through. */
6003 case '1':
6004 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6006 case 'e':
6007 case 'r':
6008 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6009 the expression. If it is set, we conflict iff we are that rtx or
6010 both are in memory. Otherwise, we check all operands of the
6011 expression recursively. */
6013 switch (TREE_CODE (exp))
6015 case ADDR_EXPR:
6016 /* If the operand is static or we are static, we can't conflict.
6017 Likewise if we don't conflict with the operand at all. */
6018 if (staticp (TREE_OPERAND (exp, 0))
6019 || TREE_STATIC (exp)
6020 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6021 return 1;
6023 /* Otherwise, the only way this can conflict is if we are taking
6024 the address of a DECL a that address if part of X, which is
6025 very rare. */
6026 exp = TREE_OPERAND (exp, 0);
6027 if (DECL_P (exp))
6029 if (!DECL_RTL_SET_P (exp)
6030 || !MEM_P (DECL_RTL (exp)))
6031 return 0;
6032 else
6033 exp_rtl = XEXP (DECL_RTL (exp), 0);
6035 break;
6037 case INDIRECT_REF:
6038 if (MEM_P (x)
6039 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6040 get_alias_set (exp)))
6041 return 0;
6042 break;
6044 case CALL_EXPR:
6045 /* Assume that the call will clobber all hard registers and
6046 all of memory. */
6047 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6048 || MEM_P (x))
6049 return 0;
6050 break;
6052 case WITH_CLEANUP_EXPR:
6053 case CLEANUP_POINT_EXPR:
6054 /* Lowered by gimplify.c. */
6055 abort ();
6057 case SAVE_EXPR:
6058 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6060 case BIND_EXPR:
6061 /* The only operand we look at is operand 1. The rest aren't
6062 part of the expression. */
6063 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6065 default:
6066 break;
6069 /* If we have an rtx, we do not need to scan our operands. */
6070 if (exp_rtl)
6071 break;
6073 nops = first_rtl_op (TREE_CODE (exp));
6074 for (i = 0; i < nops; i++)
6075 if (TREE_OPERAND (exp, i) != 0
6076 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6077 return 0;
6079 /* If this is a language-specific tree code, it may require
6080 special handling. */
6081 if ((unsigned int) TREE_CODE (exp)
6082 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6083 && !lang_hooks.safe_from_p (x, exp))
6084 return 0;
6087 /* If we have an rtl, find any enclosed object. Then see if we conflict
6088 with it. */
6089 if (exp_rtl)
6091 if (GET_CODE (exp_rtl) == SUBREG)
6093 exp_rtl = SUBREG_REG (exp_rtl);
6094 if (REG_P (exp_rtl)
6095 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6096 return 0;
6099 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6100 are memory and they conflict. */
6101 return ! (rtx_equal_p (x, exp_rtl)
6102 || (MEM_P (x) && MEM_P (exp_rtl)
6103 && true_dependence (exp_rtl, VOIDmode, x,
6104 rtx_addr_varies_p)));
6107 /* If we reach here, it is safe. */
6108 return 1;
6111 /* Subroutine of expand_expr: return rtx if EXP is a
6112 variable or parameter; else return 0. */
6114 static rtx
6115 var_rtx (tree exp)
6117 STRIP_NOPS (exp);
6118 switch (TREE_CODE (exp))
6120 case PARM_DECL:
6121 case VAR_DECL:
6122 return DECL_RTL (exp);
6123 default:
6124 return 0;
6128 /* Return the highest power of two that EXP is known to be a multiple of.
6129 This is used in updating alignment of MEMs in array references. */
6131 static unsigned HOST_WIDE_INT
6132 highest_pow2_factor (tree exp)
6134 unsigned HOST_WIDE_INT c0, c1;
6136 switch (TREE_CODE (exp))
6138 case INTEGER_CST:
6139 /* We can find the lowest bit that's a one. If the low
6140 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6141 We need to handle this case since we can find it in a COND_EXPR,
6142 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6143 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6144 later ICE. */
6145 if (TREE_CONSTANT_OVERFLOW (exp))
6146 return BIGGEST_ALIGNMENT;
6147 else
6149 /* Note: tree_low_cst is intentionally not used here,
6150 we don't care about the upper bits. */
6151 c0 = TREE_INT_CST_LOW (exp);
6152 c0 &= -c0;
6153 return c0 ? c0 : BIGGEST_ALIGNMENT;
6155 break;
6157 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6158 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6159 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6160 return MIN (c0, c1);
6162 case MULT_EXPR:
6163 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6164 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6165 return c0 * c1;
6167 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6168 case CEIL_DIV_EXPR:
6169 if (integer_pow2p (TREE_OPERAND (exp, 1))
6170 && host_integerp (TREE_OPERAND (exp, 1), 1))
6172 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6173 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6174 return MAX (1, c0 / c1);
6176 break;
6178 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6179 case SAVE_EXPR:
6180 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6182 case COMPOUND_EXPR:
6183 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6185 case COND_EXPR:
6186 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6187 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6188 return MIN (c0, c1);
6190 default:
6191 break;
6194 return 1;
6197 /* Similar, except that the alignment requirements of TARGET are
6198 taken into account. Assume it is at least as aligned as its
6199 type, unless it is a COMPONENT_REF in which case the layout of
6200 the structure gives the alignment. */
6202 static unsigned HOST_WIDE_INT
6203 highest_pow2_factor_for_target (tree target, tree exp)
6205 unsigned HOST_WIDE_INT target_align, factor;
6207 factor = highest_pow2_factor (exp);
6208 if (TREE_CODE (target) == COMPONENT_REF)
6209 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6210 else
6211 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6212 return MAX (factor, target_align);
6215 /* Expands variable VAR. */
6217 void
6218 expand_var (tree var)
6220 if (DECL_EXTERNAL (var))
6221 return;
6223 if (TREE_STATIC (var))
6224 /* If this is an inlined copy of a static local variable,
6225 look up the original decl. */
6226 var = DECL_ORIGIN (var);
6228 if (TREE_STATIC (var)
6229 ? !TREE_ASM_WRITTEN (var)
6230 : !DECL_RTL_SET_P (var))
6232 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6234 /* Prepare a mem & address for the decl. */
6235 rtx x;
6237 if (TREE_STATIC (var))
6238 abort ();
6240 x = gen_rtx_MEM (DECL_MODE (var),
6241 gen_reg_rtx (Pmode));
6243 set_mem_attributes (x, var, 1);
6244 SET_DECL_RTL (var, x);
6246 else if (lang_hooks.expand_decl (var))
6247 /* OK. */;
6248 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6249 expand_decl (var);
6250 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6251 rest_of_decl_compilation (var, NULL, 0, 0);
6252 else if (TREE_CODE (var) == TYPE_DECL
6253 || TREE_CODE (var) == CONST_DECL
6254 || TREE_CODE (var) == FUNCTION_DECL
6255 || TREE_CODE (var) == LABEL_DECL)
6256 /* No expansion needed. */;
6257 else
6258 abort ();
6262 /* Expands declarations of variables in list VARS. */
6264 static void
6265 expand_vars (tree vars)
6267 for (; vars; vars = TREE_CHAIN (vars))
6269 tree var = vars;
6271 if (DECL_EXTERNAL (var))
6272 continue;
6274 expand_var (var);
6275 expand_decl_init (var);
6279 /* Subroutine of expand_expr. Expand the two operands of a binary
6280 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6281 The value may be stored in TARGET if TARGET is nonzero. The
6282 MODIFIER argument is as documented by expand_expr. */
6284 static void
6285 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6286 enum expand_modifier modifier)
6288 if (! safe_from_p (target, exp1, 1))
6289 target = 0;
6290 if (operand_equal_p (exp0, exp1, 0))
6292 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6293 *op1 = copy_rtx (*op0);
6295 else
6297 /* If we need to preserve evaluation order, copy exp0 into its own
6298 temporary variable so that it can't be clobbered by exp1. */
6299 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6300 exp0 = save_expr (exp0);
6301 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6302 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6307 /* expand_expr: generate code for computing expression EXP.
6308 An rtx for the computed value is returned. The value is never null.
6309 In the case of a void EXP, const0_rtx is returned.
6311 The value may be stored in TARGET if TARGET is nonzero.
6312 TARGET is just a suggestion; callers must assume that
6313 the rtx returned may not be the same as TARGET.
6315 If TARGET is CONST0_RTX, it means that the value will be ignored.
6317 If TMODE is not VOIDmode, it suggests generating the
6318 result in mode TMODE. But this is done only when convenient.
6319 Otherwise, TMODE is ignored and the value generated in its natural mode.
6320 TMODE is just a suggestion; callers must assume that
6321 the rtx returned may not have mode TMODE.
6323 Note that TARGET may have neither TMODE nor MODE. In that case, it
6324 probably will not be used.
6326 If MODIFIER is EXPAND_SUM then when EXP is an addition
6327 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6328 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6329 products as above, or REG or MEM, or constant.
6330 Ordinarily in such cases we would output mul or add instructions
6331 and then return a pseudo reg containing the sum.
6333 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6334 it also marks a label as absolutely required (it can't be dead).
6335 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6336 This is used for outputting expressions used in initializers.
6338 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6339 with a constant address even if that address is not normally legitimate.
6340 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6342 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6343 a call parameter. Such targets require special care as we haven't yet
6344 marked TARGET so that it's safe from being trashed by libcalls. We
6345 don't want to use TARGET for anything but the final result;
6346 Intermediate values must go elsewhere. Additionally, calls to
6347 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6349 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6350 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6351 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6352 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6353 recursively. */
6355 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6356 enum expand_modifier, rtx *);
6359 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6360 enum expand_modifier modifier, rtx *alt_rtl)
6362 int rn = -1;
6363 rtx ret, last = NULL;
6365 /* Handle ERROR_MARK before anybody tries to access its type. */
6366 if (TREE_CODE (exp) == ERROR_MARK
6367 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6369 ret = CONST0_RTX (tmode);
6370 return ret ? ret : const0_rtx;
6373 if (flag_non_call_exceptions)
6375 rn = lookup_stmt_eh_region (exp);
6376 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6377 if (rn >= 0)
6378 last = get_last_insn ();
6381 /* If this is an expression of some kind and it has an associated line
6382 number, then emit the line number before expanding the expression.
6384 We need to save and restore the file and line information so that
6385 errors discovered during expansion are emitted with the right
6386 information. It would be better of the diagnostic routines
6387 used the file/line information embedded in the tree nodes rather
6388 than globals. */
6389 if (cfun && EXPR_HAS_LOCATION (exp))
6391 location_t saved_location = input_location;
6392 input_location = EXPR_LOCATION (exp);
6393 emit_line_note (input_location);
6395 /* Record where the insns produced belong. */
6396 record_block_change (TREE_BLOCK (exp));
6398 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6400 input_location = saved_location;
6402 else
6404 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6407 /* If using non-call exceptions, mark all insns that may trap.
6408 expand_call() will mark CALL_INSNs before we get to this code,
6409 but it doesn't handle libcalls, and these may trap. */
6410 if (rn >= 0)
6412 rtx insn;
6413 for (insn = next_real_insn (last); insn;
6414 insn = next_real_insn (insn))
6416 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6417 /* If we want exceptions for non-call insns, any
6418 may_trap_p instruction may throw. */
6419 && GET_CODE (PATTERN (insn)) != CLOBBER
6420 && GET_CODE (PATTERN (insn)) != USE
6421 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6423 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6424 REG_NOTES (insn));
6429 return ret;
6432 static rtx
6433 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6434 enum expand_modifier modifier, rtx *alt_rtl)
6436 rtx op0, op1, temp;
6437 tree type = TREE_TYPE (exp);
6438 int unsignedp;
6439 enum machine_mode mode;
6440 enum tree_code code = TREE_CODE (exp);
6441 optab this_optab;
6442 rtx subtarget, original_target;
6443 int ignore;
6444 tree context;
6445 bool reduce_bit_field = false;
6446 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6447 ? reduce_to_bit_field_precision ((expr), \
6448 target, \
6449 type) \
6450 : (expr))
6452 mode = TYPE_MODE (type);
6453 unsignedp = TYPE_UNSIGNED (type);
6454 if (lang_hooks.reduce_bit_field_operations
6455 && TREE_CODE (type) == INTEGER_TYPE
6456 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6458 /* An operation in what may be a bit-field type needs the
6459 result to be reduced to the precision of the bit-field type,
6460 which is narrower than that of the type's mode. */
6461 reduce_bit_field = true;
6462 if (modifier == EXPAND_STACK_PARM)
6463 target = 0;
6466 /* Use subtarget as the target for operand 0 of a binary operation. */
6467 subtarget = get_subtarget (target);
6468 original_target = target;
6469 ignore = (target == const0_rtx
6470 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6471 || code == CONVERT_EXPR || code == COND_EXPR
6472 || code == VIEW_CONVERT_EXPR)
6473 && TREE_CODE (type) == VOID_TYPE));
6475 /* If we are going to ignore this result, we need only do something
6476 if there is a side-effect somewhere in the expression. If there
6477 is, short-circuit the most common cases here. Note that we must
6478 not call expand_expr with anything but const0_rtx in case this
6479 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6481 if (ignore)
6483 if (! TREE_SIDE_EFFECTS (exp))
6484 return const0_rtx;
6486 /* Ensure we reference a volatile object even if value is ignored, but
6487 don't do this if all we are doing is taking its address. */
6488 if (TREE_THIS_VOLATILE (exp)
6489 && TREE_CODE (exp) != FUNCTION_DECL
6490 && mode != VOIDmode && mode != BLKmode
6491 && modifier != EXPAND_CONST_ADDRESS)
6493 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6494 if (MEM_P (temp))
6495 temp = copy_to_reg (temp);
6496 return const0_rtx;
6499 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6500 || code == INDIRECT_REF)
6501 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6502 modifier);
6504 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6505 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6507 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6508 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6509 return const0_rtx;
6511 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6512 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6513 /* If the second operand has no side effects, just evaluate
6514 the first. */
6515 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6516 modifier);
6517 else if (code == BIT_FIELD_REF)
6519 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6520 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6521 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6522 return const0_rtx;
6525 target = 0;
6528 /* If will do cse, generate all results into pseudo registers
6529 since 1) that allows cse to find more things
6530 and 2) otherwise cse could produce an insn the machine
6531 cannot support. An exception is a CONSTRUCTOR into a multi-word
6532 MEM: that's much more likely to be most efficient into the MEM.
6533 Another is a CALL_EXPR which must return in memory. */
6535 if (! cse_not_expected && mode != BLKmode && target
6536 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6537 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6538 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6539 target = 0;
6541 switch (code)
6543 case LABEL_DECL:
6545 tree function = decl_function_context (exp);
6547 temp = label_rtx (exp);
6548 temp = gen_rtx_LABEL_REF (Pmode, temp);
6550 if (function != current_function_decl
6551 && function != 0)
6552 LABEL_REF_NONLOCAL_P (temp) = 1;
6554 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6555 return temp;
6558 case PARM_DECL:
6559 if (!DECL_RTL_SET_P (exp))
6561 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6562 return CONST0_RTX (mode);
6565 /* ... fall through ... */
6567 case VAR_DECL:
6568 /* If a static var's type was incomplete when the decl was written,
6569 but the type is complete now, lay out the decl now. */
6570 if (DECL_SIZE (exp) == 0
6571 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6572 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6573 layout_decl (exp, 0);
6575 /* ... fall through ... */
6577 case FUNCTION_DECL:
6578 case RESULT_DECL:
6579 if (DECL_RTL (exp) == 0)
6580 abort ();
6582 /* Ensure variable marked as used even if it doesn't go through
6583 a parser. If it hasn't be used yet, write out an external
6584 definition. */
6585 if (! TREE_USED (exp))
6587 assemble_external (exp);
6588 TREE_USED (exp) = 1;
6591 /* Show we haven't gotten RTL for this yet. */
6592 temp = 0;
6594 /* Handle variables inherited from containing functions. */
6595 context = decl_function_context (exp);
6597 if (context != 0 && context != current_function_decl
6598 /* If var is static, we don't need a static chain to access it. */
6599 && ! (MEM_P (DECL_RTL (exp))
6600 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6602 rtx addr;
6604 /* Mark as non-local and addressable. */
6605 DECL_NONLOCAL (exp) = 1;
6606 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6607 abort ();
6608 lang_hooks.mark_addressable (exp);
6609 if (!MEM_P (DECL_RTL (exp)))
6610 abort ();
6611 addr = XEXP (DECL_RTL (exp), 0);
6612 if (MEM_P (addr))
6613 addr
6614 = replace_equiv_address (addr,
6615 fix_lexical_addr (XEXP (addr, 0), exp));
6616 else
6617 addr = fix_lexical_addr (addr, exp);
6619 temp = replace_equiv_address (DECL_RTL (exp), addr);
6622 /* This is the case of an array whose size is to be determined
6623 from its initializer, while the initializer is still being parsed.
6624 See expand_decl. */
6626 else if (MEM_P (DECL_RTL (exp))
6627 && REG_P (XEXP (DECL_RTL (exp), 0)))
6628 temp = validize_mem (DECL_RTL (exp));
6630 /* If DECL_RTL is memory, we are in the normal case and either
6631 the address is not valid or it is not a register and -fforce-addr
6632 is specified, get the address into a register. */
6634 else if (MEM_P (DECL_RTL (exp))
6635 && modifier != EXPAND_CONST_ADDRESS
6636 && modifier != EXPAND_SUM
6637 && modifier != EXPAND_INITIALIZER
6638 && (! memory_address_p (DECL_MODE (exp),
6639 XEXP (DECL_RTL (exp), 0))
6640 || (flag_force_addr
6641 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6643 if (alt_rtl)
6644 *alt_rtl = DECL_RTL (exp);
6645 temp = replace_equiv_address (DECL_RTL (exp),
6646 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6649 /* If we got something, return it. But first, set the alignment
6650 if the address is a register. */
6651 if (temp != 0)
6653 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6654 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6656 return temp;
6659 /* If the mode of DECL_RTL does not match that of the decl, it
6660 must be a promoted value. We return a SUBREG of the wanted mode,
6661 but mark it so that we know that it was already extended. */
6663 if (REG_P (DECL_RTL (exp))
6664 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6666 /* Get the signedness used for this variable. Ensure we get the
6667 same mode we got when the variable was declared. */
6668 if (GET_MODE (DECL_RTL (exp))
6669 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6670 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6671 abort ();
6673 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6674 SUBREG_PROMOTED_VAR_P (temp) = 1;
6675 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6676 return temp;
6679 return DECL_RTL (exp);
6681 case INTEGER_CST:
6682 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6683 TREE_INT_CST_HIGH (exp), mode);
6685 /* ??? If overflow is set, fold will have done an incomplete job,
6686 which can result in (plus xx (const_int 0)), which can get
6687 simplified by validate_replace_rtx during virtual register
6688 instantiation, which can result in unrecognizable insns.
6689 Avoid this by forcing all overflows into registers. */
6690 if (TREE_CONSTANT_OVERFLOW (exp)
6691 && modifier != EXPAND_INITIALIZER)
6692 temp = force_reg (mode, temp);
6694 return temp;
6696 case VECTOR_CST:
6697 return const_vector_from_tree (exp);
6699 case CONST_DECL:
6700 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6702 case REAL_CST:
6703 /* If optimized, generate immediate CONST_DOUBLE
6704 which will be turned into memory by reload if necessary.
6706 We used to force a register so that loop.c could see it. But
6707 this does not allow gen_* patterns to perform optimizations with
6708 the constants. It also produces two insns in cases like "x = 1.0;".
6709 On most machines, floating-point constants are not permitted in
6710 many insns, so we'd end up copying it to a register in any case.
6712 Now, we do the copying in expand_binop, if appropriate. */
6713 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6714 TYPE_MODE (TREE_TYPE (exp)));
6716 case COMPLEX_CST:
6717 /* Handle evaluating a complex constant in a CONCAT target. */
6718 if (original_target && GET_CODE (original_target) == CONCAT)
6720 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6721 rtx rtarg, itarg;
6723 rtarg = XEXP (original_target, 0);
6724 itarg = XEXP (original_target, 1);
6726 /* Move the real and imaginary parts separately. */
6727 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6728 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6730 if (op0 != rtarg)
6731 emit_move_insn (rtarg, op0);
6732 if (op1 != itarg)
6733 emit_move_insn (itarg, op1);
6735 return original_target;
6738 /* ... fall through ... */
6740 case STRING_CST:
6741 temp = output_constant_def (exp, 1);
6743 /* temp contains a constant address.
6744 On RISC machines where a constant address isn't valid,
6745 make some insns to get that address into a register. */
6746 if (modifier != EXPAND_CONST_ADDRESS
6747 && modifier != EXPAND_INITIALIZER
6748 && modifier != EXPAND_SUM
6749 && (! memory_address_p (mode, XEXP (temp, 0))
6750 || flag_force_addr))
6751 return replace_equiv_address (temp,
6752 copy_rtx (XEXP (temp, 0)));
6753 return temp;
6755 case SAVE_EXPR:
6757 tree val = TREE_OPERAND (exp, 0);
6758 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6760 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6762 /* We can indeed still hit this case, typically via builtin
6763 expanders calling save_expr immediately before expanding
6764 something. Assume this means that we only have to deal
6765 with non-BLKmode values. */
6766 if (GET_MODE (ret) == BLKmode)
6767 abort ();
6769 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6770 DECL_ARTIFICIAL (val) = 1;
6771 TREE_OPERAND (exp, 0) = val;
6773 if (!CONSTANT_P (ret))
6774 ret = copy_to_reg (ret);
6775 SET_DECL_RTL (val, ret);
6778 return ret;
6781 case UNSAVE_EXPR:
6783 rtx temp;
6784 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6785 TREE_OPERAND (exp, 0)
6786 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6787 return temp;
6790 case GOTO_EXPR:
6791 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6792 expand_goto (TREE_OPERAND (exp, 0));
6793 else
6794 expand_computed_goto (TREE_OPERAND (exp, 0));
6795 return const0_rtx;
6797 /* These are lowered during gimplification, so we should never ever
6798 see them here. */
6799 case LOOP_EXPR:
6800 case EXIT_EXPR:
6801 abort ();
6803 case LABELED_BLOCK_EXPR:
6804 if (LABELED_BLOCK_BODY (exp))
6805 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6806 /* Should perhaps use expand_label, but this is simpler and safer. */
6807 do_pending_stack_adjust ();
6808 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6809 return const0_rtx;
6811 case EXIT_BLOCK_EXPR:
6812 if (EXIT_BLOCK_RETURN (exp))
6813 sorry ("returned value in block_exit_expr");
6814 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6815 return const0_rtx;
6817 case BIND_EXPR:
6819 tree block = BIND_EXPR_BLOCK (exp);
6820 int mark_ends;
6822 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6823 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6824 mark_ends = (block != NULL_TREE);
6825 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6827 /* If VARS have not yet been expanded, expand them now. */
6828 expand_vars (BIND_EXPR_VARS (exp));
6830 /* TARGET was clobbered early in this function. The correct
6831 indicator or whether or not we need the value of this
6832 expression is the IGNORE variable. */
6833 temp = expand_expr (BIND_EXPR_BODY (exp),
6834 ignore ? const0_rtx : target,
6835 tmode, modifier);
6837 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6839 return temp;
6842 case CONSTRUCTOR:
6843 /* If we don't need the result, just ensure we evaluate any
6844 subexpressions. */
6845 if (ignore)
6847 tree elt;
6849 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6850 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6852 return const0_rtx;
6855 /* All elts simple constants => refer to a constant in memory. But
6856 if this is a non-BLKmode mode, let it store a field at a time
6857 since that should make a CONST_INT or CONST_DOUBLE when we
6858 fold. Likewise, if we have a target we can use, it is best to
6859 store directly into the target unless the type is large enough
6860 that memcpy will be used. If we are making an initializer and
6861 all operands are constant, put it in memory as well.
6863 FIXME: Avoid trying to fill vector constructors piece-meal.
6864 Output them with output_constant_def below unless we're sure
6865 they're zeros. This should go away when vector initializers
6866 are treated like VECTOR_CST instead of arrays.
6868 else if ((TREE_STATIC (exp)
6869 && ((mode == BLKmode
6870 && ! (target != 0 && safe_from_p (target, exp, 1)))
6871 || TREE_ADDRESSABLE (exp)
6872 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6873 && (! MOVE_BY_PIECES_P
6874 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6875 TYPE_ALIGN (type)))
6876 && ! mostly_zeros_p (exp))))
6877 || ((modifier == EXPAND_INITIALIZER
6878 || modifier == EXPAND_CONST_ADDRESS)
6879 && TREE_CONSTANT (exp)))
6881 rtx constructor = output_constant_def (exp, 1);
6883 if (modifier != EXPAND_CONST_ADDRESS
6884 && modifier != EXPAND_INITIALIZER
6885 && modifier != EXPAND_SUM)
6886 constructor = validize_mem (constructor);
6888 return constructor;
6890 else
6892 /* Handle calls that pass values in multiple non-contiguous
6893 locations. The Irix 6 ABI has examples of this. */
6894 if (target == 0 || ! safe_from_p (target, exp, 1)
6895 || GET_CODE (target) == PARALLEL
6896 || modifier == EXPAND_STACK_PARM)
6897 target
6898 = assign_temp (build_qualified_type (type,
6899 (TYPE_QUALS (type)
6900 | (TREE_READONLY (exp)
6901 * TYPE_QUAL_CONST))),
6902 0, TREE_ADDRESSABLE (exp), 1);
6904 store_constructor (exp, target, 0, int_expr_size (exp));
6905 return target;
6908 case INDIRECT_REF:
6910 tree exp1 = TREE_OPERAND (exp, 0);
6912 if (modifier != EXPAND_WRITE)
6914 tree t;
6916 t = fold_read_from_constant_string (exp);
6917 if (t)
6918 return expand_expr (t, target, tmode, modifier);
6921 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6922 op0 = memory_address (mode, op0);
6923 temp = gen_rtx_MEM (mode, op0);
6924 set_mem_attributes (temp, exp, 0);
6926 /* If we are writing to this object and its type is a record with
6927 readonly fields, we must mark it as readonly so it will
6928 conflict with readonly references to those fields. */
6929 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6930 RTX_UNCHANGING_P (temp) = 1;
6932 return temp;
6935 case ARRAY_REF:
6937 #ifdef ENABLE_CHECKING
6938 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6939 abort ();
6940 #endif
6943 tree array = TREE_OPERAND (exp, 0);
6944 tree low_bound = array_ref_low_bound (exp);
6945 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6946 HOST_WIDE_INT i;
6948 /* Optimize the special-case of a zero lower bound.
6950 We convert the low_bound to sizetype to avoid some problems
6951 with constant folding. (E.g. suppose the lower bound is 1,
6952 and its mode is QI. Without the conversion, (ARRAY
6953 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6954 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6956 if (! integer_zerop (low_bound))
6957 index = size_diffop (index, convert (sizetype, low_bound));
6959 /* Fold an expression like: "foo"[2].
6960 This is not done in fold so it won't happen inside &.
6961 Don't fold if this is for wide characters since it's too
6962 difficult to do correctly and this is a very rare case. */
6964 if (modifier != EXPAND_CONST_ADDRESS
6965 && modifier != EXPAND_INITIALIZER
6966 && modifier != EXPAND_MEMORY)
6968 tree t = fold_read_from_constant_string (exp);
6970 if (t)
6971 return expand_expr (t, target, tmode, modifier);
6974 /* If this is a constant index into a constant array,
6975 just get the value from the array. Handle both the cases when
6976 we have an explicit constructor and when our operand is a variable
6977 that was declared const. */
6979 if (modifier != EXPAND_CONST_ADDRESS
6980 && modifier != EXPAND_INITIALIZER
6981 && modifier != EXPAND_MEMORY
6982 && TREE_CODE (array) == CONSTRUCTOR
6983 && ! TREE_SIDE_EFFECTS (array)
6984 && TREE_CODE (index) == INTEGER_CST
6985 && 0 > compare_tree_int (index,
6986 list_length (CONSTRUCTOR_ELTS
6987 (TREE_OPERAND (exp, 0)))))
6989 tree elem;
6991 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6992 i = TREE_INT_CST_LOW (index);
6993 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6996 if (elem)
6997 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6998 modifier);
7001 else if (optimize >= 1
7002 && modifier != EXPAND_CONST_ADDRESS
7003 && modifier != EXPAND_INITIALIZER
7004 && modifier != EXPAND_MEMORY
7005 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7006 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7007 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7008 && targetm.binds_local_p (array))
7010 if (TREE_CODE (index) == INTEGER_CST)
7012 tree init = DECL_INITIAL (array);
7014 if (TREE_CODE (init) == CONSTRUCTOR)
7016 tree elem;
7018 for (elem = CONSTRUCTOR_ELTS (init);
7019 (elem
7020 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7021 elem = TREE_CHAIN (elem))
7024 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7025 return expand_expr (fold (TREE_VALUE (elem)), target,
7026 tmode, modifier);
7028 else if (TREE_CODE (init) == STRING_CST
7029 && 0 > compare_tree_int (index,
7030 TREE_STRING_LENGTH (init)))
7032 tree type = TREE_TYPE (TREE_TYPE (init));
7033 enum machine_mode mode = TYPE_MODE (type);
7035 if (GET_MODE_CLASS (mode) == MODE_INT
7036 && GET_MODE_SIZE (mode) == 1)
7037 return gen_int_mode (TREE_STRING_POINTER (init)
7038 [TREE_INT_CST_LOW (index)], mode);
7043 goto normal_inner_ref;
7045 case COMPONENT_REF:
7046 /* If the operand is a CONSTRUCTOR, we can just extract the
7047 appropriate field if it is present. */
7048 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7050 tree elt;
7052 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7053 elt = TREE_CHAIN (elt))
7054 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7055 /* We can normally use the value of the field in the
7056 CONSTRUCTOR. However, if this is a bitfield in
7057 an integral mode that we can fit in a HOST_WIDE_INT,
7058 we must mask only the number of bits in the bitfield,
7059 since this is done implicitly by the constructor. If
7060 the bitfield does not meet either of those conditions,
7061 we can't do this optimization. */
7062 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7063 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7064 == MODE_INT)
7065 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7066 <= HOST_BITS_PER_WIDE_INT))))
7068 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7069 && modifier == EXPAND_STACK_PARM)
7070 target = 0;
7071 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7072 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7074 HOST_WIDE_INT bitsize
7075 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7076 enum machine_mode imode
7077 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7079 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7081 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7082 op0 = expand_and (imode, op0, op1, target);
7084 else
7086 tree count
7087 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7090 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7091 target, 0);
7092 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7093 target, 0);
7097 return op0;
7100 goto normal_inner_ref;
7102 case BIT_FIELD_REF:
7103 case ARRAY_RANGE_REF:
7104 normal_inner_ref:
7106 enum machine_mode mode1;
7107 HOST_WIDE_INT bitsize, bitpos;
7108 tree offset;
7109 int volatilep = 0;
7110 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7111 &mode1, &unsignedp, &volatilep);
7112 rtx orig_op0;
7114 /* If we got back the original object, something is wrong. Perhaps
7115 we are evaluating an expression too early. In any event, don't
7116 infinitely recurse. */
7117 if (tem == exp)
7118 abort ();
7120 /* If TEM's type is a union of variable size, pass TARGET to the inner
7121 computation, since it will need a temporary and TARGET is known
7122 to have to do. This occurs in unchecked conversion in Ada. */
7124 orig_op0 = op0
7125 = expand_expr (tem,
7126 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7127 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7128 != INTEGER_CST)
7129 && modifier != EXPAND_STACK_PARM
7130 ? target : NULL_RTX),
7131 VOIDmode,
7132 (modifier == EXPAND_INITIALIZER
7133 || modifier == EXPAND_CONST_ADDRESS
7134 || modifier == EXPAND_STACK_PARM)
7135 ? modifier : EXPAND_NORMAL);
7137 /* If this is a constant, put it into a register if it is a
7138 legitimate constant and OFFSET is 0 and memory if it isn't. */
7139 if (CONSTANT_P (op0))
7141 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7142 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7143 && offset == 0)
7144 op0 = force_reg (mode, op0);
7145 else
7146 op0 = validize_mem (force_const_mem (mode, op0));
7149 /* Otherwise, if this object not in memory and we either have an
7150 offset or a BLKmode result, put it there. This case can't occur in
7151 C, but can in Ada if we have unchecked conversion of an expression
7152 from a scalar type to an array or record type or for an
7153 ARRAY_RANGE_REF whose type is BLKmode. */
7154 else if (!MEM_P (op0)
7155 && (offset != 0
7156 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7158 tree nt = build_qualified_type (TREE_TYPE (tem),
7159 (TYPE_QUALS (TREE_TYPE (tem))
7160 | TYPE_QUAL_CONST));
7161 rtx memloc = assign_temp (nt, 1, 1, 1);
7163 emit_move_insn (memloc, op0);
7164 op0 = memloc;
7167 if (offset != 0)
7169 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7170 EXPAND_SUM);
7172 if (!MEM_P (op0))
7173 abort ();
7175 #ifdef POINTERS_EXTEND_UNSIGNED
7176 if (GET_MODE (offset_rtx) != Pmode)
7177 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7178 #else
7179 if (GET_MODE (offset_rtx) != ptr_mode)
7180 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7181 #endif
7183 if (GET_MODE (op0) == BLKmode
7184 /* A constant address in OP0 can have VOIDmode, we must
7185 not try to call force_reg in that case. */
7186 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7187 && bitsize != 0
7188 && (bitpos % bitsize) == 0
7189 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7190 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7192 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7193 bitpos = 0;
7196 op0 = offset_address (op0, offset_rtx,
7197 highest_pow2_factor (offset));
7200 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7201 record its alignment as BIGGEST_ALIGNMENT. */
7202 if (MEM_P (op0) && bitpos == 0 && offset != 0
7203 && is_aligning_offset (offset, tem))
7204 set_mem_align (op0, BIGGEST_ALIGNMENT);
7206 /* Don't forget about volatility even if this is a bitfield. */
7207 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7209 if (op0 == orig_op0)
7210 op0 = copy_rtx (op0);
7212 MEM_VOLATILE_P (op0) = 1;
7215 /* The following code doesn't handle CONCAT.
7216 Assume only bitpos == 0 can be used for CONCAT, due to
7217 one element arrays having the same mode as its element. */
7218 if (GET_CODE (op0) == CONCAT)
7220 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7221 abort ();
7222 return op0;
7225 /* In cases where an aligned union has an unaligned object
7226 as a field, we might be extracting a BLKmode value from
7227 an integer-mode (e.g., SImode) object. Handle this case
7228 by doing the extract into an object as wide as the field
7229 (which we know to be the width of a basic mode), then
7230 storing into memory, and changing the mode to BLKmode. */
7231 if (mode1 == VOIDmode
7232 || REG_P (op0) || GET_CODE (op0) == SUBREG
7233 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7234 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7235 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7236 && modifier != EXPAND_CONST_ADDRESS
7237 && modifier != EXPAND_INITIALIZER)
7238 /* If the field isn't aligned enough to fetch as a memref,
7239 fetch it as a bit field. */
7240 || (mode1 != BLKmode
7241 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7242 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7243 || (MEM_P (op0)
7244 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7245 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7246 && ((modifier == EXPAND_CONST_ADDRESS
7247 || modifier == EXPAND_INITIALIZER)
7248 ? STRICT_ALIGNMENT
7249 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7250 || (bitpos % BITS_PER_UNIT != 0)))
7251 /* If the type and the field are a constant size and the
7252 size of the type isn't the same size as the bitfield,
7253 we must use bitfield operations. */
7254 || (bitsize >= 0
7255 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7256 == INTEGER_CST)
7257 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7258 bitsize)))
7260 enum machine_mode ext_mode = mode;
7262 if (ext_mode == BLKmode
7263 && ! (target != 0 && MEM_P (op0)
7264 && MEM_P (target)
7265 && bitpos % BITS_PER_UNIT == 0))
7266 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7268 if (ext_mode == BLKmode)
7270 if (target == 0)
7271 target = assign_temp (type, 0, 1, 1);
7273 if (bitsize == 0)
7274 return target;
7276 /* In this case, BITPOS must start at a byte boundary and
7277 TARGET, if specified, must be a MEM. */
7278 if (!MEM_P (op0)
7279 || (target != 0 && !MEM_P (target))
7280 || bitpos % BITS_PER_UNIT != 0)
7281 abort ();
7283 emit_block_move (target,
7284 adjust_address (op0, VOIDmode,
7285 bitpos / BITS_PER_UNIT),
7286 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7287 / BITS_PER_UNIT),
7288 (modifier == EXPAND_STACK_PARM
7289 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7291 return target;
7294 op0 = validize_mem (op0);
7296 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7297 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7299 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7300 (modifier == EXPAND_STACK_PARM
7301 ? NULL_RTX : target),
7302 ext_mode, ext_mode);
7304 /* If the result is a record type and BITSIZE is narrower than
7305 the mode of OP0, an integral mode, and this is a big endian
7306 machine, we must put the field into the high-order bits. */
7307 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7308 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7309 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7310 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7311 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7312 - bitsize),
7313 op0, 1);
7315 /* If the result type is BLKmode, store the data into a temporary
7316 of the appropriate type, but with the mode corresponding to the
7317 mode for the data we have (op0's mode). It's tempting to make
7318 this a constant type, since we know it's only being stored once,
7319 but that can cause problems if we are taking the address of this
7320 COMPONENT_REF because the MEM of any reference via that address
7321 will have flags corresponding to the type, which will not
7322 necessarily be constant. */
7323 if (mode == BLKmode)
7325 rtx new
7326 = assign_stack_temp_for_type
7327 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7329 emit_move_insn (new, op0);
7330 op0 = copy_rtx (new);
7331 PUT_MODE (op0, BLKmode);
7332 set_mem_attributes (op0, exp, 1);
7335 return op0;
7338 /* If the result is BLKmode, use that to access the object
7339 now as well. */
7340 if (mode == BLKmode)
7341 mode1 = BLKmode;
7343 /* Get a reference to just this component. */
7344 if (modifier == EXPAND_CONST_ADDRESS
7345 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7346 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7347 else
7348 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7350 if (op0 == orig_op0)
7351 op0 = copy_rtx (op0);
7353 set_mem_attributes (op0, exp, 0);
7354 if (REG_P (XEXP (op0, 0)))
7355 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7357 MEM_VOLATILE_P (op0) |= volatilep;
7358 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7359 || modifier == EXPAND_CONST_ADDRESS
7360 || modifier == EXPAND_INITIALIZER)
7361 return op0;
7362 else if (target == 0)
7363 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7365 convert_move (target, op0, unsignedp);
7366 return target;
7369 case OBJ_TYPE_REF:
7370 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7372 case CALL_EXPR:
7373 /* Check for a built-in function. */
7374 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7375 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7376 == FUNCTION_DECL)
7377 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7379 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7380 == BUILT_IN_FRONTEND)
7381 return lang_hooks.expand_expr (exp, original_target,
7382 tmode, modifier,
7383 alt_rtl);
7384 else
7385 return expand_builtin (exp, target, subtarget, tmode, ignore);
7388 return expand_call (exp, target, ignore);
7390 case NON_LVALUE_EXPR:
7391 case NOP_EXPR:
7392 case CONVERT_EXPR:
7393 if (TREE_OPERAND (exp, 0) == error_mark_node)
7394 return const0_rtx;
7396 if (TREE_CODE (type) == UNION_TYPE)
7398 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7400 /* If both input and output are BLKmode, this conversion isn't doing
7401 anything except possibly changing memory attribute. */
7402 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7404 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7405 modifier);
7407 result = copy_rtx (result);
7408 set_mem_attributes (result, exp, 0);
7409 return result;
7412 if (target == 0)
7414 if (TYPE_MODE (type) != BLKmode)
7415 target = gen_reg_rtx (TYPE_MODE (type));
7416 else
7417 target = assign_temp (type, 0, 1, 1);
7420 if (MEM_P (target))
7421 /* Store data into beginning of memory target. */
7422 store_expr (TREE_OPERAND (exp, 0),
7423 adjust_address (target, TYPE_MODE (valtype), 0),
7424 modifier == EXPAND_STACK_PARM ? 2 : 0);
7426 else if (REG_P (target))
7427 /* Store this field into a union of the proper type. */
7428 store_field (target,
7429 MIN ((int_size_in_bytes (TREE_TYPE
7430 (TREE_OPERAND (exp, 0)))
7431 * BITS_PER_UNIT),
7432 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7433 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7434 VOIDmode, 0, type, 0);
7435 else
7436 abort ();
7438 /* Return the entire union. */
7439 return target;
7442 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7444 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7445 modifier);
7447 /* If the signedness of the conversion differs and OP0 is
7448 a promoted SUBREG, clear that indication since we now
7449 have to do the proper extension. */
7450 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7451 && GET_CODE (op0) == SUBREG)
7452 SUBREG_PROMOTED_VAR_P (op0) = 0;
7454 return REDUCE_BIT_FIELD (op0);
7457 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7458 op0 = REDUCE_BIT_FIELD (op0);
7459 if (GET_MODE (op0) == mode)
7460 return op0;
7462 /* If OP0 is a constant, just convert it into the proper mode. */
7463 if (CONSTANT_P (op0))
7465 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7466 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7468 if (modifier == EXPAND_INITIALIZER)
7469 return simplify_gen_subreg (mode, op0, inner_mode,
7470 subreg_lowpart_offset (mode,
7471 inner_mode));
7472 else
7473 return convert_modes (mode, inner_mode, op0,
7474 TYPE_UNSIGNED (inner_type));
7477 if (modifier == EXPAND_INITIALIZER)
7478 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7480 if (target == 0)
7481 return
7482 convert_to_mode (mode, op0,
7483 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7484 else
7485 convert_move (target, op0,
7486 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7487 return target;
7489 case VIEW_CONVERT_EXPR:
7490 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7492 /* If the input and output modes are both the same, we are done.
7493 Otherwise, if neither mode is BLKmode and both are integral and within
7494 a word, we can use gen_lowpart. If neither is true, make sure the
7495 operand is in memory and convert the MEM to the new mode. */
7496 if (TYPE_MODE (type) == GET_MODE (op0))
7498 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7499 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7500 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7501 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7502 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7503 op0 = gen_lowpart (TYPE_MODE (type), op0);
7504 else if (!MEM_P (op0))
7506 /* If the operand is not a MEM, force it into memory. Since we
7507 are going to be be changing the mode of the MEM, don't call
7508 force_const_mem for constants because we don't allow pool
7509 constants to change mode. */
7510 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7512 if (TREE_ADDRESSABLE (exp))
7513 abort ();
7515 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7516 target
7517 = assign_stack_temp_for_type
7518 (TYPE_MODE (inner_type),
7519 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7521 emit_move_insn (target, op0);
7522 op0 = target;
7525 /* At this point, OP0 is in the correct mode. If the output type is such
7526 that the operand is known to be aligned, indicate that it is.
7527 Otherwise, we need only be concerned about alignment for non-BLKmode
7528 results. */
7529 if (MEM_P (op0))
7531 op0 = copy_rtx (op0);
7533 if (TYPE_ALIGN_OK (type))
7534 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7535 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7536 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7538 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7539 HOST_WIDE_INT temp_size
7540 = MAX (int_size_in_bytes (inner_type),
7541 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7542 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7543 temp_size, 0, type);
7544 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7546 if (TREE_ADDRESSABLE (exp))
7547 abort ();
7549 if (GET_MODE (op0) == BLKmode)
7550 emit_block_move (new_with_op0_mode, op0,
7551 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7552 (modifier == EXPAND_STACK_PARM
7553 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7554 else
7555 emit_move_insn (new_with_op0_mode, op0);
7557 op0 = new;
7560 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7563 return op0;
7565 case PLUS_EXPR:
7566 this_optab = ! unsignedp && flag_trapv
7567 && (GET_MODE_CLASS (mode) == MODE_INT)
7568 ? addv_optab : add_optab;
7570 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7571 something else, make sure we add the register to the constant and
7572 then to the other thing. This case can occur during strength
7573 reduction and doing it this way will produce better code if the
7574 frame pointer or argument pointer is eliminated.
7576 fold-const.c will ensure that the constant is always in the inner
7577 PLUS_EXPR, so the only case we need to do anything about is if
7578 sp, ap, or fp is our second argument, in which case we must swap
7579 the innermost first argument and our second argument. */
7581 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7582 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7583 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7584 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7585 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7586 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7588 tree t = TREE_OPERAND (exp, 1);
7590 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7591 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7594 /* If the result is to be ptr_mode and we are adding an integer to
7595 something, we might be forming a constant. So try to use
7596 plus_constant. If it produces a sum and we can't accept it,
7597 use force_operand. This allows P = &ARR[const] to generate
7598 efficient code on machines where a SYMBOL_REF is not a valid
7599 address.
7601 If this is an EXPAND_SUM call, always return the sum. */
7602 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7603 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7605 if (modifier == EXPAND_STACK_PARM)
7606 target = 0;
7607 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7608 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7609 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7611 rtx constant_part;
7613 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7614 EXPAND_SUM);
7615 /* Use immed_double_const to ensure that the constant is
7616 truncated according to the mode of OP1, then sign extended
7617 to a HOST_WIDE_INT. Using the constant directly can result
7618 in non-canonical RTL in a 64x32 cross compile. */
7619 constant_part
7620 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7621 (HOST_WIDE_INT) 0,
7622 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7623 op1 = plus_constant (op1, INTVAL (constant_part));
7624 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7625 op1 = force_operand (op1, target);
7626 return REDUCE_BIT_FIELD (op1);
7629 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7630 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7631 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7633 rtx constant_part;
7635 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7636 (modifier == EXPAND_INITIALIZER
7637 ? EXPAND_INITIALIZER : EXPAND_SUM));
7638 if (! CONSTANT_P (op0))
7640 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7641 VOIDmode, modifier);
7642 /* Return a PLUS if modifier says it's OK. */
7643 if (modifier == EXPAND_SUM
7644 || modifier == EXPAND_INITIALIZER)
7645 return simplify_gen_binary (PLUS, mode, op0, op1);
7646 goto binop2;
7648 /* Use immed_double_const to ensure that the constant is
7649 truncated according to the mode of OP1, then sign extended
7650 to a HOST_WIDE_INT. Using the constant directly can result
7651 in non-canonical RTL in a 64x32 cross compile. */
7652 constant_part
7653 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7654 (HOST_WIDE_INT) 0,
7655 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7656 op0 = plus_constant (op0, INTVAL (constant_part));
7657 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7658 op0 = force_operand (op0, target);
7659 return REDUCE_BIT_FIELD (op0);
7663 /* No sense saving up arithmetic to be done
7664 if it's all in the wrong mode to form part of an address.
7665 And force_operand won't know whether to sign-extend or
7666 zero-extend. */
7667 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7668 || mode != ptr_mode)
7670 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7671 subtarget, &op0, &op1, 0);
7672 if (op0 == const0_rtx)
7673 return op1;
7674 if (op1 == const0_rtx)
7675 return op0;
7676 goto binop2;
7679 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7680 subtarget, &op0, &op1, modifier);
7681 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7683 case MINUS_EXPR:
7684 /* For initializers, we are allowed to return a MINUS of two
7685 symbolic constants. Here we handle all cases when both operands
7686 are constant. */
7687 /* Handle difference of two symbolic constants,
7688 for the sake of an initializer. */
7689 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7690 && really_constant_p (TREE_OPERAND (exp, 0))
7691 && really_constant_p (TREE_OPERAND (exp, 1)))
7693 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7694 NULL_RTX, &op0, &op1, modifier);
7696 /* If the last operand is a CONST_INT, use plus_constant of
7697 the negated constant. Else make the MINUS. */
7698 if (GET_CODE (op1) == CONST_INT)
7699 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7700 else
7701 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7704 this_optab = ! unsignedp && flag_trapv
7705 && (GET_MODE_CLASS(mode) == MODE_INT)
7706 ? subv_optab : sub_optab;
7708 /* No sense saving up arithmetic to be done
7709 if it's all in the wrong mode to form part of an address.
7710 And force_operand won't know whether to sign-extend or
7711 zero-extend. */
7712 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7713 || mode != ptr_mode)
7714 goto binop;
7716 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7717 subtarget, &op0, &op1, modifier);
7719 /* Convert A - const to A + (-const). */
7720 if (GET_CODE (op1) == CONST_INT)
7722 op1 = negate_rtx (mode, op1);
7723 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7726 goto binop2;
7728 case MULT_EXPR:
7729 /* If first operand is constant, swap them.
7730 Thus the following special case checks need only
7731 check the second operand. */
7732 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7734 tree t1 = TREE_OPERAND (exp, 0);
7735 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7736 TREE_OPERAND (exp, 1) = t1;
7739 /* Attempt to return something suitable for generating an
7740 indexed address, for machines that support that. */
7742 if (modifier == EXPAND_SUM && mode == ptr_mode
7743 && host_integerp (TREE_OPERAND (exp, 1), 0))
7745 tree exp1 = TREE_OPERAND (exp, 1);
7747 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7748 EXPAND_SUM);
7750 if (!REG_P (op0))
7751 op0 = force_operand (op0, NULL_RTX);
7752 if (!REG_P (op0))
7753 op0 = copy_to_mode_reg (mode, op0);
7755 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7756 gen_int_mode (tree_low_cst (exp1, 0),
7757 TYPE_MODE (TREE_TYPE (exp1)))));
7760 if (modifier == EXPAND_STACK_PARM)
7761 target = 0;
7763 /* Check for multiplying things that have been extended
7764 from a narrower type. If this machine supports multiplying
7765 in that narrower type with a result in the desired type,
7766 do it that way, and avoid the explicit type-conversion. */
7767 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7768 && TREE_CODE (type) == INTEGER_TYPE
7769 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7770 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7771 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7772 && int_fits_type_p (TREE_OPERAND (exp, 1),
7773 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7774 /* Don't use a widening multiply if a shift will do. */
7775 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7776 > HOST_BITS_PER_WIDE_INT)
7777 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7779 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7780 && (TYPE_PRECISION (TREE_TYPE
7781 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7782 == TYPE_PRECISION (TREE_TYPE
7783 (TREE_OPERAND
7784 (TREE_OPERAND (exp, 0), 0))))
7785 /* If both operands are extended, they must either both
7786 be zero-extended or both be sign-extended. */
7787 && (TYPE_UNSIGNED (TREE_TYPE
7788 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7789 == TYPE_UNSIGNED (TREE_TYPE
7790 (TREE_OPERAND
7791 (TREE_OPERAND (exp, 0), 0)))))))
7793 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7794 enum machine_mode innermode = TYPE_MODE (op0type);
7795 bool zextend_p = TYPE_UNSIGNED (op0type);
7796 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7797 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7799 if (mode == GET_MODE_WIDER_MODE (innermode))
7801 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7803 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7804 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7805 TREE_OPERAND (exp, 1),
7806 NULL_RTX, &op0, &op1, 0);
7807 else
7808 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7809 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7810 NULL_RTX, &op0, &op1, 0);
7811 goto binop2;
7813 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7814 && innermode == word_mode)
7816 rtx htem, hipart;
7817 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7818 NULL_RTX, VOIDmode, 0);
7819 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7820 op1 = convert_modes (innermode, mode,
7821 expand_expr (TREE_OPERAND (exp, 1),
7822 NULL_RTX, VOIDmode, 0),
7823 unsignedp);
7824 else
7825 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7826 NULL_RTX, VOIDmode, 0);
7827 temp = expand_binop (mode, other_optab, op0, op1, target,
7828 unsignedp, OPTAB_LIB_WIDEN);
7829 hipart = gen_highpart (innermode, temp);
7830 htem = expand_mult_highpart_adjust (innermode, hipart,
7831 op0, op1, hipart,
7832 zextend_p);
7833 if (htem != hipart)
7834 emit_move_insn (hipart, htem);
7835 return REDUCE_BIT_FIELD (temp);
7839 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7840 subtarget, &op0, &op1, 0);
7841 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7843 case TRUNC_DIV_EXPR:
7844 case FLOOR_DIV_EXPR:
7845 case CEIL_DIV_EXPR:
7846 case ROUND_DIV_EXPR:
7847 case EXACT_DIV_EXPR:
7848 if (modifier == EXPAND_STACK_PARM)
7849 target = 0;
7850 /* Possible optimization: compute the dividend with EXPAND_SUM
7851 then if the divisor is constant can optimize the case
7852 where some terms of the dividend have coeffs divisible by it. */
7853 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7854 subtarget, &op0, &op1, 0);
7855 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7857 case RDIV_EXPR:
7858 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7859 expensive divide. If not, combine will rebuild the original
7860 computation. */
7861 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7862 && TREE_CODE (type) == REAL_TYPE
7863 && !real_onep (TREE_OPERAND (exp, 0)))
7864 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7865 build (RDIV_EXPR, type,
7866 build_real (type, dconst1),
7867 TREE_OPERAND (exp, 1))),
7868 target, tmode, modifier);
7869 this_optab = sdiv_optab;
7870 goto binop;
7872 case TRUNC_MOD_EXPR:
7873 case FLOOR_MOD_EXPR:
7874 case CEIL_MOD_EXPR:
7875 case ROUND_MOD_EXPR:
7876 if (modifier == EXPAND_STACK_PARM)
7877 target = 0;
7878 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7879 subtarget, &op0, &op1, 0);
7880 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7882 case FIX_ROUND_EXPR:
7883 case FIX_FLOOR_EXPR:
7884 case FIX_CEIL_EXPR:
7885 abort (); /* Not used for C. */
7887 case FIX_TRUNC_EXPR:
7888 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7889 if (target == 0 || modifier == EXPAND_STACK_PARM)
7890 target = gen_reg_rtx (mode);
7891 expand_fix (target, op0, unsignedp);
7892 return target;
7894 case FLOAT_EXPR:
7895 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7896 if (target == 0 || modifier == EXPAND_STACK_PARM)
7897 target = gen_reg_rtx (mode);
7898 /* expand_float can't figure out what to do if FROM has VOIDmode.
7899 So give it the correct mode. With -O, cse will optimize this. */
7900 if (GET_MODE (op0) == VOIDmode)
7901 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7902 op0);
7903 expand_float (target, op0,
7904 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7905 return target;
7907 case NEGATE_EXPR:
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7909 if (modifier == EXPAND_STACK_PARM)
7910 target = 0;
7911 temp = expand_unop (mode,
7912 ! unsignedp && flag_trapv
7913 && (GET_MODE_CLASS(mode) == MODE_INT)
7914 ? negv_optab : neg_optab, op0, target, 0);
7915 if (temp == 0)
7916 abort ();
7917 return REDUCE_BIT_FIELD (temp);
7919 case ABS_EXPR:
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7921 if (modifier == EXPAND_STACK_PARM)
7922 target = 0;
7924 /* ABS_EXPR is not valid for complex arguments. */
7925 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7926 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7927 abort ();
7929 /* Unsigned abs is simply the operand. Testing here means we don't
7930 risk generating incorrect code below. */
7931 if (TYPE_UNSIGNED (type))
7932 return op0;
7934 return expand_abs (mode, op0, target, unsignedp,
7935 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7937 case MAX_EXPR:
7938 case MIN_EXPR:
7939 target = original_target;
7940 if (target == 0
7941 || modifier == EXPAND_STACK_PARM
7942 || (MEM_P (target) && MEM_VOLATILE_P (target))
7943 || GET_MODE (target) != mode
7944 || (REG_P (target)
7945 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7946 target = gen_reg_rtx (mode);
7947 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7948 target, &op0, &op1, 0);
7950 /* First try to do it with a special MIN or MAX instruction.
7951 If that does not win, use a conditional jump to select the proper
7952 value. */
7953 this_optab = (unsignedp
7954 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7955 : (code == MIN_EXPR ? smin_optab : smax_optab));
7957 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7958 OPTAB_WIDEN);
7959 if (temp != 0)
7960 return temp;
7962 /* At this point, a MEM target is no longer useful; we will get better
7963 code without it. */
7965 if (MEM_P (target))
7966 target = gen_reg_rtx (mode);
7968 /* If op1 was placed in target, swap op0 and op1. */
7969 if (target != op0 && target == op1)
7971 rtx tem = op0;
7972 op0 = op1;
7973 op1 = tem;
7976 if (target != op0)
7977 emit_move_insn (target, op0);
7979 op0 = gen_label_rtx ();
7981 /* If this mode is an integer too wide to compare properly,
7982 compare word by word. Rely on cse to optimize constant cases. */
7983 if (GET_MODE_CLASS (mode) == MODE_INT
7984 && ! can_compare_p (GE, mode, ccp_jump))
7986 if (code == MAX_EXPR)
7987 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7988 NULL_RTX, op0);
7989 else
7990 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7991 NULL_RTX, op0);
7993 else
7995 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7996 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7998 emit_move_insn (target, op1);
7999 emit_label (op0);
8000 return target;
8002 case BIT_NOT_EXPR:
8003 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8004 if (modifier == EXPAND_STACK_PARM)
8005 target = 0;
8006 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8007 if (temp == 0)
8008 abort ();
8009 return temp;
8011 /* ??? Can optimize bitwise operations with one arg constant.
8012 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8013 and (a bitwise1 b) bitwise2 b (etc)
8014 but that is probably not worth while. */
8016 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8017 boolean values when we want in all cases to compute both of them. In
8018 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8019 as actual zero-or-1 values and then bitwise anding. In cases where
8020 there cannot be any side effects, better code would be made by
8021 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8022 how to recognize those cases. */
8024 case TRUTH_AND_EXPR:
8025 case BIT_AND_EXPR:
8026 this_optab = and_optab;
8027 goto binop;
8029 case TRUTH_OR_EXPR:
8030 case BIT_IOR_EXPR:
8031 this_optab = ior_optab;
8032 goto binop;
8034 case TRUTH_XOR_EXPR:
8035 case BIT_XOR_EXPR:
8036 this_optab = xor_optab;
8037 goto binop;
8039 case LSHIFT_EXPR:
8040 case RSHIFT_EXPR:
8041 case LROTATE_EXPR:
8042 case RROTATE_EXPR:
8043 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8044 subtarget = 0;
8045 if (modifier == EXPAND_STACK_PARM)
8046 target = 0;
8047 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8048 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8049 unsignedp);
8051 /* Could determine the answer when only additive constants differ. Also,
8052 the addition of one can be handled by changing the condition. */
8053 case LT_EXPR:
8054 case LE_EXPR:
8055 case GT_EXPR:
8056 case GE_EXPR:
8057 case EQ_EXPR:
8058 case NE_EXPR:
8059 case UNORDERED_EXPR:
8060 case ORDERED_EXPR:
8061 case UNLT_EXPR:
8062 case UNLE_EXPR:
8063 case UNGT_EXPR:
8064 case UNGE_EXPR:
8065 case UNEQ_EXPR:
8066 case LTGT_EXPR:
8067 temp = do_store_flag (exp,
8068 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8069 tmode != VOIDmode ? tmode : mode, 0);
8070 if (temp != 0)
8071 return temp;
8073 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8074 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8075 && original_target
8076 && REG_P (original_target)
8077 && (GET_MODE (original_target)
8078 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8080 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8081 VOIDmode, 0);
8083 /* If temp is constant, we can just compute the result. */
8084 if (GET_CODE (temp) == CONST_INT)
8086 if (INTVAL (temp) != 0)
8087 emit_move_insn (target, const1_rtx);
8088 else
8089 emit_move_insn (target, const0_rtx);
8091 return target;
8094 if (temp != original_target)
8096 enum machine_mode mode1 = GET_MODE (temp);
8097 if (mode1 == VOIDmode)
8098 mode1 = tmode != VOIDmode ? tmode : mode;
8100 temp = copy_to_mode_reg (mode1, temp);
8103 op1 = gen_label_rtx ();
8104 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8105 GET_MODE (temp), unsignedp, op1);
8106 emit_move_insn (temp, const1_rtx);
8107 emit_label (op1);
8108 return temp;
8111 /* If no set-flag instruction, must generate a conditional
8112 store into a temporary variable. Drop through
8113 and handle this like && and ||. */
8115 case TRUTH_ANDIF_EXPR:
8116 case TRUTH_ORIF_EXPR:
8117 if (! ignore
8118 && (target == 0
8119 || modifier == EXPAND_STACK_PARM
8120 || ! safe_from_p (target, exp, 1)
8121 /* Make sure we don't have a hard reg (such as function's return
8122 value) live across basic blocks, if not optimizing. */
8123 || (!optimize && REG_P (target)
8124 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8125 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8127 if (target)
8128 emit_clr_insn (target);
8130 op1 = gen_label_rtx ();
8131 jumpifnot (exp, op1);
8133 if (target)
8134 emit_0_to_1_insn (target);
8136 emit_label (op1);
8137 return ignore ? const0_rtx : target;
8139 case TRUTH_NOT_EXPR:
8140 if (modifier == EXPAND_STACK_PARM)
8141 target = 0;
8142 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8143 /* The parser is careful to generate TRUTH_NOT_EXPR
8144 only with operands that are always zero or one. */
8145 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8146 target, 1, OPTAB_LIB_WIDEN);
8147 if (temp == 0)
8148 abort ();
8149 return temp;
8151 case COMPOUND_EXPR:
8152 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8153 emit_queue ();
8154 return expand_expr_real (TREE_OPERAND (exp, 1),
8155 (ignore ? const0_rtx : target),
8156 VOIDmode, modifier, alt_rtl);
8158 case STATEMENT_LIST:
8160 tree_stmt_iterator iter;
8162 if (!ignore)
8163 abort ();
8165 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8166 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8168 return const0_rtx;
8170 case COND_EXPR:
8171 /* If it's void, we don't need to worry about computing a value. */
8172 if (VOID_TYPE_P (TREE_TYPE (exp)))
8174 tree pred = TREE_OPERAND (exp, 0);
8175 tree then_ = TREE_OPERAND (exp, 1);
8176 tree else_ = TREE_OPERAND (exp, 2);
8178 if (TREE_CODE (then_) == GOTO_EXPR
8179 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8181 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8182 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8184 else if (TREE_CODE (else_) == GOTO_EXPR
8185 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8187 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8188 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8191 /* Just use the 'if' machinery. */
8192 expand_start_cond (pred, 0);
8193 expand_expr (then_, const0_rtx, VOIDmode, 0);
8195 exp = else_;
8197 /* Iterate over 'else if's instead of recursing. */
8198 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8200 expand_start_else ();
8201 if (EXPR_HAS_LOCATION (exp))
8203 emit_line_note (EXPR_LOCATION (exp));
8204 record_block_change (TREE_BLOCK (exp));
8206 expand_elseif (TREE_OPERAND (exp, 0));
8207 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8209 /* Don't emit the jump and label if there's no 'else' clause. */
8210 if (TREE_SIDE_EFFECTS (exp))
8212 expand_start_else ();
8213 expand_expr (exp, const0_rtx, VOIDmode, 0);
8215 expand_end_cond ();
8216 return const0_rtx;
8219 /* If we would have a "singleton" (see below) were it not for a
8220 conversion in each arm, bring that conversion back out. */
8221 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8222 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8223 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8224 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8226 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8227 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8229 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8230 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8231 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8232 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8233 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8234 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8235 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8236 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8237 return expand_expr (build1 (NOP_EXPR, type,
8238 build (COND_EXPR, TREE_TYPE (iftrue),
8239 TREE_OPERAND (exp, 0),
8240 iftrue, iffalse)),
8241 target, tmode, modifier);
8245 /* Note that COND_EXPRs whose type is a structure or union
8246 are required to be constructed to contain assignments of
8247 a temporary variable, so that we can evaluate them here
8248 for side effect only. If type is void, we must do likewise. */
8250 /* If an arm of the branch requires a cleanup,
8251 only that cleanup is performed. */
8253 tree singleton = 0;
8254 tree binary_op = 0, unary_op = 0;
8256 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8257 convert it to our mode, if necessary. */
8258 if (integer_onep (TREE_OPERAND (exp, 1))
8259 && integer_zerop (TREE_OPERAND (exp, 2))
8260 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8262 if (ignore)
8264 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8265 modifier);
8266 return const0_rtx;
8269 if (modifier == EXPAND_STACK_PARM)
8270 target = 0;
8271 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8272 if (GET_MODE (op0) == mode)
8273 return op0;
8275 if (target == 0)
8276 target = gen_reg_rtx (mode);
8277 convert_move (target, op0, unsignedp);
8278 return target;
8281 /* Check for X ? A + B : A. If we have this, we can copy A to the
8282 output and conditionally add B. Similarly for unary operations.
8283 Don't do this if X has side-effects because those side effects
8284 might affect A or B and the "?" operation is a sequence point in
8285 ANSI. (operand_equal_p tests for side effects.) */
8287 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8288 && operand_equal_p (TREE_OPERAND (exp, 2),
8289 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8290 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8291 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8292 && operand_equal_p (TREE_OPERAND (exp, 1),
8293 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8294 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8295 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8296 && operand_equal_p (TREE_OPERAND (exp, 2),
8297 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8298 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8299 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8300 && operand_equal_p (TREE_OPERAND (exp, 1),
8301 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8302 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8304 /* If we are not to produce a result, we have no target. Otherwise,
8305 if a target was specified use it; it will not be used as an
8306 intermediate target unless it is safe. If no target, use a
8307 temporary. */
8309 if (ignore)
8310 temp = 0;
8311 else if (modifier == EXPAND_STACK_PARM)
8312 temp = assign_temp (type, 0, 0, 1);
8313 else if (original_target
8314 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8315 || (singleton && REG_P (original_target)
8316 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8317 && original_target == var_rtx (singleton)))
8318 && GET_MODE (original_target) == mode
8319 #ifdef HAVE_conditional_move
8320 && (! can_conditionally_move_p (mode)
8321 || REG_P (original_target)
8322 || TREE_ADDRESSABLE (type))
8323 #endif
8324 && (!MEM_P (original_target)
8325 || TREE_ADDRESSABLE (type)))
8326 temp = original_target;
8327 else if (TREE_ADDRESSABLE (type))
8328 abort ();
8329 else
8330 temp = assign_temp (type, 0, 0, 1);
8332 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8333 do the test of X as a store-flag operation, do this as
8334 A + ((X != 0) << log C). Similarly for other simple binary
8335 operators. Only do for C == 1 if BRANCH_COST is low. */
8336 if (temp && singleton && binary_op
8337 && (TREE_CODE (binary_op) == PLUS_EXPR
8338 || TREE_CODE (binary_op) == MINUS_EXPR
8339 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8340 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8341 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8342 : integer_onep (TREE_OPERAND (binary_op, 1)))
8343 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8345 rtx result;
8346 tree cond;
8347 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8348 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8349 ? addv_optab : add_optab)
8350 : TREE_CODE (binary_op) == MINUS_EXPR
8351 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8352 ? subv_optab : sub_optab)
8353 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8354 : xor_optab);
8356 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8357 if (singleton == TREE_OPERAND (exp, 1))
8358 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8359 else
8360 cond = TREE_OPERAND (exp, 0);
8362 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8363 ? temp : NULL_RTX),
8364 mode, BRANCH_COST <= 1);
8366 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8367 result = expand_shift (LSHIFT_EXPR, mode, result,
8368 build_int_2 (tree_log2
8369 (TREE_OPERAND
8370 (binary_op, 1)),
8372 (safe_from_p (temp, singleton, 1)
8373 ? temp : NULL_RTX), 0);
8375 if (result)
8377 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8378 return expand_binop (mode, boptab, op1, result, temp,
8379 unsignedp, OPTAB_LIB_WIDEN);
8383 do_pending_stack_adjust ();
8384 NO_DEFER_POP;
8385 op0 = gen_label_rtx ();
8387 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8389 if (temp != 0)
8391 /* If the target conflicts with the other operand of the
8392 binary op, we can't use it. Also, we can't use the target
8393 if it is a hard register, because evaluating the condition
8394 might clobber it. */
8395 if ((binary_op
8396 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8397 || (REG_P (temp)
8398 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8399 temp = gen_reg_rtx (mode);
8400 store_expr (singleton, temp,
8401 modifier == EXPAND_STACK_PARM ? 2 : 0);
8403 else
8404 expand_expr (singleton,
8405 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8406 if (singleton == TREE_OPERAND (exp, 1))
8407 jumpif (TREE_OPERAND (exp, 0), op0);
8408 else
8409 jumpifnot (TREE_OPERAND (exp, 0), op0);
8411 if (binary_op && temp == 0)
8412 /* Just touch the other operand. */
8413 expand_expr (TREE_OPERAND (binary_op, 1),
8414 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8415 else if (binary_op)
8416 store_expr (build (TREE_CODE (binary_op), type,
8417 make_tree (type, temp),
8418 TREE_OPERAND (binary_op, 1)),
8419 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8420 else
8421 store_expr (build1 (TREE_CODE (unary_op), type,
8422 make_tree (type, temp)),
8423 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8424 op1 = op0;
8426 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8427 comparison operator. If we have one of these cases, set the
8428 output to A, branch on A (cse will merge these two references),
8429 then set the output to FOO. */
8430 else if (temp
8431 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8432 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8433 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8434 TREE_OPERAND (exp, 1), 0)
8435 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8436 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8437 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8439 if (REG_P (temp)
8440 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8441 temp = gen_reg_rtx (mode);
8442 store_expr (TREE_OPERAND (exp, 1), temp,
8443 modifier == EXPAND_STACK_PARM ? 2 : 0);
8444 jumpif (TREE_OPERAND (exp, 0), op0);
8446 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8447 store_expr (TREE_OPERAND (exp, 2), temp,
8448 modifier == EXPAND_STACK_PARM ? 2 : 0);
8449 else
8450 expand_expr (TREE_OPERAND (exp, 2),
8451 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8452 op1 = op0;
8454 else if (temp
8455 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8456 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8457 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8458 TREE_OPERAND (exp, 2), 0)
8459 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8460 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8461 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8463 if (REG_P (temp)
8464 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8465 temp = gen_reg_rtx (mode);
8466 store_expr (TREE_OPERAND (exp, 2), temp,
8467 modifier == EXPAND_STACK_PARM ? 2 : 0);
8468 jumpifnot (TREE_OPERAND (exp, 0), op0);
8470 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8471 store_expr (TREE_OPERAND (exp, 1), temp,
8472 modifier == EXPAND_STACK_PARM ? 2 : 0);
8473 else
8474 expand_expr (TREE_OPERAND (exp, 1),
8475 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8476 op1 = op0;
8478 else
8480 op1 = gen_label_rtx ();
8481 jumpifnot (TREE_OPERAND (exp, 0), op0);
8483 /* One branch of the cond can be void, if it never returns. For
8484 example A ? throw : E */
8485 if (temp != 0
8486 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8487 store_expr (TREE_OPERAND (exp, 1), temp,
8488 modifier == EXPAND_STACK_PARM ? 2 : 0);
8489 else
8490 expand_expr (TREE_OPERAND (exp, 1),
8491 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8492 emit_queue ();
8493 emit_jump_insn (gen_jump (op1));
8494 emit_barrier ();
8495 emit_label (op0);
8496 if (temp != 0
8497 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8498 store_expr (TREE_OPERAND (exp, 2), temp,
8499 modifier == EXPAND_STACK_PARM ? 2 : 0);
8500 else
8501 expand_expr (TREE_OPERAND (exp, 2),
8502 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8505 emit_queue ();
8506 emit_label (op1);
8507 OK_DEFER_POP;
8509 return temp;
8512 case INIT_EXPR:
8514 tree lhs = TREE_OPERAND (exp, 0);
8515 tree rhs = TREE_OPERAND (exp, 1);
8517 temp = expand_assignment (lhs, rhs, ! ignore);
8518 return temp;
8521 case MODIFY_EXPR:
8523 /* If lhs is complex, expand calls in rhs before computing it.
8524 That's so we don't compute a pointer and save it over a
8525 call. If lhs is simple, compute it first so we can give it
8526 as a target if the rhs is just a call. This avoids an
8527 extra temp and copy and that prevents a partial-subsumption
8528 which makes bad code. Actually we could treat
8529 component_ref's of vars like vars. */
8531 tree lhs = TREE_OPERAND (exp, 0);
8532 tree rhs = TREE_OPERAND (exp, 1);
8534 temp = 0;
8536 /* Check for |= or &= of a bitfield of size one into another bitfield
8537 of size 1. In this case, (unless we need the result of the
8538 assignment) we can do this more efficiently with a
8539 test followed by an assignment, if necessary.
8541 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8542 things change so we do, this code should be enhanced to
8543 support it. */
8544 if (ignore
8545 && TREE_CODE (lhs) == COMPONENT_REF
8546 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8547 || TREE_CODE (rhs) == BIT_AND_EXPR)
8548 && TREE_OPERAND (rhs, 0) == lhs
8549 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8550 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8551 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8553 rtx label = gen_label_rtx ();
8555 do_jump (TREE_OPERAND (rhs, 1),
8556 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8557 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8558 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8559 (TREE_CODE (rhs) == BIT_IOR_EXPR
8560 ? integer_one_node
8561 : integer_zero_node)),
8563 do_pending_stack_adjust ();
8564 emit_label (label);
8565 return const0_rtx;
8568 temp = expand_assignment (lhs, rhs, ! ignore);
8570 return temp;
8573 case RETURN_EXPR:
8574 if (!TREE_OPERAND (exp, 0))
8575 expand_null_return ();
8576 else
8577 expand_return (TREE_OPERAND (exp, 0));
8578 return const0_rtx;
8580 case PREINCREMENT_EXPR:
8581 case PREDECREMENT_EXPR:
8582 return REDUCE_BIT_FIELD (expand_increment (exp, 0, ignore));
8584 case POSTINCREMENT_EXPR:
8585 case POSTDECREMENT_EXPR:
8586 /* Faster to treat as pre-increment if result is not used. */
8587 return REDUCE_BIT_FIELD (expand_increment (exp, ! ignore, ignore));
8589 case ADDR_EXPR:
8590 if (modifier == EXPAND_STACK_PARM)
8591 target = 0;
8592 /* If we are taking the address of something erroneous, just
8593 return a zero. */
8594 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8595 return const0_rtx;
8596 /* If we are taking the address of a constant and are at the
8597 top level, we have to use output_constant_def since we can't
8598 call force_const_mem at top level. */
8599 else if (cfun == 0
8600 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8601 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8602 == 'c')))
8603 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8604 else
8606 /* We make sure to pass const0_rtx down if we came in with
8607 ignore set, to avoid doing the cleanups twice for something. */
8608 op0 = expand_expr (TREE_OPERAND (exp, 0),
8609 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8610 (modifier == EXPAND_INITIALIZER
8611 ? modifier : EXPAND_CONST_ADDRESS));
8613 /* If we are going to ignore the result, OP0 will have been set
8614 to const0_rtx, so just return it. Don't get confused and
8615 think we are taking the address of the constant. */
8616 if (ignore)
8617 return op0;
8619 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8620 clever and returns a REG when given a MEM. */
8621 op0 = protect_from_queue (op0, 1);
8623 /* We would like the object in memory. If it is a constant, we can
8624 have it be statically allocated into memory. For a non-constant,
8625 we need to allocate some memory and store the value into it. */
8627 if (CONSTANT_P (op0))
8628 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8629 op0);
8630 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8631 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8632 || GET_CODE (op0) == LO_SUM)
8634 /* If this object is in a register, it can't be BLKmode. */
8635 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8636 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8638 if (GET_CODE (op0) == PARALLEL)
8639 /* Handle calls that pass values in multiple
8640 non-contiguous locations. The Irix 6 ABI has examples
8641 of this. */
8642 emit_group_store (memloc, op0, inner_type,
8643 int_size_in_bytes (inner_type));
8644 else
8645 emit_move_insn (memloc, op0);
8647 op0 = memloc;
8650 if (!MEM_P (op0))
8651 abort ();
8653 mark_temp_addr_taken (op0);
8654 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8656 op0 = XEXP (op0, 0);
8657 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8658 op0 = convert_memory_address (ptr_mode, op0);
8659 return op0;
8662 /* If OP0 is not aligned as least as much as the type requires, we
8663 need to make a temporary, copy OP0 to it, and take the address of
8664 the temporary. We want to use the alignment of the type, not of
8665 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8666 the test for BLKmode means that can't happen. The test for
8667 BLKmode is because we never make mis-aligned MEMs with
8668 non-BLKmode.
8670 We don't need to do this at all if the machine doesn't have
8671 strict alignment. */
8672 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8673 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8674 > MEM_ALIGN (op0))
8675 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8677 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8678 rtx new;
8680 if (TYPE_ALIGN_OK (inner_type))
8681 abort ();
8683 if (TREE_ADDRESSABLE (inner_type))
8685 /* We can't make a bitwise copy of this object, so fail. */
8686 error ("cannot take the address of an unaligned member");
8687 return const0_rtx;
8690 new = assign_stack_temp_for_type
8691 (TYPE_MODE (inner_type),
8692 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8693 : int_size_in_bytes (inner_type),
8694 1, build_qualified_type (inner_type,
8695 (TYPE_QUALS (inner_type)
8696 | TYPE_QUAL_CONST)));
8698 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8699 (modifier == EXPAND_STACK_PARM
8700 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8702 op0 = new;
8705 op0 = force_operand (XEXP (op0, 0), target);
8708 if (flag_force_addr
8709 && !REG_P (op0)
8710 && modifier != EXPAND_CONST_ADDRESS
8711 && modifier != EXPAND_INITIALIZER
8712 && modifier != EXPAND_SUM)
8713 op0 = force_reg (Pmode, op0);
8715 if (REG_P (op0)
8716 && ! REG_USERVAR_P (op0))
8717 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8719 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8720 op0 = convert_memory_address (ptr_mode, op0);
8722 return op0;
8724 case ENTRY_VALUE_EXPR:
8725 abort ();
8727 /* COMPLEX type for Extended Pascal & Fortran */
8728 case COMPLEX_EXPR:
8730 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8731 rtx insns;
8733 /* Get the rtx code of the operands. */
8734 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8735 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8737 if (! target)
8738 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8740 start_sequence ();
8742 /* Move the real (op0) and imaginary (op1) parts to their location. */
8743 emit_move_insn (gen_realpart (mode, target), op0);
8744 emit_move_insn (gen_imagpart (mode, target), op1);
8746 insns = get_insns ();
8747 end_sequence ();
8749 /* Complex construction should appear as a single unit. */
8750 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8751 each with a separate pseudo as destination.
8752 It's not correct for flow to treat them as a unit. */
8753 if (GET_CODE (target) != CONCAT)
8754 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8755 else
8756 emit_insn (insns);
8758 return target;
8761 case REALPART_EXPR:
8762 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8763 return gen_realpart (mode, op0);
8765 case IMAGPART_EXPR:
8766 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8767 return gen_imagpart (mode, op0);
8769 case CONJ_EXPR:
8771 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8772 rtx imag_t;
8773 rtx insns;
8775 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8777 if (! target)
8778 target = gen_reg_rtx (mode);
8780 start_sequence ();
8782 /* Store the realpart and the negated imagpart to target. */
8783 emit_move_insn (gen_realpart (partmode, target),
8784 gen_realpart (partmode, op0));
8786 imag_t = gen_imagpart (partmode, target);
8787 temp = expand_unop (partmode,
8788 ! unsignedp && flag_trapv
8789 && (GET_MODE_CLASS(partmode) == MODE_INT)
8790 ? negv_optab : neg_optab,
8791 gen_imagpart (partmode, op0), imag_t, 0);
8792 if (temp != imag_t)
8793 emit_move_insn (imag_t, temp);
8795 insns = get_insns ();
8796 end_sequence ();
8798 /* Conjugate should appear as a single unit
8799 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8800 each with a separate pseudo as destination.
8801 It's not correct for flow to treat them as a unit. */
8802 if (GET_CODE (target) != CONCAT)
8803 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8804 else
8805 emit_insn (insns);
8807 return target;
8810 case RESX_EXPR:
8811 expand_resx_expr (exp);
8812 return const0_rtx;
8814 case TRY_CATCH_EXPR:
8815 case CATCH_EXPR:
8816 case EH_FILTER_EXPR:
8817 case TRY_FINALLY_EXPR:
8818 /* Lowered by tree-eh.c. */
8819 abort ();
8821 case WITH_CLEANUP_EXPR:
8822 case CLEANUP_POINT_EXPR:
8823 case TARGET_EXPR:
8824 /* Lowered by gimplify.c. */
8825 abort ();
8827 case VA_ARG_EXPR:
8828 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8830 case EXC_PTR_EXPR:
8831 return get_exception_pointer (cfun);
8833 case FILTER_EXPR:
8834 return get_exception_filter (cfun);
8836 case FDESC_EXPR:
8837 /* Function descriptors are not valid except for as
8838 initialization constants, and should not be expanded. */
8839 abort ();
8841 case SWITCH_EXPR:
8842 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
8843 "switch");
8844 if (SWITCH_BODY (exp))
8845 expand_expr_stmt (SWITCH_BODY (exp));
8846 if (SWITCH_LABELS (exp))
8848 tree duplicate = 0;
8849 tree vec = SWITCH_LABELS (exp);
8850 size_t i, n = TREE_VEC_LENGTH (vec);
8852 for (i = 0; i < n; ++i)
8854 tree elt = TREE_VEC_ELT (vec, i);
8855 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
8856 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
8857 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
8859 tree case_low = CASE_LOW (elt);
8860 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
8861 if (case_low && case_high)
8863 /* Case label is less than minimum for type. */
8864 if (TREE_CODE (min_value) == INTEGER_CST
8865 && tree_int_cst_compare (case_low, min_value) < 0
8866 && tree_int_cst_compare (case_high, min_value) < 0)
8868 warning ("case label value %d is less than minimum value for type",
8869 TREE_INT_CST (case_low));
8870 continue;
8873 /* Case value is greater than maximum for type. */
8874 if (TREE_CODE (max_value) == INTEGER_CST
8875 && tree_int_cst_compare (case_low, max_value) > 0
8876 && tree_int_cst_compare (case_high, max_value) > 0)
8878 warning ("case label value %d exceeds maximum value for type",
8879 TREE_INT_CST (case_high));
8880 continue;
8883 /* Saturate lower case label value to minimum. */
8884 if (TREE_CODE (min_value) == INTEGER_CST
8885 && tree_int_cst_compare (case_high, min_value) >= 0
8886 && tree_int_cst_compare (case_low, min_value) < 0)
8888 warning ("lower value %d in case label range less than minimum value for type",
8889 TREE_INT_CST (case_low));
8890 case_low = min_value;
8893 /* Saturate upper case label value to maximum. */
8894 if (TREE_CODE (max_value) == INTEGER_CST
8895 && tree_int_cst_compare (case_low, max_value) <= 0
8896 && tree_int_cst_compare (case_high, max_value) > 0)
8898 warning ("upper value %d in case label range exceeds maximum value for type",
8899 TREE_INT_CST (case_high));
8900 case_high = max_value;
8904 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
8905 if (duplicate)
8906 abort ();
8909 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8910 return const0_rtx;
8912 case LABEL_EXPR:
8913 expand_label (TREE_OPERAND (exp, 0));
8914 return const0_rtx;
8916 case CASE_LABEL_EXPR:
8918 tree duplicate = 0;
8919 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
8920 &duplicate, false);
8921 if (duplicate)
8922 abort ();
8923 return const0_rtx;
8926 case ASM_EXPR:
8927 expand_asm_expr (exp);
8928 return const0_rtx;
8930 default:
8931 return lang_hooks.expand_expr (exp, original_target, tmode,
8932 modifier, alt_rtl);
8935 /* Here to do an ordinary binary operator, generating an instruction
8936 from the optab already placed in `this_optab'. */
8937 binop:
8938 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8939 subtarget, &op0, &op1, 0);
8940 binop2:
8941 if (modifier == EXPAND_STACK_PARM)
8942 target = 0;
8943 temp = expand_binop (mode, this_optab, op0, op1, target,
8944 unsignedp, OPTAB_LIB_WIDEN);
8945 if (temp == 0)
8946 abort ();
8947 return REDUCE_BIT_FIELD (temp);
8949 #undef REDUCE_BIT_FIELD
8951 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8952 signedness of TYPE), possibly returning the result in TARGET. */
8953 static rtx
8954 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8956 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8957 if (target && GET_MODE (target) != GET_MODE (exp))
8958 target = 0;
8959 if (TYPE_UNSIGNED (type))
8961 rtx mask;
8962 if (prec < HOST_BITS_PER_WIDE_INT)
8963 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8964 GET_MODE (exp));
8965 else
8966 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8967 ((unsigned HOST_WIDE_INT) 1
8968 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8969 GET_MODE (exp));
8970 return expand_and (GET_MODE (exp), exp, mask, target);
8972 else
8974 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8975 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8976 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8980 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8981 when applied to the address of EXP produces an address known to be
8982 aligned more than BIGGEST_ALIGNMENT. */
8984 static int
8985 is_aligning_offset (tree offset, tree exp)
8987 /* Strip off any conversions. */
8988 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8989 || TREE_CODE (offset) == NOP_EXPR
8990 || TREE_CODE (offset) == CONVERT_EXPR)
8991 offset = TREE_OPERAND (offset, 0);
8993 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8994 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8995 if (TREE_CODE (offset) != BIT_AND_EXPR
8996 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8997 || compare_tree_int (TREE_OPERAND (offset, 1),
8998 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8999 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9000 return 0;
9002 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9003 It must be NEGATE_EXPR. Then strip any more conversions. */
9004 offset = TREE_OPERAND (offset, 0);
9005 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9006 || TREE_CODE (offset) == NOP_EXPR
9007 || TREE_CODE (offset) == CONVERT_EXPR)
9008 offset = TREE_OPERAND (offset, 0);
9010 if (TREE_CODE (offset) != NEGATE_EXPR)
9011 return 0;
9013 offset = TREE_OPERAND (offset, 0);
9014 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9015 || TREE_CODE (offset) == NOP_EXPR
9016 || TREE_CODE (offset) == CONVERT_EXPR)
9017 offset = TREE_OPERAND (offset, 0);
9019 /* This must now be the address of EXP. */
9020 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9023 /* Return the tree node if an ARG corresponds to a string constant or zero
9024 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9025 in bytes within the string that ARG is accessing. The type of the
9026 offset will be `sizetype'. */
9028 tree
9029 string_constant (tree arg, tree *ptr_offset)
9031 STRIP_NOPS (arg);
9033 if (TREE_CODE (arg) == ADDR_EXPR
9034 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9036 *ptr_offset = size_zero_node;
9037 return TREE_OPERAND (arg, 0);
9039 if (TREE_CODE (arg) == ADDR_EXPR
9040 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9041 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9043 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9044 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9046 else if (TREE_CODE (arg) == PLUS_EXPR)
9048 tree arg0 = TREE_OPERAND (arg, 0);
9049 tree arg1 = TREE_OPERAND (arg, 1);
9051 STRIP_NOPS (arg0);
9052 STRIP_NOPS (arg1);
9054 if (TREE_CODE (arg0) == ADDR_EXPR
9055 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9057 *ptr_offset = convert (sizetype, arg1);
9058 return TREE_OPERAND (arg0, 0);
9060 else if (TREE_CODE (arg1) == ADDR_EXPR
9061 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9063 *ptr_offset = convert (sizetype, arg0);
9064 return TREE_OPERAND (arg1, 0);
9068 return 0;
9071 /* Expand code for a post- or pre- increment or decrement
9072 and return the RTX for the result.
9073 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9075 static rtx
9076 expand_increment (tree exp, int post, int ignore)
9078 rtx op0, op1;
9079 rtx temp, value;
9080 tree incremented = TREE_OPERAND (exp, 0);
9081 optab this_optab = add_optab;
9082 int icode;
9083 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9084 int op0_is_copy = 0;
9085 int single_insn = 0;
9086 /* 1 means we can't store into OP0 directly,
9087 because it is a subreg narrower than a word,
9088 and we don't dare clobber the rest of the word. */
9089 int bad_subreg = 0;
9091 /* Stabilize any component ref that might need to be
9092 evaluated more than once below. */
9093 if (!post
9094 || TREE_CODE (incremented) == BIT_FIELD_REF
9095 || (TREE_CODE (incremented) == COMPONENT_REF
9096 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9097 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9098 incremented = stabilize_reference (incremented);
9099 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9100 ones into save exprs so that they don't accidentally get evaluated
9101 more than once by the code below. */
9102 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9103 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9104 incremented = save_expr (incremented);
9106 /* Compute the operands as RTX.
9107 Note whether OP0 is the actual lvalue or a copy of it:
9108 I believe it is a copy iff it is a register or subreg
9109 and insns were generated in computing it. */
9111 temp = get_last_insn ();
9112 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9114 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9115 in place but instead must do sign- or zero-extension during assignment,
9116 so we copy it into a new register and let the code below use it as
9117 a copy.
9119 Note that we can safely modify this SUBREG since it is know not to be
9120 shared (it was made by the expand_expr call above). */
9122 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9124 if (post)
9125 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9126 else
9127 bad_subreg = 1;
9129 else if (GET_CODE (op0) == SUBREG
9130 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9132 /* We cannot increment this SUBREG in place. If we are
9133 post-incrementing, get a copy of the old value. Otherwise,
9134 just mark that we cannot increment in place. */
9135 if (post)
9136 op0 = copy_to_reg (op0);
9137 else
9138 bad_subreg = 1;
9141 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
9142 && temp != get_last_insn ());
9143 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9145 /* Decide whether incrementing or decrementing. */
9146 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9147 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9148 this_optab = sub_optab;
9150 /* Convert decrement by a constant into a negative increment. */
9151 if (this_optab == sub_optab
9152 && GET_CODE (op1) == CONST_INT)
9154 op1 = GEN_INT (-INTVAL (op1));
9155 this_optab = add_optab;
9158 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9159 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9161 /* For a preincrement, see if we can do this with a single instruction. */
9162 if (!post)
9164 icode = (int) this_optab->handlers[(int) mode].insn_code;
9165 if (icode != (int) CODE_FOR_nothing
9166 /* Make sure that OP0 is valid for operands 0 and 1
9167 of the insn we want to queue. */
9168 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9169 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9170 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9171 single_insn = 1;
9174 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9175 then we cannot just increment OP0. We must therefore contrive to
9176 increment the original value. Then, for postincrement, we can return
9177 OP0 since it is a copy of the old value. For preincrement, expand here
9178 unless we can do it with a single insn.
9180 Likewise if storing directly into OP0 would clobber high bits
9181 we need to preserve (bad_subreg). */
9182 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9184 /* This is the easiest way to increment the value wherever it is.
9185 Problems with multiple evaluation of INCREMENTED are prevented
9186 because either (1) it is a component_ref or preincrement,
9187 in which case it was stabilized above, or (2) it is an array_ref
9188 with constant index in an array in a register, which is
9189 safe to reevaluate. */
9190 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9191 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9192 ? MINUS_EXPR : PLUS_EXPR),
9193 TREE_TYPE (exp),
9194 incremented,
9195 TREE_OPERAND (exp, 1));
9197 while (TREE_CODE (incremented) == NOP_EXPR
9198 || TREE_CODE (incremented) == CONVERT_EXPR)
9200 newexp = convert (TREE_TYPE (incremented), newexp);
9201 incremented = TREE_OPERAND (incremented, 0);
9204 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9205 return post ? op0 : temp;
9208 if (post)
9210 /* We have a true reference to the value in OP0.
9211 If there is an insn to add or subtract in this mode, queue it.
9212 Queuing the increment insn avoids the register shuffling
9213 that often results if we must increment now and first save
9214 the old value for subsequent use. */
9216 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9217 op0 = stabilize (op0);
9218 #endif
9220 icode = (int) this_optab->handlers[(int) mode].insn_code;
9221 if (icode != (int) CODE_FOR_nothing
9222 /* Make sure that OP0 is valid for operands 0 and 1
9223 of the insn we want to queue. */
9224 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9225 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9227 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9228 op1 = force_reg (mode, op1);
9230 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9232 if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
9234 rtx addr = (general_operand (XEXP (op0, 0), mode)
9235 ? force_reg (Pmode, XEXP (op0, 0))
9236 : copy_to_reg (XEXP (op0, 0)));
9237 rtx temp, result;
9239 op0 = replace_equiv_address (op0, addr);
9240 temp = force_reg (GET_MODE (op0), op0);
9241 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9242 op1 = force_reg (mode, op1);
9244 /* The increment queue is LIFO, thus we have to `queue'
9245 the instructions in reverse order. */
9246 enqueue_insn (op0, gen_move_insn (op0, temp));
9247 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9248 return result;
9252 /* Preincrement, or we can't increment with one simple insn. */
9253 if (post)
9254 /* Save a copy of the value before inc or dec, to return it later. */
9255 temp = value = copy_to_reg (op0);
9256 else
9257 /* Arrange to return the incremented value. */
9258 /* Copy the rtx because expand_binop will protect from the queue,
9259 and the results of that would be invalid for us to return
9260 if our caller does emit_queue before using our result. */
9261 temp = copy_rtx (value = op0);
9263 /* Increment however we can. */
9264 op1 = expand_binop (mode, this_optab, value, op1, op0,
9265 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9267 /* Make sure the value is stored into OP0. */
9268 if (op1 != op0)
9269 emit_move_insn (op0, op1);
9271 return temp;
9274 /* Generate code to calculate EXP using a store-flag instruction
9275 and return an rtx for the result. EXP is either a comparison
9276 or a TRUTH_NOT_EXPR whose operand is a comparison.
9278 If TARGET is nonzero, store the result there if convenient.
9280 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9281 cheap.
9283 Return zero if there is no suitable set-flag instruction
9284 available on this machine.
9286 Once expand_expr has been called on the arguments of the comparison,
9287 we are committed to doing the store flag, since it is not safe to
9288 re-evaluate the expression. We emit the store-flag insn by calling
9289 emit_store_flag, but only expand the arguments if we have a reason
9290 to believe that emit_store_flag will be successful. If we think that
9291 it will, but it isn't, we have to simulate the store-flag with a
9292 set/jump/set sequence. */
9294 static rtx
9295 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9297 enum rtx_code code;
9298 tree arg0, arg1, type;
9299 tree tem;
9300 enum machine_mode operand_mode;
9301 int invert = 0;
9302 int unsignedp;
9303 rtx op0, op1;
9304 enum insn_code icode;
9305 rtx subtarget = target;
9306 rtx result, label;
9308 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9309 result at the end. We can't simply invert the test since it would
9310 have already been inverted if it were valid. This case occurs for
9311 some floating-point comparisons. */
9313 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9314 invert = 1, exp = TREE_OPERAND (exp, 0);
9316 arg0 = TREE_OPERAND (exp, 0);
9317 arg1 = TREE_OPERAND (exp, 1);
9319 /* Don't crash if the comparison was erroneous. */
9320 if (arg0 == error_mark_node || arg1 == error_mark_node)
9321 return const0_rtx;
9323 type = TREE_TYPE (arg0);
9324 operand_mode = TYPE_MODE (type);
9325 unsignedp = TYPE_UNSIGNED (type);
9327 /* We won't bother with BLKmode store-flag operations because it would mean
9328 passing a lot of information to emit_store_flag. */
9329 if (operand_mode == BLKmode)
9330 return 0;
9332 /* We won't bother with store-flag operations involving function pointers
9333 when function pointers must be canonicalized before comparisons. */
9334 #ifdef HAVE_canonicalize_funcptr_for_compare
9335 if (HAVE_canonicalize_funcptr_for_compare
9336 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9337 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9338 == FUNCTION_TYPE))
9339 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9340 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9341 == FUNCTION_TYPE))))
9342 return 0;
9343 #endif
9345 STRIP_NOPS (arg0);
9346 STRIP_NOPS (arg1);
9348 /* Get the rtx comparison code to use. We know that EXP is a comparison
9349 operation of some type. Some comparisons against 1 and -1 can be
9350 converted to comparisons with zero. Do so here so that the tests
9351 below will be aware that we have a comparison with zero. These
9352 tests will not catch constants in the first operand, but constants
9353 are rarely passed as the first operand. */
9355 switch (TREE_CODE (exp))
9357 case EQ_EXPR:
9358 code = EQ;
9359 break;
9360 case NE_EXPR:
9361 code = NE;
9362 break;
9363 case LT_EXPR:
9364 if (integer_onep (arg1))
9365 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9366 else
9367 code = unsignedp ? LTU : LT;
9368 break;
9369 case LE_EXPR:
9370 if (! unsignedp && integer_all_onesp (arg1))
9371 arg1 = integer_zero_node, code = LT;
9372 else
9373 code = unsignedp ? LEU : LE;
9374 break;
9375 case GT_EXPR:
9376 if (! unsignedp && integer_all_onesp (arg1))
9377 arg1 = integer_zero_node, code = GE;
9378 else
9379 code = unsignedp ? GTU : GT;
9380 break;
9381 case GE_EXPR:
9382 if (integer_onep (arg1))
9383 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9384 else
9385 code = unsignedp ? GEU : GE;
9386 break;
9388 case UNORDERED_EXPR:
9389 code = UNORDERED;
9390 break;
9391 case ORDERED_EXPR:
9392 code = ORDERED;
9393 break;
9394 case UNLT_EXPR:
9395 code = UNLT;
9396 break;
9397 case UNLE_EXPR:
9398 code = UNLE;
9399 break;
9400 case UNGT_EXPR:
9401 code = UNGT;
9402 break;
9403 case UNGE_EXPR:
9404 code = UNGE;
9405 break;
9406 case UNEQ_EXPR:
9407 code = UNEQ;
9408 break;
9409 case LTGT_EXPR:
9410 code = LTGT;
9411 break;
9413 default:
9414 abort ();
9417 /* Put a constant second. */
9418 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9420 tem = arg0; arg0 = arg1; arg1 = tem;
9421 code = swap_condition (code);
9424 /* If this is an equality or inequality test of a single bit, we can
9425 do this by shifting the bit being tested to the low-order bit and
9426 masking the result with the constant 1. If the condition was EQ,
9427 we xor it with 1. This does not require an scc insn and is faster
9428 than an scc insn even if we have it.
9430 The code to make this transformation was moved into fold_single_bit_test,
9431 so we just call into the folder and expand its result. */
9433 if ((code == NE || code == EQ)
9434 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9435 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9437 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9438 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9439 arg0, arg1, type),
9440 target, VOIDmode, EXPAND_NORMAL);
9443 /* Now see if we are likely to be able to do this. Return if not. */
9444 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9445 return 0;
9447 icode = setcc_gen_code[(int) code];
9448 if (icode == CODE_FOR_nothing
9449 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9451 /* We can only do this if it is one of the special cases that
9452 can be handled without an scc insn. */
9453 if ((code == LT && integer_zerop (arg1))
9454 || (! only_cheap && code == GE && integer_zerop (arg1)))
9456 else if (BRANCH_COST >= 0
9457 && ! only_cheap && (code == NE || code == EQ)
9458 && TREE_CODE (type) != REAL_TYPE
9459 && ((abs_optab->handlers[(int) operand_mode].insn_code
9460 != CODE_FOR_nothing)
9461 || (ffs_optab->handlers[(int) operand_mode].insn_code
9462 != CODE_FOR_nothing)))
9464 else
9465 return 0;
9468 if (! get_subtarget (target)
9469 || GET_MODE (subtarget) != operand_mode)
9470 subtarget = 0;
9472 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9474 if (target == 0)
9475 target = gen_reg_rtx (mode);
9477 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9478 because, if the emit_store_flag does anything it will succeed and
9479 OP0 and OP1 will not be used subsequently. */
9481 result = emit_store_flag (target, code,
9482 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9483 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9484 operand_mode, unsignedp, 1);
9486 if (result)
9488 if (invert)
9489 result = expand_binop (mode, xor_optab, result, const1_rtx,
9490 result, 0, OPTAB_LIB_WIDEN);
9491 return result;
9494 /* If this failed, we have to do this with set/compare/jump/set code. */
9495 if (!REG_P (target)
9496 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9497 target = gen_reg_rtx (GET_MODE (target));
9499 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9500 result = compare_from_rtx (op0, op1, code, unsignedp,
9501 operand_mode, NULL_RTX);
9502 if (GET_CODE (result) == CONST_INT)
9503 return (((result == const0_rtx && ! invert)
9504 || (result != const0_rtx && invert))
9505 ? const0_rtx : const1_rtx);
9507 /* The code of RESULT may not match CODE if compare_from_rtx
9508 decided to swap its operands and reverse the original code.
9510 We know that compare_from_rtx returns either a CONST_INT or
9511 a new comparison code, so it is safe to just extract the
9512 code from RESULT. */
9513 code = GET_CODE (result);
9515 label = gen_label_rtx ();
9516 if (bcc_gen_fctn[(int) code] == 0)
9517 abort ();
9519 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9520 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9521 emit_label (label);
9523 return target;
9527 /* Stubs in case we haven't got a casesi insn. */
9528 #ifndef HAVE_casesi
9529 # define HAVE_casesi 0
9530 # define gen_casesi(a, b, c, d, e) (0)
9531 # define CODE_FOR_casesi CODE_FOR_nothing
9532 #endif
9534 /* If the machine does not have a case insn that compares the bounds,
9535 this means extra overhead for dispatch tables, which raises the
9536 threshold for using them. */
9537 #ifndef CASE_VALUES_THRESHOLD
9538 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9539 #endif /* CASE_VALUES_THRESHOLD */
9541 unsigned int
9542 case_values_threshold (void)
9544 return CASE_VALUES_THRESHOLD;
9547 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9548 0 otherwise (i.e. if there is no casesi instruction). */
9550 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9551 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9553 enum machine_mode index_mode = SImode;
9554 int index_bits = GET_MODE_BITSIZE (index_mode);
9555 rtx op1, op2, index;
9556 enum machine_mode op_mode;
9558 if (! HAVE_casesi)
9559 return 0;
9561 /* Convert the index to SImode. */
9562 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9564 enum machine_mode omode = TYPE_MODE (index_type);
9565 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9567 /* We must handle the endpoints in the original mode. */
9568 index_expr = build (MINUS_EXPR, index_type,
9569 index_expr, minval);
9570 minval = integer_zero_node;
9571 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9572 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9573 omode, 1, default_label);
9574 /* Now we can safely truncate. */
9575 index = convert_to_mode (index_mode, index, 0);
9577 else
9579 if (TYPE_MODE (index_type) != index_mode)
9581 index_expr = convert (lang_hooks.types.type_for_size
9582 (index_bits, 0), index_expr);
9583 index_type = TREE_TYPE (index_expr);
9586 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9588 emit_queue ();
9589 index = protect_from_queue (index, 0);
9590 do_pending_stack_adjust ();
9592 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9593 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9594 (index, op_mode))
9595 index = copy_to_mode_reg (op_mode, index);
9597 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9599 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9600 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9601 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9602 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9603 (op1, op_mode))
9604 op1 = copy_to_mode_reg (op_mode, op1);
9606 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9608 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9609 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9610 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9611 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9612 (op2, op_mode))
9613 op2 = copy_to_mode_reg (op_mode, op2);
9615 emit_jump_insn (gen_casesi (index, op1, op2,
9616 table_label, default_label));
9617 return 1;
9620 /* Attempt to generate a tablejump instruction; same concept. */
9621 #ifndef HAVE_tablejump
9622 #define HAVE_tablejump 0
9623 #define gen_tablejump(x, y) (0)
9624 #endif
9626 /* Subroutine of the next function.
9628 INDEX is the value being switched on, with the lowest value
9629 in the table already subtracted.
9630 MODE is its expected mode (needed if INDEX is constant).
9631 RANGE is the length of the jump table.
9632 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9634 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9635 index value is out of range. */
9637 static void
9638 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9639 rtx default_label)
9641 rtx temp, vector;
9643 if (INTVAL (range) > cfun->max_jumptable_ents)
9644 cfun->max_jumptable_ents = INTVAL (range);
9646 /* Do an unsigned comparison (in the proper mode) between the index
9647 expression and the value which represents the length of the range.
9648 Since we just finished subtracting the lower bound of the range
9649 from the index expression, this comparison allows us to simultaneously
9650 check that the original index expression value is both greater than
9651 or equal to the minimum value of the range and less than or equal to
9652 the maximum value of the range. */
9654 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9655 default_label);
9657 /* If index is in range, it must fit in Pmode.
9658 Convert to Pmode so we can index with it. */
9659 if (mode != Pmode)
9660 index = convert_to_mode (Pmode, index, 1);
9662 /* Don't let a MEM slip through, because then INDEX that comes
9663 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9664 and break_out_memory_refs will go to work on it and mess it up. */
9665 #ifdef PIC_CASE_VECTOR_ADDRESS
9666 if (flag_pic && !REG_P (index))
9667 index = copy_to_mode_reg (Pmode, index);
9668 #endif
9670 /* If flag_force_addr were to affect this address
9671 it could interfere with the tricky assumptions made
9672 about addresses that contain label-refs,
9673 which may be valid only very near the tablejump itself. */
9674 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9675 GET_MODE_SIZE, because this indicates how large insns are. The other
9676 uses should all be Pmode, because they are addresses. This code
9677 could fail if addresses and insns are not the same size. */
9678 index = gen_rtx_PLUS (Pmode,
9679 gen_rtx_MULT (Pmode, index,
9680 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9681 gen_rtx_LABEL_REF (Pmode, table_label));
9682 #ifdef PIC_CASE_VECTOR_ADDRESS
9683 if (flag_pic)
9684 index = PIC_CASE_VECTOR_ADDRESS (index);
9685 else
9686 #endif
9687 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9688 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9689 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9690 RTX_UNCHANGING_P (vector) = 1;
9691 MEM_NOTRAP_P (vector) = 1;
9692 convert_move (temp, vector, 0);
9694 emit_jump_insn (gen_tablejump (temp, table_label));
9696 /* If we are generating PIC code or if the table is PC-relative, the
9697 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9698 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9699 emit_barrier ();
9703 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9704 rtx table_label, rtx default_label)
9706 rtx index;
9708 if (! HAVE_tablejump)
9709 return 0;
9711 index_expr = fold (build (MINUS_EXPR, index_type,
9712 convert (index_type, index_expr),
9713 convert (index_type, minval)));
9714 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9715 emit_queue ();
9716 index = protect_from_queue (index, 0);
9717 do_pending_stack_adjust ();
9719 do_tablejump (index, TYPE_MODE (index_type),
9720 convert_modes (TYPE_MODE (index_type),
9721 TYPE_MODE (TREE_TYPE (range)),
9722 expand_expr (range, NULL_RTX,
9723 VOIDmode, 0),
9724 TYPE_UNSIGNED (TREE_TYPE (range))),
9725 table_label, default_label);
9726 return 1;
9729 /* Nonzero if the mode is a valid vector mode for this architecture.
9730 This returns nonzero even if there is no hardware support for the
9731 vector mode, but we can emulate with narrower modes. */
9734 vector_mode_valid_p (enum machine_mode mode)
9736 enum mode_class class = GET_MODE_CLASS (mode);
9737 enum machine_mode innermode;
9739 /* Doh! What's going on? */
9740 if (class != MODE_VECTOR_INT
9741 && class != MODE_VECTOR_FLOAT)
9742 return 0;
9744 /* Hardware support. Woo hoo! */
9745 if (VECTOR_MODE_SUPPORTED_P (mode))
9746 return 1;
9748 innermode = GET_MODE_INNER (mode);
9750 /* We should probably return 1 if requesting V4DI and we have no DI,
9751 but we have V2DI, but this is probably very unlikely. */
9753 /* If we have support for the inner mode, we can safely emulate it.
9754 We may not have V2DI, but me can emulate with a pair of DIs. */
9755 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9758 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9759 static rtx
9760 const_vector_from_tree (tree exp)
9762 rtvec v;
9763 int units, i;
9764 tree link, elt;
9765 enum machine_mode inner, mode;
9767 mode = TYPE_MODE (TREE_TYPE (exp));
9769 if (initializer_zerop (exp))
9770 return CONST0_RTX (mode);
9772 units = GET_MODE_NUNITS (mode);
9773 inner = GET_MODE_INNER (mode);
9775 v = rtvec_alloc (units);
9777 link = TREE_VECTOR_CST_ELTS (exp);
9778 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9780 elt = TREE_VALUE (link);
9782 if (TREE_CODE (elt) == REAL_CST)
9783 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9784 inner);
9785 else
9786 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9787 TREE_INT_CST_HIGH (elt),
9788 inner);
9791 /* Initialize remaining elements to 0. */
9792 for (; i < units; ++i)
9793 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9795 return gen_rtx_raw_CONST_VECTOR (mode, v);
9797 #include "gt-expr.h"