* tree.h (SET_ARRAY_OR_VECTOR_CHECK): Rename to SET_OR_ARRAY_CHECK
[official-gcc.git] / gcc / expr.c
blob9f246dcf6a3ac3710a5403ecb4686b0afd4e3ab5
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
58 #ifdef PUSH_ROUNDING
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
66 #endif
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 rtx from;
102 rtx from_addr;
103 int autinc_from;
104 int explicit_inc_from;
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
107 int reverse;
110 /* This structure is used by store_by_pieces to describe the clear to
111 be performed. */
113 struct store_by_pieces
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 void *constfundata;
123 int reverse;
126 static rtx enqueue_insn (rtx, rtx);
127 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
128 unsigned int);
129 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
130 struct move_by_pieces *);
131 static bool block_move_libcall_safe_for_call_parm (void);
132 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
133 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
134 static tree emit_block_move_libcall_fn (int);
135 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
136 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
137 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
138 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
139 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
140 struct store_by_pieces *);
141 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
142 static rtx clear_storage_via_libcall (rtx, rtx);
143 static tree clear_storage_libcall_fn (int);
144 static rtx compress_float_constant (rtx, rtx);
145 static rtx get_subtarget (rtx);
146 static int is_zeros_p (tree);
147 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int);
150 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
151 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
152 tree, enum machine_mode, int, tree, int);
153 static rtx var_rtx (tree);
155 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
156 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
158 static int is_aligning_offset (tree, tree);
159 static rtx expand_increment (tree, int, int);
160 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
161 enum expand_modifier);
162 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
163 #ifdef PUSH_ROUNDING
164 static void emit_single_push_insn (enum machine_mode, rtx, tree);
165 #endif
166 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
167 static rtx const_vector_from_tree (tree);
169 /* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
173 static char direct_load[NUM_MACHINE_MODES];
174 static char direct_store[NUM_MACHINE_MODES];
176 /* Record for each mode whether we can float-extend from memory. */
178 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
187 /* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189 #ifndef CLEAR_BY_PIECES_P
190 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
192 #endif
194 /* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197 #ifndef STORE_BY_PIECES_P
198 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
199 #endif
201 /* This array records the insn_code of insns to perform block moves. */
202 enum insn_code movstr_optab[NUM_MACHINE_MODES];
204 /* This array records the insn_code of insns to perform block clears. */
205 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
207 /* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
210 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
212 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
213 struct file_stack *expr_wfl_stack;
215 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
219 #endif
221 /* This is run once per compilation to set up which modes can be used
222 directly in memory and to initialize the block move optab. */
224 void
225 init_expr_once (void)
227 rtx insn, pat;
228 enum machine_mode mode;
229 int num_clobbers;
230 rtx mem, mem1;
231 rtx reg;
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
250 int regno;
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
254 PUT_MODE (mem1, mode);
255 PUT_MODE (reg, mode);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
263 regno++)
265 if (! HARD_REGNO_MODE_OK (regno, mode))
266 continue;
268 REGNO (reg) = regno;
270 SET_SRC (pat) = mem;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
299 srcmode = GET_MODE_WIDER_MODE (srcmode))
301 enum insn_code ic;
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
305 continue;
307 PUT_MODE (mem, srcmode);
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
315 /* This is run at the start of compiling a function. */
317 void
318 init_expr (void)
320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
323 /* Small sanity check that the queue is empty at the end of a function. */
325 void
326 finish_expr_for_function (void)
328 if (pending_chain)
329 abort ();
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
342 static rtx
343 enqueue_insn (rtx var, rtx body)
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (rtx x, int modify)
368 RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
373 return x;
374 #endif
376 if (code != QUEUED)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
382 shared. */
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 rtx y = XEXP (x, 0);
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
389 if (QUEUED_INSN (y))
391 rtx temp = gen_reg_rtx (GET_MODE (x));
393 emit_insn_before (gen_move_insn (temp, new),
394 QUEUED_INSN (y));
395 return temp;
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
405 if (code == MEM)
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
410 x = copy_rtx (x);
411 XEXP (x, 0) = tem;
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 x = copy_rtx (x);
421 XEXP (x, 0) = new0;
422 XEXP (x, 1) = new1;
425 return x;
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
429 emit_queue. */
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
433 use that copy. */
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
440 QUEUED_INSN (x));
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (rtx x)
452 enum rtx_code code = GET_CODE (x);
453 switch (code)
455 case QUEUED:
456 return 1;
457 case MEM:
458 return queued_subexp_p (XEXP (x, 0));
459 case MULT:
460 case PLUS:
461 case MINUS:
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
464 default:
465 return 0;
469 /* Retrieve a mark on the queue. */
471 static rtx
472 mark_queue (void)
474 return pending_chain;
477 /* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
481 static void
482 emit_insns_enqueued_after_mark (rtx mark)
484 rtx p;
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark && ! QUEUED_BODY (mark))
490 return;
492 while ((p = pending_chain) != mark)
494 rtx body = QUEUED_BODY (p);
496 switch (GET_CODE (body))
498 case INSN:
499 case JUMP_INSN:
500 case CALL_INSN:
501 case CODE_LABEL:
502 case BARRIER:
503 case NOTE:
504 QUEUED_INSN (p) = body;
505 emit_insn (body);
506 break;
508 #ifdef ENABLE_CHECKING
509 case SEQUENCE:
510 abort ();
511 break;
512 #endif
514 default:
515 QUEUED_INSN (p) = emit_insn (body);
516 break;
519 QUEUED_BODY (p) = 0;
520 pending_chain = QUEUED_NEXT (p);
524 /* Perform all the pending incrementations. */
526 void
527 emit_queue (void)
529 emit_insns_enqueued_after_mark (NULL_RTX);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
537 void
538 convert_move (rtx to, rtx from, int unsignedp)
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
544 enum insn_code code;
545 rtx libcall;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
554 if (to_real != from_real)
555 abort ();
557 /* If FROM is a SUBREG that indicates that we have already done at least
558 the required extension, strip it. We don't handle such SUBREGs as
559 TO here. */
561 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
562 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
563 >= GET_MODE_SIZE (to_mode))
564 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
565 from = gen_lowpart (to_mode, from), from_mode = to_mode;
567 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
568 abort ();
570 if (to_mode == from_mode
571 || (from_mode == VOIDmode && CONSTANT_P (from)))
573 emit_move_insn (to, from);
574 return;
577 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
579 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
580 abort ();
582 if (VECTOR_MODE_P (to_mode))
583 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
584 else
585 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
587 emit_move_insn (to, from);
588 return;
591 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
593 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
594 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
595 return;
598 if (to_real)
600 rtx value, insns;
601 convert_optab tab;
603 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
604 tab = sext_optab;
605 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
606 tab = trunc_optab;
607 else
608 abort ();
610 /* Try converting directly if the insn is supported. */
612 code = tab->handlers[to_mode][from_mode].insn_code;
613 if (code != CODE_FOR_nothing)
615 emit_unop_insn (code, to, from,
616 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
617 return;
620 /* Otherwise use a libcall. */
621 libcall = tab->handlers[to_mode][from_mode].libfunc;
623 if (!libcall)
624 /* This conversion is not implemented yet. */
625 abort ();
627 start_sequence ();
628 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
629 1, from, from_mode);
630 insns = get_insns ();
631 end_sequence ();
632 emit_libcall_block (insns, to, value,
633 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
634 from)
635 : gen_rtx_FLOAT_EXTEND (to_mode, from));
636 return;
639 /* Handle pointer conversion. */ /* SPEE 900220. */
640 /* Targets are expected to provide conversion insns between PxImode and
641 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
642 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
644 enum machine_mode full_mode
645 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
647 if (trunc_optab->handlers[to_mode][full_mode].insn_code
648 == CODE_FOR_nothing)
649 abort ();
651 if (full_mode != from_mode)
652 from = convert_to_mode (full_mode, from, unsignedp);
653 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
654 to, from, UNKNOWN);
655 return;
657 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
659 enum machine_mode full_mode
660 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
662 if (sext_optab->handlers[full_mode][from_mode].insn_code
663 == CODE_FOR_nothing)
664 abort ();
666 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
667 to, from, UNKNOWN);
668 if (to_mode == full_mode)
669 return;
671 /* else proceed to integer conversions below. */
672 from_mode = full_mode;
675 /* Now both modes are integers. */
677 /* Handle expanding beyond a word. */
678 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
679 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
681 rtx insns;
682 rtx lowpart;
683 rtx fill_value;
684 rtx lowfrom;
685 int i;
686 enum machine_mode lowpart_mode;
687 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
689 /* Try converting directly if the insn is supported. */
690 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
691 != CODE_FOR_nothing)
693 /* If FROM is a SUBREG, put it into a register. Do this
694 so that we always generate the same set of insns for
695 better cse'ing; if an intermediate assignment occurred,
696 we won't be doing the operation directly on the SUBREG. */
697 if (optimize > 0 && GET_CODE (from) == SUBREG)
698 from = force_reg (from_mode, from);
699 emit_unop_insn (code, to, from, equiv_code);
700 return;
702 /* Next, try converting via full word. */
703 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
704 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
705 != CODE_FOR_nothing))
707 if (GET_CODE (to) == REG)
709 if (reg_overlap_mentioned_p (to, from))
710 from = force_reg (from_mode, from);
711 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
713 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
714 emit_unop_insn (code, to,
715 gen_lowpart (word_mode, to), equiv_code);
716 return;
719 /* No special multiword conversion insn; do it by hand. */
720 start_sequence ();
722 /* Since we will turn this into a no conflict block, we must ensure
723 that the source does not overlap the target. */
725 if (reg_overlap_mentioned_p (to, from))
726 from = force_reg (from_mode, from);
728 /* Get a copy of FROM widened to a word, if necessary. */
729 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
730 lowpart_mode = word_mode;
731 else
732 lowpart_mode = from_mode;
734 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
736 lowpart = gen_lowpart (lowpart_mode, to);
737 emit_move_insn (lowpart, lowfrom);
739 /* Compute the value to put in each remaining word. */
740 if (unsignedp)
741 fill_value = const0_rtx;
742 else
744 #ifdef HAVE_slt
745 if (HAVE_slt
746 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
747 && STORE_FLAG_VALUE == -1)
749 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
750 lowpart_mode, 0);
751 fill_value = gen_reg_rtx (word_mode);
752 emit_insn (gen_slt (fill_value));
754 else
755 #endif
757 fill_value
758 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
759 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
760 NULL_RTX, 0);
761 fill_value = convert_to_mode (word_mode, fill_value, 1);
765 /* Fill the remaining words. */
766 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
768 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
769 rtx subword = operand_subword (to, index, 1, to_mode);
771 if (subword == 0)
772 abort ();
774 if (fill_value != subword)
775 emit_move_insn (subword, fill_value);
778 insns = get_insns ();
779 end_sequence ();
781 emit_no_conflict_block (insns, to, from, NULL_RTX,
782 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
783 return;
786 /* Truncating multi-word to a word or less. */
787 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
788 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
790 if (!((GET_CODE (from) == MEM
791 && ! MEM_VOLATILE_P (from)
792 && direct_load[(int) to_mode]
793 && ! mode_dependent_address_p (XEXP (from, 0)))
794 || GET_CODE (from) == REG
795 || GET_CODE (from) == SUBREG))
796 from = force_reg (from_mode, from);
797 convert_move (to, gen_lowpart (word_mode, from), 0);
798 return;
801 /* Now follow all the conversions between integers
802 no more than a word long. */
804 /* For truncation, usually we can just refer to FROM in a narrower mode. */
805 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
806 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
807 GET_MODE_BITSIZE (from_mode)))
809 if (!((GET_CODE (from) == MEM
810 && ! MEM_VOLATILE_P (from)
811 && direct_load[(int) to_mode]
812 && ! mode_dependent_address_p (XEXP (from, 0)))
813 || GET_CODE (from) == REG
814 || GET_CODE (from) == SUBREG))
815 from = force_reg (from_mode, from);
816 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
817 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
818 from = copy_to_reg (from);
819 emit_move_insn (to, gen_lowpart (to_mode, from));
820 return;
823 /* Handle extension. */
824 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
826 /* Convert directly if that works. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
830 if (flag_force_mem)
831 from = force_not_mem (from);
833 emit_unop_insn (code, to, from, equiv_code);
834 return;
836 else
838 enum machine_mode intermediate;
839 rtx tmp;
840 tree shift_amount;
842 /* Search for a mode to convert via. */
843 for (intermediate = from_mode; intermediate != VOIDmode;
844 intermediate = GET_MODE_WIDER_MODE (intermediate))
845 if (((can_extend_p (to_mode, intermediate, unsignedp)
846 != CODE_FOR_nothing)
847 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
848 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
849 GET_MODE_BITSIZE (intermediate))))
850 && (can_extend_p (intermediate, from_mode, unsignedp)
851 != CODE_FOR_nothing))
853 convert_move (to, convert_to_mode (intermediate, from,
854 unsignedp), unsignedp);
855 return;
858 /* No suitable intermediate mode.
859 Generate what we need with shifts. */
860 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
861 - GET_MODE_BITSIZE (from_mode), 0);
862 from = gen_lowpart (to_mode, force_reg (from_mode, from));
863 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
864 to, unsignedp);
865 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
866 to, unsignedp);
867 if (tmp != to)
868 emit_move_insn (to, tmp);
869 return;
873 /* Support special truncate insns for certain modes. */
874 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
876 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
877 to, from, UNKNOWN);
878 return;
881 /* Handle truncation of volatile memrefs, and so on;
882 the things that couldn't be truncated directly,
883 and for which there was no special instruction.
885 ??? Code above formerly short-circuited this, for most integer
886 mode pairs, with a force_reg in from_mode followed by a recursive
887 call to this routine. Appears always to have been wrong. */
888 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
890 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
891 emit_move_insn (to, temp);
892 return;
895 /* Mode combination is not recognized. */
896 abort ();
899 /* Return an rtx for a value that would result
900 from converting X to mode MODE.
901 Both X and MODE may be floating, or both integer.
902 UNSIGNEDP is nonzero if X is an unsigned value.
903 This can be done by referring to a part of X in place
904 or by copying to a new temporary with conversion.
906 This function *must not* call protect_from_queue
907 except when putting X into an insn (in which case convert_move does it). */
910 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
912 return convert_modes (mode, VOIDmode, x, unsignedp);
915 /* Return an rtx for a value that would result
916 from converting X from mode OLDMODE to mode MODE.
917 Both modes may be floating, or both integer.
918 UNSIGNEDP is nonzero if X is an unsigned value.
920 This can be done by referring to a part of X in place
921 or by copying to a new temporary with conversion.
923 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
925 This function *must not* call protect_from_queue
926 except when putting X into an insn (in which case convert_move does it). */
929 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
931 rtx temp;
933 /* If FROM is a SUBREG that indicates that we have already done at least
934 the required extension, strip it. */
936 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
937 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
938 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
939 x = gen_lowpart (mode, x);
941 if (GET_MODE (x) != VOIDmode)
942 oldmode = GET_MODE (x);
944 if (mode == oldmode)
945 return x;
947 /* There is one case that we must handle specially: If we are converting
948 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
949 we are to interpret the constant as unsigned, gen_lowpart will do
950 the wrong if the constant appears negative. What we want to do is
951 make the high-order word of the constant zero, not all ones. */
953 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
954 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
955 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
957 HOST_WIDE_INT val = INTVAL (x);
959 if (oldmode != VOIDmode
960 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
962 int width = GET_MODE_BITSIZE (oldmode);
964 /* We need to zero extend VAL. */
965 val &= ((HOST_WIDE_INT) 1 << width) - 1;
968 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
971 /* We can do this with a gen_lowpart if both desired and current modes
972 are integer, and this is either a constant integer, a register, or a
973 non-volatile MEM. Except for the constant case where MODE is no
974 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
976 if ((GET_CODE (x) == CONST_INT
977 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
978 || (GET_MODE_CLASS (mode) == MODE_INT
979 && GET_MODE_CLASS (oldmode) == MODE_INT
980 && (GET_CODE (x) == CONST_DOUBLE
981 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
982 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
983 && direct_load[(int) mode])
984 || (GET_CODE (x) == REG
985 && (! HARD_REGISTER_P (x)
986 || HARD_REGNO_MODE_OK (REGNO (x), mode))
987 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
988 GET_MODE_BITSIZE (GET_MODE (x)))))))))
990 /* ?? If we don't know OLDMODE, we have to assume here that
991 X does not need sign- or zero-extension. This may not be
992 the case, but it's the best we can do. */
993 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
994 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
996 HOST_WIDE_INT val = INTVAL (x);
997 int width = GET_MODE_BITSIZE (oldmode);
999 /* We must sign or zero-extend in this case. Start by
1000 zero-extending, then sign extend if we need to. */
1001 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1002 if (! unsignedp
1003 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1004 val |= (HOST_WIDE_INT) (-1) << width;
1006 return gen_int_mode (val, mode);
1009 return gen_lowpart (mode, x);
1012 /* Converting from integer constant into mode is always equivalent to an
1013 subreg operation. */
1014 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1016 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1017 abort ();
1018 return simplify_gen_subreg (mode, x, oldmode, 0);
1021 temp = gen_reg_rtx (mode);
1022 convert_move (temp, x, unsignedp);
1023 return temp;
1026 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1027 store efficiently. Due to internal GCC limitations, this is
1028 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1029 for an immediate constant. */
1031 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1033 /* Determine whether the LEN bytes can be moved by using several move
1034 instructions. Return nonzero if a call to move_by_pieces should
1035 succeed. */
1038 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1039 unsigned int align ATTRIBUTE_UNUSED)
1041 return MOVE_BY_PIECES_P (len, align);
1044 /* Generate several move instructions to copy LEN bytes from block FROM to
1045 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1046 and TO through protect_from_queue before calling.
1048 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1049 used to push FROM to the stack.
1051 ALIGN is maximum stack alignment we can assume.
1053 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1054 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1055 stpcpy. */
1058 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1059 unsigned int align, int endp)
1061 struct move_by_pieces data;
1062 rtx to_addr, from_addr = XEXP (from, 0);
1063 unsigned int max_size = MOVE_MAX_PIECES + 1;
1064 enum machine_mode mode = VOIDmode, tmode;
1065 enum insn_code icode;
1067 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1069 data.offset = 0;
1070 data.from_addr = from_addr;
1071 if (to)
1073 to_addr = XEXP (to, 0);
1074 data.to = to;
1075 data.autinc_to
1076 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1077 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1078 data.reverse
1079 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1081 else
1083 to_addr = NULL_RTX;
1084 data.to = NULL_RTX;
1085 data.autinc_to = 1;
1086 #ifdef STACK_GROWS_DOWNWARD
1087 data.reverse = 1;
1088 #else
1089 data.reverse = 0;
1090 #endif
1092 data.to_addr = to_addr;
1093 data.from = from;
1094 data.autinc_from
1095 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1096 || GET_CODE (from_addr) == POST_INC
1097 || GET_CODE (from_addr) == POST_DEC);
1099 data.explicit_inc_from = 0;
1100 data.explicit_inc_to = 0;
1101 if (data.reverse) data.offset = len;
1102 data.len = len;
1104 /* If copying requires more than two move insns,
1105 copy addresses to registers (to make displacements shorter)
1106 and use post-increment if available. */
1107 if (!(data.autinc_from && data.autinc_to)
1108 && move_by_pieces_ninsns (len, align) > 2)
1110 /* Find the mode of the largest move... */
1111 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1112 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1113 if (GET_MODE_SIZE (tmode) < max_size)
1114 mode = tmode;
1116 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1118 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1119 data.autinc_from = 1;
1120 data.explicit_inc_from = -1;
1122 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1124 data.from_addr = copy_addr_to_reg (from_addr);
1125 data.autinc_from = 1;
1126 data.explicit_inc_from = 1;
1128 if (!data.autinc_from && CONSTANT_P (from_addr))
1129 data.from_addr = copy_addr_to_reg (from_addr);
1130 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1132 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1133 data.autinc_to = 1;
1134 data.explicit_inc_to = -1;
1136 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1138 data.to_addr = copy_addr_to_reg (to_addr);
1139 data.autinc_to = 1;
1140 data.explicit_inc_to = 1;
1142 if (!data.autinc_to && CONSTANT_P (to_addr))
1143 data.to_addr = copy_addr_to_reg (to_addr);
1146 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1147 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1148 align = MOVE_MAX * BITS_PER_UNIT;
1150 /* First move what we can in the largest integer mode, then go to
1151 successively smaller modes. */
1153 while (max_size > 1)
1155 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1156 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1157 if (GET_MODE_SIZE (tmode) < max_size)
1158 mode = tmode;
1160 if (mode == VOIDmode)
1161 break;
1163 icode = mov_optab->handlers[(int) mode].insn_code;
1164 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1165 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1167 max_size = GET_MODE_SIZE (mode);
1170 /* The code above should have handled everything. */
1171 if (data.len > 0)
1172 abort ();
1174 if (endp)
1176 rtx to1;
1178 if (data.reverse)
1179 abort ();
1180 if (data.autinc_to)
1182 if (endp == 2)
1184 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1185 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1186 else
1187 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1188 -1));
1190 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1191 data.offset);
1193 else
1195 if (endp == 2)
1196 --data.offset;
1197 to1 = adjust_address (data.to, QImode, data.offset);
1199 return to1;
1201 else
1202 return data.to;
1205 /* Return number of insns required to move L bytes by pieces.
1206 ALIGN (in bits) is maximum alignment we can assume. */
1208 static unsigned HOST_WIDE_INT
1209 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1211 unsigned HOST_WIDE_INT n_insns = 0;
1212 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1214 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1215 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1216 align = MOVE_MAX * BITS_PER_UNIT;
1218 while (max_size > 1)
1220 enum machine_mode mode = VOIDmode, tmode;
1221 enum insn_code icode;
1223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1224 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1225 if (GET_MODE_SIZE (tmode) < max_size)
1226 mode = tmode;
1228 if (mode == VOIDmode)
1229 break;
1231 icode = mov_optab->handlers[(int) mode].insn_code;
1232 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1233 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1235 max_size = GET_MODE_SIZE (mode);
1238 if (l)
1239 abort ();
1240 return n_insns;
1243 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1244 with move instructions for mode MODE. GENFUN is the gen_... function
1245 to make a move insn for that mode. DATA has all the other info. */
1247 static void
1248 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1249 struct move_by_pieces *data)
1251 unsigned int size = GET_MODE_SIZE (mode);
1252 rtx to1 = NULL_RTX, from1;
1254 while (data->len >= size)
1256 if (data->reverse)
1257 data->offset -= size;
1259 if (data->to)
1261 if (data->autinc_to)
1262 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1263 data->offset);
1264 else
1265 to1 = adjust_address (data->to, mode, data->offset);
1268 if (data->autinc_from)
1269 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1270 data->offset);
1271 else
1272 from1 = adjust_address (data->from, mode, data->offset);
1274 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1275 emit_insn (gen_add2_insn (data->to_addr,
1276 GEN_INT (-(HOST_WIDE_INT)size)));
1277 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1278 emit_insn (gen_add2_insn (data->from_addr,
1279 GEN_INT (-(HOST_WIDE_INT)size)));
1281 if (data->to)
1282 emit_insn ((*genfun) (to1, from1));
1283 else
1285 #ifdef PUSH_ROUNDING
1286 emit_single_push_insn (mode, from1, NULL);
1287 #else
1288 abort ();
1289 #endif
1292 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1293 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1295 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1297 if (! data->reverse)
1298 data->offset += size;
1300 data->len -= size;
1304 /* Emit code to move a block Y to a block X. This may be done with
1305 string-move instructions, with multiple scalar move instructions,
1306 or with a library call.
1308 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1309 SIZE is an rtx that says how long they are.
1310 ALIGN is the maximum alignment we can assume they have.
1311 METHOD describes what kind of copy this is, and what mechanisms may be used.
1313 Return the address of the new block, if memcpy is called and returns it,
1314 0 otherwise. */
1317 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1319 bool may_use_call;
1320 rtx retval = 0;
1321 unsigned int align;
1323 switch (method)
1325 case BLOCK_OP_NORMAL:
1326 may_use_call = true;
1327 break;
1329 case BLOCK_OP_CALL_PARM:
1330 may_use_call = block_move_libcall_safe_for_call_parm ();
1332 /* Make inhibit_defer_pop nonzero around the library call
1333 to force it to pop the arguments right away. */
1334 NO_DEFER_POP;
1335 break;
1337 case BLOCK_OP_NO_LIBCALL:
1338 may_use_call = false;
1339 break;
1341 default:
1342 abort ();
1345 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1347 if (GET_MODE (x) != BLKmode)
1348 abort ();
1349 if (GET_MODE (y) != BLKmode)
1350 abort ();
1352 x = protect_from_queue (x, 1);
1353 y = protect_from_queue (y, 0);
1354 size = protect_from_queue (size, 0);
1356 if (GET_CODE (x) != MEM)
1357 abort ();
1358 if (GET_CODE (y) != MEM)
1359 abort ();
1360 if (size == 0)
1361 abort ();
1363 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1364 can be incorrect is coming from __builtin_memcpy. */
1365 if (GET_CODE (size) == CONST_INT)
1367 if (INTVAL (size) == 0)
1368 return 0;
1370 x = shallow_copy_rtx (x);
1371 y = shallow_copy_rtx (y);
1372 set_mem_size (x, size);
1373 set_mem_size (y, size);
1376 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1377 move_by_pieces (x, y, INTVAL (size), align, 0);
1378 else if (emit_block_move_via_movstr (x, y, size, align))
1380 else if (may_use_call)
1381 retval = emit_block_move_via_libcall (x, y, size);
1382 else
1383 emit_block_move_via_loop (x, y, size, align);
1385 if (method == BLOCK_OP_CALL_PARM)
1386 OK_DEFER_POP;
1388 return retval;
1391 /* A subroutine of emit_block_move. Returns true if calling the
1392 block move libcall will not clobber any parameters which may have
1393 already been placed on the stack. */
1395 static bool
1396 block_move_libcall_safe_for_call_parm (void)
1398 /* If arguments are pushed on the stack, then they're safe. */
1399 if (PUSH_ARGS)
1400 return true;
1402 /* If registers go on the stack anyway, any argument is sure to clobber
1403 an outgoing argument. */
1404 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1406 tree fn = emit_block_move_libcall_fn (false);
1407 (void) fn;
1408 if (REG_PARM_STACK_SPACE (fn) != 0)
1409 return false;
1411 #endif
1413 /* If any argument goes in memory, then it might clobber an outgoing
1414 argument. */
1416 CUMULATIVE_ARGS args_so_far;
1417 tree fn, arg;
1419 fn = emit_block_move_libcall_fn (false);
1420 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1422 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1423 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1425 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1426 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1427 if (!tmp || !REG_P (tmp))
1428 return false;
1429 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1430 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1431 NULL_TREE, 1))
1432 return false;
1433 #endif
1434 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1437 return true;
1440 /* A subroutine of emit_block_move. Expand a movstr pattern;
1441 return true if successful. */
1443 static bool
1444 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1446 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1447 int save_volatile_ok = volatile_ok;
1448 enum machine_mode mode;
1450 /* Since this is a move insn, we don't care about volatility. */
1451 volatile_ok = 1;
1453 /* Try the most limited insn first, because there's no point
1454 including more than one in the machine description unless
1455 the more limited one has some advantage. */
1457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1458 mode = GET_MODE_WIDER_MODE (mode))
1460 enum insn_code code = movstr_optab[(int) mode];
1461 insn_operand_predicate_fn pred;
1463 if (code != CODE_FOR_nothing
1464 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1465 here because if SIZE is less than the mode mask, as it is
1466 returned by the macro, it will definitely be less than the
1467 actual mode mask. */
1468 && ((GET_CODE (size) == CONST_INT
1469 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1470 <= (GET_MODE_MASK (mode) >> 1)))
1471 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1472 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1473 || (*pred) (x, BLKmode))
1474 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1475 || (*pred) (y, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1477 || (*pred) (opalign, VOIDmode)))
1479 rtx op2;
1480 rtx last = get_last_insn ();
1481 rtx pat;
1483 op2 = convert_to_mode (mode, size, 1);
1484 pred = insn_data[(int) code].operand[2].predicate;
1485 if (pred != 0 && ! (*pred) (op2, mode))
1486 op2 = copy_to_mode_reg (mode, op2);
1488 /* ??? When called via emit_block_move_for_call, it'd be
1489 nice if there were some way to inform the backend, so
1490 that it doesn't fail the expansion because it thinks
1491 emitting the libcall would be more efficient. */
1493 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1494 if (pat)
1496 emit_insn (pat);
1497 volatile_ok = save_volatile_ok;
1498 return true;
1500 else
1501 delete_insns_since (last);
1505 volatile_ok = save_volatile_ok;
1506 return false;
1509 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1510 Return the return value from memcpy, 0 otherwise. */
1512 static rtx
1513 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1515 rtx dst_addr, src_addr;
1516 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1517 enum machine_mode size_mode;
1518 rtx retval;
1520 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1522 It is unsafe to save the value generated by protect_from_queue and reuse
1523 it later. Consider what happens if emit_queue is called before the
1524 return value from protect_from_queue is used.
1526 Expansion of the CALL_EXPR below will call emit_queue before we are
1527 finished emitting RTL for argument setup. So if we are not careful we
1528 could get the wrong value for an argument.
1530 To avoid this problem we go ahead and emit code to copy the addresses of
1531 DST and SRC and SIZE into new pseudos. We can then place those new
1532 pseudos into an RTL_EXPR and use them later, even after a call to
1533 emit_queue.
1535 Note this is not strictly needed for library calls since they do not call
1536 emit_queue before loading their arguments. However, we may need to have
1537 library calls call emit_queue in the future since failing to do so could
1538 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1539 arguments in registers. */
1541 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1542 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1544 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1545 src_addr = convert_memory_address (ptr_mode, src_addr);
1547 dst_tree = make_tree (ptr_type_node, dst_addr);
1548 src_tree = make_tree (ptr_type_node, src_addr);
1550 if (TARGET_MEM_FUNCTIONS)
1551 size_mode = TYPE_MODE (sizetype);
1552 else
1553 size_mode = TYPE_MODE (unsigned_type_node);
1555 size = convert_to_mode (size_mode, size, 1);
1556 size = copy_to_mode_reg (size_mode, size);
1558 /* It is incorrect to use the libcall calling conventions to call
1559 memcpy in this context. This could be a user call to memcpy and
1560 the user may wish to examine the return value from memcpy. For
1561 targets where libcalls and normal calls have different conventions
1562 for returning pointers, we could end up generating incorrect code.
1564 For convenience, we generate the call to bcopy this way as well. */
1566 if (TARGET_MEM_FUNCTIONS)
1567 size_tree = make_tree (sizetype, size);
1568 else
1569 size_tree = make_tree (unsigned_type_node, size);
1571 fn = emit_block_move_libcall_fn (true);
1572 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1573 if (TARGET_MEM_FUNCTIONS)
1575 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1576 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1578 else
1580 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1584 /* Now we have to build up the CALL_EXPR itself. */
1585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1586 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1587 call_expr, arg_list, NULL_TREE);
1589 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1591 /* If we are initializing a readonly value, show the above call clobbered
1592 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1593 the delay slot scheduler might overlook conflicts and take nasty
1594 decisions. */
1595 if (RTX_UNCHANGING_P (dst))
1596 add_function_usage_to
1597 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1598 gen_rtx_CLOBBER (VOIDmode, dst),
1599 NULL_RTX));
1601 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1604 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1605 for the function we use for block copies. The first time FOR_CALL
1606 is true, we call assemble_external. */
1608 static GTY(()) tree block_move_fn;
1610 void
1611 init_block_move_fn (const char *asmspec)
1613 if (!block_move_fn)
1615 tree args, fn;
1617 if (TARGET_MEM_FUNCTIONS)
1619 fn = get_identifier ("memcpy");
1620 args = build_function_type_list (ptr_type_node, ptr_type_node,
1621 const_ptr_type_node, sizetype,
1622 NULL_TREE);
1624 else
1626 fn = get_identifier ("bcopy");
1627 args = build_function_type_list (void_type_node, const_ptr_type_node,
1628 ptr_type_node, unsigned_type_node,
1629 NULL_TREE);
1632 fn = build_decl (FUNCTION_DECL, fn, args);
1633 DECL_EXTERNAL (fn) = 1;
1634 TREE_PUBLIC (fn) = 1;
1635 DECL_ARTIFICIAL (fn) = 1;
1636 TREE_NOTHROW (fn) = 1;
1638 block_move_fn = fn;
1641 if (asmspec)
1643 SET_DECL_RTL (block_move_fn, NULL_RTX);
1644 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1648 static tree
1649 emit_block_move_libcall_fn (int for_call)
1651 static bool emitted_extern;
1653 if (!block_move_fn)
1654 init_block_move_fn (NULL);
1656 if (for_call && !emitted_extern)
1658 emitted_extern = true;
1659 make_decl_rtl (block_move_fn, NULL);
1660 assemble_external (block_move_fn);
1663 return block_move_fn;
1666 /* A subroutine of emit_block_move. Copy the data via an explicit
1667 loop. This is used only when libcalls are forbidden. */
1668 /* ??? It'd be nice to copy in hunks larger than QImode. */
1670 static void
1671 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1672 unsigned int align ATTRIBUTE_UNUSED)
1674 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1675 enum machine_mode iter_mode;
1677 iter_mode = GET_MODE (size);
1678 if (iter_mode == VOIDmode)
1679 iter_mode = word_mode;
1681 top_label = gen_label_rtx ();
1682 cmp_label = gen_label_rtx ();
1683 iter = gen_reg_rtx (iter_mode);
1685 emit_move_insn (iter, const0_rtx);
1687 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1688 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1689 do_pending_stack_adjust ();
1691 emit_note (NOTE_INSN_LOOP_BEG);
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1702 emit_move_insn (x, y);
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1706 if (tmp != iter)
1707 emit_move_insn (iter, tmp);
1709 emit_note (NOTE_INSN_LOOP_CONT);
1710 emit_label (cmp_label);
1712 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1713 true, top_label);
1715 emit_note (NOTE_INSN_LOOP_END);
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1721 void
1722 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1724 int i;
1725 #ifdef HAVE_load_multiple
1726 rtx pat;
1727 rtx last;
1728 #endif
1730 if (nregs == 0)
1731 return;
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1736 /* See if the machine can do this with a load multiple insn. */
1737 #ifdef HAVE_load_multiple
1738 if (HAVE_load_multiple)
1740 last = get_last_insn ();
1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1742 GEN_INT (nregs));
1743 if (pat)
1745 emit_insn (pat);
1746 return;
1748 else
1749 delete_insns_since (last);
1751 #endif
1753 for (i = 0; i < nregs; i++)
1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1755 operand_subword_force (x, i, mode));
1758 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1759 The number of registers to be filled is NREGS. */
1761 void
1762 move_block_from_reg (int regno, rtx x, int nregs)
1764 int i;
1766 if (nregs == 0)
1767 return;
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1775 GEN_INT (nregs));
1776 if (pat)
1778 emit_insn (pat);
1779 return;
1781 else
1782 delete_insns_since (last);
1784 #endif
1786 for (i = 0; i < nregs; i++)
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1790 if (tem == 0)
1791 abort ();
1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1797 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1804 gen_group_rtx (rtx orig)
1806 int i, length;
1807 rtx *tmps;
1809 if (GET_CODE (orig) != PARALLEL)
1810 abort ();
1812 length = XVECLEN (orig, 0);
1813 tmps = alloca (sizeof (rtx) * length);
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1818 if (i)
1819 tmps[0] = 0;
1821 for (; i < length; i++)
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1832 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1835 if not known. */
1837 void
1838 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1840 rtx *tmps, src;
1841 int start, i;
1843 if (GET_CODE (dst) != PARALLEL)
1844 abort ();
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1849 start = 0;
1850 else
1851 start = 1;
1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
1861 int shift = 0;
1863 /* Handle trailing fragments that run over the size of the struct. */
1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1868 if (
1869 #ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1872 #else
1873 BYTES_BIG_ENDIAN
1874 #endif
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1877 bytelen = ssize - bytepos;
1878 if (bytelen <= 0)
1879 abort ();
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1885 src = orig_src;
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1893 else
1894 src = gen_reg_rtx (GET_MODE (orig_src));
1896 emit_move_insn (src, orig_src);
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1904 && bytelen == GET_MODE_SIZE (mode))
1906 tmps[i] = gen_reg_rtx (mode);
1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 else if (GET_CODE (src) == CONCAT)
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1920 to be extracted. */
1921 tmps[i] = XEXP (src, bytepos / slen0);
1922 if (! CONSTANT_P (tmps[i])
1923 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
1928 else if (bytepos == 0)
1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1931 emit_move_insn (mem, src);
1932 tmps[i] = adjust_address (mem, mode, 0);
1934 else
1935 abort ();
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1941 && GET_CODE (src) == REG)
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1944 rtx mem;
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1951 && XVECLEN (dst, 0) > 1)
1952 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1953 else if (CONSTANT_P (src)
1954 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1955 tmps[i] = src;
1956 else
1957 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1958 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1959 mode, mode, ssize);
1961 if (shift)
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
1966 emit_queue ();
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1973 /* Emit code to move a block SRC to block DST, where SRC and DST are
1974 non-consecutive groups of registers, each represented by a PARALLEL. */
1976 void
1977 emit_group_move (rtx dst, rtx src)
1979 int i;
1981 if (GET_CODE (src) != PARALLEL
1982 || GET_CODE (dst) != PARALLEL
1983 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1984 abort ();
1986 /* Skip first entry if NULL. */
1987 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1988 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1989 XEXP (XVECEXP (src, 0, i), 0));
1992 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1993 where SRC is non-consecutive registers represented by a PARALLEL.
1994 SSIZE represents the total size of block ORIG_DST, or -1 if not
1995 known. */
1997 void
1998 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2000 rtx *tmps, dst;
2001 int start, i;
2003 if (GET_CODE (src) != PARALLEL)
2004 abort ();
2006 /* Check for a NULL entry, used to indicate that the parameter goes
2007 both on the stack and in registers. */
2008 if (XEXP (XVECEXP (src, 0, 0), 0))
2009 start = 0;
2010 else
2011 start = 1;
2013 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2015 /* Copy the (probable) hard regs into pseudos. */
2016 for (i = start; i < XVECLEN (src, 0); i++)
2018 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2019 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2020 emit_move_insn (tmps[i], reg);
2022 emit_queue ();
2024 /* If we won't be storing directly into memory, protect the real destination
2025 from strange tricks we might play. */
2026 dst = orig_dst;
2027 if (GET_CODE (dst) == PARALLEL)
2029 rtx temp;
2031 /* We can get a PARALLEL dst if there is a conditional expression in
2032 a return statement. In that case, the dst and src are the same,
2033 so no action is necessary. */
2034 if (rtx_equal_p (dst, src))
2035 return;
2037 /* It is unclear if we can ever reach here, but we may as well handle
2038 it. Allocate a temporary, and split this into a store/load to/from
2039 the temporary. */
2041 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2042 emit_group_store (temp, src, type, ssize);
2043 emit_group_load (dst, temp, type, ssize);
2044 return;
2046 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2048 dst = gen_reg_rtx (GET_MODE (orig_dst));
2049 /* Make life a bit easier for combine. */
2050 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2056 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2057 enum machine_mode mode = GET_MODE (tmps[i]);
2058 unsigned int bytelen = GET_MODE_SIZE (mode);
2059 rtx dest = dst;
2061 /* Handle trailing fragments that run over the size of the struct. */
2062 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2064 /* store_bit_field always takes its value from the lsb.
2065 Move the fragment to the lsb if it's not already there. */
2066 if (
2067 #ifdef BLOCK_REG_PADDING
2068 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2069 == (BYTES_BIG_ENDIAN ? upward : downward)
2070 #else
2071 BYTES_BIG_ENDIAN
2072 #endif
2075 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2076 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2077 tmps[i], 0, OPTAB_WIDEN);
2079 bytelen = ssize - bytepos;
2082 if (GET_CODE (dst) == CONCAT)
2084 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 dest = XEXP (dst, 0);
2086 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2088 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2089 dest = XEXP (dst, 1);
2091 else if (bytepos == 0 && XVECLEN (src, 0))
2093 dest = assign_stack_temp (GET_MODE (dest),
2094 GET_MODE_SIZE (GET_MODE (dest)), 0);
2095 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2096 tmps[i]);
2097 dst = dest;
2098 break;
2100 else
2101 abort ();
2104 /* Optimize the access just a bit. */
2105 if (GET_CODE (dest) == MEM
2106 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2111 else
2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2113 mode, tmps[i], ssize);
2116 emit_queue ();
2118 /* Copy from the pseudo into the (probable) hard reg. */
2119 if (orig_dst != dst)
2120 emit_move_insn (orig_dst, dst);
2123 /* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
2133 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2140 if (tgtblk == 0)
2142 tgtblk = assign_temp (build_qualified_type (type,
2143 (TYPE_QUALS (type)
2144 | TYPE_QUAL_CONST)),
2145 0, 1, 1);
2146 preserve_temp_slots (tgtblk);
2149 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2150 into a new pseudo which is a full word. */
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2154 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2167 ? !BYTES_BIG_ENDIAN
2168 : BYTES_BIG_ENDIAN))
2169 padding_correction
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2172 /* Copy the structure BITSIZE bites at a time.
2174 We could probably emit more efficient code for machines which do not use
2175 strict alignment, but it doesn't seem worth the effort at the current
2176 time. */
2177 for (bitpos = 0, xbitpos = padding_correction;
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
2185 || xbitpos == padding_correction)
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2187 GET_MODE (srcreg));
2189 /* We need a new destination operand each time bitpos is on
2190 a word boundary. */
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2194 /* Use xbitpos for the source extraction (right justified) and
2195 xbitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, word_mode, word_mode,
2200 BITS_PER_WORD),
2201 BITS_PER_WORD);
2204 return tgtblk;
2207 /* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
2210 void
2211 use_reg (rtx *call_fusage, rtx reg)
2213 if (GET_CODE (reg) != REG
2214 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2215 abort ();
2217 *call_fusage
2218 = gen_rtx_EXPR_LIST (VOIDmode,
2219 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2222 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2223 starting at REGNO. All of these registers must be hard registers. */
2225 void
2226 use_regs (rtx *call_fusage, int regno, int nregs)
2228 int i;
2230 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2231 abort ();
2233 for (i = 0; i < nregs; i++)
2234 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2237 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2238 PARALLEL REGS. This is for calls that pass values in multiple
2239 non-contiguous locations. The Irix 6 ABI has examples of this. */
2241 void
2242 use_group_regs (rtx *call_fusage, rtx regs)
2244 int i;
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
2253 if (reg != 0 && GET_CODE (reg) == REG)
2254 use_reg (call_fusage, reg);
2259 /* Determine whether the LEN bytes generated by CONSTFUN can be
2260 stored to memory using several move instructions. CONSTFUNDATA is
2261 a pointer which will be passed as argument in every CONSTFUN call.
2262 ALIGN is maximum alignment we can assume. Return nonzero if a
2263 call to store_by_pieces should succeed. */
2266 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2267 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2268 void *constfundata, unsigned int align)
2270 unsigned HOST_WIDE_INT max_size, l;
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2274 int reverse;
2275 rtx cst;
2277 if (len == 0)
2278 return 1;
2280 if (! STORE_BY_PIECES_P (len, align))
2281 return 0;
2283 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2284 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2285 align = MOVE_MAX * BITS_PER_UNIT;
2287 /* We would first store what we can in the largest integer mode, then go to
2288 successively smaller modes. */
2290 for (reverse = 0;
2291 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2292 reverse++)
2294 l = len;
2295 mode = VOIDmode;
2296 max_size = STORE_MAX_PIECES + 1;
2297 while (max_size > 1)
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2302 mode = tmode;
2304 if (mode == VOIDmode)
2305 break;
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= GET_MODE_ALIGNMENT (mode))
2311 unsigned int size = GET_MODE_SIZE (mode);
2313 while (l >= size)
2315 if (reverse)
2316 offset -= size;
2318 cst = (*constfun) (constfundata, offset, mode);
2319 if (!LEGITIMATE_CONSTANT_P (cst))
2320 return 0;
2322 if (!reverse)
2323 offset += size;
2325 l -= size;
2329 max_size = GET_MODE_SIZE (mode);
2332 /* The code above should have handled everything. */
2333 if (l != 0)
2334 abort ();
2337 return 1;
2340 /* Generate several move instructions to store LEN bytes generated by
2341 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2342 pointer which will be passed as argument in every CONSTFUN call.
2343 ALIGN is maximum alignment we can assume.
2344 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2345 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2346 stpcpy. */
2349 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2350 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2351 void *constfundata, unsigned int align, int endp)
2353 struct store_by_pieces data;
2355 if (len == 0)
2357 if (endp == 2)
2358 abort ();
2359 return to;
2362 if (! STORE_BY_PIECES_P (len, align))
2363 abort ();
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2367 data.len = len;
2368 data.to = to;
2369 store_by_pieces_1 (&data, align);
2370 if (endp)
2372 rtx to1;
2374 if (data.reverse)
2375 abort ();
2376 if (data.autinc_to)
2378 if (endp == 2)
2380 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2382 else
2383 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2384 -1));
2386 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2387 data.offset);
2389 else
2391 if (endp == 2)
2392 --data.offset;
2393 to1 = adjust_address (data.to, QImode, data.offset);
2395 return to1;
2397 else
2398 return data.to;
2401 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
2405 static void
2406 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2408 struct store_by_pieces data;
2410 if (len == 0)
2411 return;
2413 data.constfun = clear_by_pieces_1;
2414 data.constfundata = NULL;
2415 data.len = len;
2416 data.to = to;
2417 store_by_pieces_1 (&data, align);
2420 /* Callback routine for clear_by_pieces.
2421 Return const0_rtx unconditionally. */
2423 static rtx
2424 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2425 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2426 enum machine_mode mode ATTRIBUTE_UNUSED)
2428 return const0_rtx;
2431 /* Subroutine of clear_by_pieces and store_by_pieces.
2432 Generate several move instructions to store LEN bytes of block TO. (A MEM
2433 rtx with BLKmode). The caller must pass TO through protect_from_queue
2434 before calling. ALIGN is maximum alignment we can assume. */
2436 static void
2437 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2438 unsigned int align ATTRIBUTE_UNUSED)
2440 rtx to_addr = XEXP (data->to, 0);
2441 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2442 enum machine_mode mode = VOIDmode, tmode;
2443 enum insn_code icode;
2445 data->offset = 0;
2446 data->to_addr = to_addr;
2447 data->autinc_to
2448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2451 data->explicit_inc_to = 0;
2452 data->reverse
2453 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2454 if (data->reverse)
2455 data->offset = data->len;
2457 /* If storing requires more than two move insns,
2458 copy addresses to registers (to make displacements shorter)
2459 and use post-increment if available. */
2460 if (!data->autinc_to
2461 && move_by_pieces_ninsns (data->len, align) > 2)
2463 /* Determine the main mode we'll be using. */
2464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2466 if (GET_MODE_SIZE (tmode) < max_size)
2467 mode = tmode;
2469 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2471 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = -1;
2476 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2477 && ! data->autinc_to)
2479 data->to_addr = copy_addr_to_reg (to_addr);
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = 1;
2484 if ( !data->autinc_to && CONSTANT_P (to_addr))
2485 data->to_addr = copy_addr_to_reg (to_addr);
2488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2490 align = MOVE_MAX * BITS_PER_UNIT;
2492 /* First store what we can in the largest integer mode, then go to
2493 successively smaller modes. */
2495 while (max_size > 1)
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2500 mode = tmode;
2502 if (mode == VOIDmode)
2503 break;
2505 icode = mov_optab->handlers[(int) mode].insn_code;
2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2509 max_size = GET_MODE_SIZE (mode);
2512 /* The code above should have handled everything. */
2513 if (data->len != 0)
2514 abort ();
2517 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2521 static void
2522 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
2525 unsigned int size = GET_MODE_SIZE (mode);
2526 rtx to1, cst;
2528 while (data->len >= size)
2530 if (data->reverse)
2531 data->offset -= size;
2533 if (data->autinc_to)
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2535 data->offset);
2536 else
2537 to1 = adjust_address (data->to, mode, data->offset);
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2549 if (! data->reverse)
2550 data->offset += size;
2552 data->len -= size;
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2560 clear_storage (rtx object, rtx size)
2562 rtx retval = 0;
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2572 else
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2577 if (size == const0_rtx)
2579 else if (GET_CODE (size) == CONST_INT
2580 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2582 else if (clear_storage_via_clrstr (object, size, align))
2584 else
2585 retval = clear_storage_via_libcall (object, size);
2588 return retval;
2591 /* A subroutine of clear_storage. Expand a clrstr pattern;
2592 return true if successful. */
2594 static bool
2595 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2601 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2602 enum machine_mode mode;
2604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2605 mode = GET_MODE_WIDER_MODE (mode))
2607 enum insn_code code = clrstr_optab[(int) mode];
2608 insn_operand_predicate_fn pred;
2610 if (code != CODE_FOR_nothing
2611 /* We don't need MODE to be narrower than
2612 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2613 the mode mask, as it is returned by the macro, it will
2614 definitely be less than the actual mode mask. */
2615 && ((GET_CODE (size) == CONST_INT
2616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2617 <= (GET_MODE_MASK (mode) >> 1)))
2618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2619 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2620 || (*pred) (object, BLKmode))
2621 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2622 || (*pred) (opalign, VOIDmode)))
2624 rtx op1;
2625 rtx last = get_last_insn ();
2626 rtx pat;
2628 op1 = convert_to_mode (mode, size, 1);
2629 pred = insn_data[(int) code].operand[1].predicate;
2630 if (pred != 0 && ! (*pred) (op1, mode))
2631 op1 = copy_to_mode_reg (mode, op1);
2633 pat = GEN_FCN ((int) code) (object, op1, opalign);
2634 if (pat)
2636 emit_insn (pat);
2637 return true;
2639 else
2640 delete_insns_since (last);
2644 return false;
2647 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2648 Return the return value of memset, 0 otherwise. */
2650 static rtx
2651 clear_storage_via_libcall (rtx object, rtx size)
2653 tree call_expr, arg_list, fn, object_tree, size_tree;
2654 enum machine_mode size_mode;
2655 rtx retval;
2657 /* OBJECT or SIZE may have been passed through protect_from_queue.
2659 It is unsafe to save the value generated by protect_from_queue
2660 and reuse it later. Consider what happens if emit_queue is
2661 called before the return value from protect_from_queue is used.
2663 Expansion of the CALL_EXPR below will call emit_queue before
2664 we are finished emitting RTL for argument setup. So if we are
2665 not careful we could get the wrong value for an argument.
2667 To avoid this problem we go ahead and emit code to copy OBJECT
2668 and SIZE into new pseudos. We can then place those new pseudos
2669 into an RTL_EXPR and use them later, even after a call to
2670 emit_queue.
2672 Note this is not strictly needed for library calls since they
2673 do not call emit_queue before loading their arguments. However,
2674 we may need to have library calls call emit_queue in the future
2675 since failing to do so could cause problems for targets which
2676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2678 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2680 if (TARGET_MEM_FUNCTIONS)
2681 size_mode = TYPE_MODE (sizetype);
2682 else
2683 size_mode = TYPE_MODE (unsigned_type_node);
2684 size = convert_to_mode (size_mode, size, 1);
2685 size = copy_to_mode_reg (size_mode, size);
2687 /* It is incorrect to use the libcall calling conventions to call
2688 memset in this context. This could be a user call to memset and
2689 the user may wish to examine the return value from memset. For
2690 targets where libcalls and normal calls have different conventions
2691 for returning pointers, we could end up generating incorrect code.
2693 For convenience, we generate the call to bzero this way as well. */
2695 object_tree = make_tree (ptr_type_node, object);
2696 if (TARGET_MEM_FUNCTIONS)
2697 size_tree = make_tree (sizetype, size);
2698 else
2699 size_tree = make_tree (unsigned_type_node, size);
2701 fn = clear_storage_libcall_fn (true);
2702 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2703 if (TARGET_MEM_FUNCTIONS)
2704 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2705 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2709 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2710 call_expr, arg_list, NULL_TREE);
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2714 /* If we are initializing a readonly value, show the above call
2715 clobbered it. Otherwise, a load from it may erroneously be
2716 hoisted from a loop. */
2717 if (RTX_UNCHANGING_P (object))
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2720 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2723 /* A subroutine of clear_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2727 static GTY(()) tree block_clear_fn;
2729 void
2730 init_block_clear_fn (const char *asmspec)
2732 if (!block_clear_fn)
2734 tree fn, args;
2736 if (TARGET_MEM_FUNCTIONS)
2738 fn = get_identifier ("memset");
2739 args = build_function_type_list (ptr_type_node, ptr_type_node,
2740 integer_type_node, sizetype,
2741 NULL_TREE);
2743 else
2745 fn = get_identifier ("bzero");
2746 args = build_function_type_list (void_type_node, ptr_type_node,
2747 unsigned_type_node, NULL_TREE);
2750 fn = build_decl (FUNCTION_DECL, fn, args);
2751 DECL_EXTERNAL (fn) = 1;
2752 TREE_PUBLIC (fn) = 1;
2753 DECL_ARTIFICIAL (fn) = 1;
2754 TREE_NOTHROW (fn) = 1;
2756 block_clear_fn = fn;
2759 if (asmspec)
2761 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2762 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2766 static tree
2767 clear_storage_libcall_fn (int for_call)
2769 static bool emitted_extern;
2771 if (!block_clear_fn)
2772 init_block_clear_fn (NULL);
2774 if (for_call && !emitted_extern)
2776 emitted_extern = true;
2777 make_decl_rtl (block_clear_fn, NULL);
2778 assemble_external (block_clear_fn);
2781 return block_clear_fn;
2784 /* Generate code to copy Y into X.
2785 Both Y and X must have the same mode, except that
2786 Y can be a constant with VOIDmode.
2787 This mode cannot be BLKmode; use emit_block_move for that.
2789 Return the last instruction emitted. */
2792 emit_move_insn (rtx x, rtx y)
2794 enum machine_mode mode = GET_MODE (x);
2795 rtx y_cst = NULL_RTX;
2796 rtx last_insn, set;
2798 x = protect_from_queue (x, 1);
2799 y = protect_from_queue (y, 0);
2801 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2802 abort ();
2804 /* Never force constant_p_rtx to memory. */
2805 if (GET_CODE (y) == CONSTANT_P_RTX)
2807 else if (CONSTANT_P (y))
2809 if (optimize
2810 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2811 && (last_insn = compress_float_constant (x, y)))
2812 return last_insn;
2814 y_cst = y;
2816 if (!LEGITIMATE_CONSTANT_P (y))
2818 y = force_const_mem (mode, y);
2820 /* If the target's cannot_force_const_mem prevented the spill,
2821 assume that the target's move expanders will also take care
2822 of the non-legitimate constant. */
2823 if (!y)
2824 y = y_cst;
2828 /* If X or Y are memory references, verify that their addresses are valid
2829 for the machine. */
2830 if (GET_CODE (x) == MEM
2831 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2832 && ! push_operand (x, GET_MODE (x)))
2833 || (flag_force_addr
2834 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2835 x = validize_mem (x);
2837 if (GET_CODE (y) == MEM
2838 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2839 || (flag_force_addr
2840 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2841 y = validize_mem (y);
2843 if (mode == BLKmode)
2844 abort ();
2846 last_insn = emit_move_insn_1 (x, y);
2848 if (y_cst && GET_CODE (x) == REG
2849 && (set = single_set (last_insn)) != NULL_RTX
2850 && SET_DEST (set) == x
2851 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2852 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2854 return last_insn;
2857 /* Low level part of emit_move_insn.
2858 Called just like emit_move_insn, but assumes X and Y
2859 are basically valid. */
2862 emit_move_insn_1 (rtx x, rtx y)
2864 enum machine_mode mode = GET_MODE (x);
2865 enum machine_mode submode;
2866 enum mode_class class = GET_MODE_CLASS (mode);
2868 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2869 abort ();
2871 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2872 return
2873 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2875 /* Expand complex moves by moving real part and imag part, if possible. */
2876 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2877 && BLKmode != (submode = GET_MODE_INNER (mode))
2878 && (mov_optab->handlers[(int) submode].insn_code
2879 != CODE_FOR_nothing))
2881 /* Don't split destination if it is a stack push. */
2882 int stack = push_operand (x, GET_MODE (x));
2884 #ifdef PUSH_ROUNDING
2885 /* In case we output to the stack, but the size is smaller than the
2886 machine can push exactly, we need to use move instructions. */
2887 if (stack
2888 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2889 != GET_MODE_SIZE (submode)))
2891 rtx temp;
2892 HOST_WIDE_INT offset1, offset2;
2894 /* Do not use anti_adjust_stack, since we don't want to update
2895 stack_pointer_delta. */
2896 temp = expand_binop (Pmode,
2897 #ifdef STACK_GROWS_DOWNWARD
2898 sub_optab,
2899 #else
2900 add_optab,
2901 #endif
2902 stack_pointer_rtx,
2903 GEN_INT
2904 (PUSH_ROUNDING
2905 (GET_MODE_SIZE (GET_MODE (x)))),
2906 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2908 if (temp != stack_pointer_rtx)
2909 emit_move_insn (stack_pointer_rtx, temp);
2911 #ifdef STACK_GROWS_DOWNWARD
2912 offset1 = 0;
2913 offset2 = GET_MODE_SIZE (submode);
2914 #else
2915 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2916 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2917 + GET_MODE_SIZE (submode));
2918 #endif
2920 emit_move_insn (change_address (x, submode,
2921 gen_rtx_PLUS (Pmode,
2922 stack_pointer_rtx,
2923 GEN_INT (offset1))),
2924 gen_realpart (submode, y));
2925 emit_move_insn (change_address (x, submode,
2926 gen_rtx_PLUS (Pmode,
2927 stack_pointer_rtx,
2928 GEN_INT (offset2))),
2929 gen_imagpart (submode, y));
2931 else
2932 #endif
2933 /* If this is a stack, push the highpart first, so it
2934 will be in the argument order.
2936 In that case, change_address is used only to convert
2937 the mode, not to change the address. */
2938 if (stack)
2940 /* Note that the real part always precedes the imag part in memory
2941 regardless of machine's endianness. */
2942 #ifdef STACK_GROWS_DOWNWARD
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_imagpart (submode, y));
2945 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2946 gen_realpart (submode, y));
2947 #else
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_realpart (submode, y));
2950 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2951 gen_imagpart (submode, y));
2952 #endif
2954 else
2956 rtx realpart_x, realpart_y;
2957 rtx imagpart_x, imagpart_y;
2959 /* If this is a complex value with each part being smaller than a
2960 word, the usual calling sequence will likely pack the pieces into
2961 a single register. Unfortunately, SUBREG of hard registers only
2962 deals in terms of words, so we have a problem converting input
2963 arguments to the CONCAT of two registers that is used elsewhere
2964 for complex values. If this is before reload, we can copy it into
2965 memory and reload. FIXME, we should see about using extract and
2966 insert on integer registers, but complex short and complex char
2967 variables should be rarely used. */
2968 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2969 && (reload_in_progress | reload_completed) == 0)
2971 int packed_dest_p
2972 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2973 int packed_src_p
2974 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2976 if (packed_dest_p || packed_src_p)
2978 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2979 ? MODE_FLOAT : MODE_INT);
2981 enum machine_mode reg_mode
2982 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2984 if (reg_mode != BLKmode)
2986 rtx mem = assign_stack_temp (reg_mode,
2987 GET_MODE_SIZE (mode), 0);
2988 rtx cmem = adjust_address (mem, mode, 0);
2990 cfun->cannot_inline
2991 = N_("function using short complex types cannot be inline");
2993 if (packed_dest_p)
2995 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2997 emit_move_insn_1 (cmem, y);
2998 return emit_move_insn_1 (sreg, mem);
3000 else
3002 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3004 emit_move_insn_1 (mem, sreg);
3005 return emit_move_insn_1 (x, cmem);
3011 realpart_x = gen_realpart (submode, x);
3012 realpart_y = gen_realpart (submode, y);
3013 imagpart_x = gen_imagpart (submode, x);
3014 imagpart_y = gen_imagpart (submode, y);
3016 /* Show the output dies here. This is necessary for SUBREGs
3017 of pseudos since we cannot track their lifetimes correctly;
3018 hard regs shouldn't appear here except as return values.
3019 We never want to emit such a clobber after reload. */
3020 if (x != y
3021 && ! (reload_in_progress || reload_completed)
3022 && (GET_CODE (realpart_x) == SUBREG
3023 || GET_CODE (imagpart_x) == SUBREG))
3024 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3026 emit_move_insn (realpart_x, realpart_y);
3027 emit_move_insn (imagpart_x, imagpart_y);
3030 return get_last_insn ();
3033 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3034 find a mode to do it in. If we have a movcc, use it. Otherwise,
3035 find the MODE_INT mode of the same width. */
3036 else if (GET_MODE_CLASS (mode) == MODE_CC
3037 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3039 enum insn_code insn_code;
3040 enum machine_mode tmode = VOIDmode;
3041 rtx x1 = x, y1 = y;
3043 if (mode != CCmode
3044 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3045 tmode = CCmode;
3046 else
3047 for (tmode = QImode; tmode != VOIDmode;
3048 tmode = GET_MODE_WIDER_MODE (tmode))
3049 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3050 break;
3052 if (tmode == VOIDmode)
3053 abort ();
3055 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3056 may call change_address which is not appropriate if we were
3057 called when a reload was in progress. We don't have to worry
3058 about changing the address since the size in bytes is supposed to
3059 be the same. Copy the MEM to change the mode and move any
3060 substitutions from the old MEM to the new one. */
3062 if (reload_in_progress)
3064 x = gen_lowpart_common (tmode, x1);
3065 if (x == 0 && GET_CODE (x1) == MEM)
3067 x = adjust_address_nv (x1, tmode, 0);
3068 copy_replacements (x1, x);
3071 y = gen_lowpart_common (tmode, y1);
3072 if (y == 0 && GET_CODE (y1) == MEM)
3074 y = adjust_address_nv (y1, tmode, 0);
3075 copy_replacements (y1, y);
3078 else
3080 x = gen_lowpart (tmode, x);
3081 y = gen_lowpart (tmode, y);
3084 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3085 return emit_insn (GEN_FCN (insn_code) (x, y));
3088 /* Try using a move pattern for the corresponding integer mode. This is
3089 only safe when simplify_subreg can convert MODE constants into integer
3090 constants. At present, it can only do this reliably if the value
3091 fits within a HOST_WIDE_INT. */
3092 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3093 && (submode = int_mode_for_mode (mode)) != BLKmode
3094 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3095 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3096 (simplify_gen_subreg (submode, x, mode, 0),
3097 simplify_gen_subreg (submode, y, mode, 0)));
3099 /* This will handle any multi-word or full-word mode that lacks a move_insn
3100 pattern. However, you will get better code if you define such patterns,
3101 even if they must turn into multiple assembler instructions. */
3102 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3104 rtx last_insn = 0;
3105 rtx seq, inner;
3106 int need_clobber;
3107 int i;
3109 #ifdef PUSH_ROUNDING
3111 /* If X is a push on the stack, do the push now and replace
3112 X with a reference to the stack pointer. */
3113 if (push_operand (x, GET_MODE (x)))
3115 rtx temp;
3116 enum rtx_code code;
3118 /* Do not use anti_adjust_stack, since we don't want to update
3119 stack_pointer_delta. */
3120 temp = expand_binop (Pmode,
3121 #ifdef STACK_GROWS_DOWNWARD
3122 sub_optab,
3123 #else
3124 add_optab,
3125 #endif
3126 stack_pointer_rtx,
3127 GEN_INT
3128 (PUSH_ROUNDING
3129 (GET_MODE_SIZE (GET_MODE (x)))),
3130 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3132 if (temp != stack_pointer_rtx)
3133 emit_move_insn (stack_pointer_rtx, temp);
3135 code = GET_CODE (XEXP (x, 0));
3137 /* Just hope that small offsets off SP are OK. */
3138 if (code == POST_INC)
3139 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3140 GEN_INT (-((HOST_WIDE_INT)
3141 GET_MODE_SIZE (GET_MODE (x)))));
3142 else if (code == POST_DEC)
3143 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3144 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3145 else
3146 temp = stack_pointer_rtx;
3148 x = change_address (x, VOIDmode, temp);
3150 #endif
3152 /* If we are in reload, see if either operand is a MEM whose address
3153 is scheduled for replacement. */
3154 if (reload_in_progress && GET_CODE (x) == MEM
3155 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3156 x = replace_equiv_address_nv (x, inner);
3157 if (reload_in_progress && GET_CODE (y) == MEM
3158 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3159 y = replace_equiv_address_nv (y, inner);
3161 start_sequence ();
3163 need_clobber = 0;
3164 for (i = 0;
3165 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3166 i++)
3168 rtx xpart = operand_subword (x, i, 1, mode);
3169 rtx ypart = operand_subword (y, i, 1, mode);
3171 /* If we can't get a part of Y, put Y into memory if it is a
3172 constant. Otherwise, force it into a register. If we still
3173 can't get a part of Y, abort. */
3174 if (ypart == 0 && CONSTANT_P (y))
3176 y = force_const_mem (mode, y);
3177 ypart = operand_subword (y, i, 1, mode);
3179 else if (ypart == 0)
3180 ypart = operand_subword_force (y, i, mode);
3182 if (xpart == 0 || ypart == 0)
3183 abort ();
3185 need_clobber |= (GET_CODE (xpart) == SUBREG);
3187 last_insn = emit_move_insn (xpart, ypart);
3190 seq = get_insns ();
3191 end_sequence ();
3193 /* Show the output dies here. This is necessary for SUBREGs
3194 of pseudos since we cannot track their lifetimes correctly;
3195 hard regs shouldn't appear here except as return values.
3196 We never want to emit such a clobber after reload. */
3197 if (x != y
3198 && ! (reload_in_progress || reload_completed)
3199 && need_clobber != 0)
3200 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3202 emit_insn (seq);
3204 return last_insn;
3206 else
3207 abort ();
3210 /* If Y is representable exactly in a narrower mode, and the target can
3211 perform the extension directly from constant or memory, then emit the
3212 move as an extension. */
3214 static rtx
3215 compress_float_constant (rtx x, rtx y)
3217 enum machine_mode dstmode = GET_MODE (x);
3218 enum machine_mode orig_srcmode = GET_MODE (y);
3219 enum machine_mode srcmode;
3220 REAL_VALUE_TYPE r;
3222 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3224 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3225 srcmode != orig_srcmode;
3226 srcmode = GET_MODE_WIDER_MODE (srcmode))
3228 enum insn_code ic;
3229 rtx trunc_y, last_insn;
3231 /* Skip if the target can't extend this way. */
3232 ic = can_extend_p (dstmode, srcmode, 0);
3233 if (ic == CODE_FOR_nothing)
3234 continue;
3236 /* Skip if the narrowed value isn't exact. */
3237 if (! exact_real_truncate (srcmode, &r))
3238 continue;
3240 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3242 if (LEGITIMATE_CONSTANT_P (trunc_y))
3244 /* Skip if the target needs extra instructions to perform
3245 the extension. */
3246 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3247 continue;
3249 else if (float_extend_from_mem[dstmode][srcmode])
3250 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3251 else
3252 continue;
3254 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3255 last_insn = get_last_insn ();
3257 if (GET_CODE (x) == REG)
3258 set_unique_reg_note (last_insn, REG_EQUAL, y);
3260 return last_insn;
3263 return NULL_RTX;
3266 /* Pushing data onto the stack. */
3268 /* Push a block of length SIZE (perhaps variable)
3269 and return an rtx to address the beginning of the block.
3270 Note that it is not possible for the value returned to be a QUEUED.
3271 The value may be virtual_outgoing_args_rtx.
3273 EXTRA is the number of bytes of padding to push in addition to SIZE.
3274 BELOW nonzero means this padding comes at low addresses;
3275 otherwise, the padding comes at high addresses. */
3278 push_block (rtx size, int extra, int below)
3280 rtx temp;
3282 size = convert_modes (Pmode, ptr_mode, size, 1);
3283 if (CONSTANT_P (size))
3284 anti_adjust_stack (plus_constant (size, extra));
3285 else if (GET_CODE (size) == REG && extra == 0)
3286 anti_adjust_stack (size);
3287 else
3289 temp = copy_to_mode_reg (Pmode, size);
3290 if (extra != 0)
3291 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3292 temp, 0, OPTAB_LIB_WIDEN);
3293 anti_adjust_stack (temp);
3296 #ifndef STACK_GROWS_DOWNWARD
3297 if (0)
3298 #else
3299 if (1)
3300 #endif
3302 temp = virtual_outgoing_args_rtx;
3303 if (extra != 0 && below)
3304 temp = plus_constant (temp, extra);
3306 else
3308 if (GET_CODE (size) == CONST_INT)
3309 temp = plus_constant (virtual_outgoing_args_rtx,
3310 -INTVAL (size) - (below ? 0 : extra));
3311 else if (extra != 0 && !below)
3312 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3313 negate_rtx (Pmode, plus_constant (size, extra)));
3314 else
3315 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3316 negate_rtx (Pmode, size));
3319 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3322 #ifdef PUSH_ROUNDING
3324 /* Emit single push insn. */
3326 static void
3327 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3329 rtx dest_addr;
3330 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3331 rtx dest;
3332 enum insn_code icode;
3333 insn_operand_predicate_fn pred;
3335 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3336 /* If there is push pattern, use it. Otherwise try old way of throwing
3337 MEM representing push operation to move expander. */
3338 icode = push_optab->handlers[(int) mode].insn_code;
3339 if (icode != CODE_FOR_nothing)
3341 if (((pred = insn_data[(int) icode].operand[0].predicate)
3342 && !((*pred) (x, mode))))
3343 x = force_reg (mode, x);
3344 emit_insn (GEN_FCN (icode) (x));
3345 return;
3347 if (GET_MODE_SIZE (mode) == rounded_size)
3348 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3349 /* If we are to pad downward, adjust the stack pointer first and
3350 then store X into the stack location using an offset. This is
3351 because emit_move_insn does not know how to pad; it does not have
3352 access to type. */
3353 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3355 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3356 HOST_WIDE_INT offset;
3358 emit_move_insn (stack_pointer_rtx,
3359 expand_binop (Pmode,
3360 #ifdef STACK_GROWS_DOWNWARD
3361 sub_optab,
3362 #else
3363 add_optab,
3364 #endif
3365 stack_pointer_rtx,
3366 GEN_INT (rounded_size),
3367 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3369 offset = (HOST_WIDE_INT) padding_size;
3370 #ifdef STACK_GROWS_DOWNWARD
3371 if (STACK_PUSH_CODE == POST_DEC)
3372 /* We have already decremented the stack pointer, so get the
3373 previous value. */
3374 offset += (HOST_WIDE_INT) rounded_size;
3375 #else
3376 if (STACK_PUSH_CODE == POST_INC)
3377 /* We have already incremented the stack pointer, so get the
3378 previous value. */
3379 offset -= (HOST_WIDE_INT) rounded_size;
3380 #endif
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3383 else
3385 #ifdef STACK_GROWS_DOWNWARD
3386 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3387 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3388 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3389 #else
3390 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3391 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3392 GEN_INT (rounded_size));
3393 #endif
3394 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3397 dest = gen_rtx_MEM (mode, dest_addr);
3399 if (type != 0)
3401 set_mem_attributes (dest, type, 1);
3403 if (flag_optimize_sibling_calls)
3404 /* Function incoming arguments may overlap with sibling call
3405 outgoing arguments and we cannot allow reordering of reads
3406 from function arguments with stores to outgoing arguments
3407 of sibling calls. */
3408 set_mem_alias_set (dest, 0);
3410 emit_move_insn (dest, x);
3412 #endif
3414 /* Generate code to push X onto the stack, assuming it has mode MODE and
3415 type TYPE.
3416 MODE is redundant except when X is a CONST_INT (since they don't
3417 carry mode info).
3418 SIZE is an rtx for the size of data to be copied (in bytes),
3419 needed only if X is BLKmode.
3421 ALIGN (in bits) is maximum alignment we can assume.
3423 If PARTIAL and REG are both nonzero, then copy that many of the first
3424 words of X into registers starting with REG, and push the rest of X.
3425 The amount of space pushed is decreased by PARTIAL words,
3426 rounded *down* to a multiple of PARM_BOUNDARY.
3427 REG must be a hard register in this case.
3428 If REG is zero but PARTIAL is not, take any all others actions for an
3429 argument partially in registers, but do not actually load any
3430 registers.
3432 EXTRA is the amount in bytes of extra space to leave next to this arg.
3433 This is ignored if an argument block has already been allocated.
3435 On a machine that lacks real push insns, ARGS_ADDR is the address of
3436 the bottom of the argument block for this call. We use indexing off there
3437 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3438 argument block has not been preallocated.
3440 ARGS_SO_FAR is the size of args previously pushed for this call.
3442 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3443 for arguments passed in registers. If nonzero, it will be the number
3444 of bytes required. */
3446 void
3447 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3448 unsigned int align, int partial, rtx reg, int extra,
3449 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3450 rtx alignment_pad)
3452 rtx xinner;
3453 enum direction stack_direction
3454 #ifdef STACK_GROWS_DOWNWARD
3455 = downward;
3456 #else
3457 = upward;
3458 #endif
3460 /* Decide where to pad the argument: `downward' for below,
3461 `upward' for above, or `none' for don't pad it.
3462 Default is below for small data on big-endian machines; else above. */
3463 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3465 /* Invert direction if stack is post-decrement.
3466 FIXME: why? */
3467 if (STACK_PUSH_CODE == POST_DEC)
3468 if (where_pad != none)
3469 where_pad = (where_pad == downward ? upward : downward);
3471 xinner = x = protect_from_queue (x, 0);
3473 if (mode == BLKmode)
3475 /* Copy a block into the stack, entirely or partially. */
3477 rtx temp;
3478 int used = partial * UNITS_PER_WORD;
3479 int offset;
3480 int skip;
3482 if (reg && GET_CODE (reg) == PARALLEL)
3484 /* Use the size of the elt to compute offset. */
3485 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3486 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3487 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3489 else
3490 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3492 if (size == 0)
3493 abort ();
3495 used -= offset;
3497 /* USED is now the # of bytes we need not copy to the stack
3498 because registers will take care of them. */
3500 if (partial != 0)
3501 xinner = adjust_address (xinner, BLKmode, used);
3503 /* If the partial register-part of the arg counts in its stack size,
3504 skip the part of stack space corresponding to the registers.
3505 Otherwise, start copying to the beginning of the stack space,
3506 by setting SKIP to 0. */
3507 skip = (reg_parm_stack_space == 0) ? 0 : used;
3509 #ifdef PUSH_ROUNDING
3510 /* Do it with several push insns if that doesn't take lots of insns
3511 and if there is no difficulty with push insns that skip bytes
3512 on the stack for alignment purposes. */
3513 if (args_addr == 0
3514 && PUSH_ARGS
3515 && GET_CODE (size) == CONST_INT
3516 && skip == 0
3517 && MEM_ALIGN (xinner) >= align
3518 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3519 /* Here we avoid the case of a structure whose weak alignment
3520 forces many pushes of a small amount of data,
3521 and such small pushes do rounding that causes trouble. */
3522 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3523 || align >= BIGGEST_ALIGNMENT
3524 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3525 == (align / BITS_PER_UNIT)))
3526 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3528 /* Push padding now if padding above and stack grows down,
3529 or if padding below and stack grows up.
3530 But if space already allocated, this has already been done. */
3531 if (extra && args_addr == 0
3532 && where_pad != none && where_pad != stack_direction)
3533 anti_adjust_stack (GEN_INT (extra));
3535 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3537 else
3538 #endif /* PUSH_ROUNDING */
3540 rtx target;
3542 /* Otherwise make space on the stack and copy the data
3543 to the address of that space. */
3545 /* Deduct words put into registers from the size we must copy. */
3546 if (partial != 0)
3548 if (GET_CODE (size) == CONST_INT)
3549 size = GEN_INT (INTVAL (size) - used);
3550 else
3551 size = expand_binop (GET_MODE (size), sub_optab, size,
3552 GEN_INT (used), NULL_RTX, 0,
3553 OPTAB_LIB_WIDEN);
3556 /* Get the address of the stack space.
3557 In this case, we do not deal with EXTRA separately.
3558 A single stack adjust will do. */
3559 if (! args_addr)
3561 temp = push_block (size, extra, where_pad == downward);
3562 extra = 0;
3564 else if (GET_CODE (args_so_far) == CONST_INT)
3565 temp = memory_address (BLKmode,
3566 plus_constant (args_addr,
3567 skip + INTVAL (args_so_far)));
3568 else
3569 temp = memory_address (BLKmode,
3570 plus_constant (gen_rtx_PLUS (Pmode,
3571 args_addr,
3572 args_so_far),
3573 skip));
3575 if (!ACCUMULATE_OUTGOING_ARGS)
3577 /* If the source is referenced relative to the stack pointer,
3578 copy it to another register to stabilize it. We do not need
3579 to do this if we know that we won't be changing sp. */
3581 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3582 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3583 temp = copy_to_reg (temp);
3586 target = gen_rtx_MEM (BLKmode, temp);
3588 if (type != 0)
3590 set_mem_attributes (target, type, 1);
3591 /* Function incoming arguments may overlap with sibling call
3592 outgoing arguments and we cannot allow reordering of reads
3593 from function arguments with stores to outgoing arguments
3594 of sibling calls. */
3595 set_mem_alias_set (target, 0);
3598 /* ALIGN may well be better aligned than TYPE, e.g. due to
3599 PARM_BOUNDARY. Assume the caller isn't lying. */
3600 set_mem_align (target, align);
3602 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3605 else if (partial > 0)
3607 /* Scalar partly in registers. */
3609 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3610 int i;
3611 int not_stack;
3612 /* # words of start of argument
3613 that we must make space for but need not store. */
3614 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3615 int args_offset = INTVAL (args_so_far);
3616 int skip;
3618 /* Push padding now if padding above and stack grows down,
3619 or if padding below and stack grows up.
3620 But if space already allocated, this has already been done. */
3621 if (extra && args_addr == 0
3622 && where_pad != none && where_pad != stack_direction)
3623 anti_adjust_stack (GEN_INT (extra));
3625 /* If we make space by pushing it, we might as well push
3626 the real data. Otherwise, we can leave OFFSET nonzero
3627 and leave the space uninitialized. */
3628 if (args_addr == 0)
3629 offset = 0;
3631 /* Now NOT_STACK gets the number of words that we don't need to
3632 allocate on the stack. */
3633 not_stack = partial - offset;
3635 /* If the partial register-part of the arg counts in its stack size,
3636 skip the part of stack space corresponding to the registers.
3637 Otherwise, start copying to the beginning of the stack space,
3638 by setting SKIP to 0. */
3639 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3642 x = validize_mem (force_const_mem (mode, x));
3644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3645 SUBREGs of such registers are not allowed. */
3646 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3648 x = copy_to_reg (x);
3650 /* Loop over all the words allocated on the stack for this arg. */
3651 /* We can do it by words, because any scalar bigger than a word
3652 has a size a multiple of a word. */
3653 #ifndef PUSH_ARGS_REVERSED
3654 for (i = not_stack; i < size; i++)
3655 #else
3656 for (i = size - 1; i >= not_stack; i--)
3657 #endif
3658 if (i >= not_stack + offset)
3659 emit_push_insn (operand_subword_force (x, i, mode),
3660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3661 0, args_addr,
3662 GEN_INT (args_offset + ((i - not_stack + skip)
3663 * UNITS_PER_WORD)),
3664 reg_parm_stack_space, alignment_pad);
3666 else
3668 rtx addr;
3669 rtx dest;
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
3676 anti_adjust_stack (GEN_INT (extra));
3678 #ifdef PUSH_ROUNDING
3679 if (args_addr == 0 && PUSH_ARGS)
3680 emit_single_push_insn (mode, x, type);
3681 else
3682 #endif
3684 if (GET_CODE (args_so_far) == CONST_INT)
3685 addr
3686 = memory_address (mode,
3687 plus_constant (args_addr,
3688 INTVAL (args_so_far)));
3689 else
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3691 args_so_far));
3692 dest = gen_rtx_MEM (mode, addr);
3693 if (type != 0)
3695 set_mem_attributes (dest, type, 1);
3696 /* Function incoming arguments may overlap with sibling call
3697 outgoing arguments and we cannot allow reordering of reads
3698 from function arguments with stores to outgoing arguments
3699 of sibling calls. */
3700 set_mem_alias_set (dest, 0);
3703 emit_move_insn (dest, x);
3707 /* If part should go in registers, copy that part
3708 into the appropriate registers. Do this now, at the end,
3709 since mem-to-mem copies above may do function calls. */
3710 if (partial > 0 && reg != 0)
3712 /* Handle calls that pass values in multiple non-contiguous locations.
3713 The Irix 6 ABI has examples of this. */
3714 if (GET_CODE (reg) == PARALLEL)
3715 emit_group_load (reg, x, type, -1);
3716 else
3717 move_block_to_reg (REGNO (reg), x, partial, mode);
3720 if (extra && args_addr == 0 && where_pad == stack_direction)
3721 anti_adjust_stack (GEN_INT (extra));
3723 if (alignment_pad && args_addr == 0)
3724 anti_adjust_stack (alignment_pad);
3727 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3728 operations. */
3730 static rtx
3731 get_subtarget (rtx x)
3733 return ((x == 0
3734 /* Only registers can be subtargets. */
3735 || GET_CODE (x) != REG
3736 /* If the register is readonly, it can't be set more than once. */
3737 || RTX_UNCHANGING_P (x)
3738 /* Don't use hard regs to avoid extending their life. */
3739 || REGNO (x) < FIRST_PSEUDO_REGISTER
3740 /* Avoid subtargets inside loops,
3741 since they hide some invariant expressions. */
3742 || preserve_subexpressions_p ())
3743 ? 0 : x);
3746 /* Expand an assignment that stores the value of FROM into TO.
3747 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3748 (This may contain a QUEUED rtx;
3749 if the value is constant, this rtx is a constant.)
3750 Otherwise, the returned value is NULL_RTX. */
3753 expand_assignment (tree to, tree from, int want_value)
3755 rtx to_rtx = 0;
3756 rtx result;
3758 /* Don't crash if the lhs of the assignment was erroneous. */
3760 if (TREE_CODE (to) == ERROR_MARK)
3762 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3763 return want_value ? result : NULL_RTX;
3766 /* Assignment of a structure component needs special treatment
3767 if the structure component's rtx is not simply a MEM.
3768 Assignment of an array element at a constant index, and assignment of
3769 an array element in an unaligned packed structure field, has the same
3770 problem. */
3772 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3773 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3774 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3776 enum machine_mode mode1;
3777 HOST_WIDE_INT bitsize, bitpos;
3778 rtx orig_to_rtx;
3779 tree offset;
3780 int unsignedp;
3781 int volatilep = 0;
3782 tree tem;
3784 push_temp_slots ();
3785 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3786 &unsignedp, &volatilep);
3788 /* If we are going to use store_bit_field and extract_bit_field,
3789 make sure to_rtx will be safe for multiple use. */
3791 if (mode1 == VOIDmode && want_value)
3792 tem = stabilize_reference (tem);
3794 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3796 if (offset != 0)
3798 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3800 if (GET_CODE (to_rtx) != MEM)
3801 abort ();
3803 #ifdef POINTERS_EXTEND_UNSIGNED
3804 if (GET_MODE (offset_rtx) != Pmode)
3805 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3806 #else
3807 if (GET_MODE (offset_rtx) != ptr_mode)
3808 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3809 #endif
3811 /* A constant address in TO_RTX can have VOIDmode, we must not try
3812 to call force_reg for that case. Avoid that case. */
3813 if (GET_CODE (to_rtx) == MEM
3814 && GET_MODE (to_rtx) == BLKmode
3815 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3816 && bitsize > 0
3817 && (bitpos % bitsize) == 0
3818 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3819 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3821 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3822 bitpos = 0;
3825 to_rtx = offset_address (to_rtx, offset_rtx,
3826 highest_pow2_factor_for_target (to,
3827 offset));
3830 if (GET_CODE (to_rtx) == MEM)
3832 /* If the field is at offset zero, we could have been given the
3833 DECL_RTX of the parent struct. Don't munge it. */
3834 to_rtx = shallow_copy_rtx (to_rtx);
3836 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3839 /* Deal with volatile and readonly fields. The former is only done
3840 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3841 if (volatilep && GET_CODE (to_rtx) == MEM)
3843 if (to_rtx == orig_to_rtx)
3844 to_rtx = copy_rtx (to_rtx);
3845 MEM_VOLATILE_P (to_rtx) = 1;
3848 if (TREE_CODE (to) == COMPONENT_REF
3849 && TREE_READONLY (TREE_OPERAND (to, 1))
3850 /* We can't assert that a MEM won't be set more than once
3851 if the component is not addressable because another
3852 non-addressable component may be referenced by the same MEM. */
3853 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3855 if (to_rtx == orig_to_rtx)
3856 to_rtx = copy_rtx (to_rtx);
3857 RTX_UNCHANGING_P (to_rtx) = 1;
3860 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3862 if (to_rtx == orig_to_rtx)
3863 to_rtx = copy_rtx (to_rtx);
3864 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3867 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3868 (want_value
3869 /* Spurious cast for HPUX compiler. */
3870 ? ((enum machine_mode)
3871 TYPE_MODE (TREE_TYPE (to)))
3872 : VOIDmode),
3873 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3875 preserve_temp_slots (result);
3876 free_temp_slots ();
3877 pop_temp_slots ();
3879 /* If the value is meaningful, convert RESULT to the proper mode.
3880 Otherwise, return nothing. */
3881 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3882 TYPE_MODE (TREE_TYPE (from)),
3883 result,
3884 TYPE_UNSIGNED (TREE_TYPE (to)))
3885 : NULL_RTX);
3888 /* If the rhs is a function call and its value is not an aggregate,
3889 call the function before we start to compute the lhs.
3890 This is needed for correct code for cases such as
3891 val = setjmp (buf) on machines where reference to val
3892 requires loading up part of an address in a separate insn.
3894 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3895 since it might be a promoted variable where the zero- or sign- extension
3896 needs to be done. Handling this in the normal way is safe because no
3897 computation is done before the call. */
3898 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3899 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3900 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3901 && GET_CODE (DECL_RTL (to)) == REG))
3903 rtx value;
3905 push_temp_slots ();
3906 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3907 if (to_rtx == 0)
3908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3910 /* Handle calls that return values in multiple non-contiguous locations.
3911 The Irix 6 ABI has examples of this. */
3912 if (GET_CODE (to_rtx) == PARALLEL)
3913 emit_group_load (to_rtx, value, TREE_TYPE (from),
3914 int_size_in_bytes (TREE_TYPE (from)));
3915 else if (GET_MODE (to_rtx) == BLKmode)
3916 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3917 else
3919 if (POINTER_TYPE_P (TREE_TYPE (to)))
3920 value = convert_memory_address (GET_MODE (to_rtx), value);
3921 emit_move_insn (to_rtx, value);
3923 preserve_temp_slots (to_rtx);
3924 free_temp_slots ();
3925 pop_temp_slots ();
3926 return want_value ? to_rtx : NULL_RTX;
3929 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3930 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3932 if (to_rtx == 0)
3933 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3935 /* Don't move directly into a return register. */
3936 if (TREE_CODE (to) == RESULT_DECL
3937 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3939 rtx temp;
3941 push_temp_slots ();
3942 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3944 if (GET_CODE (to_rtx) == PARALLEL)
3945 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3946 int_size_in_bytes (TREE_TYPE (from)));
3947 else
3948 emit_move_insn (to_rtx, temp);
3950 preserve_temp_slots (to_rtx);
3951 free_temp_slots ();
3952 pop_temp_slots ();
3953 return want_value ? to_rtx : NULL_RTX;
3956 /* In case we are returning the contents of an object which overlaps
3957 the place the value is being stored, use a safe function when copying
3958 a value through a pointer into a structure value return block. */
3959 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3960 && current_function_returns_struct
3961 && !current_function_returns_pcc_struct)
3963 rtx from_rtx, size;
3965 push_temp_slots ();
3966 size = expr_size (from);
3967 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3969 if (TARGET_MEM_FUNCTIONS)
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TYPE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3976 else
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size,
3982 TYPE_UNSIGNED (integer_type_node)),
3983 TYPE_MODE (integer_type_node));
3985 preserve_temp_slots (to_rtx);
3986 free_temp_slots ();
3987 pop_temp_slots ();
3988 return want_value ? to_rtx : NULL_RTX;
3991 /* Compute FROM and store the value in the rtx we got. */
3993 push_temp_slots ();
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3996 free_temp_slots ();
3997 pop_temp_slots ();
3998 return want_value ? result : NULL_RTX;
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4003 TARGET may contain a QUEUED rtx.
4005 If WANT_VALUE & 1 is nonzero, return a copy of the value
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4018 be more thorough?
4020 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4021 to catch quickly any cases where the caller uses the value
4022 and fails to set WANT_VALUE.
4024 If WANT_VALUE & 2 is set, this is a store into a call param on the
4025 stack, and block moves may need to be treated specially. */
4028 store_expr (tree exp, rtx target, int want_value)
4030 rtx temp;
4031 rtx alt_rtl = NULL_RTX;
4032 rtx mark = mark_queue ();
4033 int dont_return_target = 0;
4034 int dont_store_target = 0;
4036 if (VOID_TYPE_P (TREE_TYPE (exp)))
4038 /* C++ can generate ?: expressions with a throw expression in one
4039 branch and an rvalue in the other. Here, we resolve attempts to
4040 store the throw expression's nonexistent result. */
4041 if (want_value)
4042 abort ();
4043 expand_expr (exp, const0_rtx, VOIDmode, 0);
4044 return NULL_RTX;
4046 if (TREE_CODE (exp) == COMPOUND_EXPR)
4048 /* Perform first part of compound expression, then assign from second
4049 part. */
4050 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4051 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4052 emit_queue ();
4053 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4055 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4057 /* For conditional expression, get safe form of the target. Then
4058 test the condition, doing the appropriate assignment on either
4059 side. This avoids the creation of unnecessary temporaries.
4060 For non-BLKmode, it is more efficient not to do this. */
4062 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4064 emit_queue ();
4065 target = protect_from_queue (target, 1);
4067 do_pending_stack_adjust ();
4068 NO_DEFER_POP;
4069 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4070 start_cleanup_deferral ();
4071 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4072 end_cleanup_deferral ();
4073 emit_queue ();
4074 emit_jump_insn (gen_jump (lab2));
4075 emit_barrier ();
4076 emit_label (lab1);
4077 start_cleanup_deferral ();
4078 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4079 end_cleanup_deferral ();
4080 emit_queue ();
4081 emit_label (lab2);
4082 OK_DEFER_POP;
4084 return want_value & 1 ? target : NULL_RTX;
4086 else if (queued_subexp_p (target))
4087 /* If target contains a postincrement, let's not risk
4088 using it as the place to generate the rhs. */
4090 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4092 /* Expand EXP into a new pseudo. */
4093 temp = gen_reg_rtx (GET_MODE (target));
4094 temp = expand_expr (exp, temp, GET_MODE (target),
4095 (want_value & 2
4096 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4098 else
4099 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4100 (want_value & 2
4101 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4103 /* If target is volatile, ANSI requires accessing the value
4104 *from* the target, if it is accessed. So make that happen.
4105 In no case return the target itself. */
4106 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4107 dont_return_target = 1;
4109 else if ((want_value & 1) != 0
4110 && GET_CODE (target) == MEM
4111 && ! MEM_VOLATILE_P (target)
4112 && GET_MODE (target) != BLKmode)
4113 /* If target is in memory and caller wants value in a register instead,
4114 arrange that. Pass TARGET as target for expand_expr so that,
4115 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4116 We know expand_expr will not use the target in that case.
4117 Don't do this if TARGET is volatile because we are supposed
4118 to write it and then read it. */
4120 temp = expand_expr (exp, target, GET_MODE (target),
4121 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4122 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4124 /* If TEMP is already in the desired TARGET, only copy it from
4125 memory and don't store it there again. */
4126 if (temp == target
4127 || (rtx_equal_p (temp, target)
4128 && ! side_effects_p (temp) && ! side_effects_p (target)))
4129 dont_store_target = 1;
4130 temp = copy_to_reg (temp);
4132 dont_return_target = 1;
4134 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4135 /* If this is a scalar in a register that is stored in a wider mode
4136 than the declared mode, compute the result into its declared mode
4137 and then convert to the wider mode. Our value is the computed
4138 expression. */
4140 rtx inner_target = 0;
4142 /* If we don't want a value, we can do the conversion inside EXP,
4143 which will often result in some optimizations. Do the conversion
4144 in two steps: first change the signedness, if needed, then
4145 the extend. But don't do this if the type of EXP is a subtype
4146 of something else since then the conversion might involve
4147 more than just converting modes. */
4148 if ((want_value & 1) == 0
4149 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4150 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4152 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4153 != SUBREG_PROMOTED_UNSIGNED_P (target))
4154 exp = convert
4155 (lang_hooks.types.signed_or_unsigned_type
4156 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4158 exp = convert (lang_hooks.types.type_for_mode
4159 (GET_MODE (SUBREG_REG (target)),
4160 SUBREG_PROMOTED_UNSIGNED_P (target)),
4161 exp);
4163 inner_target = SUBREG_REG (target);
4166 temp = expand_expr (exp, inner_target, VOIDmode,
4167 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4169 /* If TEMP is a MEM and we want a result value, make the access
4170 now so it gets done only once. Strictly speaking, this is
4171 only necessary if the MEM is volatile, or if the address
4172 overlaps TARGET. But not performing the load twice also
4173 reduces the amount of rtl we generate and then have to CSE. */
4174 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4175 temp = copy_to_reg (temp);
4177 /* If TEMP is a VOIDmode constant, use convert_modes to make
4178 sure that we properly convert it. */
4179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4181 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4182 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4183 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4184 GET_MODE (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4188 convert_move (SUBREG_REG (target), temp,
4189 SUBREG_PROMOTED_UNSIGNED_P (target));
4191 /* If we promoted a constant, change the mode back down to match
4192 target. Otherwise, the caller might get confused by a result whose
4193 mode is larger than expected. */
4195 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4197 if (GET_MODE (temp) != VOIDmode)
4199 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4200 SUBREG_PROMOTED_VAR_P (temp) = 1;
4201 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4202 SUBREG_PROMOTED_UNSIGNED_P (target));
4204 else
4205 temp = convert_modes (GET_MODE (target),
4206 GET_MODE (SUBREG_REG (target)),
4207 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4210 return want_value & 1 ? temp : NULL_RTX;
4212 else
4214 temp = expand_expr_real (exp, target, GET_MODE (target),
4215 (want_value & 2
4216 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4217 &alt_rtl);
4218 /* Return TARGET if it's a specified hardware register.
4219 If TARGET is a volatile mem ref, either return TARGET
4220 or return a reg copied *from* TARGET; ANSI requires this.
4222 Otherwise, if TEMP is not TARGET, return TEMP
4223 if it is constant (for efficiency),
4224 or if we really want the correct value. */
4225 if (!(target && GET_CODE (target) == REG
4226 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4227 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4228 && ! rtx_equal_p (temp, target)
4229 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4230 dont_return_target = 1;
4233 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4234 the same as that of TARGET, adjust the constant. This is needed, for
4235 example, in case it is a CONST_DOUBLE and we want only a word-sized
4236 value. */
4237 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4238 && TREE_CODE (exp) != ERROR_MARK
4239 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4240 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4241 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4243 /* If value was not generated in the target, store it there.
4244 Convert the value to TARGET's type first if necessary and emit the
4245 pending incrementations that have been queued when expanding EXP.
4246 Note that we cannot emit the whole queue blindly because this will
4247 effectively disable the POST_INC optimization later.
4249 If TEMP and TARGET compare equal according to rtx_equal_p, but
4250 one or both of them are volatile memory refs, we have to distinguish
4251 two cases:
4252 - expand_expr has used TARGET. In this case, we must not generate
4253 another copy. This can be detected by TARGET being equal according
4254 to == .
4255 - expand_expr has not used TARGET - that means that the source just
4256 happens to have the same RTX form. Since temp will have been created
4257 by expand_expr, it will compare unequal according to == .
4258 We must generate a copy in this case, to reach the correct number
4259 of volatile memory references. */
4261 if ((! rtx_equal_p (temp, target)
4262 || (temp != target && (side_effects_p (temp)
4263 || side_effects_p (target))))
4264 && TREE_CODE (exp) != ERROR_MARK
4265 && ! dont_store_target
4266 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4267 but TARGET is not valid memory reference, TEMP will differ
4268 from TARGET although it is really the same location. */
4269 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4270 /* If there's nothing to copy, don't bother. Don't call expr_size
4271 unless necessary, because some front-ends (C++) expr_size-hook
4272 aborts on objects that are not supposed to be bit-copied or
4273 bit-initialized. */
4274 && expr_size (exp) != const0_rtx)
4276 emit_insns_enqueued_after_mark (mark);
4277 target = protect_from_queue (target, 1);
4278 temp = protect_from_queue (temp, 0);
4279 if (GET_MODE (temp) != GET_MODE (target)
4280 && GET_MODE (temp) != VOIDmode)
4282 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4283 if (dont_return_target)
4285 /* In this case, we will return TEMP,
4286 so make sure it has the proper mode.
4287 But don't forget to store the value into TARGET. */
4288 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4289 emit_move_insn (target, temp);
4291 else
4292 convert_move (target, temp, unsignedp);
4295 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4297 /* Handle copying a string constant into an array. The string
4298 constant may be shorter than the array. So copy just the string's
4299 actual length, and clear the rest. First get the size of the data
4300 type of the string, which is actually the size of the target. */
4301 rtx size = expr_size (exp);
4303 if (GET_CODE (size) == CONST_INT
4304 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4305 emit_block_move (target, temp, size,
4306 (want_value & 2
4307 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4308 else
4310 /* Compute the size of the data to copy from the string. */
4311 tree copy_size
4312 = size_binop (MIN_EXPR,
4313 make_tree (sizetype, size),
4314 size_int (TREE_STRING_LENGTH (exp)));
4315 rtx copy_size_rtx
4316 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4317 (want_value & 2
4318 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4319 rtx label = 0;
4321 /* Copy that much. */
4322 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4323 TYPE_UNSIGNED (sizetype));
4324 emit_block_move (target, temp, copy_size_rtx,
4325 (want_value & 2
4326 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4328 /* Figure out how much is left in TARGET that we have to clear.
4329 Do all calculations in ptr_mode. */
4330 if (GET_CODE (copy_size_rtx) == CONST_INT)
4332 size = plus_constant (size, -INTVAL (copy_size_rtx));
4333 target = adjust_address (target, BLKmode,
4334 INTVAL (copy_size_rtx));
4336 else
4338 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4339 copy_size_rtx, NULL_RTX, 0,
4340 OPTAB_LIB_WIDEN);
4342 #ifdef POINTERS_EXTEND_UNSIGNED
4343 if (GET_MODE (copy_size_rtx) != Pmode)
4344 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4345 TYPE_UNSIGNED (sizetype));
4346 #endif
4348 target = offset_address (target, copy_size_rtx,
4349 highest_pow2_factor (copy_size));
4350 label = gen_label_rtx ();
4351 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4352 GET_MODE (size), 0, label);
4355 if (size != const0_rtx)
4356 clear_storage (target, size);
4358 if (label)
4359 emit_label (label);
4362 /* Handle calls that return values in multiple non-contiguous locations.
4363 The Irix 6 ABI has examples of this. */
4364 else if (GET_CODE (target) == PARALLEL)
4365 emit_group_load (target, temp, TREE_TYPE (exp),
4366 int_size_in_bytes (TREE_TYPE (exp)));
4367 else if (GET_MODE (temp) == BLKmode)
4368 emit_block_move (target, temp, expr_size (exp),
4369 (want_value & 2
4370 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4371 else
4373 temp = force_operand (temp, target);
4374 if (temp != target)
4375 emit_move_insn (target, temp);
4379 /* If we don't want a value, return NULL_RTX. */
4380 if ((want_value & 1) == 0)
4381 return NULL_RTX;
4383 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4384 ??? The latter test doesn't seem to make sense. */
4385 else if (dont_return_target && GET_CODE (temp) != MEM)
4386 return temp;
4388 /* Return TARGET itself if it is a hard register. */
4389 else if ((want_value & 1) != 0
4390 && GET_MODE (target) != BLKmode
4391 && ! (GET_CODE (target) == REG
4392 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4393 return copy_to_reg (target);
4395 else
4396 return target;
4399 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4401 static int
4402 is_zeros_p (tree exp)
4404 tree elt;
4406 switch (TREE_CODE (exp))
4408 case CONVERT_EXPR:
4409 case NOP_EXPR:
4410 case NON_LVALUE_EXPR:
4411 case VIEW_CONVERT_EXPR:
4412 return is_zeros_p (TREE_OPERAND (exp, 0));
4414 case INTEGER_CST:
4415 return integer_zerop (exp);
4417 case COMPLEX_CST:
4418 return
4419 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4421 case REAL_CST:
4422 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4424 case VECTOR_CST:
4425 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4426 elt = TREE_CHAIN (elt))
4427 if (!is_zeros_p (TREE_VALUE (elt)))
4428 return 0;
4430 return 1;
4432 case CONSTRUCTOR:
4433 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4434 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4435 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4436 if (! is_zeros_p (TREE_VALUE (elt)))
4437 return 0;
4439 return 1;
4441 default:
4442 return 0;
4446 /* Return 1 if EXP contains mostly (3/4) zeros. */
4449 mostly_zeros_p (tree exp)
4451 if (TREE_CODE (exp) == CONSTRUCTOR)
4453 int elts = 0, zeros = 0;
4454 tree elt = CONSTRUCTOR_ELTS (exp);
4455 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4457 /* If there are no ranges of true bits, it is all zero. */
4458 return elt == NULL_TREE;
4460 for (; elt; elt = TREE_CHAIN (elt))
4462 /* We do not handle the case where the index is a RANGE_EXPR,
4463 so the statistic will be somewhat inaccurate.
4464 We do make a more accurate count in store_constructor itself,
4465 so since this function is only used for nested array elements,
4466 this should be close enough. */
4467 if (mostly_zeros_p (TREE_VALUE (elt)))
4468 zeros++;
4469 elts++;
4472 return 4 * zeros >= 3 * elts;
4475 return is_zeros_p (exp);
4478 /* Helper function for store_constructor.
4479 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4480 TYPE is the type of the CONSTRUCTOR, not the element type.
4481 CLEARED is as for store_constructor.
4482 ALIAS_SET is the alias set to use for any stores.
4484 This provides a recursive shortcut back to store_constructor when it isn't
4485 necessary to go through store_field. This is so that we can pass through
4486 the cleared field to let store_constructor know that we may not have to
4487 clear a substructure if the outer structure has already been cleared. */
4489 static void
4490 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4491 HOST_WIDE_INT bitpos, enum machine_mode mode,
4492 tree exp, tree type, int cleared, int alias_set)
4494 if (TREE_CODE (exp) == CONSTRUCTOR
4495 && bitpos % BITS_PER_UNIT == 0
4496 /* If we have a nonzero bitpos for a register target, then we just
4497 let store_field do the bitfield handling. This is unlikely to
4498 generate unnecessary clear instructions anyways. */
4499 && (bitpos == 0 || GET_CODE (target) == MEM))
4501 if (GET_CODE (target) == MEM)
4502 target
4503 = adjust_address (target,
4504 GET_MODE (target) == BLKmode
4505 || 0 != (bitpos
4506 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4507 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4510 /* Update the alias set, if required. */
4511 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4512 && MEM_ALIAS_SET (target) != 0)
4514 target = copy_rtx (target);
4515 set_mem_alias_set (target, alias_set);
4518 store_constructor (exp, target, cleared, bitsize);
4520 else
4521 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4522 alias_set);
4525 /* Store the value of constructor EXP into the rtx TARGET.
4526 TARGET is either a REG or a MEM; we know it cannot conflict, since
4527 safe_from_p has been called.
4528 CLEARED is positive if TARGET is known to have been zeroed, zero if
4529 this is the top level of calls to store_constructor, and negative
4530 if this is a recursive call but no clearing has been done.
4531 SIZE is the number of bits of TARGET we are allowed to modify: this
4532 may not be the same as the size of EXP if we are assigning to a field
4533 which has been packed to exclude padding bits. */
4535 static void
4536 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4538 tree type = TREE_TYPE (exp);
4539 #ifdef WORD_REGISTER_OPERATIONS
4540 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4541 #endif
4543 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4544 || TREE_CODE (type) == QUAL_UNION_TYPE)
4546 tree elt;
4548 /* If this is not the top level, we don't do any initialization. */
4549 if (cleared)
4551 /* If the size is zero, pretend we've cleared it. */
4552 else if (size == 0)
4553 cleared = 1;
4554 /* We either clear the aggregate or indicate the value is dead. */
4555 else if ((TREE_CODE (type) == UNION_TYPE
4556 || TREE_CODE (type) == QUAL_UNION_TYPE)
4557 && ! CONSTRUCTOR_ELTS (exp))
4558 /* If the constructor is empty, clear the union. */
4560 clear_storage (target, GEN_INT (size / BITS_PER_UNIT));
4561 cleared = 1;
4564 /* If we are building a static constructor into a register,
4565 set the initial value as zero so we can fold the value into
4566 a constant. But if more than one register is involved,
4567 this probably loses. */
4568 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4569 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4571 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4572 cleared = 1;
4575 /* If the constructor has fewer fields than the structure
4576 or if we are initializing the structure to mostly zeros,
4577 clear the whole structure first. Don't do this if TARGET is a
4578 register whose mode size isn't equal to SIZE since clear_storage
4579 can't handle this case. */
4580 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4581 || mostly_zeros_p (exp))
4582 && (GET_CODE (target) != REG
4583 || ((HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))
4584 == size)))
4586 rtx xtarget = target;
4588 if (readonly_fields_p (type))
4590 xtarget = copy_rtx (xtarget);
4591 RTX_UNCHANGING_P (xtarget) = 1;
4594 clear_storage (xtarget, GEN_INT (size / BITS_PER_UNIT));
4595 cleared = 1;
4597 else
4599 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4600 cleared = -1;
4603 /* Store each element of the constructor into
4604 the corresponding field of TARGET. */
4606 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4608 tree field = TREE_PURPOSE (elt);
4609 tree value = TREE_VALUE (elt);
4610 enum machine_mode mode;
4611 HOST_WIDE_INT bitsize;
4612 HOST_WIDE_INT bitpos = 0;
4613 tree offset;
4614 rtx to_rtx = target;
4616 /* Just ignore missing fields.
4617 We cleared the whole structure, above,
4618 if any fields are missing. */
4619 if (field == 0)
4620 continue;
4622 if (cleared > 0 && is_zeros_p (value))
4623 continue;
4625 if (host_integerp (DECL_SIZE (field), 1))
4626 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4627 else
4628 bitsize = -1;
4630 mode = DECL_MODE (field);
4631 if (DECL_BIT_FIELD (field))
4632 mode = VOIDmode;
4634 offset = DECL_FIELD_OFFSET (field);
4635 if (host_integerp (offset, 0)
4636 && host_integerp (bit_position (field), 0))
4638 bitpos = int_bit_position (field);
4639 offset = 0;
4641 else
4642 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4644 if (offset)
4646 rtx offset_rtx;
4648 offset
4649 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4650 make_tree (TREE_TYPE (exp),
4651 target));
4653 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4654 if (GET_CODE (to_rtx) != MEM)
4655 abort ();
4657 #ifdef POINTERS_EXTEND_UNSIGNED
4658 if (GET_MODE (offset_rtx) != Pmode)
4659 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4660 #else
4661 if (GET_MODE (offset_rtx) != ptr_mode)
4662 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4663 #endif
4665 to_rtx = offset_address (to_rtx, offset_rtx,
4666 highest_pow2_factor (offset));
4669 if (TREE_READONLY (field))
4671 if (GET_CODE (to_rtx) == MEM)
4672 to_rtx = copy_rtx (to_rtx);
4674 RTX_UNCHANGING_P (to_rtx) = 1;
4677 #ifdef WORD_REGISTER_OPERATIONS
4678 /* If this initializes a field that is smaller than a word, at the
4679 start of a word, try to widen it to a full word.
4680 This special case allows us to output C++ member function
4681 initializations in a form that the optimizers can understand. */
4682 if (GET_CODE (target) == REG
4683 && bitsize < BITS_PER_WORD
4684 && bitpos % BITS_PER_WORD == 0
4685 && GET_MODE_CLASS (mode) == MODE_INT
4686 && TREE_CODE (value) == INTEGER_CST
4687 && exp_size >= 0
4688 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4690 tree type = TREE_TYPE (value);
4692 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4694 type = lang_hooks.types.type_for_size
4695 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4696 value = convert (type, value);
4699 if (BYTES_BIG_ENDIAN)
4700 value
4701 = fold (build (LSHIFT_EXPR, type, value,
4702 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4703 bitsize = BITS_PER_WORD;
4704 mode = word_mode;
4706 #endif
4708 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4709 && DECL_NONADDRESSABLE_P (field))
4711 to_rtx = copy_rtx (to_rtx);
4712 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4715 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4716 value, type, cleared,
4717 get_alias_set (TREE_TYPE (field)));
4720 else if (TREE_CODE (type) == ARRAY_TYPE
4721 || TREE_CODE (type) == VECTOR_TYPE)
4723 tree elt;
4724 int i;
4725 int need_to_clear;
4726 tree domain;
4727 tree elttype = TREE_TYPE (type);
4728 int const_bounds_p;
4729 HOST_WIDE_INT minelt = 0;
4730 HOST_WIDE_INT maxelt = 0;
4731 int icode = 0;
4732 rtx *vector = NULL;
4733 int elt_size = 0;
4734 unsigned n_elts = 0;
4736 if (TREE_CODE (type) == ARRAY_TYPE)
4737 domain = TYPE_DOMAIN (type);
4738 else
4739 /* Vectors do not have domains; look up the domain of
4740 the array embedded in the debug representation type.
4741 FIXME Would probably be more efficient to treat vectors
4742 separately from arrays. */
4744 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4745 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4746 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4748 enum machine_mode mode = GET_MODE (target);
4750 icode = (int) vec_init_optab->handlers[mode].insn_code;
4751 if (icode != CODE_FOR_nothing)
4753 unsigned int i;
4755 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4756 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4757 vector = alloca (n_elts);
4758 for (i = 0; i < n_elts; i++)
4759 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4764 const_bounds_p = (TYPE_MIN_VALUE (domain)
4765 && TYPE_MAX_VALUE (domain)
4766 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4767 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4769 /* If we have constant bounds for the range of the type, get them. */
4770 if (const_bounds_p)
4772 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4773 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4776 /* If the constructor has fewer elements than the array,
4777 clear the whole array first. Similarly if this is
4778 static constructor of a non-BLKmode object. */
4779 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4780 need_to_clear = 1;
4781 else
4783 HOST_WIDE_INT count = 0, zero_count = 0;
4784 need_to_clear = ! const_bounds_p;
4786 /* This loop is a more accurate version of the loop in
4787 mostly_zeros_p (it handles RANGE_EXPR in an index).
4788 It is also needed to check for missing elements. */
4789 for (elt = CONSTRUCTOR_ELTS (exp);
4790 elt != NULL_TREE && ! need_to_clear;
4791 elt = TREE_CHAIN (elt))
4793 tree index = TREE_PURPOSE (elt);
4794 HOST_WIDE_INT this_node_count;
4796 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4798 tree lo_index = TREE_OPERAND (index, 0);
4799 tree hi_index = TREE_OPERAND (index, 1);
4801 if (! host_integerp (lo_index, 1)
4802 || ! host_integerp (hi_index, 1))
4804 need_to_clear = 1;
4805 break;
4808 this_node_count = (tree_low_cst (hi_index, 1)
4809 - tree_low_cst (lo_index, 1) + 1);
4811 else
4812 this_node_count = 1;
4814 count += this_node_count;
4815 if (mostly_zeros_p (TREE_VALUE (elt)))
4816 zero_count += this_node_count;
4819 /* Clear the entire array first if there are any missing elements,
4820 or if the incidence of zero elements is >= 75%. */
4821 if (! need_to_clear
4822 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4823 need_to_clear = 1;
4826 if (need_to_clear && size > 0 && !vector)
4828 if (! cleared)
4830 if (REG_P (target))
4831 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4832 else
4833 clear_storage (target, GEN_INT (size / BITS_PER_UNIT));
4835 cleared = 1;
4838 else if (REG_P (target))
4840 /* Inform later passes that the old value is dead. */
4841 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4842 cleared = -1;
4845 /* Store each element of the constructor into
4846 the corresponding element of TARGET, determined
4847 by counting the elements. */
4848 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4849 elt;
4850 elt = TREE_CHAIN (elt), i++)
4852 enum machine_mode mode;
4853 HOST_WIDE_INT bitsize;
4854 HOST_WIDE_INT bitpos;
4855 int unsignedp;
4856 tree value = TREE_VALUE (elt);
4857 tree index = TREE_PURPOSE (elt);
4858 rtx xtarget = target;
4860 if (cleared > 0 && is_zeros_p (value))
4861 continue;
4863 unsignedp = TYPE_UNSIGNED (elttype);
4864 mode = TYPE_MODE (elttype);
4865 if (mode == BLKmode)
4866 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4867 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4868 : -1);
4869 else
4870 bitsize = GET_MODE_BITSIZE (mode);
4872 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4874 tree lo_index = TREE_OPERAND (index, 0);
4875 tree hi_index = TREE_OPERAND (index, 1);
4876 rtx index_r, pos_rtx, loop_end;
4877 struct nesting *loop;
4878 HOST_WIDE_INT lo, hi, count;
4879 tree position;
4881 if (vector)
4882 abort ();
4884 /* If the range is constant and "small", unroll the loop. */
4885 if (const_bounds_p
4886 && host_integerp (lo_index, 0)
4887 && host_integerp (hi_index, 0)
4888 && (lo = tree_low_cst (lo_index, 0),
4889 hi = tree_low_cst (hi_index, 0),
4890 count = hi - lo + 1,
4891 (GET_CODE (target) != MEM
4892 || count <= 2
4893 || (host_integerp (TYPE_SIZE (elttype), 1)
4894 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4895 <= 40 * 8)))))
4897 lo -= minelt; hi -= minelt;
4898 for (; lo <= hi; lo++)
4900 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4902 if (GET_CODE (target) == MEM
4903 && !MEM_KEEP_ALIAS_SET_P (target)
4904 && TREE_CODE (type) == ARRAY_TYPE
4905 && TYPE_NONALIASED_COMPONENT (type))
4907 target = copy_rtx (target);
4908 MEM_KEEP_ALIAS_SET_P (target) = 1;
4911 store_constructor_field
4912 (target, bitsize, bitpos, mode, value, type, cleared,
4913 get_alias_set (elttype));
4916 else
4918 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4919 loop_end = gen_label_rtx ();
4921 unsignedp = TYPE_UNSIGNED (domain);
4923 index = build_decl (VAR_DECL, NULL_TREE, domain);
4925 index_r
4926 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4927 &unsignedp, 0));
4928 SET_DECL_RTL (index, index_r);
4929 if (TREE_CODE (value) == SAVE_EXPR
4930 && SAVE_EXPR_RTL (value) == 0)
4932 /* Make sure value gets expanded once before the
4933 loop. */
4934 expand_expr (value, const0_rtx, VOIDmode, 0);
4935 emit_queue ();
4937 store_expr (lo_index, index_r, 0);
4938 loop = expand_start_loop (0);
4940 /* Assign value to element index. */
4941 position
4942 = convert (ssizetype,
4943 fold (build (MINUS_EXPR, TREE_TYPE (index),
4944 index, TYPE_MIN_VALUE (domain))));
4945 position = size_binop (MULT_EXPR, position,
4946 convert (ssizetype,
4947 TYPE_SIZE_UNIT (elttype)));
4949 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4950 xtarget = offset_address (target, pos_rtx,
4951 highest_pow2_factor (position));
4952 xtarget = adjust_address (xtarget, mode, 0);
4953 if (TREE_CODE (value) == CONSTRUCTOR)
4954 store_constructor (value, xtarget, cleared, bitsize);
4955 else
4956 store_expr (value, xtarget, 0);
4958 expand_exit_loop_if_false (loop,
4959 build (LT_EXPR, integer_type_node,
4960 index, hi_index));
4962 expand_increment (build (PREINCREMENT_EXPR,
4963 TREE_TYPE (index),
4964 index, integer_one_node), 0, 0);
4965 expand_end_loop ();
4966 emit_label (loop_end);
4969 else if ((index != 0 && ! host_integerp (index, 0))
4970 || ! host_integerp (TYPE_SIZE (elttype), 1))
4972 tree position;
4974 if (vector)
4975 abort ();
4977 if (index == 0)
4978 index = ssize_int (1);
4980 if (minelt)
4981 index = convert (ssizetype,
4982 fold (build (MINUS_EXPR, index,
4983 TYPE_MIN_VALUE (domain))));
4985 position = size_binop (MULT_EXPR, index,
4986 convert (ssizetype,
4987 TYPE_SIZE_UNIT (elttype)));
4988 xtarget = offset_address (target,
4989 expand_expr (position, 0, VOIDmode, 0),
4990 highest_pow2_factor (position));
4991 xtarget = adjust_address (xtarget, mode, 0);
4992 store_expr (value, xtarget, 0);
4994 else if (vector)
4996 int pos;
4998 if (index != 0)
4999 pos = tree_low_cst (index, 0) - minelt;
5000 else
5001 pos = i;
5002 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5004 else
5006 if (index != 0)
5007 bitpos = ((tree_low_cst (index, 0) - minelt)
5008 * tree_low_cst (TYPE_SIZE (elttype), 1));
5009 else
5010 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5012 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5013 && TREE_CODE (type) == ARRAY_TYPE
5014 && TYPE_NONALIASED_COMPONENT (type))
5016 target = copy_rtx (target);
5017 MEM_KEEP_ALIAS_SET_P (target) = 1;
5019 store_constructor_field (target, bitsize, bitpos, mode, value,
5020 type, cleared, get_alias_set (elttype));
5023 if (vector)
5025 emit_insn (GEN_FCN (icode) (target,
5026 gen_rtx_PARALLEL (GET_MODE (target),
5027 gen_rtvec_v (n_elts, vector))));
5031 /* Set constructor assignments. */
5032 else if (TREE_CODE (type) == SET_TYPE)
5034 tree elt = CONSTRUCTOR_ELTS (exp);
5035 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5036 tree domain = TYPE_DOMAIN (type);
5037 tree domain_min, domain_max, bitlength;
5039 /* The default implementation strategy is to extract the constant
5040 parts of the constructor, use that to initialize the target,
5041 and then "or" in whatever non-constant ranges we need in addition.
5043 If a large set is all zero or all ones, it is
5044 probably better to set it using memset (if available) or bzero.
5045 Also, if a large set has just a single range, it may also be
5046 better to first clear all the first clear the set (using
5047 bzero/memset), and set the bits we want. */
5049 /* Check for all zeros. */
5050 if (elt == NULL_TREE && size > 0)
5052 if (cleared <= 0)
5053 clear_storage (target, GEN_INT (size / BITS_PER_UNIT));
5054 return;
5057 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5058 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5059 bitlength = size_binop (PLUS_EXPR,
5060 size_diffop (domain_max, domain_min),
5061 ssize_int (1));
5063 nbits = tree_low_cst (bitlength, 1);
5065 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5066 are "complicated" (more than one range), initialize (the
5067 constant parts) by copying from a constant. */
5068 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5069 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5071 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5072 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5073 char *bit_buffer = alloca (nbits);
5074 HOST_WIDE_INT word = 0;
5075 unsigned int bit_pos = 0;
5076 unsigned int ibit = 0;
5077 unsigned int offset = 0; /* In bytes from beginning of set. */
5079 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5080 for (;;)
5082 if (bit_buffer[ibit])
5084 if (BYTES_BIG_ENDIAN)
5085 word |= (1 << (set_word_size - 1 - bit_pos));
5086 else
5087 word |= 1 << bit_pos;
5090 bit_pos++; ibit++;
5091 if (bit_pos >= set_word_size || ibit == nbits)
5093 if (word != 0 || cleared <= 0)
5095 rtx datum = gen_int_mode (word, mode);
5096 rtx to_rtx;
5098 /* The assumption here is that it is safe to use
5099 XEXP if the set is multi-word, but not if
5100 it's single-word. */
5101 if (GET_CODE (target) == MEM)
5102 to_rtx = adjust_address (target, mode, offset);
5103 else if (offset == 0)
5104 to_rtx = target;
5105 else
5106 abort ();
5107 emit_move_insn (to_rtx, datum);
5110 if (ibit == nbits)
5111 break;
5112 word = 0;
5113 bit_pos = 0;
5114 offset += set_word_size / BITS_PER_UNIT;
5118 else if (cleared <= 0)
5119 /* Don't bother clearing storage if the set is all ones. */
5120 if (TREE_CHAIN (elt) != NULL_TREE
5121 || (TREE_PURPOSE (elt) == NULL_TREE
5122 ? nbits != 1
5123 : ( ! host_integerp (TREE_VALUE (elt), 0)
5124 || ! host_integerp (TREE_PURPOSE (elt), 0)
5125 || (tree_low_cst (TREE_VALUE (elt), 0)
5126 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5127 != (HOST_WIDE_INT) nbits))))
5128 clear_storage (target, GEN_INT (size / BITS_PER_UNIT));
5130 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5132 /* Start of range of element or NULL. */
5133 tree startbit = TREE_PURPOSE (elt);
5134 /* End of range of element, or element value. */
5135 tree endbit = TREE_VALUE (elt);
5136 HOST_WIDE_INT startb, endb;
5137 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5139 bitlength_rtx = expand_expr (bitlength,
5140 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5142 /* Handle non-range tuple element like [ expr ]. */
5143 if (startbit == NULL_TREE)
5145 startbit = save_expr (endbit);
5146 endbit = startbit;
5149 startbit = convert (sizetype, startbit);
5150 endbit = convert (sizetype, endbit);
5151 if (! integer_zerop (domain_min))
5153 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5154 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5156 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5157 EXPAND_CONST_ADDRESS);
5158 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5159 EXPAND_CONST_ADDRESS);
5161 if (REG_P (target))
5163 targetx
5164 = assign_temp
5165 ((build_qualified_type (lang_hooks.types.type_for_mode
5166 (GET_MODE (target), 0),
5167 TYPE_QUAL_CONST)),
5168 0, 1, 1);
5169 emit_move_insn (targetx, target);
5172 else if (GET_CODE (target) == MEM)
5173 targetx = target;
5174 else
5175 abort ();
5177 /* Optimization: If startbit and endbit are constants divisible
5178 by BITS_PER_UNIT, call memset instead. */
5179 if (TARGET_MEM_FUNCTIONS
5180 && TREE_CODE (startbit) == INTEGER_CST
5181 && TREE_CODE (endbit) == INTEGER_CST
5182 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5183 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5185 emit_library_call (memset_libfunc, LCT_NORMAL,
5186 VOIDmode, 3,
5187 plus_constant (XEXP (targetx, 0),
5188 startb / BITS_PER_UNIT),
5189 Pmode,
5190 constm1_rtx, TYPE_MODE (integer_type_node),
5191 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5192 TYPE_MODE (sizetype));
5194 else
5195 emit_library_call (setbits_libfunc, LCT_NORMAL,
5196 VOIDmode, 4, XEXP (targetx, 0),
5197 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5198 startbit_rtx, TYPE_MODE (sizetype),
5199 endbit_rtx, TYPE_MODE (sizetype));
5201 if (REG_P (target))
5202 emit_move_insn (target, targetx);
5206 else
5207 abort ();
5210 /* Store the value of EXP (an expression tree)
5211 into a subfield of TARGET which has mode MODE and occupies
5212 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5213 If MODE is VOIDmode, it means that we are storing into a bit-field.
5215 If VALUE_MODE is VOIDmode, return nothing in particular.
5216 UNSIGNEDP is not used in this case.
5218 Otherwise, return an rtx for the value stored. This rtx
5219 has mode VALUE_MODE if that is convenient to do.
5220 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5222 TYPE is the type of the underlying object,
5224 ALIAS_SET is the alias set for the destination. This value will
5225 (in general) be different from that for TARGET, since TARGET is a
5226 reference to the containing structure. */
5228 static rtx
5229 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5230 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5231 int unsignedp, tree type, int alias_set)
5233 HOST_WIDE_INT width_mask = 0;
5235 if (TREE_CODE (exp) == ERROR_MARK)
5236 return const0_rtx;
5238 /* If we have nothing to store, do nothing unless the expression has
5239 side-effects. */
5240 if (bitsize == 0)
5241 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5242 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5243 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5245 /* If we are storing into an unaligned field of an aligned union that is
5246 in a register, we may have the mode of TARGET being an integer mode but
5247 MODE == BLKmode. In that case, get an aligned object whose size and
5248 alignment are the same as TARGET and store TARGET into it (we can avoid
5249 the store if the field being stored is the entire width of TARGET). Then
5250 call ourselves recursively to store the field into a BLKmode version of
5251 that object. Finally, load from the object into TARGET. This is not
5252 very efficient in general, but should only be slightly more expensive
5253 than the otherwise-required unaligned accesses. Perhaps this can be
5254 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5255 twice, once with emit_move_insn and once via store_field. */
5257 if (mode == BLKmode
5258 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5260 rtx object = assign_temp (type, 0, 1, 1);
5261 rtx blk_object = adjust_address (object, BLKmode, 0);
5263 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5264 emit_move_insn (object, target);
5266 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5267 alias_set);
5269 emit_move_insn (target, object);
5271 /* We want to return the BLKmode version of the data. */
5272 return blk_object;
5275 if (GET_CODE (target) == CONCAT)
5277 /* We're storing into a struct containing a single __complex. */
5279 if (bitpos != 0)
5280 abort ();
5281 return store_expr (exp, target, 0);
5284 /* If the structure is in a register or if the component
5285 is a bit field, we cannot use addressing to access it.
5286 Use bit-field techniques or SUBREG to store in it. */
5288 if (mode == VOIDmode
5289 || (mode != BLKmode && ! direct_store[(int) mode]
5290 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5291 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5292 || GET_CODE (target) == REG
5293 || GET_CODE (target) == SUBREG
5294 /* If the field isn't aligned enough to store as an ordinary memref,
5295 store it as a bit field. */
5296 || (mode != BLKmode
5297 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5298 || bitpos % GET_MODE_ALIGNMENT (mode))
5299 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5300 || (bitpos % BITS_PER_UNIT != 0)))
5301 /* If the RHS and field are a constant size and the size of the
5302 RHS isn't the same size as the bitfield, we must use bitfield
5303 operations. */
5304 || (bitsize >= 0
5305 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5306 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5308 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5310 /* If BITSIZE is narrower than the size of the type of EXP
5311 we will be narrowing TEMP. Normally, what's wanted are the
5312 low-order bits. However, if EXP's type is a record and this is
5313 big-endian machine, we want the upper BITSIZE bits. */
5314 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5315 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5316 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5317 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5318 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5319 - bitsize),
5320 NULL_RTX, 1);
5322 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5323 MODE. */
5324 if (mode != VOIDmode && mode != BLKmode
5325 && mode != TYPE_MODE (TREE_TYPE (exp)))
5326 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5328 /* If the modes of TARGET and TEMP are both BLKmode, both
5329 must be in memory and BITPOS must be aligned on a byte
5330 boundary. If so, we simply do a block copy. */
5331 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5333 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5334 || bitpos % BITS_PER_UNIT != 0)
5335 abort ();
5337 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5338 emit_block_move (target, temp,
5339 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5340 / BITS_PER_UNIT),
5341 BLOCK_OP_NORMAL);
5343 return value_mode == VOIDmode ? const0_rtx : target;
5346 /* Store the value in the bitfield. */
5347 store_bit_field (target, bitsize, bitpos, mode, temp,
5348 int_size_in_bytes (type));
5350 if (value_mode != VOIDmode)
5352 /* The caller wants an rtx for the value.
5353 If possible, avoid refetching from the bitfield itself. */
5354 if (width_mask != 0
5355 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5357 tree count;
5358 enum machine_mode tmode;
5360 tmode = GET_MODE (temp);
5361 if (tmode == VOIDmode)
5362 tmode = value_mode;
5364 if (unsignedp)
5365 return expand_and (tmode, temp,
5366 gen_int_mode (width_mask, tmode),
5367 NULL_RTX);
5369 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5370 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5371 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5374 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5375 NULL_RTX, value_mode, VOIDmode,
5376 int_size_in_bytes (type));
5378 return const0_rtx;
5380 else
5382 rtx addr = XEXP (target, 0);
5383 rtx to_rtx = target;
5385 /* If a value is wanted, it must be the lhs;
5386 so make the address stable for multiple use. */
5388 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5389 && ! CONSTANT_ADDRESS_P (addr)
5390 /* A frame-pointer reference is already stable. */
5391 && ! (GET_CODE (addr) == PLUS
5392 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5393 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5394 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5395 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5397 /* Now build a reference to just the desired component. */
5399 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5401 if (to_rtx == target)
5402 to_rtx = copy_rtx (to_rtx);
5404 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5405 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5406 set_mem_alias_set (to_rtx, alias_set);
5408 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5412 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5413 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5414 codes and find the ultimate containing object, which we return.
5416 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5417 bit position, and *PUNSIGNEDP to the signedness of the field.
5418 If the position of the field is variable, we store a tree
5419 giving the variable offset (in units) in *POFFSET.
5420 This offset is in addition to the bit position.
5421 If the position is not variable, we store 0 in *POFFSET.
5423 If any of the extraction expressions is volatile,
5424 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5426 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5427 is a mode that can be used to access the field. In that case, *PBITSIZE
5428 is redundant.
5430 If the field describes a variable-sized object, *PMODE is set to
5431 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5432 this case, but the address of the object can be found. */
5434 tree
5435 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5436 HOST_WIDE_INT *pbitpos, tree *poffset,
5437 enum machine_mode *pmode, int *punsignedp,
5438 int *pvolatilep)
5440 tree size_tree = 0;
5441 enum machine_mode mode = VOIDmode;
5442 tree offset = size_zero_node;
5443 tree bit_offset = bitsize_zero_node;
5444 tree tem;
5446 /* First get the mode, signedness, and size. We do this from just the
5447 outermost expression. */
5448 if (TREE_CODE (exp) == COMPONENT_REF)
5450 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5451 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5452 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5454 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5456 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5458 size_tree = TREE_OPERAND (exp, 1);
5459 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5461 else
5463 mode = TYPE_MODE (TREE_TYPE (exp));
5464 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5466 if (mode == BLKmode)
5467 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5468 else
5469 *pbitsize = GET_MODE_BITSIZE (mode);
5472 if (size_tree != 0)
5474 if (! host_integerp (size_tree, 1))
5475 mode = BLKmode, *pbitsize = -1;
5476 else
5477 *pbitsize = tree_low_cst (size_tree, 1);
5480 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5481 and find the ultimate containing object. */
5482 while (1)
5484 if (TREE_CODE (exp) == BIT_FIELD_REF)
5485 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5486 else if (TREE_CODE (exp) == COMPONENT_REF)
5488 tree field = TREE_OPERAND (exp, 1);
5489 tree this_offset = DECL_FIELD_OFFSET (field);
5491 /* If this field hasn't been filled in yet, don't go
5492 past it. This should only happen when folding expressions
5493 made during type construction. */
5494 if (this_offset == 0)
5495 break;
5496 else
5497 this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
5499 offset = size_binop (PLUS_EXPR, offset, this_offset);
5500 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5501 DECL_FIELD_BIT_OFFSET (field));
5503 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5506 else if (TREE_CODE (exp) == ARRAY_REF
5507 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5509 tree index = TREE_OPERAND (exp, 1);
5510 tree array = TREE_OPERAND (exp, 0);
5511 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5512 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5513 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5515 /* We assume all arrays have sizes that are a multiple of a byte.
5516 First subtract the lower bound, if any, in the type of the
5517 index, then convert to sizetype and multiply by the size of the
5518 array element. */
5519 if (low_bound != 0 && ! integer_zerop (low_bound))
5520 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5521 index, low_bound));
5523 /* If the index has a self-referential type, instantiate it with
5524 the object; likewise for the component size. */
5525 index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
5526 unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
5527 offset = size_binop (PLUS_EXPR, offset,
5528 size_binop (MULT_EXPR,
5529 convert (sizetype, index),
5530 unit_size));
5533 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5534 conversions that don't change the mode, and all view conversions
5535 except those that need to "step up" the alignment. */
5536 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5537 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5538 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5539 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5540 && STRICT_ALIGNMENT
5541 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5542 < BIGGEST_ALIGNMENT)
5543 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5544 || TYPE_ALIGN_OK (TREE_TYPE
5545 (TREE_OPERAND (exp, 0))))))
5546 && ! ((TREE_CODE (exp) == NOP_EXPR
5547 || TREE_CODE (exp) == CONVERT_EXPR)
5548 && (TYPE_MODE (TREE_TYPE (exp))
5549 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5550 break;
5552 /* If any reference in the chain is volatile, the effect is volatile. */
5553 if (TREE_THIS_VOLATILE (exp))
5554 *pvolatilep = 1;
5556 exp = TREE_OPERAND (exp, 0);
5559 /* If OFFSET is constant, see if we can return the whole thing as a
5560 constant bit position. Otherwise, split it up. */
5561 if (host_integerp (offset, 0)
5562 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5563 bitsize_unit_node))
5564 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5565 && host_integerp (tem, 0))
5566 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5567 else
5568 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5570 *pmode = mode;
5571 return exp;
5574 /* Return 1 if T is an expression that get_inner_reference handles. */
5577 handled_component_p (tree t)
5579 switch (TREE_CODE (t))
5581 case BIT_FIELD_REF:
5582 case COMPONENT_REF:
5583 case ARRAY_REF:
5584 case ARRAY_RANGE_REF:
5585 case NON_LVALUE_EXPR:
5586 case VIEW_CONVERT_EXPR:
5587 return 1;
5589 /* ??? Sure they are handled, but get_inner_reference may return
5590 a different PBITSIZE, depending upon whether the expression is
5591 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5592 case NOP_EXPR:
5593 case CONVERT_EXPR:
5594 return (TYPE_MODE (TREE_TYPE (t))
5595 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5597 default:
5598 return 0;
5602 /* Given an rtx VALUE that may contain additions and multiplications, return
5603 an equivalent value that just refers to a register, memory, or constant.
5604 This is done by generating instructions to perform the arithmetic and
5605 returning a pseudo-register containing the value.
5607 The returned value may be a REG, SUBREG, MEM or constant. */
5610 force_operand (rtx value, rtx target)
5612 rtx op1, op2;
5613 /* Use subtarget as the target for operand 0 of a binary operation. */
5614 rtx subtarget = get_subtarget (target);
5615 enum rtx_code code = GET_CODE (value);
5617 /* Check for subreg applied to an expression produced by loop optimizer. */
5618 if (code == SUBREG
5619 && GET_CODE (SUBREG_REG (value)) != REG
5620 && GET_CODE (SUBREG_REG (value)) != MEM)
5622 value = simplify_gen_subreg (GET_MODE (value),
5623 force_reg (GET_MODE (SUBREG_REG (value)),
5624 force_operand (SUBREG_REG (value),
5625 NULL_RTX)),
5626 GET_MODE (SUBREG_REG (value)),
5627 SUBREG_BYTE (value));
5628 code = GET_CODE (value);
5631 /* Check for a PIC address load. */
5632 if ((code == PLUS || code == MINUS)
5633 && XEXP (value, 0) == pic_offset_table_rtx
5634 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5635 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5636 || GET_CODE (XEXP (value, 1)) == CONST))
5638 if (!subtarget)
5639 subtarget = gen_reg_rtx (GET_MODE (value));
5640 emit_move_insn (subtarget, value);
5641 return subtarget;
5644 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5646 if (!target)
5647 target = gen_reg_rtx (GET_MODE (value));
5648 convert_move (target, force_operand (XEXP (value, 0), NULL),
5649 code == ZERO_EXTEND);
5650 return target;
5653 if (ARITHMETIC_P (value))
5655 op2 = XEXP (value, 1);
5656 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5657 subtarget = 0;
5658 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5660 code = PLUS;
5661 op2 = negate_rtx (GET_MODE (value), op2);
5664 /* Check for an addition with OP2 a constant integer and our first
5665 operand a PLUS of a virtual register and something else. In that
5666 case, we want to emit the sum of the virtual register and the
5667 constant first and then add the other value. This allows virtual
5668 register instantiation to simply modify the constant rather than
5669 creating another one around this addition. */
5670 if (code == PLUS && GET_CODE (op2) == CONST_INT
5671 && GET_CODE (XEXP (value, 0)) == PLUS
5672 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5673 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5674 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5676 rtx temp = expand_simple_binop (GET_MODE (value), code,
5677 XEXP (XEXP (value, 0), 0), op2,
5678 subtarget, 0, OPTAB_LIB_WIDEN);
5679 return expand_simple_binop (GET_MODE (value), code, temp,
5680 force_operand (XEXP (XEXP (value,
5681 0), 1), 0),
5682 target, 0, OPTAB_LIB_WIDEN);
5685 op1 = force_operand (XEXP (value, 0), subtarget);
5686 op2 = force_operand (op2, NULL_RTX);
5687 switch (code)
5689 case MULT:
5690 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5691 case DIV:
5692 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5693 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5694 target, 1, OPTAB_LIB_WIDEN);
5695 else
5696 return expand_divmod (0,
5697 FLOAT_MODE_P (GET_MODE (value))
5698 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5699 GET_MODE (value), op1, op2, target, 0);
5700 break;
5701 case MOD:
5702 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5703 target, 0);
5704 break;
5705 case UDIV:
5706 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5707 target, 1);
5708 break;
5709 case UMOD:
5710 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5711 target, 1);
5712 break;
5713 case ASHIFTRT:
5714 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5715 target, 0, OPTAB_LIB_WIDEN);
5716 break;
5717 default:
5718 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5719 target, 1, OPTAB_LIB_WIDEN);
5722 if (UNARY_P (value))
5724 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5725 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5728 #ifdef INSN_SCHEDULING
5729 /* On machines that have insn scheduling, we want all memory reference to be
5730 explicit, so we need to deal with such paradoxical SUBREGs. */
5731 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5732 && (GET_MODE_SIZE (GET_MODE (value))
5733 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5734 value
5735 = simplify_gen_subreg (GET_MODE (value),
5736 force_reg (GET_MODE (SUBREG_REG (value)),
5737 force_operand (SUBREG_REG (value),
5738 NULL_RTX)),
5739 GET_MODE (SUBREG_REG (value)),
5740 SUBREG_BYTE (value));
5741 #endif
5743 return value;
5746 /* Subroutine of expand_expr: return nonzero iff there is no way that
5747 EXP can reference X, which is being modified. TOP_P is nonzero if this
5748 call is going to be used to determine whether we need a temporary
5749 for EXP, as opposed to a recursive call to this function.
5751 It is always safe for this routine to return zero since it merely
5752 searches for optimization opportunities. */
5755 safe_from_p (rtx x, tree exp, int top_p)
5757 rtx exp_rtl = 0;
5758 int i, nops;
5759 static tree save_expr_list;
5761 if (x == 0
5762 /* If EXP has varying size, we MUST use a target since we currently
5763 have no way of allocating temporaries of variable size
5764 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5765 So we assume here that something at a higher level has prevented a
5766 clash. This is somewhat bogus, but the best we can do. Only
5767 do this when X is BLKmode and when we are at the top level. */
5768 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5769 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5770 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5771 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5772 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5773 != INTEGER_CST)
5774 && GET_MODE (x) == BLKmode)
5775 /* If X is in the outgoing argument area, it is always safe. */
5776 || (GET_CODE (x) == MEM
5777 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5778 || (GET_CODE (XEXP (x, 0)) == PLUS
5779 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5780 return 1;
5782 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5783 find the underlying pseudo. */
5784 if (GET_CODE (x) == SUBREG)
5786 x = SUBREG_REG (x);
5787 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5788 return 0;
5791 /* A SAVE_EXPR might appear many times in the expression passed to the
5792 top-level safe_from_p call, and if it has a complex subexpression,
5793 examining it multiple times could result in a combinatorial explosion.
5794 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5795 with optimization took about 28 minutes to compile -- even though it was
5796 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5797 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5798 we have processed. Note that the only test of top_p was above. */
5800 if (top_p)
5802 int rtn;
5803 tree t;
5805 save_expr_list = 0;
5807 rtn = safe_from_p (x, exp, 0);
5809 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5810 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5812 return rtn;
5815 /* Now look at our tree code and possibly recurse. */
5816 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5818 case 'd':
5819 exp_rtl = DECL_RTL_IF_SET (exp);
5820 break;
5822 case 'c':
5823 return 1;
5825 case 'x':
5826 if (TREE_CODE (exp) == TREE_LIST)
5828 while (1)
5830 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5831 return 0;
5832 exp = TREE_CHAIN (exp);
5833 if (!exp)
5834 return 1;
5835 if (TREE_CODE (exp) != TREE_LIST)
5836 return safe_from_p (x, exp, 0);
5839 else if (TREE_CODE (exp) == ERROR_MARK)
5840 return 1; /* An already-visited SAVE_EXPR? */
5841 else
5842 return 0;
5844 case '2':
5845 case '<':
5846 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5847 return 0;
5848 /* Fall through. */
5850 case '1':
5851 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5853 case 'e':
5854 case 'r':
5855 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5856 the expression. If it is set, we conflict iff we are that rtx or
5857 both are in memory. Otherwise, we check all operands of the
5858 expression recursively. */
5860 switch (TREE_CODE (exp))
5862 case ADDR_EXPR:
5863 /* If the operand is static or we are static, we can't conflict.
5864 Likewise if we don't conflict with the operand at all. */
5865 if (staticp (TREE_OPERAND (exp, 0))
5866 || TREE_STATIC (exp)
5867 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5868 return 1;
5870 /* Otherwise, the only way this can conflict is if we are taking
5871 the address of a DECL a that address if part of X, which is
5872 very rare. */
5873 exp = TREE_OPERAND (exp, 0);
5874 if (DECL_P (exp))
5876 if (!DECL_RTL_SET_P (exp)
5877 || GET_CODE (DECL_RTL (exp)) != MEM)
5878 return 0;
5879 else
5880 exp_rtl = XEXP (DECL_RTL (exp), 0);
5882 break;
5884 case INDIRECT_REF:
5885 if (GET_CODE (x) == MEM
5886 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5887 get_alias_set (exp)))
5888 return 0;
5889 break;
5891 case CALL_EXPR:
5892 /* Assume that the call will clobber all hard registers and
5893 all of memory. */
5894 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5895 || GET_CODE (x) == MEM)
5896 return 0;
5897 break;
5899 case RTL_EXPR:
5900 /* If a sequence exists, we would have to scan every instruction
5901 in the sequence to see if it was safe. This is probably not
5902 worthwhile. */
5903 if (RTL_EXPR_SEQUENCE (exp))
5904 return 0;
5906 exp_rtl = RTL_EXPR_RTL (exp);
5907 break;
5909 case WITH_CLEANUP_EXPR:
5910 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5911 break;
5913 case CLEANUP_POINT_EXPR:
5914 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5916 case SAVE_EXPR:
5917 exp_rtl = SAVE_EXPR_RTL (exp);
5918 if (exp_rtl)
5919 break;
5921 /* If we've already scanned this, don't do it again. Otherwise,
5922 show we've scanned it and record for clearing the flag if we're
5923 going on. */
5924 if (TREE_PRIVATE (exp))
5925 return 1;
5927 TREE_PRIVATE (exp) = 1;
5928 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5930 TREE_PRIVATE (exp) = 0;
5931 return 0;
5934 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5935 return 1;
5937 case BIND_EXPR:
5938 /* The only operand we look at is operand 1. The rest aren't
5939 part of the expression. */
5940 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5942 default:
5943 break;
5946 /* If we have an rtx, we do not need to scan our operands. */
5947 if (exp_rtl)
5948 break;
5950 nops = first_rtl_op (TREE_CODE (exp));
5951 for (i = 0; i < nops; i++)
5952 if (TREE_OPERAND (exp, i) != 0
5953 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5954 return 0;
5956 /* If this is a language-specific tree code, it may require
5957 special handling. */
5958 if ((unsigned int) TREE_CODE (exp)
5959 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5960 && !lang_hooks.safe_from_p (x, exp))
5961 return 0;
5964 /* If we have an rtl, find any enclosed object. Then see if we conflict
5965 with it. */
5966 if (exp_rtl)
5968 if (GET_CODE (exp_rtl) == SUBREG)
5970 exp_rtl = SUBREG_REG (exp_rtl);
5971 if (GET_CODE (exp_rtl) == REG
5972 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5973 return 0;
5976 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5977 are memory and they conflict. */
5978 return ! (rtx_equal_p (x, exp_rtl)
5979 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5980 && true_dependence (exp_rtl, VOIDmode, x,
5981 rtx_addr_varies_p)));
5984 /* If we reach here, it is safe. */
5985 return 1;
5988 /* Subroutine of expand_expr: return rtx if EXP is a
5989 variable or parameter; else return 0. */
5991 static rtx
5992 var_rtx (tree exp)
5994 STRIP_NOPS (exp);
5995 switch (TREE_CODE (exp))
5997 case PARM_DECL:
5998 case VAR_DECL:
5999 return DECL_RTL (exp);
6000 default:
6001 return 0;
6005 /* Return the highest power of two that EXP is known to be a multiple of.
6006 This is used in updating alignment of MEMs in array references. */
6008 static unsigned HOST_WIDE_INT
6009 highest_pow2_factor (tree exp)
6011 unsigned HOST_WIDE_INT c0, c1;
6013 switch (TREE_CODE (exp))
6015 case INTEGER_CST:
6016 /* We can find the lowest bit that's a one. If the low
6017 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6018 We need to handle this case since we can find it in a COND_EXPR,
6019 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6020 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6021 later ICE. */
6022 if (TREE_CONSTANT_OVERFLOW (exp))
6023 return BIGGEST_ALIGNMENT;
6024 else
6026 /* Note: tree_low_cst is intentionally not used here,
6027 we don't care about the upper bits. */
6028 c0 = TREE_INT_CST_LOW (exp);
6029 c0 &= -c0;
6030 return c0 ? c0 : BIGGEST_ALIGNMENT;
6032 break;
6034 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6035 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6036 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6037 return MIN (c0, c1);
6039 case MULT_EXPR:
6040 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6041 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6042 return c0 * c1;
6044 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6045 case CEIL_DIV_EXPR:
6046 if (integer_pow2p (TREE_OPERAND (exp, 1))
6047 && host_integerp (TREE_OPERAND (exp, 1), 1))
6049 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6050 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6051 return MAX (1, c0 / c1);
6053 break;
6055 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6056 case SAVE_EXPR:
6057 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6059 case COMPOUND_EXPR:
6060 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6062 case COND_EXPR:
6063 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6064 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6065 return MIN (c0, c1);
6067 default:
6068 break;
6071 return 1;
6074 /* Similar, except that the alignment requirements of TARGET are
6075 taken into account. Assume it is at least as aligned as its
6076 type, unless it is a COMPONENT_REF in which case the layout of
6077 the structure gives the alignment. */
6079 static unsigned HOST_WIDE_INT
6080 highest_pow2_factor_for_target (tree target, tree exp)
6082 unsigned HOST_WIDE_INT target_align, factor;
6084 factor = highest_pow2_factor (exp);
6085 if (TREE_CODE (target) == COMPONENT_REF)
6086 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6087 else
6088 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6089 return MAX (factor, target_align);
6092 /* Subroutine of expand_expr. Expand the two operands of a binary
6093 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6094 The value may be stored in TARGET if TARGET is nonzero. The
6095 MODIFIER argument is as documented by expand_expr. */
6097 static void
6098 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6099 enum expand_modifier modifier)
6101 if (! safe_from_p (target, exp1, 1))
6102 target = 0;
6103 if (operand_equal_p (exp0, exp1, 0))
6105 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6106 *op1 = copy_rtx (*op0);
6108 else
6110 /* If we need to preserve evaluation order, copy exp0 into its own
6111 temporary variable so that it can't be clobbered by exp1. */
6112 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6113 exp0 = save_expr (exp0);
6114 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6115 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6120 /* expand_expr: generate code for computing expression EXP.
6121 An rtx for the computed value is returned. The value is never null.
6122 In the case of a void EXP, const0_rtx is returned.
6124 The value may be stored in TARGET if TARGET is nonzero.
6125 TARGET is just a suggestion; callers must assume that
6126 the rtx returned may not be the same as TARGET.
6128 If TARGET is CONST0_RTX, it means that the value will be ignored.
6130 If TMODE is not VOIDmode, it suggests generating the
6131 result in mode TMODE. But this is done only when convenient.
6132 Otherwise, TMODE is ignored and the value generated in its natural mode.
6133 TMODE is just a suggestion; callers must assume that
6134 the rtx returned may not have mode TMODE.
6136 Note that TARGET may have neither TMODE nor MODE. In that case, it
6137 probably will not be used.
6139 If MODIFIER is EXPAND_SUM then when EXP is an addition
6140 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6141 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6142 products as above, or REG or MEM, or constant.
6143 Ordinarily in such cases we would output mul or add instructions
6144 and then return a pseudo reg containing the sum.
6146 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6147 it also marks a label as absolutely required (it can't be dead).
6148 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6149 This is used for outputting expressions used in initializers.
6151 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6152 with a constant address even if that address is not normally legitimate.
6153 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6155 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6156 a call parameter. Such targets require special care as we haven't yet
6157 marked TARGET so that it's safe from being trashed by libcalls. We
6158 don't want to use TARGET for anything but the final result;
6159 Intermediate values must go elsewhere. Additionally, calls to
6160 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6162 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6163 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6164 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6165 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6166 recursively. */
6169 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6170 enum expand_modifier modifier, rtx *alt_rtl)
6172 rtx op0, op1, temp;
6173 tree type = TREE_TYPE (exp);
6174 int unsignedp;
6175 enum machine_mode mode;
6176 enum tree_code code = TREE_CODE (exp);
6177 optab this_optab;
6178 rtx subtarget, original_target;
6179 int ignore;
6180 tree context;
6182 /* Handle ERROR_MARK before anybody tries to access its type. */
6183 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6185 op0 = CONST0_RTX (tmode);
6186 if (op0 != 0)
6187 return op0;
6188 return const0_rtx;
6191 mode = TYPE_MODE (type);
6192 unsignedp = TYPE_UNSIGNED (type);
6194 /* Use subtarget as the target for operand 0 of a binary operation. */
6195 subtarget = get_subtarget (target);
6196 original_target = target;
6197 ignore = (target == const0_rtx
6198 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6199 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6200 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6201 && TREE_CODE (type) == VOID_TYPE));
6203 /* If we are going to ignore this result, we need only do something
6204 if there is a side-effect somewhere in the expression. If there
6205 is, short-circuit the most common cases here. Note that we must
6206 not call expand_expr with anything but const0_rtx in case this
6207 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6209 if (ignore)
6211 if (! TREE_SIDE_EFFECTS (exp))
6212 return const0_rtx;
6214 /* Ensure we reference a volatile object even if value is ignored, but
6215 don't do this if all we are doing is taking its address. */
6216 if (TREE_THIS_VOLATILE (exp)
6217 && TREE_CODE (exp) != FUNCTION_DECL
6218 && mode != VOIDmode && mode != BLKmode
6219 && modifier != EXPAND_CONST_ADDRESS)
6221 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6222 if (GET_CODE (temp) == MEM)
6223 temp = copy_to_reg (temp);
6224 return const0_rtx;
6227 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6228 || code == INDIRECT_REF || code == BUFFER_REF)
6229 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6230 modifier);
6232 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6233 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6235 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6236 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6237 return const0_rtx;
6239 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6240 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6241 /* If the second operand has no side effects, just evaluate
6242 the first. */
6243 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6244 modifier);
6245 else if (code == BIT_FIELD_REF)
6247 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6248 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6249 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6250 return const0_rtx;
6253 target = 0;
6256 /* If will do cse, generate all results into pseudo registers
6257 since 1) that allows cse to find more things
6258 and 2) otherwise cse could produce an insn the machine
6259 cannot support. An exception is a CONSTRUCTOR into a multi-word
6260 MEM: that's much more likely to be most efficient into the MEM.
6261 Another is a CALL_EXPR which must return in memory. */
6263 if (! cse_not_expected && mode != BLKmode && target
6264 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6265 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6266 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6267 target = 0;
6269 switch (code)
6271 case LABEL_DECL:
6273 tree function = decl_function_context (exp);
6274 /* Labels in containing functions, or labels used from initializers,
6275 must be forced. */
6276 if (modifier == EXPAND_INITIALIZER
6277 || (function != current_function_decl
6278 && function != inline_function_decl
6279 && function != 0))
6280 temp = force_label_rtx (exp);
6281 else
6282 temp = label_rtx (exp);
6284 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6285 if (function != current_function_decl
6286 && function != inline_function_decl && function != 0)
6287 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6288 return temp;
6291 case PARM_DECL:
6292 if (!DECL_RTL_SET_P (exp))
6294 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6295 return CONST0_RTX (mode);
6298 /* ... fall through ... */
6300 case VAR_DECL:
6301 /* If a static var's type was incomplete when the decl was written,
6302 but the type is complete now, lay out the decl now. */
6303 if (DECL_SIZE (exp) == 0
6304 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6305 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6306 layout_decl (exp, 0);
6308 /* ... fall through ... */
6310 case FUNCTION_DECL:
6311 case RESULT_DECL:
6312 if (DECL_RTL (exp) == 0)
6313 abort ();
6315 /* Ensure variable marked as used even if it doesn't go through
6316 a parser. If it hasn't be used yet, write out an external
6317 definition. */
6318 if (! TREE_USED (exp))
6320 assemble_external (exp);
6321 TREE_USED (exp) = 1;
6324 /* Show we haven't gotten RTL for this yet. */
6325 temp = 0;
6327 /* Handle variables inherited from containing functions. */
6328 context = decl_function_context (exp);
6330 /* We treat inline_function_decl as an alias for the current function
6331 because that is the inline function whose vars, types, etc.
6332 are being merged into the current function.
6333 See expand_inline_function. */
6335 if (context != 0 && context != current_function_decl
6336 && context != inline_function_decl
6337 /* If var is static, we don't need a static chain to access it. */
6338 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6339 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6341 rtx addr;
6343 /* Mark as non-local and addressable. */
6344 DECL_NONLOCAL (exp) = 1;
6345 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6346 abort ();
6347 lang_hooks.mark_addressable (exp);
6348 if (GET_CODE (DECL_RTL (exp)) != MEM)
6349 abort ();
6350 addr = XEXP (DECL_RTL (exp), 0);
6351 if (GET_CODE (addr) == MEM)
6352 addr
6353 = replace_equiv_address (addr,
6354 fix_lexical_addr (XEXP (addr, 0), exp));
6355 else
6356 addr = fix_lexical_addr (addr, exp);
6358 temp = replace_equiv_address (DECL_RTL (exp), addr);
6361 /* This is the case of an array whose size is to be determined
6362 from its initializer, while the initializer is still being parsed.
6363 See expand_decl. */
6365 else if (GET_CODE (DECL_RTL (exp)) == MEM
6366 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6367 temp = validize_mem (DECL_RTL (exp));
6369 /* If DECL_RTL is memory, we are in the normal case and either
6370 the address is not valid or it is not a register and -fforce-addr
6371 is specified, get the address into a register. */
6373 else if (GET_CODE (DECL_RTL (exp)) == MEM
6374 && modifier != EXPAND_CONST_ADDRESS
6375 && modifier != EXPAND_SUM
6376 && modifier != EXPAND_INITIALIZER
6377 && (! memory_address_p (DECL_MODE (exp),
6378 XEXP (DECL_RTL (exp), 0))
6379 || (flag_force_addr
6380 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6382 if (alt_rtl)
6383 *alt_rtl = DECL_RTL (exp);
6384 temp = replace_equiv_address (DECL_RTL (exp),
6385 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6388 /* If we got something, return it. But first, set the alignment
6389 if the address is a register. */
6390 if (temp != 0)
6392 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6393 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6395 return temp;
6398 /* If the mode of DECL_RTL does not match that of the decl, it
6399 must be a promoted value. We return a SUBREG of the wanted mode,
6400 but mark it so that we know that it was already extended. */
6402 if (GET_CODE (DECL_RTL (exp)) == REG
6403 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6405 /* Get the signedness used for this variable. Ensure we get the
6406 same mode we got when the variable was declared. */
6407 if (GET_MODE (DECL_RTL (exp))
6408 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6409 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6410 abort ();
6412 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6413 SUBREG_PROMOTED_VAR_P (temp) = 1;
6414 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6415 return temp;
6418 return DECL_RTL (exp);
6420 case INTEGER_CST:
6421 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6422 TREE_INT_CST_HIGH (exp), mode);
6424 /* ??? If overflow is set, fold will have done an incomplete job,
6425 which can result in (plus xx (const_int 0)), which can get
6426 simplified by validate_replace_rtx during virtual register
6427 instantiation, which can result in unrecognizable insns.
6428 Avoid this by forcing all overflows into registers. */
6429 if (TREE_CONSTANT_OVERFLOW (exp)
6430 && modifier != EXPAND_INITIALIZER)
6431 temp = force_reg (mode, temp);
6433 return temp;
6435 case VECTOR_CST:
6436 return const_vector_from_tree (exp);
6438 case CONST_DECL:
6439 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6441 case REAL_CST:
6442 /* If optimized, generate immediate CONST_DOUBLE
6443 which will be turned into memory by reload if necessary.
6445 We used to force a register so that loop.c could see it. But
6446 this does not allow gen_* patterns to perform optimizations with
6447 the constants. It also produces two insns in cases like "x = 1.0;".
6448 On most machines, floating-point constants are not permitted in
6449 many insns, so we'd end up copying it to a register in any case.
6451 Now, we do the copying in expand_binop, if appropriate. */
6452 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6453 TYPE_MODE (TREE_TYPE (exp)));
6455 case COMPLEX_CST:
6456 /* Handle evaluating a complex constant in a CONCAT target. */
6457 if (original_target && GET_CODE (original_target) == CONCAT)
6459 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6460 rtx rtarg, itarg;
6462 rtarg = XEXP (original_target, 0);
6463 itarg = XEXP (original_target, 1);
6465 /* Move the real and imaginary parts separately. */
6466 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6467 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6469 if (op0 != rtarg)
6470 emit_move_insn (rtarg, op0);
6471 if (op1 != itarg)
6472 emit_move_insn (itarg, op1);
6474 return original_target;
6477 /* ... fall through ... */
6479 case STRING_CST:
6480 temp = output_constant_def (exp, 1);
6482 /* temp contains a constant address.
6483 On RISC machines where a constant address isn't valid,
6484 make some insns to get that address into a register. */
6485 if (modifier != EXPAND_CONST_ADDRESS
6486 && modifier != EXPAND_INITIALIZER
6487 && modifier != EXPAND_SUM
6488 && (! memory_address_p (mode, XEXP (temp, 0))
6489 || flag_force_addr))
6490 return replace_equiv_address (temp,
6491 copy_rtx (XEXP (temp, 0)));
6492 return temp;
6494 case EXPR_WITH_FILE_LOCATION:
6496 rtx to_return;
6497 struct file_stack fs;
6499 fs.location = input_location;
6500 fs.next = expr_wfl_stack;
6501 input_filename = EXPR_WFL_FILENAME (exp);
6502 input_line = EXPR_WFL_LINENO (exp);
6503 expr_wfl_stack = &fs;
6504 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6505 emit_line_note (input_location);
6506 /* Possibly avoid switching back and forth here. */
6507 to_return = expand_expr (EXPR_WFL_NODE (exp),
6508 (ignore ? const0_rtx : target),
6509 tmode, modifier);
6510 if (expr_wfl_stack != &fs)
6511 abort ();
6512 input_location = fs.location;
6513 expr_wfl_stack = fs.next;
6514 return to_return;
6517 case SAVE_EXPR:
6518 context = decl_function_context (exp);
6520 /* If this SAVE_EXPR was at global context, assume we are an
6521 initialization function and move it into our context. */
6522 if (context == 0)
6523 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6525 /* We treat inline_function_decl as an alias for the current function
6526 because that is the inline function whose vars, types, etc.
6527 are being merged into the current function.
6528 See expand_inline_function. */
6529 if (context == current_function_decl || context == inline_function_decl)
6530 context = 0;
6532 /* If this is non-local, handle it. */
6533 if (context)
6535 /* The following call just exists to abort if the context is
6536 not of a containing function. */
6537 find_function_data (context);
6539 temp = SAVE_EXPR_RTL (exp);
6540 if (temp && GET_CODE (temp) == REG)
6542 put_var_into_stack (exp, /*rescan=*/true);
6543 temp = SAVE_EXPR_RTL (exp);
6545 if (temp == 0 || GET_CODE (temp) != MEM)
6546 abort ();
6547 return
6548 replace_equiv_address (temp,
6549 fix_lexical_addr (XEXP (temp, 0), exp));
6551 if (SAVE_EXPR_RTL (exp) == 0)
6553 if (mode == VOIDmode)
6554 temp = const0_rtx;
6555 else
6556 temp = assign_temp (build_qualified_type (type,
6557 (TYPE_QUALS (type)
6558 | TYPE_QUAL_CONST)),
6559 3, 0, 0);
6561 SAVE_EXPR_RTL (exp) = temp;
6562 if (!optimize && GET_CODE (temp) == REG)
6563 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6564 save_expr_regs);
6566 /* If the mode of TEMP does not match that of the expression, it
6567 must be a promoted value. We pass store_expr a SUBREG of the
6568 wanted mode but mark it so that we know that it was already
6569 extended. */
6571 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6573 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6574 promote_mode (type, mode, &unsignedp, 0);
6575 SUBREG_PROMOTED_VAR_P (temp) = 1;
6576 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6579 if (temp == const0_rtx)
6580 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6581 else
6582 store_expr (TREE_OPERAND (exp, 0), temp,
6583 modifier == EXPAND_STACK_PARM ? 2 : 0);
6585 TREE_USED (exp) = 1;
6588 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6589 must be a promoted value. We return a SUBREG of the wanted mode,
6590 but mark it so that we know that it was already extended. */
6592 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6593 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6595 /* Compute the signedness and make the proper SUBREG. */
6596 promote_mode (type, mode, &unsignedp, 0);
6597 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6598 SUBREG_PROMOTED_VAR_P (temp) = 1;
6599 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6600 return temp;
6603 return SAVE_EXPR_RTL (exp);
6605 case UNSAVE_EXPR:
6607 rtx temp;
6608 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6609 TREE_OPERAND (exp, 0)
6610 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6611 return temp;
6614 case GOTO_EXPR:
6615 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6616 expand_goto (TREE_OPERAND (exp, 0));
6617 else
6618 expand_computed_goto (TREE_OPERAND (exp, 0));
6619 return const0_rtx;
6621 case EXIT_EXPR:
6622 expand_exit_loop_if_false (NULL,
6623 invert_truthvalue (TREE_OPERAND (exp, 0)));
6624 return const0_rtx;
6626 case LABELED_BLOCK_EXPR:
6627 if (LABELED_BLOCK_BODY (exp))
6628 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6629 /* Should perhaps use expand_label, but this is simpler and safer. */
6630 do_pending_stack_adjust ();
6631 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6632 return const0_rtx;
6634 case EXIT_BLOCK_EXPR:
6635 if (EXIT_BLOCK_RETURN (exp))
6636 sorry ("returned value in block_exit_expr");
6637 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6638 return const0_rtx;
6640 case LOOP_EXPR:
6641 push_temp_slots ();
6642 expand_start_loop (1);
6643 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6644 expand_end_loop ();
6645 pop_temp_slots ();
6647 return const0_rtx;
6649 case BIND_EXPR:
6651 tree vars = TREE_OPERAND (exp, 0);
6653 /* Need to open a binding contour here because
6654 if there are any cleanups they must be contained here. */
6655 expand_start_bindings (2);
6657 /* Mark the corresponding BLOCK for output in its proper place. */
6658 if (TREE_OPERAND (exp, 2) != 0
6659 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6660 lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
6662 /* If VARS have not yet been expanded, expand them now. */
6663 while (vars)
6665 if (!DECL_RTL_SET_P (vars))
6666 expand_decl (vars);
6667 expand_decl_init (vars);
6668 vars = TREE_CHAIN (vars);
6671 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6673 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6675 return temp;
6678 case RTL_EXPR:
6679 if (RTL_EXPR_SEQUENCE (exp))
6681 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6682 abort ();
6683 emit_insn (RTL_EXPR_SEQUENCE (exp));
6684 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6686 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6687 free_temps_for_rtl_expr (exp);
6688 if (alt_rtl)
6689 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6690 return RTL_EXPR_RTL (exp);
6692 case CONSTRUCTOR:
6693 /* If we don't need the result, just ensure we evaluate any
6694 subexpressions. */
6695 if (ignore)
6697 tree elt;
6699 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6700 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6702 return const0_rtx;
6705 /* All elts simple constants => refer to a constant in memory. But
6706 if this is a non-BLKmode mode, let it store a field at a time
6707 since that should make a CONST_INT or CONST_DOUBLE when we
6708 fold. Likewise, if we have a target we can use, it is best to
6709 store directly into the target unless the type is large enough
6710 that memcpy will be used. If we are making an initializer and
6711 all operands are constant, put it in memory as well.
6713 FIXME: Avoid trying to fill vector constructors piece-meal.
6714 Output them with output_constant_def below unless we're sure
6715 they're zeros. This should go away when vector initializers
6716 are treated like VECTOR_CST instead of arrays.
6718 else if ((TREE_STATIC (exp)
6719 && ((mode == BLKmode
6720 && ! (target != 0 && safe_from_p (target, exp, 1)))
6721 || TREE_ADDRESSABLE (exp)
6722 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6723 && (! MOVE_BY_PIECES_P
6724 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6725 TYPE_ALIGN (type)))
6726 && ((TREE_CODE (type) == VECTOR_TYPE
6727 && !is_zeros_p (exp))
6728 || ! mostly_zeros_p (exp)))))
6729 || ((modifier == EXPAND_INITIALIZER
6730 || modifier == EXPAND_CONST_ADDRESS)
6731 && TREE_CONSTANT (exp)))
6733 rtx constructor = output_constant_def (exp, 1);
6735 if (modifier != EXPAND_CONST_ADDRESS
6736 && modifier != EXPAND_INITIALIZER
6737 && modifier != EXPAND_SUM)
6738 constructor = validize_mem (constructor);
6740 return constructor;
6742 else
6744 /* Handle calls that pass values in multiple non-contiguous
6745 locations. The Irix 6 ABI has examples of this. */
6746 if (target == 0 || ! safe_from_p (target, exp, 1)
6747 || GET_CODE (target) == PARALLEL
6748 || modifier == EXPAND_STACK_PARM)
6749 target
6750 = assign_temp (build_qualified_type (type,
6751 (TYPE_QUALS (type)
6752 | (TREE_READONLY (exp)
6753 * TYPE_QUAL_CONST))),
6754 0, TREE_ADDRESSABLE (exp), 1);
6756 store_constructor (exp, target, 0,
6757 int_expr_size (exp) * BITS_PER_UNIT);
6758 return target;
6761 case INDIRECT_REF:
6763 tree exp1 = TREE_OPERAND (exp, 0);
6764 tree index;
6765 tree string = string_constant (exp1, &index);
6767 /* Try to optimize reads from const strings. */
6768 if (string
6769 && TREE_CODE (string) == STRING_CST
6770 && TREE_CODE (index) == INTEGER_CST
6771 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6772 && GET_MODE_CLASS (mode) == MODE_INT
6773 && GET_MODE_SIZE (mode) == 1
6774 && modifier != EXPAND_WRITE)
6775 return gen_int_mode (TREE_STRING_POINTER (string)
6776 [TREE_INT_CST_LOW (index)], mode);
6778 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6779 op0 = memory_address (mode, op0);
6780 temp = gen_rtx_MEM (mode, op0);
6781 set_mem_attributes (temp, exp, 0);
6783 /* If we are writing to this object and its type is a record with
6784 readonly fields, we must mark it as readonly so it will
6785 conflict with readonly references to those fields. */
6786 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6787 RTX_UNCHANGING_P (temp) = 1;
6789 return temp;
6792 case ARRAY_REF:
6793 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6794 abort ();
6797 tree array = TREE_OPERAND (exp, 0);
6798 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6799 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6800 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6801 HOST_WIDE_INT i;
6803 /* Optimize the special-case of a zero lower bound.
6805 We convert the low_bound to sizetype to avoid some problems
6806 with constant folding. (E.g. suppose the lower bound is 1,
6807 and its mode is QI. Without the conversion, (ARRAY
6808 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6809 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6811 if (! integer_zerop (low_bound))
6812 index = size_diffop (index, convert (sizetype, low_bound));
6814 /* Fold an expression like: "foo"[2].
6815 This is not done in fold so it won't happen inside &.
6816 Don't fold if this is for wide characters since it's too
6817 difficult to do correctly and this is a very rare case. */
6819 if (modifier != EXPAND_CONST_ADDRESS
6820 && modifier != EXPAND_INITIALIZER
6821 && modifier != EXPAND_MEMORY
6822 && TREE_CODE (array) == STRING_CST
6823 && TREE_CODE (index) == INTEGER_CST
6824 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6825 && GET_MODE_CLASS (mode) == MODE_INT
6826 && GET_MODE_SIZE (mode) == 1)
6827 return gen_int_mode (TREE_STRING_POINTER (array)
6828 [TREE_INT_CST_LOW (index)], mode);
6830 /* If this is a constant index into a constant array,
6831 just get the value from the array. Handle both the cases when
6832 we have an explicit constructor and when our operand is a variable
6833 that was declared const. */
6835 if (modifier != EXPAND_CONST_ADDRESS
6836 && modifier != EXPAND_INITIALIZER
6837 && modifier != EXPAND_MEMORY
6838 && TREE_CODE (array) == CONSTRUCTOR
6839 && ! TREE_SIDE_EFFECTS (array)
6840 && TREE_CODE (index) == INTEGER_CST
6841 && 0 > compare_tree_int (index,
6842 list_length (CONSTRUCTOR_ELTS
6843 (TREE_OPERAND (exp, 0)))))
6845 tree elem;
6847 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6848 i = TREE_INT_CST_LOW (index);
6849 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6852 if (elem)
6853 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6854 modifier);
6857 else if (optimize >= 1
6858 && modifier != EXPAND_CONST_ADDRESS
6859 && modifier != EXPAND_INITIALIZER
6860 && modifier != EXPAND_MEMORY
6861 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6862 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6863 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6864 && targetm.binds_local_p (array))
6866 if (TREE_CODE (index) == INTEGER_CST)
6868 tree init = DECL_INITIAL (array);
6870 if (TREE_CODE (init) == CONSTRUCTOR)
6872 tree elem;
6874 for (elem = CONSTRUCTOR_ELTS (init);
6875 (elem
6876 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6877 elem = TREE_CHAIN (elem))
6880 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6881 return expand_expr (fold (TREE_VALUE (elem)), target,
6882 tmode, modifier);
6884 else if (TREE_CODE (init) == STRING_CST
6885 && 0 > compare_tree_int (index,
6886 TREE_STRING_LENGTH (init)))
6888 tree type = TREE_TYPE (TREE_TYPE (init));
6889 enum machine_mode mode = TYPE_MODE (type);
6891 if (GET_MODE_CLASS (mode) == MODE_INT
6892 && GET_MODE_SIZE (mode) == 1)
6893 return gen_int_mode (TREE_STRING_POINTER (init)
6894 [TREE_INT_CST_LOW (index)], mode);
6899 goto normal_inner_ref;
6901 case COMPONENT_REF:
6902 /* If the operand is a CONSTRUCTOR, we can just extract the
6903 appropriate field if it is present. */
6904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6906 tree elt;
6908 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6909 elt = TREE_CHAIN (elt))
6910 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6911 /* We can normally use the value of the field in the
6912 CONSTRUCTOR. However, if this is a bitfield in
6913 an integral mode that we can fit in a HOST_WIDE_INT,
6914 we must mask only the number of bits in the bitfield,
6915 since this is done implicitly by the constructor. If
6916 the bitfield does not meet either of those conditions,
6917 we can't do this optimization. */
6918 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6919 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6920 == MODE_INT)
6921 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6922 <= HOST_BITS_PER_WIDE_INT))))
6924 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6925 && modifier == EXPAND_STACK_PARM)
6926 target = 0;
6927 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6928 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6930 HOST_WIDE_INT bitsize
6931 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6932 enum machine_mode imode
6933 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6935 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6937 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6938 op0 = expand_and (imode, op0, op1, target);
6940 else
6942 tree count
6943 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6946 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6947 target, 0);
6948 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6949 target, 0);
6953 return op0;
6956 goto normal_inner_ref;
6958 case BIT_FIELD_REF:
6959 case ARRAY_RANGE_REF:
6960 normal_inner_ref:
6962 enum machine_mode mode1;
6963 HOST_WIDE_INT bitsize, bitpos;
6964 tree offset;
6965 int volatilep = 0;
6966 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6967 &mode1, &unsignedp, &volatilep);
6968 rtx orig_op0;
6970 /* If we got back the original object, something is wrong. Perhaps
6971 we are evaluating an expression too early. In any event, don't
6972 infinitely recurse. */
6973 if (tem == exp)
6974 abort ();
6976 /* If TEM's type is a union of variable size, pass TARGET to the inner
6977 computation, since it will need a temporary and TARGET is known
6978 to have to do. This occurs in unchecked conversion in Ada. */
6980 orig_op0 = op0
6981 = expand_expr (tem,
6982 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6983 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6984 != INTEGER_CST)
6985 && modifier != EXPAND_STACK_PARM
6986 ? target : NULL_RTX),
6987 VOIDmode,
6988 (modifier == EXPAND_INITIALIZER
6989 || modifier == EXPAND_CONST_ADDRESS
6990 || modifier == EXPAND_STACK_PARM)
6991 ? modifier : EXPAND_NORMAL);
6993 /* If this is a constant, put it into a register if it is a
6994 legitimate constant and OFFSET is 0 and memory if it isn't. */
6995 if (CONSTANT_P (op0))
6997 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6998 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6999 && offset == 0)
7000 op0 = force_reg (mode, op0);
7001 else
7002 op0 = validize_mem (force_const_mem (mode, op0));
7005 /* Otherwise, if this object not in memory and we either have an
7006 offset or a BLKmode result, put it there. This case can't occur in
7007 C, but can in Ada if we have unchecked conversion of an expression
7008 from a scalar type to an array or record type or for an
7009 ARRAY_RANGE_REF whose type is BLKmode. */
7010 else if (GET_CODE (op0) != MEM
7011 && (offset != 0
7012 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7014 /* If the operand is a SAVE_EXPR, we can deal with this by
7015 forcing the SAVE_EXPR into memory. */
7016 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7018 put_var_into_stack (TREE_OPERAND (exp, 0),
7019 /*rescan=*/true);
7020 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7022 else
7024 tree nt
7025 = build_qualified_type (TREE_TYPE (tem),
7026 (TYPE_QUALS (TREE_TYPE (tem))
7027 | TYPE_QUAL_CONST));
7028 rtx memloc = assign_temp (nt, 1, 1, 1);
7030 emit_move_insn (memloc, op0);
7031 op0 = memloc;
7035 if (offset != 0)
7037 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7038 EXPAND_SUM);
7040 if (GET_CODE (op0) != MEM)
7041 abort ();
7043 #ifdef POINTERS_EXTEND_UNSIGNED
7044 if (GET_MODE (offset_rtx) != Pmode)
7045 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7046 #else
7047 if (GET_MODE (offset_rtx) != ptr_mode)
7048 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7049 #endif
7051 if (GET_MODE (op0) == BLKmode
7052 /* A constant address in OP0 can have VOIDmode, we must
7053 not try to call force_reg in that case. */
7054 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7055 && bitsize != 0
7056 && (bitpos % bitsize) == 0
7057 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7058 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7060 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7061 bitpos = 0;
7064 op0 = offset_address (op0, offset_rtx,
7065 highest_pow2_factor (offset));
7068 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7069 record its alignment as BIGGEST_ALIGNMENT. */
7070 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7071 && is_aligning_offset (offset, tem))
7072 set_mem_align (op0, BIGGEST_ALIGNMENT);
7074 /* Don't forget about volatility even if this is a bitfield. */
7075 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7077 if (op0 == orig_op0)
7078 op0 = copy_rtx (op0);
7080 MEM_VOLATILE_P (op0) = 1;
7083 /* The following code doesn't handle CONCAT.
7084 Assume only bitpos == 0 can be used for CONCAT, due to
7085 one element arrays having the same mode as its element. */
7086 if (GET_CODE (op0) == CONCAT)
7088 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7089 abort ();
7090 return op0;
7093 /* In cases where an aligned union has an unaligned object
7094 as a field, we might be extracting a BLKmode value from
7095 an integer-mode (e.g., SImode) object. Handle this case
7096 by doing the extract into an object as wide as the field
7097 (which we know to be the width of a basic mode), then
7098 storing into memory, and changing the mode to BLKmode. */
7099 if (mode1 == VOIDmode
7100 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7101 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7102 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7103 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7104 && modifier != EXPAND_CONST_ADDRESS
7105 && modifier != EXPAND_INITIALIZER)
7106 /* If the field isn't aligned enough to fetch as a memref,
7107 fetch it as a bit field. */
7108 || (mode1 != BLKmode
7109 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7110 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7111 || (GET_CODE (op0) == MEM
7112 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7113 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7114 && ((modifier == EXPAND_CONST_ADDRESS
7115 || modifier == EXPAND_INITIALIZER)
7116 ? STRICT_ALIGNMENT
7117 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7118 || (bitpos % BITS_PER_UNIT != 0)))
7119 /* If the type and the field are a constant size and the
7120 size of the type isn't the same size as the bitfield,
7121 we must use bitfield operations. */
7122 || (bitsize >= 0
7123 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7124 == INTEGER_CST)
7125 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7126 bitsize)))
7128 enum machine_mode ext_mode = mode;
7130 if (ext_mode == BLKmode
7131 && ! (target != 0 && GET_CODE (op0) == MEM
7132 && GET_CODE (target) == MEM
7133 && bitpos % BITS_PER_UNIT == 0))
7134 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7136 if (ext_mode == BLKmode)
7138 if (target == 0)
7139 target = assign_temp (type, 0, 1, 1);
7141 if (bitsize == 0)
7142 return target;
7144 /* In this case, BITPOS must start at a byte boundary and
7145 TARGET, if specified, must be a MEM. */
7146 if (GET_CODE (op0) != MEM
7147 || (target != 0 && GET_CODE (target) != MEM)
7148 || bitpos % BITS_PER_UNIT != 0)
7149 abort ();
7151 emit_block_move (target,
7152 adjust_address (op0, VOIDmode,
7153 bitpos / BITS_PER_UNIT),
7154 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7155 / BITS_PER_UNIT),
7156 (modifier == EXPAND_STACK_PARM
7157 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7159 return target;
7162 op0 = validize_mem (op0);
7164 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7165 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7167 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7168 (modifier == EXPAND_STACK_PARM
7169 ? NULL_RTX : target),
7170 ext_mode, ext_mode,
7171 int_size_in_bytes (TREE_TYPE (tem)));
7173 /* If the result is a record type and BITSIZE is narrower than
7174 the mode of OP0, an integral mode, and this is a big endian
7175 machine, we must put the field into the high-order bits. */
7176 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7177 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7178 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7179 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7180 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7181 - bitsize),
7182 op0, 1);
7184 /* If the result type is BLKmode, store the data into a temporary
7185 of the appropriate type, but with the mode corresponding to the
7186 mode for the data we have (op0's mode). It's tempting to make
7187 this a constant type, since we know it's only being stored once,
7188 but that can cause problems if we are taking the address of this
7189 COMPONENT_REF because the MEM of any reference via that address
7190 will have flags corresponding to the type, which will not
7191 necessarily be constant. */
7192 if (mode == BLKmode)
7194 rtx new
7195 = assign_stack_temp_for_type
7196 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7198 emit_move_insn (new, op0);
7199 op0 = copy_rtx (new);
7200 PUT_MODE (op0, BLKmode);
7201 set_mem_attributes (op0, exp, 1);
7204 return op0;
7207 /* If the result is BLKmode, use that to access the object
7208 now as well. */
7209 if (mode == BLKmode)
7210 mode1 = BLKmode;
7212 /* Get a reference to just this component. */
7213 if (modifier == EXPAND_CONST_ADDRESS
7214 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7215 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7216 else
7217 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7219 if (op0 == orig_op0)
7220 op0 = copy_rtx (op0);
7222 set_mem_attributes (op0, exp, 0);
7223 if (GET_CODE (XEXP (op0, 0)) == REG)
7224 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7226 MEM_VOLATILE_P (op0) |= volatilep;
7227 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7228 || modifier == EXPAND_CONST_ADDRESS
7229 || modifier == EXPAND_INITIALIZER)
7230 return op0;
7231 else if (target == 0)
7232 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7234 convert_move (target, op0, unsignedp);
7235 return target;
7238 case VTABLE_REF:
7240 rtx insn, before = get_last_insn (), vtbl_ref;
7242 /* Evaluate the interior expression. */
7243 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7244 tmode, modifier);
7246 /* Get or create an instruction off which to hang a note. */
7247 if (REG_P (subtarget))
7249 target = subtarget;
7250 insn = get_last_insn ();
7251 if (insn == before)
7252 abort ();
7253 if (! INSN_P (insn))
7254 insn = prev_nonnote_insn (insn);
7256 else
7258 target = gen_reg_rtx (GET_MODE (subtarget));
7259 insn = emit_move_insn (target, subtarget);
7262 /* Collect the data for the note. */
7263 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7264 vtbl_ref = plus_constant (vtbl_ref,
7265 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7266 /* Discard the initial CONST that was added. */
7267 vtbl_ref = XEXP (vtbl_ref, 0);
7269 REG_NOTES (insn)
7270 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7272 return target;
7275 /* Intended for a reference to a buffer of a file-object in Pascal.
7276 But it's not certain that a special tree code will really be
7277 necessary for these. INDIRECT_REF might work for them. */
7278 case BUFFER_REF:
7279 abort ();
7281 case IN_EXPR:
7283 /* Pascal set IN expression.
7285 Algorithm:
7286 rlo = set_low - (set_low%bits_per_word);
7287 the_word = set [ (index - rlo)/bits_per_word ];
7288 bit_index = index % bits_per_word;
7289 bitmask = 1 << bit_index;
7290 return !!(the_word & bitmask); */
7292 tree set = TREE_OPERAND (exp, 0);
7293 tree index = TREE_OPERAND (exp, 1);
7294 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7295 tree set_type = TREE_TYPE (set);
7296 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7297 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7298 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7299 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7300 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7301 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7302 rtx setaddr = XEXP (setval, 0);
7303 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7304 rtx rlow;
7305 rtx diff, quo, rem, addr, bit, result;
7307 /* If domain is empty, answer is no. Likewise if index is constant
7308 and out of bounds. */
7309 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7310 && TREE_CODE (set_low_bound) == INTEGER_CST
7311 && tree_int_cst_lt (set_high_bound, set_low_bound))
7312 || (TREE_CODE (index) == INTEGER_CST
7313 && TREE_CODE (set_low_bound) == INTEGER_CST
7314 && tree_int_cst_lt (index, set_low_bound))
7315 || (TREE_CODE (set_high_bound) == INTEGER_CST
7316 && TREE_CODE (index) == INTEGER_CST
7317 && tree_int_cst_lt (set_high_bound, index))))
7318 return const0_rtx;
7320 if (target == 0)
7321 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7323 /* If we get here, we have to generate the code for both cases
7324 (in range and out of range). */
7326 op0 = gen_label_rtx ();
7327 op1 = gen_label_rtx ();
7329 if (! (GET_CODE (index_val) == CONST_INT
7330 && GET_CODE (lo_r) == CONST_INT))
7331 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7332 GET_MODE (index_val), iunsignedp, op1);
7334 if (! (GET_CODE (index_val) == CONST_INT
7335 && GET_CODE (hi_r) == CONST_INT))
7336 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7337 GET_MODE (index_val), iunsignedp, op1);
7339 /* Calculate the element number of bit zero in the first word
7340 of the set. */
7341 if (GET_CODE (lo_r) == CONST_INT)
7342 rlow = GEN_INT (INTVAL (lo_r)
7343 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7344 else
7345 rlow = expand_binop (index_mode, and_optab, lo_r,
7346 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7347 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7349 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7350 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7352 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7353 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7354 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7355 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7357 addr = memory_address (byte_mode,
7358 expand_binop (index_mode, add_optab, diff,
7359 setaddr, NULL_RTX, iunsignedp,
7360 OPTAB_LIB_WIDEN));
7362 /* Extract the bit we want to examine. */
7363 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7364 gen_rtx_MEM (byte_mode, addr),
7365 make_tree (TREE_TYPE (index), rem),
7366 NULL_RTX, 1);
7367 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7368 GET_MODE (target) == byte_mode ? target : 0,
7369 1, OPTAB_LIB_WIDEN);
7371 if (result != target)
7372 convert_move (target, result, 1);
7374 /* Output the code to handle the out-of-range case. */
7375 emit_jump (op0);
7376 emit_label (op1);
7377 emit_move_insn (target, const0_rtx);
7378 emit_label (op0);
7379 return target;
7382 case WITH_CLEANUP_EXPR:
7383 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7385 WITH_CLEANUP_EXPR_RTL (exp)
7386 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7387 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7388 CLEANUP_EH_ONLY (exp));
7390 /* That's it for this cleanup. */
7391 TREE_OPERAND (exp, 1) = 0;
7393 return WITH_CLEANUP_EXPR_RTL (exp);
7395 case CLEANUP_POINT_EXPR:
7397 /* Start a new binding layer that will keep track of all cleanup
7398 actions to be performed. */
7399 expand_start_bindings (2);
7401 target_temp_slot_level = temp_slot_level;
7403 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7404 /* If we're going to use this value, load it up now. */
7405 if (! ignore)
7406 op0 = force_not_mem (op0);
7407 preserve_temp_slots (op0);
7408 expand_end_bindings (NULL_TREE, 0, 0);
7410 return op0;
7412 case CALL_EXPR:
7413 /* Check for a built-in function. */
7414 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7415 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7416 == FUNCTION_DECL)
7417 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7419 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7420 == BUILT_IN_FRONTEND)
7421 /* ??? Use (*fun) form because expand_expr is a macro. */
7422 return (*lang_hooks.expand_expr) (exp, original_target,
7423 tmode, modifier,
7424 alt_rtl);
7425 else
7426 return expand_builtin (exp, target, subtarget, tmode, ignore);
7429 return expand_call (exp, target, ignore);
7431 case NON_LVALUE_EXPR:
7432 case NOP_EXPR:
7433 case CONVERT_EXPR:
7434 case REFERENCE_EXPR:
7435 if (TREE_OPERAND (exp, 0) == error_mark_node)
7436 return const0_rtx;
7438 if (TREE_CODE (type) == UNION_TYPE)
7440 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7442 /* If both input and output are BLKmode, this conversion isn't doing
7443 anything except possibly changing memory attribute. */
7444 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7446 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7447 modifier);
7449 result = copy_rtx (result);
7450 set_mem_attributes (result, exp, 0);
7451 return result;
7454 if (target == 0)
7456 if (TYPE_MODE (type) != BLKmode)
7457 target = gen_reg_rtx (TYPE_MODE (type));
7458 else
7459 target = assign_temp (type, 0, 1, 1);
7462 if (GET_CODE (target) == MEM)
7463 /* Store data into beginning of memory target. */
7464 store_expr (TREE_OPERAND (exp, 0),
7465 adjust_address (target, TYPE_MODE (valtype), 0),
7466 modifier == EXPAND_STACK_PARM ? 2 : 0);
7468 else if (GET_CODE (target) == REG)
7469 /* Store this field into a union of the proper type. */
7470 store_field (target,
7471 MIN ((int_size_in_bytes (TREE_TYPE
7472 (TREE_OPERAND (exp, 0)))
7473 * BITS_PER_UNIT),
7474 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7475 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7476 VOIDmode, 0, type, 0);
7477 else
7478 abort ();
7480 /* Return the entire union. */
7481 return target;
7484 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7486 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7487 modifier);
7489 /* If the signedness of the conversion differs and OP0 is
7490 a promoted SUBREG, clear that indication since we now
7491 have to do the proper extension. */
7492 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7493 && GET_CODE (op0) == SUBREG)
7494 SUBREG_PROMOTED_VAR_P (op0) = 0;
7496 return op0;
7499 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7500 if (GET_MODE (op0) == mode)
7501 return op0;
7503 /* If OP0 is a constant, just convert it into the proper mode. */
7504 if (CONSTANT_P (op0))
7506 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7507 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7509 if (modifier == EXPAND_INITIALIZER)
7510 return simplify_gen_subreg (mode, op0, inner_mode,
7511 subreg_lowpart_offset (mode,
7512 inner_mode));
7513 else
7514 return convert_modes (mode, inner_mode, op0,
7515 TYPE_UNSIGNED (inner_type));
7518 if (modifier == EXPAND_INITIALIZER)
7519 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7521 if (target == 0)
7522 return
7523 convert_to_mode (mode, op0,
7524 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7525 else
7526 convert_move (target, op0,
7527 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7528 return target;
7530 case VIEW_CONVERT_EXPR:
7531 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7533 /* If the input and output modes are both the same, we are done.
7534 Otherwise, if neither mode is BLKmode and both are integral and within
7535 a word, we can use gen_lowpart. If neither is true, make sure the
7536 operand is in memory and convert the MEM to the new mode. */
7537 if (TYPE_MODE (type) == GET_MODE (op0))
7539 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7540 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7541 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7542 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7543 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7544 op0 = gen_lowpart (TYPE_MODE (type), op0);
7545 else if (GET_CODE (op0) != MEM)
7547 /* If the operand is not a MEM, force it into memory. Since we
7548 are going to be be changing the mode of the MEM, don't call
7549 force_const_mem for constants because we don't allow pool
7550 constants to change mode. */
7551 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7553 if (TREE_ADDRESSABLE (exp))
7554 abort ();
7556 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7557 target
7558 = assign_stack_temp_for_type
7559 (TYPE_MODE (inner_type),
7560 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7562 emit_move_insn (target, op0);
7563 op0 = target;
7566 /* At this point, OP0 is in the correct mode. If the output type is such
7567 that the operand is known to be aligned, indicate that it is.
7568 Otherwise, we need only be concerned about alignment for non-BLKmode
7569 results. */
7570 if (GET_CODE (op0) == MEM)
7572 op0 = copy_rtx (op0);
7574 if (TYPE_ALIGN_OK (type))
7575 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7576 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7577 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7579 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7580 HOST_WIDE_INT temp_size
7581 = MAX (int_size_in_bytes (inner_type),
7582 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7583 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7584 temp_size, 0, type);
7585 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7587 if (TREE_ADDRESSABLE (exp))
7588 abort ();
7590 if (GET_MODE (op0) == BLKmode)
7591 emit_block_move (new_with_op0_mode, op0,
7592 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7593 (modifier == EXPAND_STACK_PARM
7594 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7595 else
7596 emit_move_insn (new_with_op0_mode, op0);
7598 op0 = new;
7601 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7604 return op0;
7606 case PLUS_EXPR:
7607 this_optab = ! unsignedp && flag_trapv
7608 && (GET_MODE_CLASS (mode) == MODE_INT)
7609 ? addv_optab : add_optab;
7611 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7612 something else, make sure we add the register to the constant and
7613 then to the other thing. This case can occur during strength
7614 reduction and doing it this way will produce better code if the
7615 frame pointer or argument pointer is eliminated.
7617 fold-const.c will ensure that the constant is always in the inner
7618 PLUS_EXPR, so the only case we need to do anything about is if
7619 sp, ap, or fp is our second argument, in which case we must swap
7620 the innermost first argument and our second argument. */
7622 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7623 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7624 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7625 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7626 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7627 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7629 tree t = TREE_OPERAND (exp, 1);
7631 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7632 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7635 /* If the result is to be ptr_mode and we are adding an integer to
7636 something, we might be forming a constant. So try to use
7637 plus_constant. If it produces a sum and we can't accept it,
7638 use force_operand. This allows P = &ARR[const] to generate
7639 efficient code on machines where a SYMBOL_REF is not a valid
7640 address.
7642 If this is an EXPAND_SUM call, always return the sum. */
7643 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7644 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7646 if (modifier == EXPAND_STACK_PARM)
7647 target = 0;
7648 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7649 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7650 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7652 rtx constant_part;
7654 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7655 EXPAND_SUM);
7656 /* Use immed_double_const to ensure that the constant is
7657 truncated according to the mode of OP1, then sign extended
7658 to a HOST_WIDE_INT. Using the constant directly can result
7659 in non-canonical RTL in a 64x32 cross compile. */
7660 constant_part
7661 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7662 (HOST_WIDE_INT) 0,
7663 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7664 op1 = plus_constant (op1, INTVAL (constant_part));
7665 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7666 op1 = force_operand (op1, target);
7667 return op1;
7670 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7671 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7672 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7674 rtx constant_part;
7676 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7677 (modifier == EXPAND_INITIALIZER
7678 ? EXPAND_INITIALIZER : EXPAND_SUM));
7679 if (! CONSTANT_P (op0))
7681 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7682 VOIDmode, modifier);
7683 /* Return a PLUS if modifier says it's OK. */
7684 if (modifier == EXPAND_SUM
7685 || modifier == EXPAND_INITIALIZER)
7686 return simplify_gen_binary (PLUS, mode, op0, op1);
7687 goto binop2;
7689 /* Use immed_double_const to ensure that the constant is
7690 truncated according to the mode of OP1, then sign extended
7691 to a HOST_WIDE_INT. Using the constant directly can result
7692 in non-canonical RTL in a 64x32 cross compile. */
7693 constant_part
7694 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7695 (HOST_WIDE_INT) 0,
7696 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7697 op0 = plus_constant (op0, INTVAL (constant_part));
7698 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7699 op0 = force_operand (op0, target);
7700 return op0;
7704 /* No sense saving up arithmetic to be done
7705 if it's all in the wrong mode to form part of an address.
7706 And force_operand won't know whether to sign-extend or
7707 zero-extend. */
7708 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7709 || mode != ptr_mode)
7711 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7712 subtarget, &op0, &op1, 0);
7713 if (op0 == const0_rtx)
7714 return op1;
7715 if (op1 == const0_rtx)
7716 return op0;
7717 goto binop2;
7720 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7721 subtarget, &op0, &op1, modifier);
7722 return simplify_gen_binary (PLUS, mode, op0, op1);
7724 case MINUS_EXPR:
7725 /* For initializers, we are allowed to return a MINUS of two
7726 symbolic constants. Here we handle all cases when both operands
7727 are constant. */
7728 /* Handle difference of two symbolic constants,
7729 for the sake of an initializer. */
7730 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7731 && really_constant_p (TREE_OPERAND (exp, 0))
7732 && really_constant_p (TREE_OPERAND (exp, 1)))
7734 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7735 NULL_RTX, &op0, &op1, modifier);
7737 /* If the last operand is a CONST_INT, use plus_constant of
7738 the negated constant. Else make the MINUS. */
7739 if (GET_CODE (op1) == CONST_INT)
7740 return plus_constant (op0, - INTVAL (op1));
7741 else
7742 return gen_rtx_MINUS (mode, op0, op1);
7745 this_optab = ! unsignedp && flag_trapv
7746 && (GET_MODE_CLASS(mode) == MODE_INT)
7747 ? subv_optab : sub_optab;
7749 /* No sense saving up arithmetic to be done
7750 if it's all in the wrong mode to form part of an address.
7751 And force_operand won't know whether to sign-extend or
7752 zero-extend. */
7753 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7754 || mode != ptr_mode)
7755 goto binop;
7757 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7758 subtarget, &op0, &op1, modifier);
7760 /* Convert A - const to A + (-const). */
7761 if (GET_CODE (op1) == CONST_INT)
7763 op1 = negate_rtx (mode, op1);
7764 return simplify_gen_binary (PLUS, mode, op0, op1);
7767 goto binop2;
7769 case MULT_EXPR:
7770 /* If first operand is constant, swap them.
7771 Thus the following special case checks need only
7772 check the second operand. */
7773 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7775 tree t1 = TREE_OPERAND (exp, 0);
7776 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7777 TREE_OPERAND (exp, 1) = t1;
7780 /* Attempt to return something suitable for generating an
7781 indexed address, for machines that support that. */
7783 if (modifier == EXPAND_SUM && mode == ptr_mode
7784 && host_integerp (TREE_OPERAND (exp, 1), 0))
7786 tree exp1 = TREE_OPERAND (exp, 1);
7788 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7789 EXPAND_SUM);
7791 if (GET_CODE (op0) != REG)
7792 op0 = force_operand (op0, NULL_RTX);
7793 if (GET_CODE (op0) != REG)
7794 op0 = copy_to_mode_reg (mode, op0);
7796 return gen_rtx_MULT (mode, op0,
7797 gen_int_mode (tree_low_cst (exp1, 0),
7798 TYPE_MODE (TREE_TYPE (exp1))));
7801 if (modifier == EXPAND_STACK_PARM)
7802 target = 0;
7804 /* Check for multiplying things that have been extended
7805 from a narrower type. If this machine supports multiplying
7806 in that narrower type with a result in the desired type,
7807 do it that way, and avoid the explicit type-conversion. */
7808 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7809 && TREE_CODE (type) == INTEGER_TYPE
7810 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7811 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7812 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7813 && int_fits_type_p (TREE_OPERAND (exp, 1),
7814 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7815 /* Don't use a widening multiply if a shift will do. */
7816 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7817 > HOST_BITS_PER_WIDE_INT)
7818 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7820 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7821 && (TYPE_PRECISION (TREE_TYPE
7822 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7823 == TYPE_PRECISION (TREE_TYPE
7824 (TREE_OPERAND
7825 (TREE_OPERAND (exp, 0), 0))))
7826 /* If both operands are extended, they must either both
7827 be zero-extended or both be sign-extended. */
7828 && (TYPE_UNSIGNED (TREE_TYPE
7829 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7830 == TYPE_UNSIGNED (TREE_TYPE
7831 (TREE_OPERAND
7832 (TREE_OPERAND (exp, 0), 0)))))))
7834 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7835 enum machine_mode innermode = TYPE_MODE (op0type);
7836 bool zextend_p = TYPE_UNSIGNED (op0type);
7837 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7838 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7840 if (mode == GET_MODE_WIDER_MODE (innermode))
7842 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7844 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7845 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7846 TREE_OPERAND (exp, 1),
7847 NULL_RTX, &op0, &op1, 0);
7848 else
7849 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7850 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7851 NULL_RTX, &op0, &op1, 0);
7852 goto binop2;
7854 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7855 && innermode == word_mode)
7857 rtx htem, hipart;
7858 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7859 NULL_RTX, VOIDmode, 0);
7860 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7861 op1 = convert_modes (innermode, mode,
7862 expand_expr (TREE_OPERAND (exp, 1),
7863 NULL_RTX, VOIDmode, 0),
7864 unsignedp);
7865 else
7866 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7867 NULL_RTX, VOIDmode, 0);
7868 temp = expand_binop (mode, other_optab, op0, op1, target,
7869 unsignedp, OPTAB_LIB_WIDEN);
7870 hipart = gen_highpart (innermode, temp);
7871 htem = expand_mult_highpart_adjust (innermode, hipart,
7872 op0, op1, hipart,
7873 zextend_p);
7874 if (htem != hipart)
7875 emit_move_insn (hipart, htem);
7876 return temp;
7880 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7881 subtarget, &op0, &op1, 0);
7882 return expand_mult (mode, op0, op1, target, unsignedp);
7884 case TRUNC_DIV_EXPR:
7885 case FLOOR_DIV_EXPR:
7886 case CEIL_DIV_EXPR:
7887 case ROUND_DIV_EXPR:
7888 case EXACT_DIV_EXPR:
7889 if (modifier == EXPAND_STACK_PARM)
7890 target = 0;
7891 /* Possible optimization: compute the dividend with EXPAND_SUM
7892 then if the divisor is constant can optimize the case
7893 where some terms of the dividend have coeffs divisible by it. */
7894 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7895 subtarget, &op0, &op1, 0);
7896 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7898 case RDIV_EXPR:
7899 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7900 expensive divide. If not, combine will rebuild the original
7901 computation. */
7902 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7903 && TREE_CODE (type) == REAL_TYPE
7904 && !real_onep (TREE_OPERAND (exp, 0)))
7905 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7906 build (RDIV_EXPR, type,
7907 build_real (type, dconst1),
7908 TREE_OPERAND (exp, 1))),
7909 target, tmode, modifier);
7910 this_optab = sdiv_optab;
7911 goto binop;
7913 case TRUNC_MOD_EXPR:
7914 case FLOOR_MOD_EXPR:
7915 case CEIL_MOD_EXPR:
7916 case ROUND_MOD_EXPR:
7917 if (modifier == EXPAND_STACK_PARM)
7918 target = 0;
7919 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7920 subtarget, &op0, &op1, 0);
7921 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7923 case FIX_ROUND_EXPR:
7924 case FIX_FLOOR_EXPR:
7925 case FIX_CEIL_EXPR:
7926 abort (); /* Not used for C. */
7928 case FIX_TRUNC_EXPR:
7929 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7930 if (target == 0 || modifier == EXPAND_STACK_PARM)
7931 target = gen_reg_rtx (mode);
7932 expand_fix (target, op0, unsignedp);
7933 return target;
7935 case FLOAT_EXPR:
7936 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7937 if (target == 0 || modifier == EXPAND_STACK_PARM)
7938 target = gen_reg_rtx (mode);
7939 /* expand_float can't figure out what to do if FROM has VOIDmode.
7940 So give it the correct mode. With -O, cse will optimize this. */
7941 if (GET_MODE (op0) == VOIDmode)
7942 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7943 op0);
7944 expand_float (target, op0,
7945 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7946 return target;
7948 case NEGATE_EXPR:
7949 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7950 if (modifier == EXPAND_STACK_PARM)
7951 target = 0;
7952 temp = expand_unop (mode,
7953 ! unsignedp && flag_trapv
7954 && (GET_MODE_CLASS(mode) == MODE_INT)
7955 ? negv_optab : neg_optab, op0, target, 0);
7956 if (temp == 0)
7957 abort ();
7958 return temp;
7960 case ABS_EXPR:
7961 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7962 if (modifier == EXPAND_STACK_PARM)
7963 target = 0;
7965 /* ABS_EXPR is not valid for complex arguments. */
7966 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7967 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7968 abort ();
7970 /* Unsigned abs is simply the operand. Testing here means we don't
7971 risk generating incorrect code below. */
7972 if (TYPE_UNSIGNED (type))
7973 return op0;
7975 return expand_abs (mode, op0, target, unsignedp,
7976 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7978 case MAX_EXPR:
7979 case MIN_EXPR:
7980 target = original_target;
7981 if (target == 0
7982 || modifier == EXPAND_STACK_PARM
7983 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7984 || GET_MODE (target) != mode
7985 || (GET_CODE (target) == REG
7986 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7987 target = gen_reg_rtx (mode);
7988 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7989 target, &op0, &op1, 0);
7991 /* First try to do it with a special MIN or MAX instruction.
7992 If that does not win, use a conditional jump to select the proper
7993 value. */
7994 this_optab = (unsignedp
7995 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7996 : (code == MIN_EXPR ? smin_optab : smax_optab));
7998 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7999 OPTAB_WIDEN);
8000 if (temp != 0)
8001 return temp;
8003 /* At this point, a MEM target is no longer useful; we will get better
8004 code without it. */
8006 if (GET_CODE (target) == MEM)
8007 target = gen_reg_rtx (mode);
8009 /* If op1 was placed in target, swap op0 and op1. */
8010 if (target != op0 && target == op1)
8012 rtx tem = op0;
8013 op0 = op1;
8014 op1 = tem;
8017 if (target != op0)
8018 emit_move_insn (target, op0);
8020 op0 = gen_label_rtx ();
8022 /* If this mode is an integer too wide to compare properly,
8023 compare word by word. Rely on cse to optimize constant cases. */
8024 if (GET_MODE_CLASS (mode) == MODE_INT
8025 && ! can_compare_p (GE, mode, ccp_jump))
8027 if (code == MAX_EXPR)
8028 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8029 NULL_RTX, op0);
8030 else
8031 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8032 NULL_RTX, op0);
8034 else
8036 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8037 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8039 emit_move_insn (target, op1);
8040 emit_label (op0);
8041 return target;
8043 case BIT_NOT_EXPR:
8044 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8045 if (modifier == EXPAND_STACK_PARM)
8046 target = 0;
8047 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8048 if (temp == 0)
8049 abort ();
8050 return temp;
8052 /* ??? Can optimize bitwise operations with one arg constant.
8053 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8054 and (a bitwise1 b) bitwise2 b (etc)
8055 but that is probably not worth while. */
8057 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8058 boolean values when we want in all cases to compute both of them. In
8059 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8060 as actual zero-or-1 values and then bitwise anding. In cases where
8061 there cannot be any side effects, better code would be made by
8062 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8063 how to recognize those cases. */
8065 case TRUTH_AND_EXPR:
8066 case BIT_AND_EXPR:
8067 this_optab = and_optab;
8068 goto binop;
8070 case TRUTH_OR_EXPR:
8071 case BIT_IOR_EXPR:
8072 this_optab = ior_optab;
8073 goto binop;
8075 case TRUTH_XOR_EXPR:
8076 case BIT_XOR_EXPR:
8077 this_optab = xor_optab;
8078 goto binop;
8080 case LSHIFT_EXPR:
8081 case RSHIFT_EXPR:
8082 case LROTATE_EXPR:
8083 case RROTATE_EXPR:
8084 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8085 subtarget = 0;
8086 if (modifier == EXPAND_STACK_PARM)
8087 target = 0;
8088 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8089 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8090 unsignedp);
8092 /* Could determine the answer when only additive constants differ. Also,
8093 the addition of one can be handled by changing the condition. */
8094 case LT_EXPR:
8095 case LE_EXPR:
8096 case GT_EXPR:
8097 case GE_EXPR:
8098 case EQ_EXPR:
8099 case NE_EXPR:
8100 case UNORDERED_EXPR:
8101 case ORDERED_EXPR:
8102 case UNLT_EXPR:
8103 case UNLE_EXPR:
8104 case UNGT_EXPR:
8105 case UNGE_EXPR:
8106 case UNEQ_EXPR:
8107 temp = do_store_flag (exp,
8108 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8109 tmode != VOIDmode ? tmode : mode, 0);
8110 if (temp != 0)
8111 return temp;
8113 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8114 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8115 && original_target
8116 && GET_CODE (original_target) == REG
8117 && (GET_MODE (original_target)
8118 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8120 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8121 VOIDmode, 0);
8123 /* If temp is constant, we can just compute the result. */
8124 if (GET_CODE (temp) == CONST_INT)
8126 if (INTVAL (temp) != 0)
8127 emit_move_insn (target, const1_rtx);
8128 else
8129 emit_move_insn (target, const0_rtx);
8131 return target;
8134 if (temp != original_target)
8136 enum machine_mode mode1 = GET_MODE (temp);
8137 if (mode1 == VOIDmode)
8138 mode1 = tmode != VOIDmode ? tmode : mode;
8140 temp = copy_to_mode_reg (mode1, temp);
8143 op1 = gen_label_rtx ();
8144 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8145 GET_MODE (temp), unsignedp, op1);
8146 emit_move_insn (temp, const1_rtx);
8147 emit_label (op1);
8148 return temp;
8151 /* If no set-flag instruction, must generate a conditional
8152 store into a temporary variable. Drop through
8153 and handle this like && and ||. */
8155 case TRUTH_ANDIF_EXPR:
8156 case TRUTH_ORIF_EXPR:
8157 if (! ignore
8158 && (target == 0
8159 || modifier == EXPAND_STACK_PARM
8160 || ! safe_from_p (target, exp, 1)
8161 /* Make sure we don't have a hard reg (such as function's return
8162 value) live across basic blocks, if not optimizing. */
8163 || (!optimize && GET_CODE (target) == REG
8164 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8165 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8167 if (target)
8168 emit_clr_insn (target);
8170 op1 = gen_label_rtx ();
8171 jumpifnot (exp, op1);
8173 if (target)
8174 emit_0_to_1_insn (target);
8176 emit_label (op1);
8177 return ignore ? const0_rtx : target;
8179 case TRUTH_NOT_EXPR:
8180 if (modifier == EXPAND_STACK_PARM)
8181 target = 0;
8182 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8183 /* The parser is careful to generate TRUTH_NOT_EXPR
8184 only with operands that are always zero or one. */
8185 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8186 target, 1, OPTAB_LIB_WIDEN);
8187 if (temp == 0)
8188 abort ();
8189 return temp;
8191 case COMPOUND_EXPR:
8192 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8193 emit_queue ();
8194 return expand_expr_real (TREE_OPERAND (exp, 1),
8195 (ignore ? const0_rtx : target),
8196 VOIDmode, modifier, alt_rtl);
8198 case COND_EXPR:
8199 /* If we would have a "singleton" (see below) were it not for a
8200 conversion in each arm, bring that conversion back out. */
8201 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8202 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8203 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8204 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8206 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8207 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8209 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8210 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8211 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8212 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8213 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8214 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8215 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8216 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8217 return expand_expr (build1 (NOP_EXPR, type,
8218 build (COND_EXPR, TREE_TYPE (iftrue),
8219 TREE_OPERAND (exp, 0),
8220 iftrue, iffalse)),
8221 target, tmode, modifier);
8225 /* Note that COND_EXPRs whose type is a structure or union
8226 are required to be constructed to contain assignments of
8227 a temporary variable, so that we can evaluate them here
8228 for side effect only. If type is void, we must do likewise. */
8230 /* If an arm of the branch requires a cleanup,
8231 only that cleanup is performed. */
8233 tree singleton = 0;
8234 tree binary_op = 0, unary_op = 0;
8236 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8237 convert it to our mode, if necessary. */
8238 if (integer_onep (TREE_OPERAND (exp, 1))
8239 && integer_zerop (TREE_OPERAND (exp, 2))
8240 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8242 if (ignore)
8244 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8245 modifier);
8246 return const0_rtx;
8249 if (modifier == EXPAND_STACK_PARM)
8250 target = 0;
8251 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8252 if (GET_MODE (op0) == mode)
8253 return op0;
8255 if (target == 0)
8256 target = gen_reg_rtx (mode);
8257 convert_move (target, op0, unsignedp);
8258 return target;
8261 /* Check for X ? A + B : A. If we have this, we can copy A to the
8262 output and conditionally add B. Similarly for unary operations.
8263 Don't do this if X has side-effects because those side effects
8264 might affect A or B and the "?" operation is a sequence point in
8265 ANSI. (operand_equal_p tests for side effects.) */
8267 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8268 && operand_equal_p (TREE_OPERAND (exp, 2),
8269 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8270 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8271 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8272 && operand_equal_p (TREE_OPERAND (exp, 1),
8273 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8274 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8275 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8276 && operand_equal_p (TREE_OPERAND (exp, 2),
8277 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8278 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8279 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8280 && operand_equal_p (TREE_OPERAND (exp, 1),
8281 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8282 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8284 /* If we are not to produce a result, we have no target. Otherwise,
8285 if a target was specified use it; it will not be used as an
8286 intermediate target unless it is safe. If no target, use a
8287 temporary. */
8289 if (ignore)
8290 temp = 0;
8291 else if (modifier == EXPAND_STACK_PARM)
8292 temp = assign_temp (type, 0, 0, 1);
8293 else if (original_target
8294 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8295 || (singleton && GET_CODE (original_target) == REG
8296 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8297 && original_target == var_rtx (singleton)))
8298 && GET_MODE (original_target) == mode
8299 #ifdef HAVE_conditional_move
8300 && (! can_conditionally_move_p (mode)
8301 || GET_CODE (original_target) == REG
8302 || TREE_ADDRESSABLE (type))
8303 #endif
8304 && (GET_CODE (original_target) != MEM
8305 || TREE_ADDRESSABLE (type)))
8306 temp = original_target;
8307 else if (TREE_ADDRESSABLE (type))
8308 abort ();
8309 else
8310 temp = assign_temp (type, 0, 0, 1);
8312 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8313 do the test of X as a store-flag operation, do this as
8314 A + ((X != 0) << log C). Similarly for other simple binary
8315 operators. Only do for C == 1 if BRANCH_COST is low. */
8316 if (temp && singleton && binary_op
8317 && (TREE_CODE (binary_op) == PLUS_EXPR
8318 || TREE_CODE (binary_op) == MINUS_EXPR
8319 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8320 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8321 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8322 : integer_onep (TREE_OPERAND (binary_op, 1)))
8323 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8325 rtx result;
8326 tree cond;
8327 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8328 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8329 ? addv_optab : add_optab)
8330 : TREE_CODE (binary_op) == MINUS_EXPR
8331 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8332 ? subv_optab : sub_optab)
8333 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8334 : xor_optab);
8336 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8337 if (singleton == TREE_OPERAND (exp, 1))
8338 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8339 else
8340 cond = TREE_OPERAND (exp, 0);
8342 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8343 ? temp : NULL_RTX),
8344 mode, BRANCH_COST <= 1);
8346 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8347 result = expand_shift (LSHIFT_EXPR, mode, result,
8348 build_int_2 (tree_log2
8349 (TREE_OPERAND
8350 (binary_op, 1)),
8352 (safe_from_p (temp, singleton, 1)
8353 ? temp : NULL_RTX), 0);
8355 if (result)
8357 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8358 return expand_binop (mode, boptab, op1, result, temp,
8359 unsignedp, OPTAB_LIB_WIDEN);
8363 do_pending_stack_adjust ();
8364 NO_DEFER_POP;
8365 op0 = gen_label_rtx ();
8367 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8369 if (temp != 0)
8371 /* If the target conflicts with the other operand of the
8372 binary op, we can't use it. Also, we can't use the target
8373 if it is a hard register, because evaluating the condition
8374 might clobber it. */
8375 if ((binary_op
8376 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8377 || (GET_CODE (temp) == REG
8378 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8379 temp = gen_reg_rtx (mode);
8380 store_expr (singleton, temp,
8381 modifier == EXPAND_STACK_PARM ? 2 : 0);
8383 else
8384 expand_expr (singleton,
8385 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8386 if (singleton == TREE_OPERAND (exp, 1))
8387 jumpif (TREE_OPERAND (exp, 0), op0);
8388 else
8389 jumpifnot (TREE_OPERAND (exp, 0), op0);
8391 start_cleanup_deferral ();
8392 if (binary_op && temp == 0)
8393 /* Just touch the other operand. */
8394 expand_expr (TREE_OPERAND (binary_op, 1),
8395 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8396 else if (binary_op)
8397 store_expr (build (TREE_CODE (binary_op), type,
8398 make_tree (type, temp),
8399 TREE_OPERAND (binary_op, 1)),
8400 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8401 else
8402 store_expr (build1 (TREE_CODE (unary_op), type,
8403 make_tree (type, temp)),
8404 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8405 op1 = op0;
8407 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8408 comparison operator. If we have one of these cases, set the
8409 output to A, branch on A (cse will merge these two references),
8410 then set the output to FOO. */
8411 else if (temp
8412 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8413 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8414 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8415 TREE_OPERAND (exp, 1), 0)
8416 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8417 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8418 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8420 if (GET_CODE (temp) == REG
8421 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8422 temp = gen_reg_rtx (mode);
8423 store_expr (TREE_OPERAND (exp, 1), temp,
8424 modifier == EXPAND_STACK_PARM ? 2 : 0);
8425 jumpif (TREE_OPERAND (exp, 0), op0);
8427 start_cleanup_deferral ();
8428 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8429 store_expr (TREE_OPERAND (exp, 2), temp,
8430 modifier == EXPAND_STACK_PARM ? 2 : 0);
8431 else
8432 expand_expr (TREE_OPERAND (exp, 2),
8433 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8434 op1 = op0;
8436 else if (temp
8437 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8438 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8439 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8440 TREE_OPERAND (exp, 2), 0)
8441 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8442 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8443 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8445 if (GET_CODE (temp) == REG
8446 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8447 temp = gen_reg_rtx (mode);
8448 store_expr (TREE_OPERAND (exp, 2), temp,
8449 modifier == EXPAND_STACK_PARM ? 2 : 0);
8450 jumpifnot (TREE_OPERAND (exp, 0), op0);
8452 start_cleanup_deferral ();
8453 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8454 store_expr (TREE_OPERAND (exp, 1), temp,
8455 modifier == EXPAND_STACK_PARM ? 2 : 0);
8456 else
8457 expand_expr (TREE_OPERAND (exp, 1),
8458 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8459 op1 = op0;
8461 else
8463 op1 = gen_label_rtx ();
8464 jumpifnot (TREE_OPERAND (exp, 0), op0);
8466 start_cleanup_deferral ();
8468 /* One branch of the cond can be void, if it never returns. For
8469 example A ? throw : E */
8470 if (temp != 0
8471 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8472 store_expr (TREE_OPERAND (exp, 1), temp,
8473 modifier == EXPAND_STACK_PARM ? 2 : 0);
8474 else
8475 expand_expr (TREE_OPERAND (exp, 1),
8476 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8477 end_cleanup_deferral ();
8478 emit_queue ();
8479 emit_jump_insn (gen_jump (op1));
8480 emit_barrier ();
8481 emit_label (op0);
8482 start_cleanup_deferral ();
8483 if (temp != 0
8484 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8485 store_expr (TREE_OPERAND (exp, 2), temp,
8486 modifier == EXPAND_STACK_PARM ? 2 : 0);
8487 else
8488 expand_expr (TREE_OPERAND (exp, 2),
8489 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8492 end_cleanup_deferral ();
8494 emit_queue ();
8495 emit_label (op1);
8496 OK_DEFER_POP;
8498 return temp;
8501 case TARGET_EXPR:
8503 /* Something needs to be initialized, but we didn't know
8504 where that thing was when building the tree. For example,
8505 it could be the return value of a function, or a parameter
8506 to a function which lays down in the stack, or a temporary
8507 variable which must be passed by reference.
8509 We guarantee that the expression will either be constructed
8510 or copied into our original target. */
8512 tree slot = TREE_OPERAND (exp, 0);
8513 tree cleanups = NULL_TREE;
8514 tree exp1;
8516 if (TREE_CODE (slot) != VAR_DECL)
8517 abort ();
8519 if (! ignore)
8520 target = original_target;
8522 /* Set this here so that if we get a target that refers to a
8523 register variable that's already been used, put_reg_into_stack
8524 knows that it should fix up those uses. */
8525 TREE_USED (slot) = 1;
8527 if (target == 0)
8529 if (DECL_RTL_SET_P (slot))
8531 target = DECL_RTL (slot);
8532 /* If we have already expanded the slot, so don't do
8533 it again. (mrs) */
8534 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8535 return target;
8537 else
8539 target = assign_temp (type, 2, 0, 1);
8540 /* All temp slots at this level must not conflict. */
8541 preserve_temp_slots (target);
8542 SET_DECL_RTL (slot, target);
8543 if (TREE_ADDRESSABLE (slot))
8544 put_var_into_stack (slot, /*rescan=*/false);
8546 /* Since SLOT is not known to the called function
8547 to belong to its stack frame, we must build an explicit
8548 cleanup. This case occurs when we must build up a reference
8549 to pass the reference as an argument. In this case,
8550 it is very likely that such a reference need not be
8551 built here. */
8553 if (TREE_OPERAND (exp, 2) == 0)
8554 TREE_OPERAND (exp, 2)
8555 = lang_hooks.maybe_build_cleanup (slot);
8556 cleanups = TREE_OPERAND (exp, 2);
8559 else
8561 /* This case does occur, when expanding a parameter which
8562 needs to be constructed on the stack. The target
8563 is the actual stack address that we want to initialize.
8564 The function we call will perform the cleanup in this case. */
8566 /* If we have already assigned it space, use that space,
8567 not target that we were passed in, as our target
8568 parameter is only a hint. */
8569 if (DECL_RTL_SET_P (slot))
8571 target = DECL_RTL (slot);
8572 /* If we have already expanded the slot, so don't do
8573 it again. (mrs) */
8574 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8575 return target;
8577 else
8579 SET_DECL_RTL (slot, target);
8580 /* If we must have an addressable slot, then make sure that
8581 the RTL that we just stored in slot is OK. */
8582 if (TREE_ADDRESSABLE (slot))
8583 put_var_into_stack (slot, /*rescan=*/true);
8587 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8588 /* Mark it as expanded. */
8589 TREE_OPERAND (exp, 1) = NULL_TREE;
8591 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8593 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8595 return target;
8598 case INIT_EXPR:
8600 tree lhs = TREE_OPERAND (exp, 0);
8601 tree rhs = TREE_OPERAND (exp, 1);
8603 temp = expand_assignment (lhs, rhs, ! ignore);
8604 return temp;
8607 case MODIFY_EXPR:
8609 /* If lhs is complex, expand calls in rhs before computing it.
8610 That's so we don't compute a pointer and save it over a
8611 call. If lhs is simple, compute it first so we can give it
8612 as a target if the rhs is just a call. This avoids an
8613 extra temp and copy and that prevents a partial-subsumption
8614 which makes bad code. Actually we could treat
8615 component_ref's of vars like vars. */
8617 tree lhs = TREE_OPERAND (exp, 0);
8618 tree rhs = TREE_OPERAND (exp, 1);
8620 temp = 0;
8622 /* Check for |= or &= of a bitfield of size one into another bitfield
8623 of size 1. In this case, (unless we need the result of the
8624 assignment) we can do this more efficiently with a
8625 test followed by an assignment, if necessary.
8627 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8628 things change so we do, this code should be enhanced to
8629 support it. */
8630 if (ignore
8631 && TREE_CODE (lhs) == COMPONENT_REF
8632 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8633 || TREE_CODE (rhs) == BIT_AND_EXPR)
8634 && TREE_OPERAND (rhs, 0) == lhs
8635 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8636 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8637 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8639 rtx label = gen_label_rtx ();
8641 do_jump (TREE_OPERAND (rhs, 1),
8642 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8643 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8644 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8645 (TREE_CODE (rhs) == BIT_IOR_EXPR
8646 ? integer_one_node
8647 : integer_zero_node)),
8649 do_pending_stack_adjust ();
8650 emit_label (label);
8651 return const0_rtx;
8654 temp = expand_assignment (lhs, rhs, ! ignore);
8656 return temp;
8659 case RETURN_EXPR:
8660 if (!TREE_OPERAND (exp, 0))
8661 expand_null_return ();
8662 else
8663 expand_return (TREE_OPERAND (exp, 0));
8664 return const0_rtx;
8666 case PREINCREMENT_EXPR:
8667 case PREDECREMENT_EXPR:
8668 return expand_increment (exp, 0, ignore);
8670 case POSTINCREMENT_EXPR:
8671 case POSTDECREMENT_EXPR:
8672 /* Faster to treat as pre-increment if result is not used. */
8673 return expand_increment (exp, ! ignore, ignore);
8675 case ADDR_EXPR:
8676 if (modifier == EXPAND_STACK_PARM)
8677 target = 0;
8678 /* Are we taking the address of a nested function? */
8679 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8680 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8681 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8682 && ! TREE_STATIC (exp))
8684 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8685 op0 = force_operand (op0, target);
8687 /* If we are taking the address of something erroneous, just
8688 return a zero. */
8689 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8690 return const0_rtx;
8691 /* If we are taking the address of a constant and are at the
8692 top level, we have to use output_constant_def since we can't
8693 call force_const_mem at top level. */
8694 else if (cfun == 0
8695 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8696 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8697 == 'c')))
8698 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8699 else
8701 /* We make sure to pass const0_rtx down if we came in with
8702 ignore set, to avoid doing the cleanups twice for something. */
8703 op0 = expand_expr (TREE_OPERAND (exp, 0),
8704 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8705 (modifier == EXPAND_INITIALIZER
8706 ? modifier : EXPAND_CONST_ADDRESS));
8708 /* If we are going to ignore the result, OP0 will have been set
8709 to const0_rtx, so just return it. Don't get confused and
8710 think we are taking the address of the constant. */
8711 if (ignore)
8712 return op0;
8714 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8715 clever and returns a REG when given a MEM. */
8716 op0 = protect_from_queue (op0, 1);
8718 /* We would like the object in memory. If it is a constant, we can
8719 have it be statically allocated into memory. For a non-constant,
8720 we need to allocate some memory and store the value into it. */
8722 if (CONSTANT_P (op0))
8723 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8724 op0);
8725 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8726 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8727 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8729 /* If the operand is a SAVE_EXPR, we can deal with this by
8730 forcing the SAVE_EXPR into memory. */
8731 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8733 put_var_into_stack (TREE_OPERAND (exp, 0),
8734 /*rescan=*/true);
8735 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8737 else
8739 /* If this object is in a register, it can't be BLKmode. */
8740 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8741 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8743 if (GET_CODE (op0) == PARALLEL)
8744 /* Handle calls that pass values in multiple
8745 non-contiguous locations. The Irix 6 ABI has examples
8746 of this. */
8747 emit_group_store (memloc, op0, inner_type,
8748 int_size_in_bytes (inner_type));
8749 else
8750 emit_move_insn (memloc, op0);
8752 op0 = memloc;
8756 if (GET_CODE (op0) != MEM)
8757 abort ();
8759 mark_temp_addr_taken (op0);
8760 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8762 op0 = XEXP (op0, 0);
8763 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8764 op0 = convert_memory_address (ptr_mode, op0);
8765 return op0;
8768 /* If OP0 is not aligned as least as much as the type requires, we
8769 need to make a temporary, copy OP0 to it, and take the address of
8770 the temporary. We want to use the alignment of the type, not of
8771 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8772 the test for BLKmode means that can't happen. The test for
8773 BLKmode is because we never make mis-aligned MEMs with
8774 non-BLKmode.
8776 We don't need to do this at all if the machine doesn't have
8777 strict alignment. */
8778 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8779 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8780 > MEM_ALIGN (op0))
8781 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8783 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8784 rtx new;
8786 if (TYPE_ALIGN_OK (inner_type))
8787 abort ();
8789 if (TREE_ADDRESSABLE (inner_type))
8791 /* We can't make a bitwise copy of this object, so fail. */
8792 error ("cannot take the address of an unaligned member");
8793 return const0_rtx;
8796 new = assign_stack_temp_for_type
8797 (TYPE_MODE (inner_type),
8798 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8799 : int_size_in_bytes (inner_type),
8800 1, build_qualified_type (inner_type,
8801 (TYPE_QUALS (inner_type)
8802 | TYPE_QUAL_CONST)));
8804 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8805 (modifier == EXPAND_STACK_PARM
8806 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8808 op0 = new;
8811 op0 = force_operand (XEXP (op0, 0), target);
8814 if (flag_force_addr
8815 && GET_CODE (op0) != REG
8816 && modifier != EXPAND_CONST_ADDRESS
8817 && modifier != EXPAND_INITIALIZER
8818 && modifier != EXPAND_SUM)
8819 op0 = force_reg (Pmode, op0);
8821 if (GET_CODE (op0) == REG
8822 && ! REG_USERVAR_P (op0))
8823 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8825 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8826 op0 = convert_memory_address (ptr_mode, op0);
8828 return op0;
8830 case ENTRY_VALUE_EXPR:
8831 abort ();
8833 /* COMPLEX type for Extended Pascal & Fortran */
8834 case COMPLEX_EXPR:
8836 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8837 rtx insns;
8839 /* Get the rtx code of the operands. */
8840 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8841 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8843 if (! target)
8844 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8846 start_sequence ();
8848 /* Move the real (op0) and imaginary (op1) parts to their location. */
8849 emit_move_insn (gen_realpart (mode, target), op0);
8850 emit_move_insn (gen_imagpart (mode, target), op1);
8852 insns = get_insns ();
8853 end_sequence ();
8855 /* Complex construction should appear as a single unit. */
8856 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8857 each with a separate pseudo as destination.
8858 It's not correct for flow to treat them as a unit. */
8859 if (GET_CODE (target) != CONCAT)
8860 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8861 else
8862 emit_insn (insns);
8864 return target;
8867 case REALPART_EXPR:
8868 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8869 return gen_realpart (mode, op0);
8871 case IMAGPART_EXPR:
8872 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8873 return gen_imagpart (mode, op0);
8875 case CONJ_EXPR:
8877 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8878 rtx imag_t;
8879 rtx insns;
8881 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8883 if (! target)
8884 target = gen_reg_rtx (mode);
8886 start_sequence ();
8888 /* Store the realpart and the negated imagpart to target. */
8889 emit_move_insn (gen_realpart (partmode, target),
8890 gen_realpart (partmode, op0));
8892 imag_t = gen_imagpart (partmode, target);
8893 temp = expand_unop (partmode,
8894 ! unsignedp && flag_trapv
8895 && (GET_MODE_CLASS(partmode) == MODE_INT)
8896 ? negv_optab : neg_optab,
8897 gen_imagpart (partmode, op0), imag_t, 0);
8898 if (temp != imag_t)
8899 emit_move_insn (imag_t, temp);
8901 insns = get_insns ();
8902 end_sequence ();
8904 /* Conjugate should appear as a single unit
8905 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8906 each with a separate pseudo as destination.
8907 It's not correct for flow to treat them as a unit. */
8908 if (GET_CODE (target) != CONCAT)
8909 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8910 else
8911 emit_insn (insns);
8913 return target;
8916 case TRY_CATCH_EXPR:
8918 tree handler = TREE_OPERAND (exp, 1);
8920 expand_eh_region_start ();
8922 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8924 expand_eh_region_end_cleanup (handler);
8926 return op0;
8929 case TRY_FINALLY_EXPR:
8931 tree try_block = TREE_OPERAND (exp, 0);
8932 tree finally_block = TREE_OPERAND (exp, 1);
8934 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8936 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8937 is not sufficient, so we cannot expand the block twice.
8938 So we play games with GOTO_SUBROUTINE_EXPR to let us
8939 expand the thing only once. */
8940 /* When not optimizing, we go ahead with this form since
8941 (1) user breakpoints operate more predictably without
8942 code duplication, and
8943 (2) we're not running any of the global optimizers
8944 that would explode in time/space with the highly
8945 connected CFG created by the indirect branching. */
8947 rtx finally_label = gen_label_rtx ();
8948 rtx done_label = gen_label_rtx ();
8949 rtx return_link = gen_reg_rtx (Pmode);
8950 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8951 (tree) finally_label, (tree) return_link);
8952 TREE_SIDE_EFFECTS (cleanup) = 1;
8954 /* Start a new binding layer that will keep track of all cleanup
8955 actions to be performed. */
8956 expand_start_bindings (2);
8957 target_temp_slot_level = temp_slot_level;
8959 expand_decl_cleanup (NULL_TREE, cleanup);
8960 op0 = expand_expr (try_block, target, tmode, modifier);
8962 preserve_temp_slots (op0);
8963 expand_end_bindings (NULL_TREE, 0, 0);
8964 emit_jump (done_label);
8965 emit_label (finally_label);
8966 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8967 emit_indirect_jump (return_link);
8968 emit_label (done_label);
8970 else
8972 expand_start_bindings (2);
8973 target_temp_slot_level = temp_slot_level;
8975 expand_decl_cleanup (NULL_TREE, finally_block);
8976 op0 = expand_expr (try_block, target, tmode, modifier);
8978 preserve_temp_slots (op0);
8979 expand_end_bindings (NULL_TREE, 0, 0);
8982 return op0;
8985 case GOTO_SUBROUTINE_EXPR:
8987 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8988 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8989 rtx return_address = gen_label_rtx ();
8990 emit_move_insn (return_link,
8991 gen_rtx_LABEL_REF (Pmode, return_address));
8992 emit_jump (subr);
8993 emit_label (return_address);
8994 return const0_rtx;
8997 case VA_ARG_EXPR:
8998 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9000 case EXC_PTR_EXPR:
9001 return get_exception_pointer (cfun);
9003 case FDESC_EXPR:
9004 /* Function descriptors are not valid except for as
9005 initialization constants, and should not be expanded. */
9006 abort ();
9008 default:
9009 /* ??? Use (*fun) form because expand_expr is a macro. */
9010 return (*lang_hooks.expand_expr) (exp, original_target, tmode,
9011 modifier, alt_rtl);
9014 /* Here to do an ordinary binary operator, generating an instruction
9015 from the optab already placed in `this_optab'. */
9016 binop:
9017 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9018 subtarget, &op0, &op1, 0);
9019 binop2:
9020 if (modifier == EXPAND_STACK_PARM)
9021 target = 0;
9022 temp = expand_binop (mode, this_optab, op0, op1, target,
9023 unsignedp, OPTAB_LIB_WIDEN);
9024 if (temp == 0)
9025 abort ();
9026 return temp;
9029 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9030 when applied to the address of EXP produces an address known to be
9031 aligned more than BIGGEST_ALIGNMENT. */
9033 static int
9034 is_aligning_offset (tree offset, tree exp)
9036 /* Strip off any conversions. */
9037 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9038 || TREE_CODE (offset) == NOP_EXPR
9039 || TREE_CODE (offset) == CONVERT_EXPR)
9040 offset = TREE_OPERAND (offset, 0);
9042 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9043 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9044 if (TREE_CODE (offset) != BIT_AND_EXPR
9045 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9046 || compare_tree_int (TREE_OPERAND (offset, 1),
9047 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9048 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9049 return 0;
9051 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9052 It must be NEGATE_EXPR. Then strip any more conversions. */
9053 offset = TREE_OPERAND (offset, 0);
9054 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9055 || TREE_CODE (offset) == NOP_EXPR
9056 || TREE_CODE (offset) == CONVERT_EXPR)
9057 offset = TREE_OPERAND (offset, 0);
9059 if (TREE_CODE (offset) != NEGATE_EXPR)
9060 return 0;
9062 offset = TREE_OPERAND (offset, 0);
9063 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9064 || TREE_CODE (offset) == NOP_EXPR
9065 || TREE_CODE (offset) == CONVERT_EXPR)
9066 offset = TREE_OPERAND (offset, 0);
9068 /* This must now be the address of EXP. */
9069 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9072 /* Return the tree node if an ARG corresponds to a string constant or zero
9073 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9074 in bytes within the string that ARG is accessing. The type of the
9075 offset will be `sizetype'. */
9077 tree
9078 string_constant (tree arg, tree *ptr_offset)
9080 STRIP_NOPS (arg);
9082 if (TREE_CODE (arg) == ADDR_EXPR
9083 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9085 *ptr_offset = size_zero_node;
9086 return TREE_OPERAND (arg, 0);
9088 else if (TREE_CODE (arg) == PLUS_EXPR)
9090 tree arg0 = TREE_OPERAND (arg, 0);
9091 tree arg1 = TREE_OPERAND (arg, 1);
9093 STRIP_NOPS (arg0);
9094 STRIP_NOPS (arg1);
9096 if (TREE_CODE (arg0) == ADDR_EXPR
9097 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9099 *ptr_offset = convert (sizetype, arg1);
9100 return TREE_OPERAND (arg0, 0);
9102 else if (TREE_CODE (arg1) == ADDR_EXPR
9103 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9105 *ptr_offset = convert (sizetype, arg0);
9106 return TREE_OPERAND (arg1, 0);
9110 return 0;
9113 /* Expand code for a post- or pre- increment or decrement
9114 and return the RTX for the result.
9115 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9117 static rtx
9118 expand_increment (tree exp, int post, int ignore)
9120 rtx op0, op1;
9121 rtx temp, value;
9122 tree incremented = TREE_OPERAND (exp, 0);
9123 optab this_optab = add_optab;
9124 int icode;
9125 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9126 int op0_is_copy = 0;
9127 int single_insn = 0;
9128 /* 1 means we can't store into OP0 directly,
9129 because it is a subreg narrower than a word,
9130 and we don't dare clobber the rest of the word. */
9131 int bad_subreg = 0;
9133 /* Stabilize any component ref that might need to be
9134 evaluated more than once below. */
9135 if (!post
9136 || TREE_CODE (incremented) == BIT_FIELD_REF
9137 || (TREE_CODE (incremented) == COMPONENT_REF
9138 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9139 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9140 incremented = stabilize_reference (incremented);
9141 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9142 ones into save exprs so that they don't accidentally get evaluated
9143 more than once by the code below. */
9144 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9145 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9146 incremented = save_expr (incremented);
9148 /* Compute the operands as RTX.
9149 Note whether OP0 is the actual lvalue or a copy of it:
9150 I believe it is a copy iff it is a register or subreg
9151 and insns were generated in computing it. */
9153 temp = get_last_insn ();
9154 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9156 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9157 in place but instead must do sign- or zero-extension during assignment,
9158 so we copy it into a new register and let the code below use it as
9159 a copy.
9161 Note that we can safely modify this SUBREG since it is know not to be
9162 shared (it was made by the expand_expr call above). */
9164 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9166 if (post)
9167 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9168 else
9169 bad_subreg = 1;
9171 else if (GET_CODE (op0) == SUBREG
9172 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9174 /* We cannot increment this SUBREG in place. If we are
9175 post-incrementing, get a copy of the old value. Otherwise,
9176 just mark that we cannot increment in place. */
9177 if (post)
9178 op0 = copy_to_reg (op0);
9179 else
9180 bad_subreg = 1;
9183 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9184 && temp != get_last_insn ());
9185 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9187 /* Decide whether incrementing or decrementing. */
9188 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9189 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9190 this_optab = sub_optab;
9192 /* Convert decrement by a constant into a negative increment. */
9193 if (this_optab == sub_optab
9194 && GET_CODE (op1) == CONST_INT)
9196 op1 = GEN_INT (-INTVAL (op1));
9197 this_optab = add_optab;
9200 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9201 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9203 /* For a preincrement, see if we can do this with a single instruction. */
9204 if (!post)
9206 icode = (int) this_optab->handlers[(int) mode].insn_code;
9207 if (icode != (int) CODE_FOR_nothing
9208 /* Make sure that OP0 is valid for operands 0 and 1
9209 of the insn we want to queue. */
9210 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9211 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9212 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9213 single_insn = 1;
9216 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9217 then we cannot just increment OP0. We must therefore contrive to
9218 increment the original value. Then, for postincrement, we can return
9219 OP0 since it is a copy of the old value. For preincrement, expand here
9220 unless we can do it with a single insn.
9222 Likewise if storing directly into OP0 would clobber high bits
9223 we need to preserve (bad_subreg). */
9224 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9226 /* This is the easiest way to increment the value wherever it is.
9227 Problems with multiple evaluation of INCREMENTED are prevented
9228 because either (1) it is a component_ref or preincrement,
9229 in which case it was stabilized above, or (2) it is an array_ref
9230 with constant index in an array in a register, which is
9231 safe to reevaluate. */
9232 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9233 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9234 ? MINUS_EXPR : PLUS_EXPR),
9235 TREE_TYPE (exp),
9236 incremented,
9237 TREE_OPERAND (exp, 1));
9239 while (TREE_CODE (incremented) == NOP_EXPR
9240 || TREE_CODE (incremented) == CONVERT_EXPR)
9242 newexp = convert (TREE_TYPE (incremented), newexp);
9243 incremented = TREE_OPERAND (incremented, 0);
9246 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9247 return post ? op0 : temp;
9250 if (post)
9252 /* We have a true reference to the value in OP0.
9253 If there is an insn to add or subtract in this mode, queue it.
9254 Queuing the increment insn avoids the register shuffling
9255 that often results if we must increment now and first save
9256 the old value for subsequent use. */
9258 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9259 op0 = stabilize (op0);
9260 #endif
9262 icode = (int) this_optab->handlers[(int) mode].insn_code;
9263 if (icode != (int) CODE_FOR_nothing
9264 /* Make sure that OP0 is valid for operands 0 and 1
9265 of the insn we want to queue. */
9266 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9267 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9269 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9270 op1 = force_reg (mode, op1);
9272 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9274 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9276 rtx addr = (general_operand (XEXP (op0, 0), mode)
9277 ? force_reg (Pmode, XEXP (op0, 0))
9278 : copy_to_reg (XEXP (op0, 0)));
9279 rtx temp, result;
9281 op0 = replace_equiv_address (op0, addr);
9282 temp = force_reg (GET_MODE (op0), op0);
9283 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9284 op1 = force_reg (mode, op1);
9286 /* The increment queue is LIFO, thus we have to `queue'
9287 the instructions in reverse order. */
9288 enqueue_insn (op0, gen_move_insn (op0, temp));
9289 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9290 return result;
9294 /* Preincrement, or we can't increment with one simple insn. */
9295 if (post)
9296 /* Save a copy of the value before inc or dec, to return it later. */
9297 temp = value = copy_to_reg (op0);
9298 else
9299 /* Arrange to return the incremented value. */
9300 /* Copy the rtx because expand_binop will protect from the queue,
9301 and the results of that would be invalid for us to return
9302 if our caller does emit_queue before using our result. */
9303 temp = copy_rtx (value = op0);
9305 /* Increment however we can. */
9306 op1 = expand_binop (mode, this_optab, value, op1, op0,
9307 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9309 /* Make sure the value is stored into OP0. */
9310 if (op1 != op0)
9311 emit_move_insn (op0, op1);
9313 return temp;
9316 /* Generate code to calculate EXP using a store-flag instruction
9317 and return an rtx for the result. EXP is either a comparison
9318 or a TRUTH_NOT_EXPR whose operand is a comparison.
9320 If TARGET is nonzero, store the result there if convenient.
9322 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9323 cheap.
9325 Return zero if there is no suitable set-flag instruction
9326 available on this machine.
9328 Once expand_expr has been called on the arguments of the comparison,
9329 we are committed to doing the store flag, since it is not safe to
9330 re-evaluate the expression. We emit the store-flag insn by calling
9331 emit_store_flag, but only expand the arguments if we have a reason
9332 to believe that emit_store_flag will be successful. If we think that
9333 it will, but it isn't, we have to simulate the store-flag with a
9334 set/jump/set sequence. */
9336 static rtx
9337 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9339 enum rtx_code code;
9340 tree arg0, arg1, type;
9341 tree tem;
9342 enum machine_mode operand_mode;
9343 int invert = 0;
9344 int unsignedp;
9345 rtx op0, op1;
9346 enum insn_code icode;
9347 rtx subtarget = target;
9348 rtx result, label;
9350 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9351 result at the end. We can't simply invert the test since it would
9352 have already been inverted if it were valid. This case occurs for
9353 some floating-point comparisons. */
9355 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9356 invert = 1, exp = TREE_OPERAND (exp, 0);
9358 arg0 = TREE_OPERAND (exp, 0);
9359 arg1 = TREE_OPERAND (exp, 1);
9361 /* Don't crash if the comparison was erroneous. */
9362 if (arg0 == error_mark_node || arg1 == error_mark_node)
9363 return const0_rtx;
9365 type = TREE_TYPE (arg0);
9366 operand_mode = TYPE_MODE (type);
9367 unsignedp = TYPE_UNSIGNED (type);
9369 /* We won't bother with BLKmode store-flag operations because it would mean
9370 passing a lot of information to emit_store_flag. */
9371 if (operand_mode == BLKmode)
9372 return 0;
9374 /* We won't bother with store-flag operations involving function pointers
9375 when function pointers must be canonicalized before comparisons. */
9376 #ifdef HAVE_canonicalize_funcptr_for_compare
9377 if (HAVE_canonicalize_funcptr_for_compare
9378 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9379 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9380 == FUNCTION_TYPE))
9381 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9382 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9383 == FUNCTION_TYPE))))
9384 return 0;
9385 #endif
9387 STRIP_NOPS (arg0);
9388 STRIP_NOPS (arg1);
9390 /* Get the rtx comparison code to use. We know that EXP is a comparison
9391 operation of some type. Some comparisons against 1 and -1 can be
9392 converted to comparisons with zero. Do so here so that the tests
9393 below will be aware that we have a comparison with zero. These
9394 tests will not catch constants in the first operand, but constants
9395 are rarely passed as the first operand. */
9397 switch (TREE_CODE (exp))
9399 case EQ_EXPR:
9400 code = EQ;
9401 break;
9402 case NE_EXPR:
9403 code = NE;
9404 break;
9405 case LT_EXPR:
9406 if (integer_onep (arg1))
9407 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9408 else
9409 code = unsignedp ? LTU : LT;
9410 break;
9411 case LE_EXPR:
9412 if (! unsignedp && integer_all_onesp (arg1))
9413 arg1 = integer_zero_node, code = LT;
9414 else
9415 code = unsignedp ? LEU : LE;
9416 break;
9417 case GT_EXPR:
9418 if (! unsignedp && integer_all_onesp (arg1))
9419 arg1 = integer_zero_node, code = GE;
9420 else
9421 code = unsignedp ? GTU : GT;
9422 break;
9423 case GE_EXPR:
9424 if (integer_onep (arg1))
9425 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9426 else
9427 code = unsignedp ? GEU : GE;
9428 break;
9430 case UNORDERED_EXPR:
9431 code = UNORDERED;
9432 break;
9433 case ORDERED_EXPR:
9434 code = ORDERED;
9435 break;
9436 case UNLT_EXPR:
9437 code = UNLT;
9438 break;
9439 case UNLE_EXPR:
9440 code = UNLE;
9441 break;
9442 case UNGT_EXPR:
9443 code = UNGT;
9444 break;
9445 case UNGE_EXPR:
9446 code = UNGE;
9447 break;
9448 case UNEQ_EXPR:
9449 code = UNEQ;
9450 break;
9452 default:
9453 abort ();
9456 /* Put a constant second. */
9457 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9459 tem = arg0; arg0 = arg1; arg1 = tem;
9460 code = swap_condition (code);
9463 /* If this is an equality or inequality test of a single bit, we can
9464 do this by shifting the bit being tested to the low-order bit and
9465 masking the result with the constant 1. If the condition was EQ,
9466 we xor it with 1. This does not require an scc insn and is faster
9467 than an scc insn even if we have it.
9469 The code to make this transformation was moved into fold_single_bit_test,
9470 so we just call into the folder and expand its result. */
9472 if ((code == NE || code == EQ)
9473 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9474 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9476 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9477 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9478 arg0, arg1, type),
9479 target, VOIDmode, EXPAND_NORMAL);
9482 /* Now see if we are likely to be able to do this. Return if not. */
9483 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9484 return 0;
9486 icode = setcc_gen_code[(int) code];
9487 if (icode == CODE_FOR_nothing
9488 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9490 /* We can only do this if it is one of the special cases that
9491 can be handled without an scc insn. */
9492 if ((code == LT && integer_zerop (arg1))
9493 || (! only_cheap && code == GE && integer_zerop (arg1)))
9495 else if (BRANCH_COST >= 0
9496 && ! only_cheap && (code == NE || code == EQ)
9497 && TREE_CODE (type) != REAL_TYPE
9498 && ((abs_optab->handlers[(int) operand_mode].insn_code
9499 != CODE_FOR_nothing)
9500 || (ffs_optab->handlers[(int) operand_mode].insn_code
9501 != CODE_FOR_nothing)))
9503 else
9504 return 0;
9507 if (! get_subtarget (target)
9508 || GET_MODE (subtarget) != operand_mode)
9509 subtarget = 0;
9511 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9513 if (target == 0)
9514 target = gen_reg_rtx (mode);
9516 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9517 because, if the emit_store_flag does anything it will succeed and
9518 OP0 and OP1 will not be used subsequently. */
9520 result = emit_store_flag (target, code,
9521 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9522 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9523 operand_mode, unsignedp, 1);
9525 if (result)
9527 if (invert)
9528 result = expand_binop (mode, xor_optab, result, const1_rtx,
9529 result, 0, OPTAB_LIB_WIDEN);
9530 return result;
9533 /* If this failed, we have to do this with set/compare/jump/set code. */
9534 if (GET_CODE (target) != REG
9535 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9536 target = gen_reg_rtx (GET_MODE (target));
9538 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9539 result = compare_from_rtx (op0, op1, code, unsignedp,
9540 operand_mode, NULL_RTX);
9541 if (GET_CODE (result) == CONST_INT)
9542 return (((result == const0_rtx && ! invert)
9543 || (result != const0_rtx && invert))
9544 ? const0_rtx : const1_rtx);
9546 /* The code of RESULT may not match CODE if compare_from_rtx
9547 decided to swap its operands and reverse the original code.
9549 We know that compare_from_rtx returns either a CONST_INT or
9550 a new comparison code, so it is safe to just extract the
9551 code from RESULT. */
9552 code = GET_CODE (result);
9554 label = gen_label_rtx ();
9555 if (bcc_gen_fctn[(int) code] == 0)
9556 abort ();
9558 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9559 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9560 emit_label (label);
9562 return target;
9566 /* Stubs in case we haven't got a casesi insn. */
9567 #ifndef HAVE_casesi
9568 # define HAVE_casesi 0
9569 # define gen_casesi(a, b, c, d, e) (0)
9570 # define CODE_FOR_casesi CODE_FOR_nothing
9571 #endif
9573 /* If the machine does not have a case insn that compares the bounds,
9574 this means extra overhead for dispatch tables, which raises the
9575 threshold for using them. */
9576 #ifndef CASE_VALUES_THRESHOLD
9577 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9578 #endif /* CASE_VALUES_THRESHOLD */
9580 unsigned int
9581 case_values_threshold (void)
9583 return CASE_VALUES_THRESHOLD;
9586 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9587 0 otherwise (i.e. if there is no casesi instruction). */
9589 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9590 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9592 enum machine_mode index_mode = SImode;
9593 int index_bits = GET_MODE_BITSIZE (index_mode);
9594 rtx op1, op2, index;
9595 enum machine_mode op_mode;
9597 if (! HAVE_casesi)
9598 return 0;
9600 /* Convert the index to SImode. */
9601 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9603 enum machine_mode omode = TYPE_MODE (index_type);
9604 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9606 /* We must handle the endpoints in the original mode. */
9607 index_expr = build (MINUS_EXPR, index_type,
9608 index_expr, minval);
9609 minval = integer_zero_node;
9610 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9611 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9612 omode, 1, default_label);
9613 /* Now we can safely truncate. */
9614 index = convert_to_mode (index_mode, index, 0);
9616 else
9618 if (TYPE_MODE (index_type) != index_mode)
9620 index_expr = convert (lang_hooks.types.type_for_size
9621 (index_bits, 0), index_expr);
9622 index_type = TREE_TYPE (index_expr);
9625 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9627 emit_queue ();
9628 index = protect_from_queue (index, 0);
9629 do_pending_stack_adjust ();
9631 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9632 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9633 (index, op_mode))
9634 index = copy_to_mode_reg (op_mode, index);
9636 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9638 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9639 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9640 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9641 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9642 (op1, op_mode))
9643 op1 = copy_to_mode_reg (op_mode, op1);
9645 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9647 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9648 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9649 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9650 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9651 (op2, op_mode))
9652 op2 = copy_to_mode_reg (op_mode, op2);
9654 emit_jump_insn (gen_casesi (index, op1, op2,
9655 table_label, default_label));
9656 return 1;
9659 /* Attempt to generate a tablejump instruction; same concept. */
9660 #ifndef HAVE_tablejump
9661 #define HAVE_tablejump 0
9662 #define gen_tablejump(x, y) (0)
9663 #endif
9665 /* Subroutine of the next function.
9667 INDEX is the value being switched on, with the lowest value
9668 in the table already subtracted.
9669 MODE is its expected mode (needed if INDEX is constant).
9670 RANGE is the length of the jump table.
9671 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9673 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9674 index value is out of range. */
9676 static void
9677 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9678 rtx default_label)
9680 rtx temp, vector;
9682 if (INTVAL (range) > cfun->max_jumptable_ents)
9683 cfun->max_jumptable_ents = INTVAL (range);
9685 /* Do an unsigned comparison (in the proper mode) between the index
9686 expression and the value which represents the length of the range.
9687 Since we just finished subtracting the lower bound of the range
9688 from the index expression, this comparison allows us to simultaneously
9689 check that the original index expression value is both greater than
9690 or equal to the minimum value of the range and less than or equal to
9691 the maximum value of the range. */
9693 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9694 default_label);
9696 /* If index is in range, it must fit in Pmode.
9697 Convert to Pmode so we can index with it. */
9698 if (mode != Pmode)
9699 index = convert_to_mode (Pmode, index, 1);
9701 /* Don't let a MEM slip through, because then INDEX that comes
9702 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9703 and break_out_memory_refs will go to work on it and mess it up. */
9704 #ifdef PIC_CASE_VECTOR_ADDRESS
9705 if (flag_pic && GET_CODE (index) != REG)
9706 index = copy_to_mode_reg (Pmode, index);
9707 #endif
9709 /* If flag_force_addr were to affect this address
9710 it could interfere with the tricky assumptions made
9711 about addresses that contain label-refs,
9712 which may be valid only very near the tablejump itself. */
9713 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9714 GET_MODE_SIZE, because this indicates how large insns are. The other
9715 uses should all be Pmode, because they are addresses. This code
9716 could fail if addresses and insns are not the same size. */
9717 index = gen_rtx_PLUS (Pmode,
9718 gen_rtx_MULT (Pmode, index,
9719 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9720 gen_rtx_LABEL_REF (Pmode, table_label));
9721 #ifdef PIC_CASE_VECTOR_ADDRESS
9722 if (flag_pic)
9723 index = PIC_CASE_VECTOR_ADDRESS (index);
9724 else
9725 #endif
9726 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9727 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9728 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9729 RTX_UNCHANGING_P (vector) = 1;
9730 MEM_NOTRAP_P (vector) = 1;
9731 convert_move (temp, vector, 0);
9733 emit_jump_insn (gen_tablejump (temp, table_label));
9735 /* If we are generating PIC code or if the table is PC-relative, the
9736 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9737 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9738 emit_barrier ();
9742 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9743 rtx table_label, rtx default_label)
9745 rtx index;
9747 if (! HAVE_tablejump)
9748 return 0;
9750 index_expr = fold (build (MINUS_EXPR, index_type,
9751 convert (index_type, index_expr),
9752 convert (index_type, minval)));
9753 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9754 emit_queue ();
9755 index = protect_from_queue (index, 0);
9756 do_pending_stack_adjust ();
9758 do_tablejump (index, TYPE_MODE (index_type),
9759 convert_modes (TYPE_MODE (index_type),
9760 TYPE_MODE (TREE_TYPE (range)),
9761 expand_expr (range, NULL_RTX,
9762 VOIDmode, 0),
9763 TYPE_UNSIGNED (TREE_TYPE (range))),
9764 table_label, default_label);
9765 return 1;
9768 /* Nonzero if the mode is a valid vector mode for this architecture.
9769 This returns nonzero even if there is no hardware support for the
9770 vector mode, but we can emulate with narrower modes. */
9773 vector_mode_valid_p (enum machine_mode mode)
9775 enum mode_class class = GET_MODE_CLASS (mode);
9776 enum machine_mode innermode;
9778 /* Doh! What's going on? */
9779 if (class != MODE_VECTOR_INT
9780 && class != MODE_VECTOR_FLOAT)
9781 return 0;
9783 /* Hardware support. Woo hoo! */
9784 if (VECTOR_MODE_SUPPORTED_P (mode))
9785 return 1;
9787 innermode = GET_MODE_INNER (mode);
9789 /* We should probably return 1 if requesting V4DI and we have no DI,
9790 but we have V2DI, but this is probably very unlikely. */
9792 /* If we have support for the inner mode, we can safely emulate it.
9793 We may not have V2DI, but me can emulate with a pair of DIs. */
9794 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9797 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9798 static rtx
9799 const_vector_from_tree (tree exp)
9801 rtvec v;
9802 int units, i;
9803 tree link, elt;
9804 enum machine_mode inner, mode;
9806 mode = TYPE_MODE (TREE_TYPE (exp));
9808 if (is_zeros_p (exp))
9809 return CONST0_RTX (mode);
9811 units = GET_MODE_NUNITS (mode);
9812 inner = GET_MODE_INNER (mode);
9814 v = rtvec_alloc (units);
9816 link = TREE_VECTOR_CST_ELTS (exp);
9817 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9819 elt = TREE_VALUE (link);
9821 if (TREE_CODE (elt) == REAL_CST)
9822 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9823 inner);
9824 else
9825 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9826 TREE_INT_CST_HIGH (elt),
9827 inner);
9830 /* Initialize remaining elements to 0. */
9831 for (; i < units; ++i)
9832 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9834 return gen_rtx_raw_CONST_VECTOR (mode, v);
9837 #include "gt-expr.h"