2004-01-06 Eric Christopher <echristo@redhat.com>
[official-gcc.git] / gcc / expr.c
blobb593a515bac29a9714a73ab1f6ec71dce1cc085d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
58 #ifdef PUSH_ROUNDING
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
66 #endif
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
79 #endif
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
85 #else
86 #define TARGET_MEM_FUNCTIONS 0
87 #endif
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
96 int cse_not_expected;
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list = 0;
101 /* This structure is used by move_by_pieces to describe the move to
102 be performed. */
103 struct move_by_pieces
105 rtx to;
106 rtx to_addr;
107 int autinc_to;
108 int explicit_inc_to;
109 rtx from;
110 rtx from_addr;
111 int autinc_from;
112 int explicit_inc_from;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 int reverse;
118 /* This structure is used by store_by_pieces to describe the clear to
119 be performed. */
121 struct store_by_pieces
123 rtx to;
124 rtx to_addr;
125 int autinc_to;
126 int explicit_inc_to;
127 unsigned HOST_WIDE_INT len;
128 HOST_WIDE_INT offset;
129 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
130 void *constfundata;
131 int reverse;
134 static rtx enqueue_insn (rtx, rtx);
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 unsigned int);
137 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
141 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
142 static tree emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
144 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
145 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
148 struct store_by_pieces *);
149 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
150 static rtx clear_storage_via_libcall (rtx, rtx);
151 static tree clear_storage_libcall_fn (int);
152 static rtx compress_float_constant (rtx, rtx);
153 static rtx get_subtarget (rtx);
154 static int is_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
169 enum expand_modifier);
170 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
171 #ifdef PUSH_ROUNDING
172 static void emit_single_push_insn (enum machine_mode, rtx, tree);
173 #endif
174 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
175 static rtx const_vector_from_tree (tree);
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* Record for each mode whether we can float-extend from memory. */
186 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
188 /* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
191 #ifndef MOVE_RATIO
192 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 #define MOVE_RATIO 2
194 #else
195 /* If we are optimizing for space (-Os), cut down the default move ratio. */
196 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 #endif
198 #endif
200 /* This macro is used to determine whether move_by_pieces should be called
201 to perform a structure copy. */
202 #ifndef MOVE_BY_PIECES_P
203 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 #endif
207 /* If a clear memory operation would take CLEAR_RATIO or more simple
208 move-instruction sequences, we will do a clrstr or libcall instead. */
210 #ifndef CLEAR_RATIO
211 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
212 #define CLEAR_RATIO 2
213 #else
214 /* If we are optimizing for space, cut down the default clear ratio. */
215 #define CLEAR_RATIO (optimize_size ? 3 : 15)
216 #endif
217 #endif
219 /* This macro is used to determine whether clear_by_pieces should be
220 called to clear storage. */
221 #ifndef CLEAR_BY_PIECES_P
222 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
223 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
224 #endif
226 /* This macro is used to determine whether store_by_pieces should be
227 called to "memset" storage with byte values other than zero, or
228 to "memcpy" storage when the source is a constant string. */
229 #ifndef STORE_BY_PIECES_P
230 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
231 #endif
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab[NUM_MACHINE_MODES];
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
239 /* These arrays record the insn_code of two different kinds of insns
240 to perform block compares. */
241 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
242 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
244 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
245 struct file_stack *expr_wfl_stack;
247 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
249 #ifndef SLOW_UNALIGNED_ACCESS
250 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 #endif
253 /* This is run once per compilation to set up which modes can be used
254 directly in memory and to initialize the block move optab. */
256 void
257 init_expr_once (void)
259 rtx insn, pat;
260 enum machine_mode mode;
261 int num_clobbers;
262 rtx mem, mem1;
263 rtx reg;
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
269 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
271 /* A scratch register we can modify in-place below to avoid
272 useless RTL allocations. */
273 reg = gen_rtx_REG (VOIDmode, -1);
275 insn = rtx_alloc (INSN);
276 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
277 PATTERN (insn) = pat;
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
282 int regno;
284 direct_load[(int) mode] = direct_store[(int) mode] = 0;
285 PUT_MODE (mem, mode);
286 PUT_MODE (mem1, mode);
287 PUT_MODE (reg, mode);
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
300 REGNO (reg) = regno;
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
324 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
327 mode = GET_MODE_WIDER_MODE (mode))
329 enum machine_mode srcmode;
330 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
331 srcmode = GET_MODE_WIDER_MODE (srcmode))
333 enum insn_code ic;
335 ic = can_extend_p (mode, srcmode, 0);
336 if (ic == CODE_FOR_nothing)
337 continue;
339 PUT_MODE (mem, srcmode);
341 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
342 float_extend_from_mem[mode][srcmode] = true;
347 /* This is run at the start of compiling a function. */
349 void
350 init_expr (void)
352 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
355 /* Small sanity check that the queue is empty at the end of a function. */
357 void
358 finish_expr_for_function (void)
360 if (pending_chain)
361 abort ();
364 /* Manage the queue of increment instructions to be output
365 for POSTINCREMENT_EXPR expressions, etc. */
367 /* Queue up to increment (or change) VAR later. BODY says how:
368 BODY should be the same thing you would pass to emit_insn
369 to increment right away. It will go to emit_insn later on.
371 The value is a QUEUED expression to be used in place of VAR
372 where you want to guarantee the pre-incrementation value of VAR. */
374 static rtx
375 enqueue_insn (rtx var, rtx body)
377 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
378 body, pending_chain);
379 return pending_chain;
382 /* Use protect_from_queue to convert a QUEUED expression
383 into something that you can put immediately into an instruction.
384 If the queued incrementation has not happened yet,
385 protect_from_queue returns the variable itself.
386 If the incrementation has happened, protect_from_queue returns a temp
387 that contains a copy of the old value of the variable.
389 Any time an rtx which might possibly be a QUEUED is to be put
390 into an instruction, it must be passed through protect_from_queue first.
391 QUEUED expressions are not meaningful in instructions.
393 Do not pass a value through protect_from_queue and then hold
394 on to it for a while before putting it in an instruction!
395 If the queue is flushed in between, incorrect code will result. */
398 protect_from_queue (rtx x, int modify)
400 RTX_CODE code = GET_CODE (x);
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
405 return x;
406 #endif
408 if (code != QUEUED)
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
414 shared. */
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
418 rtx y = XEXP (x, 0);
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
421 if (QUEUED_INSN (y))
423 rtx temp = gen_reg_rtx (GET_MODE (x));
425 emit_insn_before (gen_move_insn (temp, new),
426 QUEUED_INSN (y));
427 return temp;
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
437 if (code == MEM)
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
442 x = copy_rtx (x);
443 XEXP (x, 0) = tem;
446 else if (code == PLUS || code == MULT)
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
452 x = copy_rtx (x);
453 XEXP (x, 0) = new0;
454 XEXP (x, 1) = new1;
457 return x;
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
461 emit_queue. */
462 if (QUEUED_INSN (x) == 0)
463 return copy_to_reg (QUEUED_VAR (x));
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
482 queued_subexp_p (rtx x)
484 enum rtx_code code = GET_CODE (x);
485 switch (code)
487 case QUEUED:
488 return 1;
489 case MEM:
490 return queued_subexp_p (XEXP (x, 0));
491 case MULT:
492 case PLUS:
493 case MINUS:
494 return (queued_subexp_p (XEXP (x, 0))
495 || queued_subexp_p (XEXP (x, 1)));
496 default:
497 return 0;
501 /* Perform all the pending incrementations. */
503 void
504 emit_queue (void)
506 rtx p;
507 while ((p = pending_chain))
509 rtx body = QUEUED_BODY (p);
511 switch (GET_CODE (body))
513 case INSN:
514 case JUMP_INSN:
515 case CALL_INSN:
516 case CODE_LABEL:
517 case BARRIER:
518 case NOTE:
519 QUEUED_INSN (p) = body;
520 emit_insn (body);
521 break;
523 #ifdef ENABLE_CHECKING
524 case SEQUENCE:
525 abort ();
526 break;
527 #endif
529 default:
530 QUEUED_INSN (p) = emit_insn (body);
531 break;
534 pending_chain = QUEUED_NEXT (p);
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
543 void
544 convert_move (rtx to, rtx from, int unsignedp)
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
550 enum insn_code code;
551 rtx libcall;
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
555 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
560 if (to_real != from_real)
561 abort ();
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
565 TO here. */
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 abort ();
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
579 emit_move_insn (to, from);
580 return;
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 abort ();
588 if (VECTOR_MODE_P (to_mode))
589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
590 else
591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
593 emit_move_insn (to, from);
594 return;
597 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
599 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
600 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
601 return;
604 if (to_real)
606 rtx value, insns;
607 convert_optab tab;
609 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
610 tab = sext_optab;
611 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
612 tab = trunc_optab;
613 else
614 abort ();
616 /* Try converting directly if the insn is supported. */
618 code = tab->handlers[to_mode][from_mode].insn_code;
619 if (code != CODE_FOR_nothing)
621 emit_unop_insn (code, to, from,
622 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
623 return;
626 /* Otherwise use a libcall. */
627 libcall = tab->handlers[to_mode][from_mode].libfunc;
629 if (!libcall)
630 /* This conversion is not implemented yet. */
631 abort ();
633 start_sequence ();
634 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
635 1, from, from_mode);
636 insns = get_insns ();
637 end_sequence ();
638 emit_libcall_block (insns, to, value,
639 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
640 from)
641 : gen_rtx_FLOAT_EXTEND (to_mode, from));
642 return;
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
653 if (trunc_optab->handlers[to_mode][full_mode].insn_code
654 == CODE_FOR_nothing)
655 abort ();
657 if (full_mode != from_mode)
658 from = convert_to_mode (full_mode, from, unsignedp);
659 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
660 to, from, UNKNOWN);
661 return;
663 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
668 if (sext_optab->handlers[full_mode][from_mode].insn_code
669 == CODE_FOR_nothing)
670 abort ();
672 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
673 to, from, UNKNOWN);
674 if (to_mode == full_mode)
675 return;
677 /* else proceed to integer conversions below */
678 from_mode = full_mode;
681 /* Now both modes are integers. */
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
685 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
687 rtx insns;
688 rtx lowpart;
689 rtx fill_value;
690 rtx lowfrom;
691 int i;
692 enum machine_mode lowpart_mode;
693 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
695 /* Try converting directly if the insn is supported. */
696 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
697 != CODE_FOR_nothing)
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize > 0 && GET_CODE (from) == SUBREG)
704 from = force_reg (from_mode, from);
705 emit_unop_insn (code, to, from, equiv_code);
706 return;
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
710 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
711 != CODE_FOR_nothing))
713 if (GET_CODE (to) == REG)
714 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
715 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
716 emit_unop_insn (code, to,
717 gen_lowpart (word_mode, to), equiv_code);
718 return;
721 /* No special multiword conversion insn; do it by hand. */
722 start_sequence ();
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
727 if (reg_overlap_mentioned_p (to, from))
728 from = force_reg (from_mode, from);
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
732 lowpart_mode = word_mode;
733 else
734 lowpart_mode = from_mode;
736 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
738 lowpart = gen_lowpart (lowpart_mode, to);
739 emit_move_insn (lowpart, lowfrom);
741 /* Compute the value to put in each remaining word. */
742 if (unsignedp)
743 fill_value = const0_rtx;
744 else
746 #ifdef HAVE_slt
747 if (HAVE_slt
748 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
749 && STORE_FLAG_VALUE == -1)
751 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
752 lowpart_mode, 0);
753 fill_value = gen_reg_rtx (word_mode);
754 emit_insn (gen_slt (fill_value));
756 else
757 #endif
759 fill_value
760 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
761 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
762 NULL_RTX, 0);
763 fill_value = convert_to_mode (word_mode, fill_value, 1);
767 /* Fill the remaining words. */
768 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
770 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
771 rtx subword = operand_subword (to, index, 1, to_mode);
773 if (subword == 0)
774 abort ();
776 if (fill_value != subword)
777 emit_move_insn (subword, fill_value);
780 insns = get_insns ();
781 end_sequence ();
783 emit_no_conflict_block (insns, to, from, NULL_RTX,
784 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
785 return;
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
792 if (!((GET_CODE (from) == MEM
793 && ! MEM_VOLATILE_P (from)
794 && direct_load[(int) to_mode]
795 && ! mode_dependent_address_p (XEXP (from, 0)))
796 || GET_CODE (from) == REG
797 || GET_CODE (from) == SUBREG))
798 from = force_reg (from_mode, from);
799 convert_move (to, gen_lowpart (word_mode, from), 0);
800 return;
803 /* Now follow all the conversions between integers
804 no more than a word long. */
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
809 GET_MODE_BITSIZE (from_mode)))
811 if (!((GET_CODE (from) == MEM
812 && ! MEM_VOLATILE_P (from)
813 && direct_load[(int) to_mode]
814 && ! mode_dependent_address_p (XEXP (from, 0)))
815 || GET_CODE (from) == REG
816 || GET_CODE (from) == SUBREG))
817 from = force_reg (from_mode, from);
818 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
820 from = copy_to_reg (from);
821 emit_move_insn (to, gen_lowpart (to_mode, from));
822 return;
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
828 /* Convert directly if that works. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 != CODE_FOR_nothing)
832 if (flag_force_mem)
833 from = force_not_mem (from);
835 emit_unop_insn (code, to, from, equiv_code);
836 return;
838 else
840 enum machine_mode intermediate;
841 rtx tmp;
842 tree shift_amount;
844 /* Search for a mode to convert via. */
845 for (intermediate = from_mode; intermediate != VOIDmode;
846 intermediate = GET_MODE_WIDER_MODE (intermediate))
847 if (((can_extend_p (to_mode, intermediate, unsignedp)
848 != CODE_FOR_nothing)
849 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
851 GET_MODE_BITSIZE (intermediate))))
852 && (can_extend_p (intermediate, from_mode, unsignedp)
853 != CODE_FOR_nothing))
855 convert_move (to, convert_to_mode (intermediate, from,
856 unsignedp), unsignedp);
857 return;
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
863 - GET_MODE_BITSIZE (from_mode), 0);
864 from = gen_lowpart (to_mode, force_reg (from_mode, from));
865 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
866 to, unsignedp);
867 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
868 to, unsignedp);
869 if (tmp != to)
870 emit_move_insn (to, tmp);
871 return;
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
878 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 to, from, UNKNOWN);
880 return;
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
892 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
893 emit_move_insn (to, temp);
894 return;
897 /* Mode combination is not recognized. */
898 abort ();
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
912 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
914 return convert_modes (mode, VOIDmode, x, unsignedp);
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
931 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
933 rtx temp;
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
938 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
940 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
941 x = gen_lowpart (mode, x);
943 if (GET_MODE (x) != VOIDmode)
944 oldmode = GET_MODE (x);
946 if (mode == oldmode)
947 return x;
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
955 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
959 HOST_WIDE_INT val = INTVAL (x);
961 if (oldmode != VOIDmode
962 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
964 int width = GET_MODE_BITSIZE (oldmode);
966 /* We need to zero extend VAL. */
967 val &= ((HOST_WIDE_INT) 1 << width) - 1;
970 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
978 if ((GET_CODE (x) == CONST_INT
979 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
980 || (GET_MODE_CLASS (mode) == MODE_INT
981 && GET_MODE_CLASS (oldmode) == MODE_INT
982 && (GET_CODE (x) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
984 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
985 && direct_load[(int) mode])
986 || (GET_CODE (x) == REG
987 && (! HARD_REGISTER_P (x)
988 || HARD_REGNO_MODE_OK (REGNO (x), mode))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
990 GET_MODE_BITSIZE (GET_MODE (x)))))))))
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
996 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
998 HOST_WIDE_INT val = INTVAL (x);
999 int width = GET_MODE_BITSIZE (oldmode);
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1004 if (! unsignedp
1005 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1006 val |= (HOST_WIDE_INT) (-1) << width;
1008 return gen_int_mode (val, mode);
1011 return gen_lowpart (mode, x);
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1018 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1019 abort ();
1020 return simplify_gen_subreg (mode, x, oldmode, 0);
1023 temp = gen_reg_rtx (mode);
1024 convert_move (temp, x, unsignedp);
1025 return temp;
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1037 succeed. */
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1041 unsigned int align ATTRIBUTE_UNUSED)
1043 return MOVE_BY_PIECES_P (len, align);
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1053 ALIGN is maximum stack alignment we can assume.
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1057 stpcpy. */
1060 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1061 unsigned int align, int endp)
1063 struct move_by_pieces data;
1064 rtx to_addr, from_addr = XEXP (from, 0);
1065 unsigned int max_size = MOVE_MAX_PIECES + 1;
1066 enum machine_mode mode = VOIDmode, tmode;
1067 enum insn_code icode;
1069 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1071 data.offset = 0;
1072 data.from_addr = from_addr;
1073 if (to)
1075 to_addr = XEXP (to, 0);
1076 data.to = to;
1077 data.autinc_to
1078 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1079 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1080 data.reverse
1081 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1083 else
1085 to_addr = NULL_RTX;
1086 data.to = NULL_RTX;
1087 data.autinc_to = 1;
1088 #ifdef STACK_GROWS_DOWNWARD
1089 data.reverse = 1;
1090 #else
1091 data.reverse = 0;
1092 #endif
1094 data.to_addr = to_addr;
1095 data.from = from;
1096 data.autinc_from
1097 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1098 || GET_CODE (from_addr) == POST_INC
1099 || GET_CODE (from_addr) == POST_DEC);
1101 data.explicit_inc_from = 0;
1102 data.explicit_inc_to = 0;
1103 if (data.reverse) data.offset = len;
1104 data.len = len;
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data.autinc_from && data.autinc_to)
1110 && move_by_pieces_ninsns (len, align) > 2)
1112 /* Find the mode of the largest move... */
1113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1114 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1115 if (GET_MODE_SIZE (tmode) < max_size)
1116 mode = tmode;
1118 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1120 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1121 data.autinc_from = 1;
1122 data.explicit_inc_from = -1;
1124 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 data.autinc_from = 1;
1128 data.explicit_inc_from = 1;
1130 if (!data.autinc_from && CONSTANT_P (from_addr))
1131 data.from_addr = copy_addr_to_reg (from_addr);
1132 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1134 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1135 data.autinc_to = 1;
1136 data.explicit_inc_to = -1;
1138 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 data.autinc_to = 1;
1142 data.explicit_inc_to = 1;
1144 if (!data.autinc_to && CONSTANT_P (to_addr))
1145 data.to_addr = copy_addr_to_reg (to_addr);
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1149 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1150 align = MOVE_MAX * BITS_PER_UNIT;
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1155 while (max_size > 1)
1157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1158 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1159 if (GET_MODE_SIZE (tmode) < max_size)
1160 mode = tmode;
1162 if (mode == VOIDmode)
1163 break;
1165 icode = mov_optab->handlers[(int) mode].insn_code;
1166 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1167 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1169 max_size = GET_MODE_SIZE (mode);
1172 /* The code above should have handled everything. */
1173 if (data.len > 0)
1174 abort ();
1176 if (endp)
1178 rtx to1;
1180 if (data.reverse)
1181 abort ();
1182 if (data.autinc_to)
1184 if (endp == 2)
1186 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1187 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1188 else
1189 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1190 -1));
1192 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1193 data.offset);
1195 else
1197 if (endp == 2)
1198 --data.offset;
1199 to1 = adjust_address (data.to, QImode, data.offset);
1201 return to1;
1203 else
1204 return data.to;
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1213 unsigned HOST_WIDE_INT n_insns = 0;
1214 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1217 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1218 align = MOVE_MAX * BITS_PER_UNIT;
1220 while (max_size > 1)
1222 enum machine_mode mode = VOIDmode, tmode;
1223 enum insn_code icode;
1225 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1226 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1227 if (GET_MODE_SIZE (tmode) < max_size)
1228 mode = tmode;
1230 if (mode == VOIDmode)
1231 break;
1233 icode = mov_optab->handlers[(int) mode].insn_code;
1234 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1235 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1237 max_size = GET_MODE_SIZE (mode);
1240 if (l)
1241 abort ();
1242 return n_insns;
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1249 static void
1250 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1251 struct move_by_pieces *data)
1253 unsigned int size = GET_MODE_SIZE (mode);
1254 rtx to1 = NULL_RTX, from1;
1256 while (data->len >= size)
1258 if (data->reverse)
1259 data->offset -= size;
1261 if (data->to)
1263 if (data->autinc_to)
1264 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1265 data->offset);
1266 else
1267 to1 = adjust_address (data->to, mode, data->offset);
1270 if (data->autinc_from)
1271 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1272 data->offset);
1273 else
1274 from1 = adjust_address (data->from, mode, data->offset);
1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1277 emit_insn (gen_add2_insn (data->to_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1280 emit_insn (gen_add2_insn (data->from_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1283 if (data->to)
1284 emit_insn ((*genfun) (to1, from1));
1285 else
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode, from1, NULL);
1289 #else
1290 abort ();
1291 #endif
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1295 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1296 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1297 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1299 if (! data->reverse)
1300 data->offset += size;
1302 data->len -= size;
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1315 Return the address of the new block, if memcpy is called and returns it,
1316 0 otherwise. */
1319 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1321 bool may_use_call;
1322 rtx retval = 0;
1323 unsigned int align;
1325 switch (method)
1327 case BLOCK_OP_NORMAL:
1328 may_use_call = true;
1329 break;
1331 case BLOCK_OP_CALL_PARM:
1332 may_use_call = block_move_libcall_safe_for_call_parm ();
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1336 NO_DEFER_POP;
1337 break;
1339 case BLOCK_OP_NO_LIBCALL:
1340 may_use_call = false;
1341 break;
1343 default:
1344 abort ();
1347 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1349 if (GET_MODE (x) != BLKmode)
1350 abort ();
1351 if (GET_MODE (y) != BLKmode)
1352 abort ();
1354 x = protect_from_queue (x, 1);
1355 y = protect_from_queue (y, 0);
1356 size = protect_from_queue (size, 0);
1358 if (GET_CODE (x) != MEM)
1359 abort ();
1360 if (GET_CODE (y) != MEM)
1361 abort ();
1362 if (size == 0)
1363 abort ();
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size) == CONST_INT)
1369 if (INTVAL (size) == 0)
1370 return 0;
1372 x = shallow_copy_rtx (x);
1373 y = shallow_copy_rtx (y);
1374 set_mem_size (x, size);
1375 set_mem_size (y, size);
1378 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1379 move_by_pieces (x, y, INTVAL (size), align, 0);
1380 else if (emit_block_move_via_movstr (x, y, size, align))
1382 else if (may_use_call)
1383 retval = emit_block_move_via_libcall (x, y, size);
1384 else
1385 emit_block_move_via_loop (x, y, size, align);
1387 if (method == BLOCK_OP_CALL_PARM)
1388 OK_DEFER_POP;
1390 return retval;
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1397 static bool
1398 block_move_libcall_safe_for_call_parm (void)
1400 /* If arguments are pushed on the stack, then they're safe. */
1401 if (PUSH_ARGS)
1402 return true;
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1408 tree fn = emit_block_move_libcall_fn (false);
1409 (void) fn;
1410 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 return false;
1413 #endif
1415 /* If any argument goes in memory, then it might clobber an outgoing
1416 argument. */
1418 CUMULATIVE_ARGS args_so_far;
1419 tree fn, arg;
1421 fn = emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1424 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1425 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1427 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1428 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1429 if (!tmp || !REG_P (tmp))
1430 return false;
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1433 NULL_TREE, 1))
1434 return false;
1435 #endif
1436 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1439 return true;
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1445 static bool
1446 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1449 enum machine_mode mode;
1451 /* Since this is a move insn, we don't care about volatility. */
1452 volatile_ok = 1;
1454 /* Try the most limited insn first, because there's no point
1455 including more than one in the machine description unless
1456 the more limited one has some advantage. */
1458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1459 mode = GET_MODE_WIDER_MODE (mode))
1461 enum insn_code code = movstr_optab[(int) mode];
1462 insn_operand_predicate_fn pred;
1464 if (code != CODE_FOR_nothing
1465 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1466 here because if SIZE is less than the mode mask, as it is
1467 returned by the macro, it will definitely be less than the
1468 actual mode mask. */
1469 && ((GET_CODE (size) == CONST_INT
1470 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1471 <= (GET_MODE_MASK (mode) >> 1)))
1472 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1473 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1474 || (*pred) (x, BLKmode))
1475 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1476 || (*pred) (y, BLKmode))
1477 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1478 || (*pred) (opalign, VOIDmode)))
1480 rtx op2;
1481 rtx last = get_last_insn ();
1482 rtx pat;
1484 op2 = convert_to_mode (mode, size, 1);
1485 pred = insn_data[(int) code].operand[2].predicate;
1486 if (pred != 0 && ! (*pred) (op2, mode))
1487 op2 = copy_to_mode_reg (mode, op2);
1489 /* ??? When called via emit_block_move_for_call, it'd be
1490 nice if there were some way to inform the backend, so
1491 that it doesn't fail the expansion because it thinks
1492 emitting the libcall would be more efficient. */
1494 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1495 if (pat)
1497 emit_insn (pat);
1498 volatile_ok = 0;
1499 return true;
1501 else
1502 delete_insns_since (last);
1506 volatile_ok = 0;
1507 return false;
1510 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1511 Return the return value from memcpy, 0 otherwise. */
1513 static rtx
1514 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1516 rtx dst_addr, src_addr;
1517 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1518 enum machine_mode size_mode;
1519 rtx retval;
1521 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1523 It is unsafe to save the value generated by protect_from_queue and reuse
1524 it later. Consider what happens if emit_queue is called before the
1525 return value from protect_from_queue is used.
1527 Expansion of the CALL_EXPR below will call emit_queue before we are
1528 finished emitting RTL for argument setup. So if we are not careful we
1529 could get the wrong value for an argument.
1531 To avoid this problem we go ahead and emit code to copy the addresses of
1532 DST and SRC and SIZE into new pseudos. We can then place those new
1533 pseudos into an RTL_EXPR and use them later, even after a call to
1534 emit_queue.
1536 Note this is not strictly needed for library calls since they do not call
1537 emit_queue before loading their arguments. However, we may need to have
1538 library calls call emit_queue in the future since failing to do so could
1539 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1540 arguments in registers. */
1542 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1543 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1545 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1546 src_addr = convert_memory_address (ptr_mode, src_addr);
1548 dst_tree = make_tree (ptr_type_node, dst_addr);
1549 src_tree = make_tree (ptr_type_node, src_addr);
1551 if (TARGET_MEM_FUNCTIONS)
1552 size_mode = TYPE_MODE (sizetype);
1553 else
1554 size_mode = TYPE_MODE (unsigned_type_node);
1556 size = convert_to_mode (size_mode, size, 1);
1557 size = copy_to_mode_reg (size_mode, size);
1559 /* It is incorrect to use the libcall calling conventions to call
1560 memcpy in this context. This could be a user call to memcpy and
1561 the user may wish to examine the return value from memcpy. For
1562 targets where libcalls and normal calls have different conventions
1563 for returning pointers, we could end up generating incorrect code.
1565 For convenience, we generate the call to bcopy this way as well. */
1567 if (TARGET_MEM_FUNCTIONS)
1568 size_tree = make_tree (sizetype, size);
1569 else
1570 size_tree = make_tree (unsigned_type_node, size);
1572 fn = emit_block_move_libcall_fn (true);
1573 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1574 if (TARGET_MEM_FUNCTIONS)
1576 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1577 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1579 else
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1582 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1585 /* Now we have to build up the CALL_EXPR itself. */
1586 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1587 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1588 call_expr, arg_list, NULL_TREE);
1590 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1592 /* If we are initializing a readonly value, show the above call clobbered
1593 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1594 the delay slot scheduler might overlook conflicts and take nasty
1595 decisions. */
1596 if (RTX_UNCHANGING_P (dst))
1597 add_function_usage_to
1598 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1599 gen_rtx_CLOBBER (VOIDmode, dst),
1600 NULL_RTX));
1602 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1605 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1606 for the function we use for block copies. The first time FOR_CALL
1607 is true, we call assemble_external. */
1609 static GTY(()) tree block_move_fn;
1611 void
1612 init_block_move_fn (const char *asmspec)
1614 if (!block_move_fn)
1616 tree args, fn;
1618 if (TARGET_MEM_FUNCTIONS)
1620 fn = get_identifier ("memcpy");
1621 args = build_function_type_list (ptr_type_node, ptr_type_node,
1622 const_ptr_type_node, sizetype,
1623 NULL_TREE);
1625 else
1627 fn = get_identifier ("bcopy");
1628 args = build_function_type_list (void_type_node, const_ptr_type_node,
1629 ptr_type_node, unsigned_type_node,
1630 NULL_TREE);
1633 fn = build_decl (FUNCTION_DECL, fn, args);
1634 DECL_EXTERNAL (fn) = 1;
1635 TREE_PUBLIC (fn) = 1;
1636 DECL_ARTIFICIAL (fn) = 1;
1637 TREE_NOTHROW (fn) = 1;
1639 block_move_fn = fn;
1642 if (asmspec)
1644 SET_DECL_RTL (block_move_fn, NULL_RTX);
1645 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1649 static tree
1650 emit_block_move_libcall_fn (int for_call)
1652 static bool emitted_extern;
1654 if (!block_move_fn)
1655 init_block_move_fn (NULL);
1657 if (for_call && !emitted_extern)
1659 emitted_extern = true;
1660 make_decl_rtl (block_move_fn, NULL);
1661 assemble_external (block_move_fn);
1664 return block_move_fn;
1667 /* A subroutine of emit_block_move. Copy the data via an explicit
1668 loop. This is used only when libcalls are forbidden. */
1669 /* ??? It'd be nice to copy in hunks larger than QImode. */
1671 static void
1672 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1673 unsigned int align ATTRIBUTE_UNUSED)
1675 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1676 enum machine_mode iter_mode;
1678 iter_mode = GET_MODE (size);
1679 if (iter_mode == VOIDmode)
1680 iter_mode = word_mode;
1682 top_label = gen_label_rtx ();
1683 cmp_label = gen_label_rtx ();
1684 iter = gen_reg_rtx (iter_mode);
1686 emit_move_insn (iter, const0_rtx);
1688 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1689 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1690 do_pending_stack_adjust ();
1692 emit_note (NOTE_INSN_LOOP_BEG);
1694 emit_jump (cmp_label);
1695 emit_label (top_label);
1697 tmp = convert_modes (Pmode, iter_mode, iter, true);
1698 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1699 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1700 x = change_address (x, QImode, x_addr);
1701 y = change_address (y, QImode, y_addr);
1703 emit_move_insn (x, y);
1705 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1706 true, OPTAB_LIB_WIDEN);
1707 if (tmp != iter)
1708 emit_move_insn (iter, tmp);
1710 emit_note (NOTE_INSN_LOOP_CONT);
1711 emit_label (cmp_label);
1713 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1714 true, top_label);
1716 emit_note (NOTE_INSN_LOOP_END);
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1722 void
1723 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1725 int i;
1726 #ifdef HAVE_load_multiple
1727 rtx pat;
1728 rtx last;
1729 #endif
1731 if (nregs == 0)
1732 return;
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple)
1741 last = get_last_insn ();
1742 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1743 GEN_INT (nregs));
1744 if (pat)
1746 emit_insn (pat);
1747 return;
1749 else
1750 delete_insns_since (last);
1752 #endif
1754 for (i = 0; i < nregs; i++)
1755 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1756 operand_subword_force (x, i, mode));
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1762 void
1763 move_block_from_reg (int regno, rtx x, int nregs)
1765 int i;
1767 if (nregs == 0)
1768 return;
1770 /* See if the machine can do this with a store multiple insn. */
1771 #ifdef HAVE_store_multiple
1772 if (HAVE_store_multiple)
1774 rtx last = get_last_insn ();
1775 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1776 GEN_INT (nregs));
1777 if (pat)
1779 emit_insn (pat);
1780 return;
1782 else
1783 delete_insns_since (last);
1785 #endif
1787 for (i = 0; i < nregs; i++)
1789 rtx tem = operand_subword (x, i, 1, BLKmode);
1791 if (tem == 0)
1792 abort ();
1794 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1798 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1805 gen_group_rtx (rtx orig)
1807 int i, length;
1808 rtx *tmps;
1810 if (GET_CODE (orig) != PARALLEL)
1811 abort ();
1813 length = XVECLEN (orig, 0);
1814 tmps = alloca (sizeof (rtx) * length);
1816 /* Skip a NULL entry in first slot. */
1817 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1819 if (i)
1820 tmps[0] = 0;
1822 for (; i < length; i++)
1824 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1825 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1827 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1830 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1833 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1836 if not known. */
1838 void
1839 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1841 rtx *tmps, src;
1842 int start, i;
1844 if (GET_CODE (dst) != PARALLEL)
1845 abort ();
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
1849 if (XEXP (XVECEXP (dst, 0, 0), 0))
1850 start = 0;
1851 else
1852 start = 1;
1854 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1856 /* Process the pieces. */
1857 for (i = start; i < XVECLEN (dst, 0); i++)
1859 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1860 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1861 unsigned int bytelen = GET_MODE_SIZE (mode);
1862 int shift = 0;
1864 /* Handle trailing fragments that run over the size of the struct. */
1865 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1869 if (
1870 #ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1872 == (BYTES_BIG_ENDIAN ? upward : downward)
1873 #else
1874 BYTES_BIG_ENDIAN
1875 #endif
1877 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1878 bytelen = ssize - bytepos;
1879 if (bytelen <= 0)
1880 abort ();
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1886 src = orig_src;
1887 if (GET_CODE (orig_src) != MEM
1888 && (!CONSTANT_P (orig_src)
1889 || (GET_MODE (orig_src) != mode
1890 && GET_MODE (orig_src) != VOIDmode)))
1892 if (GET_MODE (orig_src) == VOIDmode)
1893 src = gen_reg_rtx (mode);
1894 else
1895 src = gen_reg_rtx (GET_MODE (orig_src));
1897 emit_move_insn (src, orig_src);
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src) == MEM
1902 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1903 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1904 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1905 && bytelen == GET_MODE_SIZE (mode))
1907 tmps[i] = gen_reg_rtx (mode);
1908 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1910 else if (GET_CODE (src) == CONCAT)
1912 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1913 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1915 if ((bytepos == 0 && bytelen == slen0)
1916 || (bytepos != 0 && bytepos + bytelen <= slen))
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1921 to be extracted. */
1922 tmps[i] = XEXP (src, bytepos / slen0);
1923 if (! CONSTANT_P (tmps[i])
1924 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1925 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1926 (bytepos % slen0) * BITS_PER_UNIT,
1927 1, NULL_RTX, mode, mode, ssize);
1929 else if (bytepos == 0)
1931 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1932 emit_move_insn (mem, src);
1933 tmps[i] = adjust_address (mem, mode, 0);
1935 else
1936 abort ();
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst))
1942 && GET_CODE (src) == REG)
1944 int slen = GET_MODE_SIZE (GET_MODE (src));
1945 rtx mem;
1947 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1948 emit_move_insn (mem, src);
1949 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1951 else if (CONSTANT_P (src)
1952 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1953 tmps[i] = src;
1954 else
1955 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1956 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1957 mode, mode, ssize);
1959 if (shift)
1960 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1961 tmps[i], 0, OPTAB_WIDEN);
1964 emit_queue ();
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i = start; i < XVECLEN (dst, 0); i++)
1968 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1971 /* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1974 void
1975 emit_group_move (rtx dst, rtx src)
1977 int i;
1979 if (GET_CODE (src) != PARALLEL
1980 || GET_CODE (dst) != PARALLEL
1981 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1982 abort ();
1984 /* Skip first entry if NULL. */
1985 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1986 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1987 XEXP (XVECEXP (src, 0, i), 0));
1990 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1993 known. */
1995 void
1996 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1998 rtx *tmps, dst;
1999 int start, i;
2001 if (GET_CODE (src) != PARALLEL)
2002 abort ();
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
2006 if (XEXP (XVECEXP (src, 0, 0), 0))
2007 start = 0;
2008 else
2009 start = 1;
2011 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i = start; i < XVECLEN (src, 0); i++)
2016 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2017 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2018 emit_move_insn (tmps[i], reg);
2020 emit_queue ();
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2024 dst = orig_dst;
2025 if (GET_CODE (dst) == PARALLEL)
2027 rtx temp;
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst, src))
2033 return;
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2037 the temporary. */
2039 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2040 emit_group_store (temp, src, type, ssize);
2041 emit_group_load (dst, temp, type, ssize);
2042 return;
2044 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2046 dst = gen_reg_rtx (GET_MODE (orig_dst));
2047 /* Make life a bit easier for combine. */
2048 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2051 /* Process the pieces. */
2052 for (i = start; i < XVECLEN (src, 0); i++)
2054 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2055 enum machine_mode mode = GET_MODE (tmps[i]);
2056 unsigned int bytelen = GET_MODE_SIZE (mode);
2057 rtx dest = dst;
2059 /* Handle trailing fragments that run over the size of the struct. */
2060 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2064 if (
2065 #ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2067 == (BYTES_BIG_ENDIAN ? upward : downward)
2068 #else
2069 BYTES_BIG_ENDIAN
2070 #endif
2073 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2074 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2075 tmps[i], 0, OPTAB_WIDEN);
2077 bytelen = ssize - bytepos;
2080 if (GET_CODE (dst) == CONCAT)
2082 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2083 dest = XEXP (dst, 0);
2084 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2086 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2087 dest = XEXP (dst, 1);
2089 else if (bytepos == 0 && XVECLEN (src, 0))
2091 dest = assign_stack_temp (GET_MODE (dest),
2092 GET_MODE_SIZE (GET_MODE (dest)), 0);
2093 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2094 tmps[i]);
2095 dst = dest;
2096 break;
2098 else
2099 abort ();
2102 /* Optimize the access just a bit. */
2103 if (GET_CODE (dest) == MEM
2104 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2105 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2106 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2107 && bytelen == GET_MODE_SIZE (mode))
2108 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2109 else
2110 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2111 mode, tmps[i], ssize);
2114 emit_queue ();
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst != dst)
2118 emit_move_insn (orig_dst, dst);
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
2131 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2133 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2134 rtx src = NULL, dst = NULL;
2135 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2136 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2138 if (tgtblk == 0)
2140 tgtblk = assign_temp (build_qualified_type (type,
2141 (TYPE_QUALS (type)
2142 | TYPE_QUAL_CONST)),
2143 0, 1, 1);
2144 preserve_temp_slots (tgtblk);
2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2148 into a new pseudo which is a full word. */
2150 if (GET_MODE (srcreg) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2152 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes % UNITS_PER_WORD != 0
2164 && (targetm.calls.return_in_msb (type)
2165 ? !BYTES_BIG_ENDIAN
2166 : BYTES_BIG_ENDIAN))
2167 padding_correction
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2170 /* Copy the structure BITSIZE bites at a time.
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
2175 for (bitpos = 0, xbitpos = padding_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == padding_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2185 GET_MODE (srcreg));
2187 /* We need a new destination operand each time bitpos is on
2188 a word boundary. */
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
2198 BITS_PER_WORD),
2199 BITS_PER_WORD);
2202 return tgtblk;
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2208 void
2209 use_reg (rtx *call_fusage, rtx reg)
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2213 abort ();
2215 *call_fusage
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2223 void
2224 use_regs (rtx *call_fusage, int regno, int nregs)
2226 int i;
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2229 abort ();
2231 for (i = 0; i < nregs; i++)
2232 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2239 void
2240 use_group_regs (rtx *call_fusage, rtx regs)
2242 int i;
2244 for (i = 0; i < XVECLEN (regs, 0); i++)
2246 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg != 0 && GET_CODE (reg) == REG)
2252 use_reg (call_fusage, reg);
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2265 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2266 void *constfundata, unsigned int align)
2268 unsigned HOST_WIDE_INT max_size, l;
2269 HOST_WIDE_INT offset = 0;
2270 enum machine_mode mode, tmode;
2271 enum insn_code icode;
2272 int reverse;
2273 rtx cst;
2275 if (len == 0)
2276 return 1;
2278 if (! STORE_BY_PIECES_P (len, align))
2279 return 0;
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2282 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2283 align = MOVE_MAX * BITS_PER_UNIT;
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2288 for (reverse = 0;
2289 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2290 reverse++)
2292 l = len;
2293 mode = VOIDmode;
2294 max_size = STORE_MAX_PIECES + 1;
2295 while (max_size > 1)
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2302 if (mode == VOIDmode)
2303 break;
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= GET_MODE_ALIGNMENT (mode))
2309 unsigned int size = GET_MODE_SIZE (mode);
2311 while (l >= size)
2313 if (reverse)
2314 offset -= size;
2316 cst = (*constfun) (constfundata, offset, mode);
2317 if (!LEGITIMATE_CONSTANT_P (cst))
2318 return 0;
2320 if (!reverse)
2321 offset += size;
2323 l -= size;
2327 max_size = GET_MODE_SIZE (mode);
2330 /* The code above should have handled everything. */
2331 if (l != 0)
2332 abort ();
2335 return 1;
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2344 stpcpy. */
2347 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2348 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2349 void *constfundata, unsigned int align, int endp)
2351 struct store_by_pieces data;
2353 if (len == 0)
2355 if (endp == 2)
2356 abort ();
2357 return to;
2360 if (! STORE_BY_PIECES_P (len, align))
2361 abort ();
2362 to = protect_from_queue (to, 1);
2363 data.constfun = constfun;
2364 data.constfundata = constfundata;
2365 data.len = len;
2366 data.to = to;
2367 store_by_pieces_1 (&data, align);
2368 if (endp)
2370 rtx to1;
2372 if (data.reverse)
2373 abort ();
2374 if (data.autinc_to)
2376 if (endp == 2)
2378 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2379 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2380 else
2381 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2382 -1));
2384 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2385 data.offset);
2387 else
2389 if (endp == 2)
2390 --data.offset;
2391 to1 = adjust_address (data.to, QImode, data.offset);
2393 return to1;
2395 else
2396 return data.to;
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2403 static void
2404 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2406 struct store_by_pieces data;
2408 if (len == 0)
2409 return;
2411 data.constfun = clear_by_pieces_1;
2412 data.constfundata = NULL;
2413 data.len = len;
2414 data.to = to;
2415 store_by_pieces_1 (&data, align);
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2421 static rtx
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2424 enum machine_mode mode ATTRIBUTE_UNUSED)
2426 return const0_rtx;
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2434 static void
2435 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2436 unsigned int align ATTRIBUTE_UNUSED)
2438 rtx to_addr = XEXP (data->to, 0);
2439 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2440 enum machine_mode mode = VOIDmode, tmode;
2441 enum insn_code icode;
2443 data->offset = 0;
2444 data->to_addr = to_addr;
2445 data->autinc_to
2446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2449 data->explicit_inc_to = 0;
2450 data->reverse
2451 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2452 if (data->reverse)
2453 data->offset = data->len;
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data->autinc_to
2459 && move_by_pieces_ninsns (data->len, align) > 2)
2461 /* Determine the main mode we'll be using. */
2462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2464 if (GET_MODE_SIZE (tmode) < max_size)
2465 mode = tmode;
2467 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2469 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = -1;
2474 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2475 && ! data->autinc_to)
2477 data->to_addr = copy_addr_to_reg (to_addr);
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = 1;
2482 if ( !data->autinc_to && CONSTANT_P (to_addr))
2483 data->to_addr = copy_addr_to_reg (to_addr);
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2487 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2488 align = MOVE_MAX * BITS_PER_UNIT;
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2493 while (max_size > 1)
2495 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2496 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2497 if (GET_MODE_SIZE (tmode) < max_size)
2498 mode = tmode;
2500 if (mode == VOIDmode)
2501 break;
2503 icode = mov_optab->handlers[(int) mode].insn_code;
2504 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2505 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2507 max_size = GET_MODE_SIZE (mode);
2510 /* The code above should have handled everything. */
2511 if (data->len != 0)
2512 abort ();
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2519 static void
2520 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2521 struct store_by_pieces *data)
2523 unsigned int size = GET_MODE_SIZE (mode);
2524 rtx to1, cst;
2526 while (data->len >= size)
2528 if (data->reverse)
2529 data->offset -= size;
2531 if (data->autinc_to)
2532 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2533 data->offset);
2534 else
2535 to1 = adjust_address (data->to, mode, data->offset);
2537 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2538 emit_insn (gen_add2_insn (data->to_addr,
2539 GEN_INT (-(HOST_WIDE_INT) size)));
2541 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2542 emit_insn ((*genfun) (to1, cst));
2544 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2547 if (! data->reverse)
2548 data->offset += size;
2550 data->len -= size;
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2558 clear_storage (rtx object, rtx size)
2560 rtx retval = 0;
2561 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object) != BLKmode
2567 && GET_CODE (size) == CONST_INT
2568 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2569 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2570 else
2572 object = protect_from_queue (object, 1);
2573 size = protect_from_queue (size, 0);
2575 if (size == const0_rtx)
2577 else if (GET_CODE (size) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2580 else if (clear_storage_via_clrstr (object, size, align))
2582 else
2583 retval = clear_storage_via_libcall (object, size);
2586 return retval;
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2592 static bool
2593 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2599 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2600 enum machine_mode mode;
2602 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2603 mode = GET_MODE_WIDER_MODE (mode))
2605 enum insn_code code = clrstr_optab[(int) mode];
2606 insn_operand_predicate_fn pred;
2608 if (code != CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2615 <= (GET_MODE_MASK (mode) >> 1)))
2616 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2617 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2618 || (*pred) (object, BLKmode))
2619 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2620 || (*pred) (opalign, VOIDmode)))
2622 rtx op1;
2623 rtx last = get_last_insn ();
2624 rtx pat;
2626 op1 = convert_to_mode (mode, size, 1);
2627 pred = insn_data[(int) code].operand[1].predicate;
2628 if (pred != 0 && ! (*pred) (op1, mode))
2629 op1 = copy_to_mode_reg (mode, op1);
2631 pat = GEN_FCN ((int) code) (object, op1, opalign);
2632 if (pat)
2634 emit_insn (pat);
2635 return true;
2637 else
2638 delete_insns_since (last);
2642 return false;
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2648 static rtx
2649 clear_storage_via_libcall (rtx object, rtx size)
2651 tree call_expr, arg_list, fn, object_tree, size_tree;
2652 enum machine_mode size_mode;
2653 rtx retval;
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2668 emit_queue.
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2676 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2678 if (TARGET_MEM_FUNCTIONS)
2679 size_mode = TYPE_MODE (sizetype);
2680 else
2681 size_mode = TYPE_MODE (unsigned_type_node);
2682 size = convert_to_mode (size_mode, size, 1);
2683 size = copy_to_mode_reg (size_mode, size);
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
2691 For convenience, we generate the call to bzero this way as well. */
2693 object_tree = make_tree (ptr_type_node, object);
2694 if (TARGET_MEM_FUNCTIONS)
2695 size_tree = make_tree (sizetype, size);
2696 else
2697 size_tree = make_tree (unsigned_type_node, size);
2699 fn = clear_storage_libcall_fn (true);
2700 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2701 if (TARGET_MEM_FUNCTIONS)
2702 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2703 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2710 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2718 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2721 /* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2725 static GTY(()) tree block_clear_fn;
2727 void
2728 init_block_clear_fn (const char *asmspec)
2730 if (!block_clear_fn)
2732 tree fn, args;
2734 if (TARGET_MEM_FUNCTIONS)
2736 fn = get_identifier ("memset");
2737 args = build_function_type_list (ptr_type_node, ptr_type_node,
2738 integer_type_node, sizetype,
2739 NULL_TREE);
2741 else
2743 fn = get_identifier ("bzero");
2744 args = build_function_type_list (void_type_node, ptr_type_node,
2745 unsigned_type_node, NULL_TREE);
2748 fn = build_decl (FUNCTION_DECL, fn, args);
2749 DECL_EXTERNAL (fn) = 1;
2750 TREE_PUBLIC (fn) = 1;
2751 DECL_ARTIFICIAL (fn) = 1;
2752 TREE_NOTHROW (fn) = 1;
2754 block_clear_fn = fn;
2757 if (asmspec)
2759 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2764 static tree
2765 clear_storage_libcall_fn (int for_call)
2767 static bool emitted_extern;
2769 if (!block_clear_fn)
2770 init_block_clear_fn (NULL);
2772 if (for_call && !emitted_extern)
2774 emitted_extern = true;
2775 make_decl_rtl (block_clear_fn, NULL);
2776 assemble_external (block_clear_fn);
2779 return block_clear_fn;
2782 /* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2787 Return the last instruction emitted. */
2790 emit_move_insn (rtx x, rtx y)
2792 enum machine_mode mode = GET_MODE (x);
2793 rtx y_cst = NULL_RTX;
2794 rtx last_insn, set;
2796 x = protect_from_queue (x, 1);
2797 y = protect_from_queue (y, 0);
2799 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2800 abort ();
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y) == CONSTANT_P_RTX)
2805 else if (CONSTANT_P (y))
2807 if (optimize
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2809 && (last_insn = compress_float_constant (x, y)))
2810 return last_insn;
2812 y_cst = y;
2814 if (!LEGITIMATE_CONSTANT_P (y))
2816 y = force_const_mem (mode, y);
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2821 if (!y)
2822 y = y_cst;
2826 /* If X or Y are memory references, verify that their addresses are valid
2827 for the machine. */
2828 if (GET_CODE (x) == MEM
2829 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2830 && ! push_operand (x, GET_MODE (x)))
2831 || (flag_force_addr
2832 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2833 x = validize_mem (x);
2835 if (GET_CODE (y) == MEM
2836 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 || (flag_force_addr
2838 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2839 y = validize_mem (y);
2841 if (mode == BLKmode)
2842 abort ();
2844 last_insn = emit_move_insn_1 (x, y);
2846 if (y_cst && GET_CODE (x) == REG
2847 && (set = single_set (last_insn)) != NULL_RTX
2848 && SET_DEST (set) == x
2849 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2850 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2852 return last_insn;
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2860 emit_move_insn_1 (rtx x, rtx y)
2862 enum machine_mode mode = GET_MODE (x);
2863 enum machine_mode submode;
2864 enum mode_class class = GET_MODE_CLASS (mode);
2866 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2867 abort ();
2869 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2870 return
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2875 && BLKmode != (submode = GET_MODE_INNER (mode))
2876 && (mov_optab->handlers[(int) submode].insn_code
2877 != CODE_FOR_nothing))
2879 /* Don't split destination if it is a stack push. */
2880 int stack = push_operand (x, GET_MODE (x));
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2885 if (stack
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2887 != GET_MODE_SIZE (submode)))
2889 rtx temp;
2890 HOST_WIDE_INT offset1, offset2;
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp = expand_binop (Pmode,
2895 #ifdef STACK_GROWS_DOWNWARD
2896 sub_optab,
2897 #else
2898 add_optab,
2899 #endif
2900 stack_pointer_rtx,
2901 GEN_INT
2902 (PUSH_ROUNDING
2903 (GET_MODE_SIZE (GET_MODE (x)))),
2904 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2906 if (temp != stack_pointer_rtx)
2907 emit_move_insn (stack_pointer_rtx, temp);
2909 #ifdef STACK_GROWS_DOWNWARD
2910 offset1 = 0;
2911 offset2 = GET_MODE_SIZE (submode);
2912 #else
2913 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2914 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2915 + GET_MODE_SIZE (submode));
2916 #endif
2918 emit_move_insn (change_address (x, submode,
2919 gen_rtx_PLUS (Pmode,
2920 stack_pointer_rtx,
2921 GEN_INT (offset1))),
2922 gen_realpart (submode, y));
2923 emit_move_insn (change_address (x, submode,
2924 gen_rtx_PLUS (Pmode,
2925 stack_pointer_rtx,
2926 GEN_INT (offset2))),
2927 gen_imagpart (submode, y));
2929 else
2930 #endif
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2936 if (stack)
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 gen_imagpart (submode, y));
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_realpart (submode, y));
2945 #else
2946 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2947 gen_realpart (submode, y));
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_imagpart (submode, y));
2950 #endif
2952 else
2954 rtx realpart_x, realpart_y;
2955 rtx imagpart_x, imagpart_y;
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2967 && (reload_in_progress | reload_completed) == 0)
2969 int packed_dest_p
2970 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2971 int packed_src_p
2972 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2974 if (packed_dest_p || packed_src_p)
2976 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2977 ? MODE_FLOAT : MODE_INT);
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2982 if (reg_mode != BLKmode)
2984 rtx mem = assign_stack_temp (reg_mode,
2985 GET_MODE_SIZE (mode), 0);
2986 rtx cmem = adjust_address (mem, mode, 0);
2988 cfun->cannot_inline
2989 = N_("function using short complex types cannot be inline");
2991 if (packed_dest_p)
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2995 emit_move_insn_1 (cmem, y);
2996 return emit_move_insn_1 (sreg, mem);
2998 else
3000 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3002 emit_move_insn_1 (mem, sreg);
3003 return emit_move_insn_1 (x, cmem);
3009 realpart_x = gen_realpart (submode, x);
3010 realpart_y = gen_realpart (submode, y);
3011 imagpart_x = gen_imagpart (submode, x);
3012 imagpart_y = gen_imagpart (submode, y);
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3018 if (x != y
3019 && ! (reload_in_progress || reload_completed)
3020 && (GET_CODE (realpart_x) == SUBREG
3021 || GET_CODE (imagpart_x) == SUBREG))
3022 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3024 emit_move_insn (realpart_x, realpart_y);
3025 emit_move_insn (imagpart_x, imagpart_y);
3028 return get_last_insn ();
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode) == MODE_CC
3035 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3037 enum insn_code insn_code;
3038 enum machine_mode tmode = VOIDmode;
3039 rtx x1 = x, y1 = y;
3041 if (mode != CCmode
3042 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3043 tmode = CCmode;
3044 else
3045 for (tmode = QImode; tmode != VOIDmode;
3046 tmode = GET_MODE_WIDER_MODE (tmode))
3047 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3048 break;
3050 if (tmode == VOIDmode)
3051 abort ();
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3060 if (reload_in_progress)
3062 x = gen_lowpart_common (tmode, x1);
3063 if (x == 0 && GET_CODE (x1) == MEM)
3065 x = adjust_address_nv (x1, tmode, 0);
3066 copy_replacements (x1, x);
3069 y = gen_lowpart_common (tmode, y1);
3070 if (y == 0 && GET_CODE (y1) == MEM)
3072 y = adjust_address_nv (y1, tmode, 0);
3073 copy_replacements (y1, y);
3076 else
3078 x = gen_lowpart (tmode, x);
3079 y = gen_lowpart (tmode, y);
3082 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3083 return emit_insn (GEN_FCN (insn_code) (x, y));
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3091 && (submode = int_mode_for_mode (mode)) != BLKmode
3092 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3093 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3094 (simplify_gen_subreg (submode, x, mode, 0),
3095 simplify_gen_subreg (submode, y, mode, 0)));
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
3099 even if they must turn into multiple assembler instructions. */
3100 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3102 rtx last_insn = 0;
3103 rtx seq, inner;
3104 int need_clobber;
3105 int i;
3107 #ifdef PUSH_ROUNDING
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x, GET_MODE (x)))
3113 rtx temp;
3114 enum rtx_code code;
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp = expand_binop (Pmode,
3119 #ifdef STACK_GROWS_DOWNWARD
3120 sub_optab,
3121 #else
3122 add_optab,
3123 #endif
3124 stack_pointer_rtx,
3125 GEN_INT
3126 (PUSH_ROUNDING
3127 (GET_MODE_SIZE (GET_MODE (x)))),
3128 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3130 if (temp != stack_pointer_rtx)
3131 emit_move_insn (stack_pointer_rtx, temp);
3133 code = GET_CODE (XEXP (x, 0));
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code == POST_INC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (-((HOST_WIDE_INT)
3139 GET_MODE_SIZE (GET_MODE (x)))));
3140 else if (code == POST_DEC)
3141 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3143 else
3144 temp = stack_pointer_rtx;
3146 x = change_address (x, VOIDmode, temp);
3148 #endif
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress && GET_CODE (x) == MEM
3153 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3154 x = replace_equiv_address_nv (x, inner);
3155 if (reload_in_progress && GET_CODE (y) == MEM
3156 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3157 y = replace_equiv_address_nv (y, inner);
3159 start_sequence ();
3161 need_clobber = 0;
3162 for (i = 0;
3163 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3164 i++)
3166 rtx xpart = operand_subword (x, i, 1, mode);
3167 rtx ypart = operand_subword (y, i, 1, mode);
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart == 0 && CONSTANT_P (y))
3174 y = force_const_mem (mode, y);
3175 ypart = operand_subword (y, i, 1, mode);
3177 else if (ypart == 0)
3178 ypart = operand_subword_force (y, i, mode);
3180 if (xpart == 0 || ypart == 0)
3181 abort ();
3183 need_clobber |= (GET_CODE (xpart) == SUBREG);
3185 last_insn = emit_move_insn (xpart, ypart);
3188 seq = get_insns ();
3189 end_sequence ();
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3195 if (x != y
3196 && ! (reload_in_progress || reload_completed)
3197 && need_clobber != 0)
3198 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3200 emit_insn (seq);
3202 return last_insn;
3204 else
3205 abort ();
3208 /* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3212 static rtx
3213 compress_float_constant (rtx x, rtx y)
3215 enum machine_mode dstmode = GET_MODE (x);
3216 enum machine_mode orig_srcmode = GET_MODE (y);
3217 enum machine_mode srcmode;
3218 REAL_VALUE_TYPE r;
3220 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3222 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3223 srcmode != orig_srcmode;
3224 srcmode = GET_MODE_WIDER_MODE (srcmode))
3226 enum insn_code ic;
3227 rtx trunc_y, last_insn;
3229 /* Skip if the target can't extend this way. */
3230 ic = can_extend_p (dstmode, srcmode, 0);
3231 if (ic == CODE_FOR_nothing)
3232 continue;
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode, &r))
3236 continue;
3238 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3240 if (LEGITIMATE_CONSTANT_P (trunc_y))
3242 /* Skip if the target needs extra instructions to perform
3243 the extension. */
3244 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3245 continue;
3247 else if (float_extend_from_mem[dstmode][srcmode])
3248 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3249 else
3250 continue;
3252 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3253 last_insn = get_last_insn ();
3255 if (GET_CODE (x) == REG)
3256 set_unique_reg_note (last_insn, REG_EQUAL, y);
3258 return last_insn;
3261 return NULL_RTX;
3264 /* Pushing data onto the stack. */
3266 /* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3276 push_block (rtx size, int extra, int below)
3278 rtx temp;
3280 size = convert_modes (Pmode, ptr_mode, size, 1);
3281 if (CONSTANT_P (size))
3282 anti_adjust_stack (plus_constant (size, extra));
3283 else if (GET_CODE (size) == REG && extra == 0)
3284 anti_adjust_stack (size);
3285 else
3287 temp = copy_to_mode_reg (Pmode, size);
3288 if (extra != 0)
3289 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3290 temp, 0, OPTAB_LIB_WIDEN);
3291 anti_adjust_stack (temp);
3294 #ifndef STACK_GROWS_DOWNWARD
3295 if (0)
3296 #else
3297 if (1)
3298 #endif
3300 temp = virtual_outgoing_args_rtx;
3301 if (extra != 0 && below)
3302 temp = plus_constant (temp, extra);
3304 else
3306 if (GET_CODE (size) == CONST_INT)
3307 temp = plus_constant (virtual_outgoing_args_rtx,
3308 -INTVAL (size) - (below ? 0 : extra));
3309 else if (extra != 0 && !below)
3310 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3311 negate_rtx (Pmode, plus_constant (size, extra)));
3312 else
3313 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3314 negate_rtx (Pmode, size));
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3320 #ifdef PUSH_ROUNDING
3322 /* Emit single push insn. */
3324 static void
3325 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3327 rtx dest_addr;
3328 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3329 rtx dest;
3330 enum insn_code icode;
3331 insn_operand_predicate_fn pred;
3333 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode = push_optab->handlers[(int) mode].insn_code;
3337 if (icode != CODE_FOR_nothing)
3339 if (((pred = insn_data[(int) icode].operand[0].predicate)
3340 && !((*pred) (x, mode))))
3341 x = force_reg (mode, x);
3342 emit_insn (GEN_FCN (icode) (x));
3343 return;
3345 if (GET_MODE_SIZE (mode) == rounded_size)
3346 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3350 access to type. */
3351 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3353 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3354 HOST_WIDE_INT offset;
3356 emit_move_insn (stack_pointer_rtx,
3357 expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3359 sub_optab,
3360 #else
3361 add_optab,
3362 #endif
3363 stack_pointer_rtx,
3364 GEN_INT (rounded_size),
3365 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3367 offset = (HOST_WIDE_INT) padding_size;
3368 #ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE == POST_DEC)
3370 /* We have already decremented the stack pointer, so get the
3371 previous value. */
3372 offset += (HOST_WIDE_INT) rounded_size;
3373 #else
3374 if (STACK_PUSH_CODE == POST_INC)
3375 /* We have already incremented the stack pointer, so get the
3376 previous value. */
3377 offset -= (HOST_WIDE_INT) rounded_size;
3378 #endif
3379 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3381 else
3383 #ifdef STACK_GROWS_DOWNWARD
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3387 #else
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (rounded_size));
3391 #endif
3392 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3395 dest = gen_rtx_MEM (mode, dest_addr);
3397 if (type != 0)
3399 set_mem_attributes (dest, type, 1);
3401 if (flag_optimize_sibling_calls)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest, 0);
3408 emit_move_insn (dest, x);
3410 #endif
3412 /* Generate code to push X onto the stack, assuming it has mode MODE and
3413 type TYPE.
3414 MODE is redundant except when X is a CONST_INT (since they don't
3415 carry mode info).
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3419 ALIGN (in bits) is maximum alignment we can assume.
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3428 registers.
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
3431 This is ignored if an argument block has already been allocated.
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
3444 void
3445 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3446 unsigned int align, int partial, rtx reg, int extra,
3447 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3448 rtx alignment_pad)
3450 rtx xinner;
3451 enum direction stack_direction
3452 #ifdef STACK_GROWS_DOWNWARD
3453 = downward;
3454 #else
3455 = upward;
3456 #endif
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3463 /* Invert direction if stack is post-decrement.
3464 FIXME: why? */
3465 if (STACK_PUSH_CODE == POST_DEC)
3466 if (where_pad != none)
3467 where_pad = (where_pad == downward ? upward : downward);
3469 xinner = x = protect_from_queue (x, 0);
3471 if (mode == BLKmode)
3473 /* Copy a block into the stack, entirely or partially. */
3475 rtx temp;
3476 int used = partial * UNITS_PER_WORD;
3477 int offset;
3478 int skip;
3480 if (reg && GET_CODE (reg) == PARALLEL)
3482 /* Use the size of the elt to compute offset. */
3483 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3484 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3485 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3487 else
3488 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3490 if (size == 0)
3491 abort ();
3493 used -= offset;
3495 /* USED is now the # of bytes we need not copy to the stack
3496 because registers will take care of them. */
3498 if (partial != 0)
3499 xinner = adjust_address (xinner, BLKmode, used);
3501 /* If the partial register-part of the arg counts in its stack size,
3502 skip the part of stack space corresponding to the registers.
3503 Otherwise, start copying to the beginning of the stack space,
3504 by setting SKIP to 0. */
3505 skip = (reg_parm_stack_space == 0) ? 0 : used;
3507 #ifdef PUSH_ROUNDING
3508 /* Do it with several push insns if that doesn't take lots of insns
3509 and if there is no difficulty with push insns that skip bytes
3510 on the stack for alignment purposes. */
3511 if (args_addr == 0
3512 && PUSH_ARGS
3513 && GET_CODE (size) == CONST_INT
3514 && skip == 0
3515 && MEM_ALIGN (xinner) >= align
3516 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3517 /* Here we avoid the case of a structure whose weak alignment
3518 forces many pushes of a small amount of data,
3519 and such small pushes do rounding that causes trouble. */
3520 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3521 || align >= BIGGEST_ALIGNMENT
3522 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3523 == (align / BITS_PER_UNIT)))
3524 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3526 /* Push padding now if padding above and stack grows down,
3527 or if padding below and stack grows up.
3528 But if space already allocated, this has already been done. */
3529 if (extra && args_addr == 0
3530 && where_pad != none && where_pad != stack_direction)
3531 anti_adjust_stack (GEN_INT (extra));
3533 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3535 else
3536 #endif /* PUSH_ROUNDING */
3538 rtx target;
3540 /* Otherwise make space on the stack and copy the data
3541 to the address of that space. */
3543 /* Deduct words put into registers from the size we must copy. */
3544 if (partial != 0)
3546 if (GET_CODE (size) == CONST_INT)
3547 size = GEN_INT (INTVAL (size) - used);
3548 else
3549 size = expand_binop (GET_MODE (size), sub_optab, size,
3550 GEN_INT (used), NULL_RTX, 0,
3551 OPTAB_LIB_WIDEN);
3554 /* Get the address of the stack space.
3555 In this case, we do not deal with EXTRA separately.
3556 A single stack adjust will do. */
3557 if (! args_addr)
3559 temp = push_block (size, extra, where_pad == downward);
3560 extra = 0;
3562 else if (GET_CODE (args_so_far) == CONST_INT)
3563 temp = memory_address (BLKmode,
3564 plus_constant (args_addr,
3565 skip + INTVAL (args_so_far)));
3566 else
3567 temp = memory_address (BLKmode,
3568 plus_constant (gen_rtx_PLUS (Pmode,
3569 args_addr,
3570 args_so_far),
3571 skip));
3573 if (!ACCUMULATE_OUTGOING_ARGS)
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3581 temp = copy_to_reg (temp);
3584 target = gen_rtx_MEM (BLKmode, temp);
3586 if (type != 0)
3588 set_mem_attributes (target, type, 1);
3589 /* Function incoming arguments may overlap with sibling call
3590 outgoing arguments and we cannot allow reordering of reads
3591 from function arguments with stores to outgoing arguments
3592 of sibling calls. */
3593 set_mem_alias_set (target, 0);
3596 /* ALIGN may well be better aligned than TYPE, e.g. due to
3597 PARM_BOUNDARY. Assume the caller isn't lying. */
3598 set_mem_align (target, align);
3600 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3603 else if (partial > 0)
3605 /* Scalar partly in registers. */
3607 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3608 int i;
3609 int not_stack;
3610 /* # words of start of argument
3611 that we must make space for but need not store. */
3612 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3613 int args_offset = INTVAL (args_so_far);
3614 int skip;
3616 /* Push padding now if padding above and stack grows down,
3617 or if padding below and stack grows up.
3618 But if space already allocated, this has already been done. */
3619 if (extra && args_addr == 0
3620 && where_pad != none && where_pad != stack_direction)
3621 anti_adjust_stack (GEN_INT (extra));
3623 /* If we make space by pushing it, we might as well push
3624 the real data. Otherwise, we can leave OFFSET nonzero
3625 and leave the space uninitialized. */
3626 if (args_addr == 0)
3627 offset = 0;
3629 /* Now NOT_STACK gets the number of words that we don't need to
3630 allocate on the stack. */
3631 not_stack = partial - offset;
3633 /* If the partial register-part of the arg counts in its stack size,
3634 skip the part of stack space corresponding to the registers.
3635 Otherwise, start copying to the beginning of the stack space,
3636 by setting SKIP to 0. */
3637 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3639 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3640 x = validize_mem (force_const_mem (mode, x));
3642 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3643 SUBREGs of such registers are not allowed. */
3644 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3645 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3646 x = copy_to_reg (x);
3648 /* Loop over all the words allocated on the stack for this arg. */
3649 /* We can do it by words, because any scalar bigger than a word
3650 has a size a multiple of a word. */
3651 #ifndef PUSH_ARGS_REVERSED
3652 for (i = not_stack; i < size; i++)
3653 #else
3654 for (i = size - 1; i >= not_stack; i--)
3655 #endif
3656 if (i >= not_stack + offset)
3657 emit_push_insn (operand_subword_force (x, i, mode),
3658 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3659 0, args_addr,
3660 GEN_INT (args_offset + ((i - not_stack + skip)
3661 * UNITS_PER_WORD)),
3662 reg_parm_stack_space, alignment_pad);
3664 else
3666 rtx addr;
3667 rtx dest;
3669 /* Push padding now if padding above and stack grows down,
3670 or if padding below and stack grows up.
3671 But if space already allocated, this has already been done. */
3672 if (extra && args_addr == 0
3673 && where_pad != none && where_pad != stack_direction)
3674 anti_adjust_stack (GEN_INT (extra));
3676 #ifdef PUSH_ROUNDING
3677 if (args_addr == 0 && PUSH_ARGS)
3678 emit_single_push_insn (mode, x, type);
3679 else
3680 #endif
3682 if (GET_CODE (args_so_far) == CONST_INT)
3683 addr
3684 = memory_address (mode,
3685 plus_constant (args_addr,
3686 INTVAL (args_so_far)));
3687 else
3688 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3689 args_so_far));
3690 dest = gen_rtx_MEM (mode, addr);
3691 if (type != 0)
3693 set_mem_attributes (dest, type, 1);
3694 /* Function incoming arguments may overlap with sibling call
3695 outgoing arguments and we cannot allow reordering of reads
3696 from function arguments with stores to outgoing arguments
3697 of sibling calls. */
3698 set_mem_alias_set (dest, 0);
3701 emit_move_insn (dest, x);
3705 /* If part should go in registers, copy that part
3706 into the appropriate registers. Do this now, at the end,
3707 since mem-to-mem copies above may do function calls. */
3708 if (partial > 0 && reg != 0)
3710 /* Handle calls that pass values in multiple non-contiguous locations.
3711 The Irix 6 ABI has examples of this. */
3712 if (GET_CODE (reg) == PARALLEL)
3713 emit_group_load (reg, x, type, -1);
3714 else
3715 move_block_to_reg (REGNO (reg), x, partial, mode);
3718 if (extra && args_addr == 0 && where_pad == stack_direction)
3719 anti_adjust_stack (GEN_INT (extra));
3721 if (alignment_pad && args_addr == 0)
3722 anti_adjust_stack (alignment_pad);
3725 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3726 operations. */
3728 static rtx
3729 get_subtarget (rtx x)
3731 return ((x == 0
3732 /* Only registers can be subtargets. */
3733 || GET_CODE (x) != REG
3734 /* If the register is readonly, it can't be set more than once. */
3735 || RTX_UNCHANGING_P (x)
3736 /* Don't use hard regs to avoid extending their life. */
3737 || REGNO (x) < FIRST_PSEUDO_REGISTER
3738 /* Avoid subtargets inside loops,
3739 since they hide some invariant expressions. */
3740 || preserve_subexpressions_p ())
3741 ? 0 : x);
3744 /* Expand an assignment that stores the value of FROM into TO.
3745 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3746 (This may contain a QUEUED rtx;
3747 if the value is constant, this rtx is a constant.)
3748 Otherwise, the returned value is NULL_RTX. */
3751 expand_assignment (tree to, tree from, int want_value)
3753 rtx to_rtx = 0;
3754 rtx result;
3756 /* Don't crash if the lhs of the assignment was erroneous. */
3758 if (TREE_CODE (to) == ERROR_MARK)
3760 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3761 return want_value ? result : NULL_RTX;
3764 /* Assignment of a structure component needs special treatment
3765 if the structure component's rtx is not simply a MEM.
3766 Assignment of an array element at a constant index, and assignment of
3767 an array element in an unaligned packed structure field, has the same
3768 problem. */
3770 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3771 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3772 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3774 enum machine_mode mode1;
3775 HOST_WIDE_INT bitsize, bitpos;
3776 rtx orig_to_rtx;
3777 tree offset;
3778 int unsignedp;
3779 int volatilep = 0;
3780 tree tem;
3782 push_temp_slots ();
3783 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3784 &unsignedp, &volatilep);
3786 /* If we are going to use store_bit_field and extract_bit_field,
3787 make sure to_rtx will be safe for multiple use. */
3789 if (mode1 == VOIDmode && want_value)
3790 tem = stabilize_reference (tem);
3792 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3794 if (offset != 0)
3796 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3798 if (GET_CODE (to_rtx) != MEM)
3799 abort ();
3801 #ifdef POINTERS_EXTEND_UNSIGNED
3802 if (GET_MODE (offset_rtx) != Pmode)
3803 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3804 #else
3805 if (GET_MODE (offset_rtx) != ptr_mode)
3806 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3807 #endif
3809 /* A constant address in TO_RTX can have VOIDmode, we must not try
3810 to call force_reg for that case. Avoid that case. */
3811 if (GET_CODE (to_rtx) == MEM
3812 && GET_MODE (to_rtx) == BLKmode
3813 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3814 && bitsize > 0
3815 && (bitpos % bitsize) == 0
3816 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3817 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3819 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3820 bitpos = 0;
3823 to_rtx = offset_address (to_rtx, offset_rtx,
3824 highest_pow2_factor_for_type (TREE_TYPE (to),
3825 offset));
3828 if (GET_CODE (to_rtx) == MEM)
3830 /* If the field is at offset zero, we could have been given the
3831 DECL_RTX of the parent struct. Don't munge it. */
3832 to_rtx = shallow_copy_rtx (to_rtx);
3834 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3837 /* Deal with volatile and readonly fields. The former is only done
3838 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3839 if (volatilep && GET_CODE (to_rtx) == MEM)
3841 if (to_rtx == orig_to_rtx)
3842 to_rtx = copy_rtx (to_rtx);
3843 MEM_VOLATILE_P (to_rtx) = 1;
3846 if (TREE_CODE (to) == COMPONENT_REF
3847 && TREE_READONLY (TREE_OPERAND (to, 1))
3848 /* We can't assert that a MEM won't be set more than once
3849 if the component is not addressable because another
3850 non-addressable component may be referenced by the same MEM. */
3851 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3853 if (to_rtx == orig_to_rtx)
3854 to_rtx = copy_rtx (to_rtx);
3855 RTX_UNCHANGING_P (to_rtx) = 1;
3858 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3860 if (to_rtx == orig_to_rtx)
3861 to_rtx = copy_rtx (to_rtx);
3862 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3865 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3866 (want_value
3867 /* Spurious cast for HPUX compiler. */
3868 ? ((enum machine_mode)
3869 TYPE_MODE (TREE_TYPE (to)))
3870 : VOIDmode),
3871 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3873 preserve_temp_slots (result);
3874 free_temp_slots ();
3875 pop_temp_slots ();
3877 /* If the value is meaningful, convert RESULT to the proper mode.
3878 Otherwise, return nothing. */
3879 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3880 TYPE_MODE (TREE_TYPE (from)),
3881 result,
3882 TREE_UNSIGNED (TREE_TYPE (to)))
3883 : NULL_RTX);
3886 /* If the rhs is a function call and its value is not an aggregate,
3887 call the function before we start to compute the lhs.
3888 This is needed for correct code for cases such as
3889 val = setjmp (buf) on machines where reference to val
3890 requires loading up part of an address in a separate insn.
3892 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3893 since it might be a promoted variable where the zero- or sign- extension
3894 needs to be done. Handling this in the normal way is safe because no
3895 computation is done before the call. */
3896 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3898 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3899 && GET_CODE (DECL_RTL (to)) == REG))
3901 rtx value;
3903 push_temp_slots ();
3904 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3905 if (to_rtx == 0)
3906 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3908 /* Handle calls that return values in multiple non-contiguous locations.
3909 The Irix 6 ABI has examples of this. */
3910 if (GET_CODE (to_rtx) == PARALLEL)
3911 emit_group_load (to_rtx, value, TREE_TYPE (from),
3912 int_size_in_bytes (TREE_TYPE (from)));
3913 else if (GET_MODE (to_rtx) == BLKmode)
3914 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3915 else
3917 if (POINTER_TYPE_P (TREE_TYPE (to)))
3918 value = convert_memory_address (GET_MODE (to_rtx), value);
3919 emit_move_insn (to_rtx, value);
3921 preserve_temp_slots (to_rtx);
3922 free_temp_slots ();
3923 pop_temp_slots ();
3924 return want_value ? to_rtx : NULL_RTX;
3927 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3928 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3930 if (to_rtx == 0)
3931 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3933 /* Don't move directly into a return register. */
3934 if (TREE_CODE (to) == RESULT_DECL
3935 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3937 rtx temp;
3939 push_temp_slots ();
3940 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3942 if (GET_CODE (to_rtx) == PARALLEL)
3943 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3944 int_size_in_bytes (TREE_TYPE (from)));
3945 else
3946 emit_move_insn (to_rtx, temp);
3948 preserve_temp_slots (to_rtx);
3949 free_temp_slots ();
3950 pop_temp_slots ();
3951 return want_value ? to_rtx : NULL_RTX;
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct)
3961 rtx from_rtx, size;
3963 push_temp_slots ();
3964 size = expr_size (from);
3965 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3967 if (TARGET_MEM_FUNCTIONS)
3968 emit_library_call (memmove_libfunc, LCT_NORMAL,
3969 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3970 XEXP (from_rtx, 0), Pmode,
3971 convert_to_mode (TYPE_MODE (sizetype),
3972 size, TREE_UNSIGNED (sizetype)),
3973 TYPE_MODE (sizetype));
3974 else
3975 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3976 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3977 XEXP (to_rtx, 0), Pmode,
3978 convert_to_mode (TYPE_MODE (integer_type_node),
3979 size,
3980 TREE_UNSIGNED (integer_type_node)),
3981 TYPE_MODE (integer_type_node));
3983 preserve_temp_slots (to_rtx);
3984 free_temp_slots ();
3985 pop_temp_slots ();
3986 return want_value ? to_rtx : NULL_RTX;
3989 /* Compute FROM and store the value in the rtx we got. */
3991 push_temp_slots ();
3992 result = store_expr (from, to_rtx, want_value);
3993 preserve_temp_slots (result);
3994 free_temp_slots ();
3995 pop_temp_slots ();
3996 return want_value ? result : NULL_RTX;
3999 /* Generate code for computing expression EXP,
4000 and storing the value into TARGET.
4001 TARGET may contain a QUEUED rtx.
4003 If WANT_VALUE & 1 is nonzero, return a copy of the value
4004 not in TARGET, so that we can be sure to use the proper
4005 value in a containing expression even if TARGET has something
4006 else stored in it. If possible, we copy the value through a pseudo
4007 and return that pseudo. Or, if the value is constant, we try to
4008 return the constant. In some cases, we return a pseudo
4009 copied *from* TARGET.
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4016 be more thorough?
4018 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4019 to catch quickly any cases where the caller uses the value
4020 and fails to set WANT_VALUE.
4022 If WANT_VALUE & 2 is set, this is a store into a call param on the
4023 stack, and block moves may need to be treated specially. */
4026 store_expr (tree exp, rtx target, int want_value)
4028 rtx temp;
4029 int dont_return_target = 0;
4030 int dont_store_target = 0;
4032 if (VOID_TYPE_P (TREE_TYPE (exp)))
4034 /* C++ can generate ?: expressions with a throw expression in one
4035 branch and an rvalue in the other. Here, we resolve attempts to
4036 store the throw expression's nonexistent result. */
4037 if (want_value)
4038 abort ();
4039 expand_expr (exp, const0_rtx, VOIDmode, 0);
4040 return NULL_RTX;
4042 if (TREE_CODE (exp) == COMPOUND_EXPR)
4044 /* Perform first part of compound expression, then assign from second
4045 part. */
4046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4047 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4048 emit_queue ();
4049 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4051 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4053 /* For conditional expression, get safe form of the target. Then
4054 test the condition, doing the appropriate assignment on either
4055 side. This avoids the creation of unnecessary temporaries.
4056 For non-BLKmode, it is more efficient not to do this. */
4058 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4060 emit_queue ();
4061 target = protect_from_queue (target, 1);
4063 do_pending_stack_adjust ();
4064 NO_DEFER_POP;
4065 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4066 start_cleanup_deferral ();
4067 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4068 end_cleanup_deferral ();
4069 emit_queue ();
4070 emit_jump_insn (gen_jump (lab2));
4071 emit_barrier ();
4072 emit_label (lab1);
4073 start_cleanup_deferral ();
4074 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4075 end_cleanup_deferral ();
4076 emit_queue ();
4077 emit_label (lab2);
4078 OK_DEFER_POP;
4080 return want_value & 1 ? target : NULL_RTX;
4082 else if (queued_subexp_p (target))
4083 /* If target contains a postincrement, let's not risk
4084 using it as the place to generate the rhs. */
4086 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4088 /* Expand EXP into a new pseudo. */
4089 temp = gen_reg_rtx (GET_MODE (target));
4090 temp = expand_expr (exp, temp, GET_MODE (target),
4091 (want_value & 2
4092 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4094 else
4095 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4096 (want_value & 2
4097 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4099 /* If target is volatile, ANSI requires accessing the value
4100 *from* the target, if it is accessed. So make that happen.
4101 In no case return the target itself. */
4102 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4103 dont_return_target = 1;
4105 else if ((want_value & 1) != 0
4106 && GET_CODE (target) == MEM
4107 && ! MEM_VOLATILE_P (target)
4108 && GET_MODE (target) != BLKmode)
4109 /* If target is in memory and caller wants value in a register instead,
4110 arrange that. Pass TARGET as target for expand_expr so that,
4111 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4112 We know expand_expr will not use the target in that case.
4113 Don't do this if TARGET is volatile because we are supposed
4114 to write it and then read it. */
4116 temp = expand_expr (exp, target, GET_MODE (target),
4117 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4118 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4120 /* If TEMP is already in the desired TARGET, only copy it from
4121 memory and don't store it there again. */
4122 if (temp == target
4123 || (rtx_equal_p (temp, target)
4124 && ! side_effects_p (temp) && ! side_effects_p (target)))
4125 dont_store_target = 1;
4126 temp = copy_to_reg (temp);
4128 dont_return_target = 1;
4130 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4131 /* If this is a scalar in a register that is stored in a wider mode
4132 than the declared mode, compute the result into its declared mode
4133 and then convert to the wider mode. Our value is the computed
4134 expression. */
4136 rtx inner_target = 0;
4138 /* If we don't want a value, we can do the conversion inside EXP,
4139 which will often result in some optimizations. Do the conversion
4140 in two steps: first change the signedness, if needed, then
4141 the extend. But don't do this if the type of EXP is a subtype
4142 of something else since then the conversion might involve
4143 more than just converting modes. */
4144 if ((want_value & 1) == 0
4145 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4146 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4148 if (TREE_UNSIGNED (TREE_TYPE (exp))
4149 != SUBREG_PROMOTED_UNSIGNED_P (target))
4150 exp = convert
4151 ((*lang_hooks.types.signed_or_unsigned_type)
4152 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4154 exp = convert ((*lang_hooks.types.type_for_mode)
4155 (GET_MODE (SUBREG_REG (target)),
4156 SUBREG_PROMOTED_UNSIGNED_P (target)),
4157 exp);
4159 inner_target = SUBREG_REG (target);
4162 temp = expand_expr (exp, inner_target, VOIDmode,
4163 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4165 /* If TEMP is a MEM and we want a result value, make the access
4166 now so it gets done only once. Strictly speaking, this is
4167 only necessary if the MEM is volatile, or if the address
4168 overlaps TARGET. But not performing the load twice also
4169 reduces the amount of rtl we generate and then have to CSE. */
4170 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4171 temp = copy_to_reg (temp);
4173 /* If TEMP is a VOIDmode constant, use convert_modes to make
4174 sure that we properly convert it. */
4175 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4177 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4178 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4179 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4180 GET_MODE (target), temp,
4181 SUBREG_PROMOTED_UNSIGNED_P (target));
4184 convert_move (SUBREG_REG (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4187 /* If we promoted a constant, change the mode back down to match
4188 target. Otherwise, the caller might get confused by a result whose
4189 mode is larger than expected. */
4191 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4193 if (GET_MODE (temp) != VOIDmode)
4195 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4196 SUBREG_PROMOTED_VAR_P (temp) = 1;
4197 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4198 SUBREG_PROMOTED_UNSIGNED_P (target));
4200 else
4201 temp = convert_modes (GET_MODE (target),
4202 GET_MODE (SUBREG_REG (target)),
4203 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4206 return want_value & 1 ? temp : NULL_RTX;
4208 else
4210 temp = expand_expr (exp, target, GET_MODE (target),
4211 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4212 /* Return TARGET if it's a specified hardware register.
4213 If TARGET is a volatile mem ref, either return TARGET
4214 or return a reg copied *from* TARGET; ANSI requires this.
4216 Otherwise, if TEMP is not TARGET, return TEMP
4217 if it is constant (for efficiency),
4218 or if we really want the correct value. */
4219 if (!(target && GET_CODE (target) == REG
4220 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4221 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4222 && ! rtx_equal_p (temp, target)
4223 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4224 dont_return_target = 1;
4227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4228 the same as that of TARGET, adjust the constant. This is needed, for
4229 example, in case it is a CONST_DOUBLE and we want only a word-sized
4230 value. */
4231 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4234 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4235 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4237 /* If value was not generated in the target, store it there.
4238 Convert the value to TARGET's type first if necessary.
4239 If TEMP and TARGET compare equal according to rtx_equal_p, but
4240 one or both of them are volatile memory refs, we have to distinguish
4241 two cases:
4242 - expand_expr has used TARGET. In this case, we must not generate
4243 another copy. This can be detected by TARGET being equal according
4244 to == .
4245 - expand_expr has not used TARGET - that means that the source just
4246 happens to have the same RTX form. Since temp will have been created
4247 by expand_expr, it will compare unequal according to == .
4248 We must generate a copy in this case, to reach the correct number
4249 of volatile memory references. */
4251 if ((! rtx_equal_p (temp, target)
4252 || (temp != target && (side_effects_p (temp)
4253 || side_effects_p (target))))
4254 && TREE_CODE (exp) != ERROR_MARK
4255 && ! dont_store_target
4256 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4257 but TARGET is not valid memory reference, TEMP will differ
4258 from TARGET although it is really the same location. */
4259 && !(GET_CODE (target) == MEM
4260 && GET_CODE (XEXP (target, 0)) != QUEUED
4261 && (!memory_address_p (GET_MODE (target), XEXP (target, 0))
4262 || (flag_force_addr && !REG_P (XEXP (target, 0)))))
4263 /* If there's nothing to copy, don't bother. Don't call expr_size
4264 unless necessary, because some front-ends (C++) expr_size-hook
4265 aborts on objects that are not supposed to be bit-copied or
4266 bit-initialized. */
4267 && expr_size (exp) != const0_rtx)
4269 target = protect_from_queue (target, 1);
4270 if (GET_MODE (temp) != GET_MODE (target)
4271 && GET_MODE (temp) != VOIDmode)
4273 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4274 if (dont_return_target)
4276 /* In this case, we will return TEMP,
4277 so make sure it has the proper mode.
4278 But don't forget to store the value into TARGET. */
4279 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4280 emit_move_insn (target, temp);
4282 else
4283 convert_move (target, temp, unsignedp);
4286 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4288 /* Handle copying a string constant into an array. The string
4289 constant may be shorter than the array. So copy just the string's
4290 actual length, and clear the rest. First get the size of the data
4291 type of the string, which is actually the size of the target. */
4292 rtx size = expr_size (exp);
4294 if (GET_CODE (size) == CONST_INT
4295 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4296 emit_block_move (target, temp, size,
4297 (want_value & 2
4298 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4299 else
4301 /* Compute the size of the data to copy from the string. */
4302 tree copy_size
4303 = size_binop (MIN_EXPR,
4304 make_tree (sizetype, size),
4305 size_int (TREE_STRING_LENGTH (exp)));
4306 rtx copy_size_rtx
4307 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4308 (want_value & 2
4309 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4310 rtx label = 0;
4312 /* Copy that much. */
4313 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4314 TREE_UNSIGNED (sizetype));
4315 emit_block_move (target, temp, copy_size_rtx,
4316 (want_value & 2
4317 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4319 /* Figure out how much is left in TARGET that we have to clear.
4320 Do all calculations in ptr_mode. */
4321 if (GET_CODE (copy_size_rtx) == CONST_INT)
4323 size = plus_constant (size, -INTVAL (copy_size_rtx));
4324 target = adjust_address (target, BLKmode,
4325 INTVAL (copy_size_rtx));
4327 else
4329 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4330 copy_size_rtx, NULL_RTX, 0,
4331 OPTAB_LIB_WIDEN);
4333 #ifdef POINTERS_EXTEND_UNSIGNED
4334 if (GET_MODE (copy_size_rtx) != Pmode)
4335 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4336 TREE_UNSIGNED (sizetype));
4337 #endif
4339 target = offset_address (target, copy_size_rtx,
4340 highest_pow2_factor (copy_size));
4341 label = gen_label_rtx ();
4342 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4343 GET_MODE (size), 0, label);
4346 if (size != const0_rtx)
4347 clear_storage (target, size);
4349 if (label)
4350 emit_label (label);
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 else if (GET_CODE (target) == PARALLEL)
4356 emit_group_load (target, temp, TREE_TYPE (exp),
4357 int_size_in_bytes (TREE_TYPE (exp)));
4358 else if (GET_MODE (temp) == BLKmode)
4359 emit_block_move (target, temp, expr_size (exp),
4360 (want_value & 2
4361 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4362 else
4363 emit_move_insn (target, temp);
4366 /* If we don't want a value, return NULL_RTX. */
4367 if ((want_value & 1) == 0)
4368 return NULL_RTX;
4370 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4371 ??? The latter test doesn't seem to make sense. */
4372 else if (dont_return_target && GET_CODE (temp) != MEM)
4373 return temp;
4375 /* Return TARGET itself if it is a hard register. */
4376 else if ((want_value & 1) != 0
4377 && GET_MODE (target) != BLKmode
4378 && ! (GET_CODE (target) == REG
4379 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4380 return copy_to_reg (target);
4382 else
4383 return target;
4386 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4388 static int
4389 is_zeros_p (tree exp)
4391 tree elt;
4393 switch (TREE_CODE (exp))
4395 case CONVERT_EXPR:
4396 case NOP_EXPR:
4397 case NON_LVALUE_EXPR:
4398 case VIEW_CONVERT_EXPR:
4399 return is_zeros_p (TREE_OPERAND (exp, 0));
4401 case INTEGER_CST:
4402 return integer_zerop (exp);
4404 case COMPLEX_CST:
4405 return
4406 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4408 case REAL_CST:
4409 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4411 case VECTOR_CST:
4412 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4413 elt = TREE_CHAIN (elt))
4414 if (!is_zeros_p (TREE_VALUE (elt)))
4415 return 0;
4417 return 1;
4419 case CONSTRUCTOR:
4420 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4421 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4422 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4423 if (! is_zeros_p (TREE_VALUE (elt)))
4424 return 0;
4426 return 1;
4428 default:
4429 return 0;
4433 /* Return 1 if EXP contains mostly (3/4) zeros. */
4436 mostly_zeros_p (tree exp)
4438 if (TREE_CODE (exp) == CONSTRUCTOR)
4440 int elts = 0, zeros = 0;
4441 tree elt = CONSTRUCTOR_ELTS (exp);
4442 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4444 /* If there are no ranges of true bits, it is all zero. */
4445 return elt == NULL_TREE;
4447 for (; elt; elt = TREE_CHAIN (elt))
4449 /* We do not handle the case where the index is a RANGE_EXPR,
4450 so the statistic will be somewhat inaccurate.
4451 We do make a more accurate count in store_constructor itself,
4452 so since this function is only used for nested array elements,
4453 this should be close enough. */
4454 if (mostly_zeros_p (TREE_VALUE (elt)))
4455 zeros++;
4456 elts++;
4459 return 4 * zeros >= 3 * elts;
4462 return is_zeros_p (exp);
4465 /* Helper function for store_constructor.
4466 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4467 TYPE is the type of the CONSTRUCTOR, not the element type.
4468 CLEARED is as for store_constructor.
4469 ALIAS_SET is the alias set to use for any stores.
4471 This provides a recursive shortcut back to store_constructor when it isn't
4472 necessary to go through store_field. This is so that we can pass through
4473 the cleared field to let store_constructor know that we may not have to
4474 clear a substructure if the outer structure has already been cleared. */
4476 static void
4477 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4478 HOST_WIDE_INT bitpos, enum machine_mode mode,
4479 tree exp, tree type, int cleared, int alias_set)
4481 if (TREE_CODE (exp) == CONSTRUCTOR
4482 && bitpos % BITS_PER_UNIT == 0
4483 /* If we have a nonzero bitpos for a register target, then we just
4484 let store_field do the bitfield handling. This is unlikely to
4485 generate unnecessary clear instructions anyways. */
4486 && (bitpos == 0 || GET_CODE (target) == MEM))
4488 if (GET_CODE (target) == MEM)
4489 target
4490 = adjust_address (target,
4491 GET_MODE (target) == BLKmode
4492 || 0 != (bitpos
4493 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4494 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4497 /* Update the alias set, if required. */
4498 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4499 && MEM_ALIAS_SET (target) != 0)
4501 target = copy_rtx (target);
4502 set_mem_alias_set (target, alias_set);
4505 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4507 else
4508 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4509 alias_set);
4512 /* Store the value of constructor EXP into the rtx TARGET.
4513 TARGET is either a REG or a MEM; we know it cannot conflict, since
4514 safe_from_p has been called.
4515 CLEARED is true if TARGET is known to have been zero'd.
4516 SIZE is the number of bytes of TARGET we are allowed to modify: this
4517 may not be the same as the size of EXP if we are assigning to a field
4518 which has been packed to exclude padding bits. */
4520 static void
4521 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4523 tree type = TREE_TYPE (exp);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4526 #endif
4528 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4529 || TREE_CODE (type) == QUAL_UNION_TYPE)
4531 tree elt;
4533 /* If size is zero or the target is already cleared, do nothing. */
4534 if (size == 0 || cleared)
4535 cleared = 1;
4536 /* We either clear the aggregate or indicate the value is dead. */
4537 else if ((TREE_CODE (type) == UNION_TYPE
4538 || TREE_CODE (type) == QUAL_UNION_TYPE)
4539 && ! CONSTRUCTOR_ELTS (exp))
4540 /* If the constructor is empty, clear the union. */
4542 clear_storage (target, expr_size (exp));
4543 cleared = 1;
4546 /* If we are building a static constructor into a register,
4547 set the initial value as zero so we can fold the value into
4548 a constant. But if more than one register is involved,
4549 this probably loses. */
4550 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4551 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4553 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4554 cleared = 1;
4557 /* If the constructor has fewer fields than the structure
4558 or if we are initializing the structure to mostly zeros,
4559 clear the whole structure first. Don't do this if TARGET is a
4560 register whose mode size isn't equal to SIZE since clear_storage
4561 can't handle this case. */
4562 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4563 || mostly_zeros_p (exp))
4564 && (GET_CODE (target) != REG
4565 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4566 == size)))
4568 rtx xtarget = target;
4570 if (readonly_fields_p (type))
4572 xtarget = copy_rtx (xtarget);
4573 RTX_UNCHANGING_P (xtarget) = 1;
4576 clear_storage (xtarget, GEN_INT (size));
4577 cleared = 1;
4580 if (! cleared)
4581 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4583 /* Store each element of the constructor into
4584 the corresponding field of TARGET. */
4586 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4588 tree field = TREE_PURPOSE (elt);
4589 tree value = TREE_VALUE (elt);
4590 enum machine_mode mode;
4591 HOST_WIDE_INT bitsize;
4592 HOST_WIDE_INT bitpos = 0;
4593 tree offset;
4594 rtx to_rtx = target;
4596 /* Just ignore missing fields.
4597 We cleared the whole structure, above,
4598 if any fields are missing. */
4599 if (field == 0)
4600 continue;
4602 if (cleared && is_zeros_p (value))
4603 continue;
4605 if (host_integerp (DECL_SIZE (field), 1))
4606 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4607 else
4608 bitsize = -1;
4610 mode = DECL_MODE (field);
4611 if (DECL_BIT_FIELD (field))
4612 mode = VOIDmode;
4614 offset = DECL_FIELD_OFFSET (field);
4615 if (host_integerp (offset, 0)
4616 && host_integerp (bit_position (field), 0))
4618 bitpos = int_bit_position (field);
4619 offset = 0;
4621 else
4622 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4624 if (offset)
4626 rtx offset_rtx;
4628 if (CONTAINS_PLACEHOLDER_P (offset))
4629 offset = build (WITH_RECORD_EXPR, sizetype,
4630 offset, make_tree (TREE_TYPE (exp), target));
4632 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4633 if (GET_CODE (to_rtx) != MEM)
4634 abort ();
4636 #ifdef POINTERS_EXTEND_UNSIGNED
4637 if (GET_MODE (offset_rtx) != Pmode)
4638 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4639 #else
4640 if (GET_MODE (offset_rtx) != ptr_mode)
4641 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4642 #endif
4644 to_rtx = offset_address (to_rtx, offset_rtx,
4645 highest_pow2_factor (offset));
4648 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4649 on the MEM might lead to scheduling the clearing after the
4650 store. */
4651 if (TREE_READONLY (field) && !cleared)
4653 if (GET_CODE (to_rtx) == MEM)
4654 to_rtx = copy_rtx (to_rtx);
4656 RTX_UNCHANGING_P (to_rtx) = 1;
4659 #ifdef WORD_REGISTER_OPERATIONS
4660 /* If this initializes a field that is smaller than a word, at the
4661 start of a word, try to widen it to a full word.
4662 This special case allows us to output C++ member function
4663 initializations in a form that the optimizers can understand. */
4664 if (GET_CODE (target) == REG
4665 && bitsize < BITS_PER_WORD
4666 && bitpos % BITS_PER_WORD == 0
4667 && GET_MODE_CLASS (mode) == MODE_INT
4668 && TREE_CODE (value) == INTEGER_CST
4669 && exp_size >= 0
4670 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4672 tree type = TREE_TYPE (value);
4674 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4676 type = (*lang_hooks.types.type_for_size)
4677 (BITS_PER_WORD, TREE_UNSIGNED (type));
4678 value = convert (type, value);
4681 if (BYTES_BIG_ENDIAN)
4682 value
4683 = fold (build (LSHIFT_EXPR, type, value,
4684 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4685 bitsize = BITS_PER_WORD;
4686 mode = word_mode;
4688 #endif
4690 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4691 && DECL_NONADDRESSABLE_P (field))
4693 to_rtx = copy_rtx (to_rtx);
4694 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4697 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4698 value, type, cleared,
4699 get_alias_set (TREE_TYPE (field)));
4702 else if (TREE_CODE (type) == ARRAY_TYPE
4703 || TREE_CODE (type) == VECTOR_TYPE)
4705 tree elt;
4706 int i;
4707 int need_to_clear;
4708 tree domain = TYPE_DOMAIN (type);
4709 tree elttype = TREE_TYPE (type);
4710 int const_bounds_p;
4711 HOST_WIDE_INT minelt = 0;
4712 HOST_WIDE_INT maxelt = 0;
4713 int icode = 0;
4714 rtx *vector = NULL;
4715 int elt_size = 0;
4716 unsigned n_elts = 0;
4718 /* Vectors are like arrays, but the domain is stored via an array
4719 type indirectly. */
4720 if (TREE_CODE (type) == VECTOR_TYPE)
4722 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4723 the same field as TYPE_DOMAIN, we are not guaranteed that
4724 it always will. */
4725 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4726 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4727 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4729 enum machine_mode mode = GET_MODE (target);
4731 icode = (int) vec_init_optab->handlers[mode].insn_code;
4732 if (icode != CODE_FOR_nothing)
4734 unsigned int i;
4736 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4737 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4738 vector = alloca (n_elts);
4739 for (i = 0; i < n_elts; i++)
4740 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4745 const_bounds_p = (TYPE_MIN_VALUE (domain)
4746 && TYPE_MAX_VALUE (domain)
4747 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4748 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4750 /* If we have constant bounds for the range of the type, get them. */
4751 if (const_bounds_p)
4753 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4754 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4757 /* If the constructor has fewer elements than the array,
4758 clear the whole array first. Similarly if this is
4759 static constructor of a non-BLKmode object. */
4760 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4761 need_to_clear = 1;
4762 else
4764 HOST_WIDE_INT count = 0, zero_count = 0;
4765 need_to_clear = ! const_bounds_p;
4767 /* This loop is a more accurate version of the loop in
4768 mostly_zeros_p (it handles RANGE_EXPR in an index).
4769 It is also needed to check for missing elements. */
4770 for (elt = CONSTRUCTOR_ELTS (exp);
4771 elt != NULL_TREE && ! need_to_clear;
4772 elt = TREE_CHAIN (elt))
4774 tree index = TREE_PURPOSE (elt);
4775 HOST_WIDE_INT this_node_count;
4777 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4779 tree lo_index = TREE_OPERAND (index, 0);
4780 tree hi_index = TREE_OPERAND (index, 1);
4782 if (! host_integerp (lo_index, 1)
4783 || ! host_integerp (hi_index, 1))
4785 need_to_clear = 1;
4786 break;
4789 this_node_count = (tree_low_cst (hi_index, 1)
4790 - tree_low_cst (lo_index, 1) + 1);
4792 else
4793 this_node_count = 1;
4795 count += this_node_count;
4796 if (mostly_zeros_p (TREE_VALUE (elt)))
4797 zero_count += this_node_count;
4800 /* Clear the entire array first if there are any missing elements,
4801 or if the incidence of zero elements is >= 75%. */
4802 if (! need_to_clear
4803 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4804 need_to_clear = 1;
4807 if (need_to_clear && size > 0 && !vector)
4809 if (! cleared)
4811 if (REG_P (target))
4812 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4813 else
4814 clear_storage (target, GEN_INT (size));
4816 cleared = 1;
4818 else if (REG_P (target))
4819 /* Inform later passes that the old value is dead. */
4820 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4822 /* Store each element of the constructor into
4823 the corresponding element of TARGET, determined
4824 by counting the elements. */
4825 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4826 elt;
4827 elt = TREE_CHAIN (elt), i++)
4829 enum machine_mode mode;
4830 HOST_WIDE_INT bitsize;
4831 HOST_WIDE_INT bitpos;
4832 int unsignedp;
4833 tree value = TREE_VALUE (elt);
4834 tree index = TREE_PURPOSE (elt);
4835 rtx xtarget = target;
4837 if (cleared && is_zeros_p (value))
4838 continue;
4840 unsignedp = TREE_UNSIGNED (elttype);
4841 mode = TYPE_MODE (elttype);
4842 if (mode == BLKmode)
4843 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4844 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4845 : -1);
4846 else
4847 bitsize = GET_MODE_BITSIZE (mode);
4849 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4851 tree lo_index = TREE_OPERAND (index, 0);
4852 tree hi_index = TREE_OPERAND (index, 1);
4853 rtx index_r, pos_rtx, loop_end;
4854 struct nesting *loop;
4855 HOST_WIDE_INT lo, hi, count;
4856 tree position;
4858 if (vector)
4859 abort ();
4861 /* If the range is constant and "small", unroll the loop. */
4862 if (const_bounds_p
4863 && host_integerp (lo_index, 0)
4864 && host_integerp (hi_index, 0)
4865 && (lo = tree_low_cst (lo_index, 0),
4866 hi = tree_low_cst (hi_index, 0),
4867 count = hi - lo + 1,
4868 (GET_CODE (target) != MEM
4869 || count <= 2
4870 || (host_integerp (TYPE_SIZE (elttype), 1)
4871 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4872 <= 40 * 8)))))
4874 lo -= minelt; hi -= minelt;
4875 for (; lo <= hi; lo++)
4877 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4879 if (GET_CODE (target) == MEM
4880 && !MEM_KEEP_ALIAS_SET_P (target)
4881 && TREE_CODE (type) == ARRAY_TYPE
4882 && TYPE_NONALIASED_COMPONENT (type))
4884 target = copy_rtx (target);
4885 MEM_KEEP_ALIAS_SET_P (target) = 1;
4888 store_constructor_field
4889 (target, bitsize, bitpos, mode, value, type, cleared,
4890 get_alias_set (elttype));
4893 else
4895 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4896 loop_end = gen_label_rtx ();
4898 unsignedp = TREE_UNSIGNED (domain);
4900 index = build_decl (VAR_DECL, NULL_TREE, domain);
4902 index_r
4903 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4904 &unsignedp, 0));
4905 SET_DECL_RTL (index, index_r);
4906 if (TREE_CODE (value) == SAVE_EXPR
4907 && SAVE_EXPR_RTL (value) == 0)
4909 /* Make sure value gets expanded once before the
4910 loop. */
4911 expand_expr (value, const0_rtx, VOIDmode, 0);
4912 emit_queue ();
4914 store_expr (lo_index, index_r, 0);
4915 loop = expand_start_loop (0);
4917 /* Assign value to element index. */
4918 position
4919 = convert (ssizetype,
4920 fold (build (MINUS_EXPR, TREE_TYPE (index),
4921 index, TYPE_MIN_VALUE (domain))));
4922 position = size_binop (MULT_EXPR, position,
4923 convert (ssizetype,
4924 TYPE_SIZE_UNIT (elttype)));
4926 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4927 xtarget = offset_address (target, pos_rtx,
4928 highest_pow2_factor (position));
4929 xtarget = adjust_address (xtarget, mode, 0);
4930 if (TREE_CODE (value) == CONSTRUCTOR)
4931 store_constructor (value, xtarget, cleared,
4932 bitsize / BITS_PER_UNIT);
4933 else
4934 store_expr (value, xtarget, 0);
4936 expand_exit_loop_if_false (loop,
4937 build (LT_EXPR, integer_type_node,
4938 index, hi_index));
4940 expand_increment (build (PREINCREMENT_EXPR,
4941 TREE_TYPE (index),
4942 index, integer_one_node), 0, 0);
4943 expand_end_loop ();
4944 emit_label (loop_end);
4947 else if ((index != 0 && ! host_integerp (index, 0))
4948 || ! host_integerp (TYPE_SIZE (elttype), 1))
4950 tree position;
4952 if (vector)
4953 abort ();
4955 if (index == 0)
4956 index = ssize_int (1);
4958 if (minelt)
4959 index = convert (ssizetype,
4960 fold (build (MINUS_EXPR, index,
4961 TYPE_MIN_VALUE (domain))));
4963 position = size_binop (MULT_EXPR, index,
4964 convert (ssizetype,
4965 TYPE_SIZE_UNIT (elttype)));
4966 xtarget = offset_address (target,
4967 expand_expr (position, 0, VOIDmode, 0),
4968 highest_pow2_factor (position));
4969 xtarget = adjust_address (xtarget, mode, 0);
4970 store_expr (value, xtarget, 0);
4972 else if (vector)
4974 int pos;
4976 if (index != 0)
4977 pos = tree_low_cst (index, 0) - minelt;
4978 else
4979 pos = i;
4980 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4982 else
4984 if (index != 0)
4985 bitpos = ((tree_low_cst (index, 0) - minelt)
4986 * tree_low_cst (TYPE_SIZE (elttype), 1));
4987 else
4988 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4990 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4991 && TREE_CODE (type) == ARRAY_TYPE
4992 && TYPE_NONALIASED_COMPONENT (type))
4994 target = copy_rtx (target);
4995 MEM_KEEP_ALIAS_SET_P (target) = 1;
4997 store_constructor_field (target, bitsize, bitpos, mode, value,
4998 type, cleared, get_alias_set (elttype));
5001 if (vector)
5003 emit_insn (GEN_FCN (icode) (target,
5004 gen_rtx_PARALLEL (GET_MODE (target),
5005 gen_rtvec_v (n_elts, vector))));
5009 /* Set constructor assignments. */
5010 else if (TREE_CODE (type) == SET_TYPE)
5012 tree elt = CONSTRUCTOR_ELTS (exp);
5013 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5014 tree domain = TYPE_DOMAIN (type);
5015 tree domain_min, domain_max, bitlength;
5017 /* The default implementation strategy is to extract the constant
5018 parts of the constructor, use that to initialize the target,
5019 and then "or" in whatever non-constant ranges we need in addition.
5021 If a large set is all zero or all ones, it is
5022 probably better to set it using memset (if available) or bzero.
5023 Also, if a large set has just a single range, it may also be
5024 better to first clear all the first clear the set (using
5025 bzero/memset), and set the bits we want. */
5027 /* Check for all zeros. */
5028 if (elt == NULL_TREE && size > 0)
5030 if (!cleared)
5031 clear_storage (target, GEN_INT (size));
5032 return;
5035 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5036 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5037 bitlength = size_binop (PLUS_EXPR,
5038 size_diffop (domain_max, domain_min),
5039 ssize_int (1));
5041 nbits = tree_low_cst (bitlength, 1);
5043 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5044 are "complicated" (more than one range), initialize (the
5045 constant parts) by copying from a constant. */
5046 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5047 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5049 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5050 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5051 char *bit_buffer = alloca (nbits);
5052 HOST_WIDE_INT word = 0;
5053 unsigned int bit_pos = 0;
5054 unsigned int ibit = 0;
5055 unsigned int offset = 0; /* In bytes from beginning of set. */
5057 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5058 for (;;)
5060 if (bit_buffer[ibit])
5062 if (BYTES_BIG_ENDIAN)
5063 word |= (1 << (set_word_size - 1 - bit_pos));
5064 else
5065 word |= 1 << bit_pos;
5068 bit_pos++; ibit++;
5069 if (bit_pos >= set_word_size || ibit == nbits)
5071 if (word != 0 || ! cleared)
5073 rtx datum = GEN_INT (word);
5074 rtx to_rtx;
5076 /* The assumption here is that it is safe to use
5077 XEXP if the set is multi-word, but not if
5078 it's single-word. */
5079 if (GET_CODE (target) == MEM)
5080 to_rtx = adjust_address (target, mode, offset);
5081 else if (offset == 0)
5082 to_rtx = target;
5083 else
5084 abort ();
5085 emit_move_insn (to_rtx, datum);
5088 if (ibit == nbits)
5089 break;
5090 word = 0;
5091 bit_pos = 0;
5092 offset += set_word_size / BITS_PER_UNIT;
5096 else if (!cleared)
5097 /* Don't bother clearing storage if the set is all ones. */
5098 if (TREE_CHAIN (elt) != NULL_TREE
5099 || (TREE_PURPOSE (elt) == NULL_TREE
5100 ? nbits != 1
5101 : ( ! host_integerp (TREE_VALUE (elt), 0)
5102 || ! host_integerp (TREE_PURPOSE (elt), 0)
5103 || (tree_low_cst (TREE_VALUE (elt), 0)
5104 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5105 != (HOST_WIDE_INT) nbits))))
5106 clear_storage (target, expr_size (exp));
5108 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5110 /* Start of range of element or NULL. */
5111 tree startbit = TREE_PURPOSE (elt);
5112 /* End of range of element, or element value. */
5113 tree endbit = TREE_VALUE (elt);
5114 HOST_WIDE_INT startb, endb;
5115 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5117 bitlength_rtx = expand_expr (bitlength,
5118 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5120 /* Handle non-range tuple element like [ expr ]. */
5121 if (startbit == NULL_TREE)
5123 startbit = save_expr (endbit);
5124 endbit = startbit;
5127 startbit = convert (sizetype, startbit);
5128 endbit = convert (sizetype, endbit);
5129 if (! integer_zerop (domain_min))
5131 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5132 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5134 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5135 EXPAND_CONST_ADDRESS);
5136 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5137 EXPAND_CONST_ADDRESS);
5139 if (REG_P (target))
5141 targetx
5142 = assign_temp
5143 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5144 (GET_MODE (target), 0),
5145 TYPE_QUAL_CONST)),
5146 0, 1, 1);
5147 emit_move_insn (targetx, target);
5150 else if (GET_CODE (target) == MEM)
5151 targetx = target;
5152 else
5153 abort ();
5155 /* Optimization: If startbit and endbit are constants divisible
5156 by BITS_PER_UNIT, call memset instead. */
5157 if (TARGET_MEM_FUNCTIONS
5158 && TREE_CODE (startbit) == INTEGER_CST
5159 && TREE_CODE (endbit) == INTEGER_CST
5160 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5161 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5163 emit_library_call (memset_libfunc, LCT_NORMAL,
5164 VOIDmode, 3,
5165 plus_constant (XEXP (targetx, 0),
5166 startb / BITS_PER_UNIT),
5167 Pmode,
5168 constm1_rtx, TYPE_MODE (integer_type_node),
5169 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5170 TYPE_MODE (sizetype));
5172 else
5173 emit_library_call (setbits_libfunc, LCT_NORMAL,
5174 VOIDmode, 4, XEXP (targetx, 0),
5175 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5176 startbit_rtx, TYPE_MODE (sizetype),
5177 endbit_rtx, TYPE_MODE (sizetype));
5179 if (REG_P (target))
5180 emit_move_insn (target, targetx);
5184 else
5185 abort ();
5188 /* Store the value of EXP (an expression tree)
5189 into a subfield of TARGET which has mode MODE and occupies
5190 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5191 If MODE is VOIDmode, it means that we are storing into a bit-field.
5193 If VALUE_MODE is VOIDmode, return nothing in particular.
5194 UNSIGNEDP is not used in this case.
5196 Otherwise, return an rtx for the value stored. This rtx
5197 has mode VALUE_MODE if that is convenient to do.
5198 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5200 TYPE is the type of the underlying object,
5202 ALIAS_SET is the alias set for the destination. This value will
5203 (in general) be different from that for TARGET, since TARGET is a
5204 reference to the containing structure. */
5206 static rtx
5207 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5208 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5209 int unsignedp, tree type, int alias_set)
5211 HOST_WIDE_INT width_mask = 0;
5213 if (TREE_CODE (exp) == ERROR_MARK)
5214 return const0_rtx;
5216 /* If we have nothing to store, do nothing unless the expression has
5217 side-effects. */
5218 if (bitsize == 0)
5219 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5220 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5221 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5223 /* If we are storing into an unaligned field of an aligned union that is
5224 in a register, we may have the mode of TARGET being an integer mode but
5225 MODE == BLKmode. In that case, get an aligned object whose size and
5226 alignment are the same as TARGET and store TARGET into it (we can avoid
5227 the store if the field being stored is the entire width of TARGET). Then
5228 call ourselves recursively to store the field into a BLKmode version of
5229 that object. Finally, load from the object into TARGET. This is not
5230 very efficient in general, but should only be slightly more expensive
5231 than the otherwise-required unaligned accesses. Perhaps this can be
5232 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5233 twice, once with emit_move_insn and once via store_field. */
5235 if (mode == BLKmode
5236 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5238 rtx object = assign_temp (type, 0, 1, 1);
5239 rtx blk_object = adjust_address (object, BLKmode, 0);
5241 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5242 emit_move_insn (object, target);
5244 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5245 alias_set);
5247 emit_move_insn (target, object);
5249 /* We want to return the BLKmode version of the data. */
5250 return blk_object;
5253 if (GET_CODE (target) == CONCAT)
5255 /* We're storing into a struct containing a single __complex. */
5257 if (bitpos != 0)
5258 abort ();
5259 return store_expr (exp, target, 0);
5262 /* If the structure is in a register or if the component
5263 is a bit field, we cannot use addressing to access it.
5264 Use bit-field techniques or SUBREG to store in it. */
5266 if (mode == VOIDmode
5267 || (mode != BLKmode && ! direct_store[(int) mode]
5268 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5269 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5270 || GET_CODE (target) == REG
5271 || GET_CODE (target) == SUBREG
5272 /* If the field isn't aligned enough to store as an ordinary memref,
5273 store it as a bit field. */
5274 || (mode != BLKmode
5275 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5276 || bitpos % GET_MODE_ALIGNMENT (mode))
5277 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5278 || (bitpos % BITS_PER_UNIT != 0)))
5279 /* If the RHS and field are a constant size and the size of the
5280 RHS isn't the same size as the bitfield, we must use bitfield
5281 operations. */
5282 || (bitsize >= 0
5283 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5284 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5286 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5288 /* If BITSIZE is narrower than the size of the type of EXP
5289 we will be narrowing TEMP. Normally, what's wanted are the
5290 low-order bits. However, if EXP's type is a record and this is
5291 big-endian machine, we want the upper BITSIZE bits. */
5292 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5293 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5294 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5295 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5296 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5297 - bitsize),
5298 NULL_RTX, 1);
5300 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5301 MODE. */
5302 if (mode != VOIDmode && mode != BLKmode
5303 && mode != TYPE_MODE (TREE_TYPE (exp)))
5304 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5306 /* If the modes of TARGET and TEMP are both BLKmode, both
5307 must be in memory and BITPOS must be aligned on a byte
5308 boundary. If so, we simply do a block copy. */
5309 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5311 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5312 || bitpos % BITS_PER_UNIT != 0)
5313 abort ();
5315 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5316 emit_block_move (target, temp,
5317 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5318 / BITS_PER_UNIT),
5319 BLOCK_OP_NORMAL);
5321 return value_mode == VOIDmode ? const0_rtx : target;
5324 /* Store the value in the bitfield. */
5325 store_bit_field (target, bitsize, bitpos, mode, temp,
5326 int_size_in_bytes (type));
5328 if (value_mode != VOIDmode)
5330 /* The caller wants an rtx for the value.
5331 If possible, avoid refetching from the bitfield itself. */
5332 if (width_mask != 0
5333 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5335 tree count;
5336 enum machine_mode tmode;
5338 tmode = GET_MODE (temp);
5339 if (tmode == VOIDmode)
5340 tmode = value_mode;
5342 if (unsignedp)
5343 return expand_and (tmode, temp,
5344 gen_int_mode (width_mask, tmode),
5345 NULL_RTX);
5347 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5348 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5349 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5352 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5353 NULL_RTX, value_mode, VOIDmode,
5354 int_size_in_bytes (type));
5356 return const0_rtx;
5358 else
5360 rtx addr = XEXP (target, 0);
5361 rtx to_rtx = target;
5363 /* If a value is wanted, it must be the lhs;
5364 so make the address stable for multiple use. */
5366 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5367 && ! CONSTANT_ADDRESS_P (addr)
5368 /* A frame-pointer reference is already stable. */
5369 && ! (GET_CODE (addr) == PLUS
5370 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5371 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5372 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5373 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5375 /* Now build a reference to just the desired component. */
5377 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5379 if (to_rtx == target)
5380 to_rtx = copy_rtx (to_rtx);
5382 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5383 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5384 set_mem_alias_set (to_rtx, alias_set);
5386 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5390 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5391 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5392 codes and find the ultimate containing object, which we return.
5394 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5395 bit position, and *PUNSIGNEDP to the signedness of the field.
5396 If the position of the field is variable, we store a tree
5397 giving the variable offset (in units) in *POFFSET.
5398 This offset is in addition to the bit position.
5399 If the position is not variable, we store 0 in *POFFSET.
5401 If any of the extraction expressions is volatile,
5402 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5404 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5405 is a mode that can be used to access the field. In that case, *PBITSIZE
5406 is redundant.
5408 If the field describes a variable-sized object, *PMODE is set to
5409 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5410 this case, but the address of the object can be found. */
5412 tree
5413 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5414 HOST_WIDE_INT *pbitpos, tree *poffset,
5415 enum machine_mode *pmode, int *punsignedp,
5416 int *pvolatilep)
5418 tree size_tree = 0;
5419 enum machine_mode mode = VOIDmode;
5420 tree offset = size_zero_node;
5421 tree bit_offset = bitsize_zero_node;
5422 tree placeholder_ptr = 0;
5423 tree tem;
5425 /* First get the mode, signedness, and size. We do this from just the
5426 outermost expression. */
5427 if (TREE_CODE (exp) == COMPONENT_REF)
5429 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5430 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5431 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5433 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5435 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5437 size_tree = TREE_OPERAND (exp, 1);
5438 *punsignedp = TREE_UNSIGNED (exp);
5440 else
5442 mode = TYPE_MODE (TREE_TYPE (exp));
5443 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5445 if (mode == BLKmode)
5446 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5447 else
5448 *pbitsize = GET_MODE_BITSIZE (mode);
5451 if (size_tree != 0)
5453 if (! host_integerp (size_tree, 1))
5454 mode = BLKmode, *pbitsize = -1;
5455 else
5456 *pbitsize = tree_low_cst (size_tree, 1);
5459 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5460 and find the ultimate containing object. */
5461 while (1)
5463 if (TREE_CODE (exp) == BIT_FIELD_REF)
5464 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5465 else if (TREE_CODE (exp) == COMPONENT_REF)
5467 tree field = TREE_OPERAND (exp, 1);
5468 tree this_offset = DECL_FIELD_OFFSET (field);
5470 /* If this field hasn't been filled in yet, don't go
5471 past it. This should only happen when folding expressions
5472 made during type construction. */
5473 if (this_offset == 0)
5474 break;
5475 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5476 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5478 offset = size_binop (PLUS_EXPR, offset, this_offset);
5479 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5480 DECL_FIELD_BIT_OFFSET (field));
5482 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5485 else if (TREE_CODE (exp) == ARRAY_REF
5486 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5488 tree index = TREE_OPERAND (exp, 1);
5489 tree array = TREE_OPERAND (exp, 0);
5490 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5491 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5492 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5494 /* We assume all arrays have sizes that are a multiple of a byte.
5495 First subtract the lower bound, if any, in the type of the
5496 index, then convert to sizetype and multiply by the size of the
5497 array element. */
5498 if (low_bound != 0 && ! integer_zerop (low_bound))
5499 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5500 index, low_bound));
5502 /* If the index has a self-referential type, pass it to a
5503 WITH_RECORD_EXPR; if the component size is, pass our
5504 component to one. */
5505 if (CONTAINS_PLACEHOLDER_P (index))
5506 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5507 if (CONTAINS_PLACEHOLDER_P (unit_size))
5508 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5510 offset = size_binop (PLUS_EXPR, offset,
5511 size_binop (MULT_EXPR,
5512 convert (sizetype, index),
5513 unit_size));
5516 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5518 tree new = find_placeholder (exp, &placeholder_ptr);
5520 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5521 We might have been called from tree optimization where we
5522 haven't set up an object yet. */
5523 if (new == 0)
5524 break;
5525 else
5526 exp = new;
5528 continue;
5531 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5532 conversions that don't change the mode, and all view conversions
5533 except those that need to "step up" the alignment. */
5534 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5535 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5536 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5537 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5538 && STRICT_ALIGNMENT
5539 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5540 < BIGGEST_ALIGNMENT)
5541 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5542 || TYPE_ALIGN_OK (TREE_TYPE
5543 (TREE_OPERAND (exp, 0))))))
5544 && ! ((TREE_CODE (exp) == NOP_EXPR
5545 || TREE_CODE (exp) == CONVERT_EXPR)
5546 && (TYPE_MODE (TREE_TYPE (exp))
5547 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5548 break;
5550 /* If any reference in the chain is volatile, the effect is volatile. */
5551 if (TREE_THIS_VOLATILE (exp))
5552 *pvolatilep = 1;
5554 exp = TREE_OPERAND (exp, 0);
5557 /* If OFFSET is constant, see if we can return the whole thing as a
5558 constant bit position. Otherwise, split it up. */
5559 if (host_integerp (offset, 0)
5560 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5561 bitsize_unit_node))
5562 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5563 && host_integerp (tem, 0))
5564 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5565 else
5566 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5568 *pmode = mode;
5569 return exp;
5572 /* Return 1 if T is an expression that get_inner_reference handles. */
5575 handled_component_p (tree t)
5577 switch (TREE_CODE (t))
5579 case BIT_FIELD_REF:
5580 case COMPONENT_REF:
5581 case ARRAY_REF:
5582 case ARRAY_RANGE_REF:
5583 case NON_LVALUE_EXPR:
5584 case VIEW_CONVERT_EXPR:
5585 return 1;
5587 /* ??? Sure they are handled, but get_inner_reference may return
5588 a different PBITSIZE, depending upon whether the expression is
5589 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5590 case NOP_EXPR:
5591 case CONVERT_EXPR:
5592 return (TYPE_MODE (TREE_TYPE (t))
5593 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5595 default:
5596 return 0;
5600 /* Given an rtx VALUE that may contain additions and multiplications, return
5601 an equivalent value that just refers to a register, memory, or constant.
5602 This is done by generating instructions to perform the arithmetic and
5603 returning a pseudo-register containing the value.
5605 The returned value may be a REG, SUBREG, MEM or constant. */
5608 force_operand (rtx value, rtx target)
5610 rtx op1, op2;
5611 /* Use subtarget as the target for operand 0 of a binary operation. */
5612 rtx subtarget = get_subtarget (target);
5613 enum rtx_code code = GET_CODE (value);
5615 /* Check for a PIC address load. */
5616 if ((code == PLUS || code == MINUS)
5617 && XEXP (value, 0) == pic_offset_table_rtx
5618 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5619 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5620 || GET_CODE (XEXP (value, 1)) == CONST))
5622 if (!subtarget)
5623 subtarget = gen_reg_rtx (GET_MODE (value));
5624 emit_move_insn (subtarget, value);
5625 return subtarget;
5628 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5630 if (!target)
5631 target = gen_reg_rtx (GET_MODE (value));
5632 convert_move (target, force_operand (XEXP (value, 0), NULL),
5633 code == ZERO_EXTEND);
5634 return target;
5637 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5639 op2 = XEXP (value, 1);
5640 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5641 subtarget = 0;
5642 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5644 code = PLUS;
5645 op2 = negate_rtx (GET_MODE (value), op2);
5648 /* Check for an addition with OP2 a constant integer and our first
5649 operand a PLUS of a virtual register and something else. In that
5650 case, we want to emit the sum of the virtual register and the
5651 constant first and then add the other value. This allows virtual
5652 register instantiation to simply modify the constant rather than
5653 creating another one around this addition. */
5654 if (code == PLUS && GET_CODE (op2) == CONST_INT
5655 && GET_CODE (XEXP (value, 0)) == PLUS
5656 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5657 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5658 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5660 rtx temp = expand_simple_binop (GET_MODE (value), code,
5661 XEXP (XEXP (value, 0), 0), op2,
5662 subtarget, 0, OPTAB_LIB_WIDEN);
5663 return expand_simple_binop (GET_MODE (value), code, temp,
5664 force_operand (XEXP (XEXP (value,
5665 0), 1), 0),
5666 target, 0, OPTAB_LIB_WIDEN);
5669 op1 = force_operand (XEXP (value, 0), subtarget);
5670 op2 = force_operand (op2, NULL_RTX);
5671 switch (code)
5673 case MULT:
5674 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5675 case DIV:
5676 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5677 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5678 target, 1, OPTAB_LIB_WIDEN);
5679 else
5680 return expand_divmod (0,
5681 FLOAT_MODE_P (GET_MODE (value))
5682 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5683 GET_MODE (value), op1, op2, target, 0);
5684 break;
5685 case MOD:
5686 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5687 target, 0);
5688 break;
5689 case UDIV:
5690 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5691 target, 1);
5692 break;
5693 case UMOD:
5694 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5695 target, 1);
5696 break;
5697 case ASHIFTRT:
5698 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5699 target, 0, OPTAB_LIB_WIDEN);
5700 break;
5701 default:
5702 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5703 target, 1, OPTAB_LIB_WIDEN);
5706 if (GET_RTX_CLASS (code) == '1')
5708 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5709 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5712 #ifdef INSN_SCHEDULING
5713 /* On machines that have insn scheduling, we want all memory reference to be
5714 explicit, so we need to deal with such paradoxical SUBREGs. */
5715 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5716 && (GET_MODE_SIZE (GET_MODE (value))
5717 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5718 value
5719 = simplify_gen_subreg (GET_MODE (value),
5720 force_reg (GET_MODE (SUBREG_REG (value)),
5721 force_operand (SUBREG_REG (value),
5722 NULL_RTX)),
5723 GET_MODE (SUBREG_REG (value)),
5724 SUBREG_BYTE (value));
5725 #endif
5727 return value;
5730 /* Subroutine of expand_expr: return nonzero iff there is no way that
5731 EXP can reference X, which is being modified. TOP_P is nonzero if this
5732 call is going to be used to determine whether we need a temporary
5733 for EXP, as opposed to a recursive call to this function.
5735 It is always safe for this routine to return zero since it merely
5736 searches for optimization opportunities. */
5739 safe_from_p (rtx x, tree exp, int top_p)
5741 rtx exp_rtl = 0;
5742 int i, nops;
5743 static tree save_expr_list;
5745 if (x == 0
5746 /* If EXP has varying size, we MUST use a target since we currently
5747 have no way of allocating temporaries of variable size
5748 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5749 So we assume here that something at a higher level has prevented a
5750 clash. This is somewhat bogus, but the best we can do. Only
5751 do this when X is BLKmode and when we are at the top level. */
5752 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5753 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5754 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5755 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5756 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5757 != INTEGER_CST)
5758 && GET_MODE (x) == BLKmode)
5759 /* If X is in the outgoing argument area, it is always safe. */
5760 || (GET_CODE (x) == MEM
5761 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5762 || (GET_CODE (XEXP (x, 0)) == PLUS
5763 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5764 return 1;
5766 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5767 find the underlying pseudo. */
5768 if (GET_CODE (x) == SUBREG)
5770 x = SUBREG_REG (x);
5771 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5772 return 0;
5775 /* A SAVE_EXPR might appear many times in the expression passed to the
5776 top-level safe_from_p call, and if it has a complex subexpression,
5777 examining it multiple times could result in a combinatorial explosion.
5778 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5779 with optimization took about 28 minutes to compile -- even though it was
5780 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5781 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5782 we have processed. Note that the only test of top_p was above. */
5784 if (top_p)
5786 int rtn;
5787 tree t;
5789 save_expr_list = 0;
5791 rtn = safe_from_p (x, exp, 0);
5793 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5794 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5796 return rtn;
5799 /* Now look at our tree code and possibly recurse. */
5800 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5802 case 'd':
5803 exp_rtl = DECL_RTL_IF_SET (exp);
5804 break;
5806 case 'c':
5807 return 1;
5809 case 'x':
5810 if (TREE_CODE (exp) == TREE_LIST)
5812 while (1)
5814 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5815 return 0;
5816 exp = TREE_CHAIN (exp);
5817 if (!exp)
5818 return 1;
5819 if (TREE_CODE (exp) != TREE_LIST)
5820 return safe_from_p (x, exp, 0);
5823 else if (TREE_CODE (exp) == ERROR_MARK)
5824 return 1; /* An already-visited SAVE_EXPR? */
5825 else
5826 return 0;
5828 case '2':
5829 case '<':
5830 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5831 return 0;
5832 /* Fall through. */
5834 case '1':
5835 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5837 case 'e':
5838 case 'r':
5839 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5840 the expression. If it is set, we conflict iff we are that rtx or
5841 both are in memory. Otherwise, we check all operands of the
5842 expression recursively. */
5844 switch (TREE_CODE (exp))
5846 case ADDR_EXPR:
5847 /* If the operand is static or we are static, we can't conflict.
5848 Likewise if we don't conflict with the operand at all. */
5849 if (staticp (TREE_OPERAND (exp, 0))
5850 || TREE_STATIC (exp)
5851 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5852 return 1;
5854 /* Otherwise, the only way this can conflict is if we are taking
5855 the address of a DECL a that address if part of X, which is
5856 very rare. */
5857 exp = TREE_OPERAND (exp, 0);
5858 if (DECL_P (exp))
5860 if (!DECL_RTL_SET_P (exp)
5861 || GET_CODE (DECL_RTL (exp)) != MEM)
5862 return 0;
5863 else
5864 exp_rtl = XEXP (DECL_RTL (exp), 0);
5866 break;
5868 case INDIRECT_REF:
5869 if (GET_CODE (x) == MEM
5870 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5871 get_alias_set (exp)))
5872 return 0;
5873 break;
5875 case CALL_EXPR:
5876 /* Assume that the call will clobber all hard registers and
5877 all of memory. */
5878 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5879 || GET_CODE (x) == MEM)
5880 return 0;
5881 break;
5883 case RTL_EXPR:
5884 /* If a sequence exists, we would have to scan every instruction
5885 in the sequence to see if it was safe. This is probably not
5886 worthwhile. */
5887 if (RTL_EXPR_SEQUENCE (exp))
5888 return 0;
5890 exp_rtl = RTL_EXPR_RTL (exp);
5891 break;
5893 case WITH_CLEANUP_EXPR:
5894 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5895 break;
5897 case CLEANUP_POINT_EXPR:
5898 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5900 case SAVE_EXPR:
5901 exp_rtl = SAVE_EXPR_RTL (exp);
5902 if (exp_rtl)
5903 break;
5905 /* If we've already scanned this, don't do it again. Otherwise,
5906 show we've scanned it and record for clearing the flag if we're
5907 going on. */
5908 if (TREE_PRIVATE (exp))
5909 return 1;
5911 TREE_PRIVATE (exp) = 1;
5912 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5914 TREE_PRIVATE (exp) = 0;
5915 return 0;
5918 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5919 return 1;
5921 case BIND_EXPR:
5922 /* The only operand we look at is operand 1. The rest aren't
5923 part of the expression. */
5924 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5926 default:
5927 break;
5930 /* If we have an rtx, we do not need to scan our operands. */
5931 if (exp_rtl)
5932 break;
5934 nops = first_rtl_op (TREE_CODE (exp));
5935 for (i = 0; i < nops; i++)
5936 if (TREE_OPERAND (exp, i) != 0
5937 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5938 return 0;
5940 /* If this is a language-specific tree code, it may require
5941 special handling. */
5942 if ((unsigned int) TREE_CODE (exp)
5943 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5944 && !(*lang_hooks.safe_from_p) (x, exp))
5945 return 0;
5948 /* If we have an rtl, find any enclosed object. Then see if we conflict
5949 with it. */
5950 if (exp_rtl)
5952 if (GET_CODE (exp_rtl) == SUBREG)
5954 exp_rtl = SUBREG_REG (exp_rtl);
5955 if (GET_CODE (exp_rtl) == REG
5956 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5957 return 0;
5960 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5961 are memory and they conflict. */
5962 return ! (rtx_equal_p (x, exp_rtl)
5963 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5964 && true_dependence (exp_rtl, VOIDmode, x,
5965 rtx_addr_varies_p)));
5968 /* If we reach here, it is safe. */
5969 return 1;
5972 /* Subroutine of expand_expr: return rtx if EXP is a
5973 variable or parameter; else return 0. */
5975 static rtx
5976 var_rtx (tree exp)
5978 STRIP_NOPS (exp);
5979 switch (TREE_CODE (exp))
5981 case PARM_DECL:
5982 case VAR_DECL:
5983 return DECL_RTL (exp);
5984 default:
5985 return 0;
5989 /* Return the highest power of two that EXP is known to be a multiple of.
5990 This is used in updating alignment of MEMs in array references. */
5992 static unsigned HOST_WIDE_INT
5993 highest_pow2_factor (tree exp)
5995 unsigned HOST_WIDE_INT c0, c1;
5997 switch (TREE_CODE (exp))
5999 case INTEGER_CST:
6000 /* We can find the lowest bit that's a one. If the low
6001 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6002 We need to handle this case since we can find it in a COND_EXPR,
6003 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6004 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6005 later ICE. */
6006 if (TREE_CONSTANT_OVERFLOW (exp))
6007 return BIGGEST_ALIGNMENT;
6008 else
6010 /* Note: tree_low_cst is intentionally not used here,
6011 we don't care about the upper bits. */
6012 c0 = TREE_INT_CST_LOW (exp);
6013 c0 &= -c0;
6014 return c0 ? c0 : BIGGEST_ALIGNMENT;
6016 break;
6018 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6019 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6020 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6021 return MIN (c0, c1);
6023 case MULT_EXPR:
6024 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6025 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6026 return c0 * c1;
6028 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6029 case CEIL_DIV_EXPR:
6030 if (integer_pow2p (TREE_OPERAND (exp, 1))
6031 && host_integerp (TREE_OPERAND (exp, 1), 1))
6033 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6034 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6035 return MAX (1, c0 / c1);
6037 break;
6039 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6040 case SAVE_EXPR: case WITH_RECORD_EXPR:
6041 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6043 case COMPOUND_EXPR:
6044 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6046 case COND_EXPR:
6047 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6048 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6049 return MIN (c0, c1);
6051 default:
6052 break;
6055 return 1;
6058 /* Similar, except that it is known that the expression must be a multiple
6059 of the alignment of TYPE. */
6061 static unsigned HOST_WIDE_INT
6062 highest_pow2_factor_for_type (tree type, tree exp)
6064 unsigned HOST_WIDE_INT type_align, factor;
6066 factor = highest_pow2_factor (exp);
6067 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6068 return MAX (factor, type_align);
6071 /* Return an object on the placeholder list that matches EXP, a
6072 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6073 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6074 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6075 is a location which initially points to a starting location in the
6076 placeholder list (zero means start of the list) and where a pointer into
6077 the placeholder list at which the object is found is placed. */
6079 tree
6080 find_placeholder (tree exp, tree *plist)
6082 tree type = TREE_TYPE (exp);
6083 tree placeholder_expr;
6085 for (placeholder_expr
6086 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6087 placeholder_expr != 0;
6088 placeholder_expr = TREE_CHAIN (placeholder_expr))
6090 tree need_type = TYPE_MAIN_VARIANT (type);
6091 tree elt;
6093 /* Find the outermost reference that is of the type we want. If none,
6094 see if any object has a type that is a pointer to the type we
6095 want. */
6096 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6097 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6098 || TREE_CODE (elt) == COND_EXPR)
6099 ? TREE_OPERAND (elt, 1)
6100 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6101 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6102 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6103 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6104 ? TREE_OPERAND (elt, 0) : 0))
6105 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6107 if (plist)
6108 *plist = placeholder_expr;
6109 return elt;
6112 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6114 = ((TREE_CODE (elt) == COMPOUND_EXPR
6115 || TREE_CODE (elt) == COND_EXPR)
6116 ? TREE_OPERAND (elt, 1)
6117 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6118 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6119 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6120 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6121 ? TREE_OPERAND (elt, 0) : 0))
6122 if (POINTER_TYPE_P (TREE_TYPE (elt))
6123 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6124 == need_type))
6126 if (plist)
6127 *plist = placeholder_expr;
6128 return build1 (INDIRECT_REF, need_type, elt);
6132 return 0;
6135 /* Subroutine of expand_expr. Expand the two operands of a binary
6136 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6137 The value may be stored in TARGET if TARGET is nonzero. The
6138 MODIFIER argument is as documented by expand_expr. */
6140 static void
6141 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6142 enum expand_modifier modifier)
6144 if (! safe_from_p (target, exp1, 1))
6145 target = 0;
6146 if (operand_equal_p (exp0, exp1, 0))
6148 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6149 *op1 = copy_rtx (*op0);
6151 else
6153 /* If we need to preserve evaluation order, copy exp0 into its own
6154 temporary variable so that it can't be clobbered by exp1. */
6155 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6156 exp0 = save_expr (exp0);
6157 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6158 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6163 /* expand_expr: generate code for computing expression EXP.
6164 An rtx for the computed value is returned. The value is never null.
6165 In the case of a void EXP, const0_rtx is returned.
6167 The value may be stored in TARGET if TARGET is nonzero.
6168 TARGET is just a suggestion; callers must assume that
6169 the rtx returned may not be the same as TARGET.
6171 If TARGET is CONST0_RTX, it means that the value will be ignored.
6173 If TMODE is not VOIDmode, it suggests generating the
6174 result in mode TMODE. But this is done only when convenient.
6175 Otherwise, TMODE is ignored and the value generated in its natural mode.
6176 TMODE is just a suggestion; callers must assume that
6177 the rtx returned may not have mode TMODE.
6179 Note that TARGET may have neither TMODE nor MODE. In that case, it
6180 probably will not be used.
6182 If MODIFIER is EXPAND_SUM then when EXP is an addition
6183 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6184 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6185 products as above, or REG or MEM, or constant.
6186 Ordinarily in such cases we would output mul or add instructions
6187 and then return a pseudo reg containing the sum.
6189 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6190 it also marks a label as absolutely required (it can't be dead).
6191 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6192 This is used for outputting expressions used in initializers.
6194 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6195 with a constant address even if that address is not normally legitimate.
6196 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6198 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6199 a call parameter. Such targets require special care as we haven't yet
6200 marked TARGET so that it's safe from being trashed by libcalls. We
6201 don't want to use TARGET for anything but the final result;
6202 Intermediate values must go elsewhere. Additionally, calls to
6203 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6206 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6207 enum expand_modifier modifier)
6209 rtx op0, op1, temp;
6210 tree type = TREE_TYPE (exp);
6211 int unsignedp = TREE_UNSIGNED (type);
6212 enum machine_mode mode;
6213 enum tree_code code = TREE_CODE (exp);
6214 optab this_optab;
6215 rtx subtarget, original_target;
6216 int ignore;
6217 tree context;
6219 /* Handle ERROR_MARK before anybody tries to access its type. */
6220 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6222 op0 = CONST0_RTX (tmode);
6223 if (op0 != 0)
6224 return op0;
6225 return const0_rtx;
6228 mode = TYPE_MODE (type);
6229 /* Use subtarget as the target for operand 0 of a binary operation. */
6230 subtarget = get_subtarget (target);
6231 original_target = target;
6232 ignore = (target == const0_rtx
6233 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6234 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6235 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6236 && TREE_CODE (type) == VOID_TYPE));
6238 /* If we are going to ignore this result, we need only do something
6239 if there is a side-effect somewhere in the expression. If there
6240 is, short-circuit the most common cases here. Note that we must
6241 not call expand_expr with anything but const0_rtx in case this
6242 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6244 if (ignore)
6246 if (! TREE_SIDE_EFFECTS (exp))
6247 return const0_rtx;
6249 /* Ensure we reference a volatile object even if value is ignored, but
6250 don't do this if all we are doing is taking its address. */
6251 if (TREE_THIS_VOLATILE (exp)
6252 && TREE_CODE (exp) != FUNCTION_DECL
6253 && mode != VOIDmode && mode != BLKmode
6254 && modifier != EXPAND_CONST_ADDRESS)
6256 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6257 if (GET_CODE (temp) == MEM)
6258 temp = copy_to_reg (temp);
6259 return const0_rtx;
6262 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6263 || code == INDIRECT_REF || code == BUFFER_REF)
6264 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6265 modifier);
6267 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6268 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6270 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6271 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6272 return const0_rtx;
6274 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6275 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6276 /* If the second operand has no side effects, just evaluate
6277 the first. */
6278 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6279 modifier);
6280 else if (code == BIT_FIELD_REF)
6282 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6283 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6284 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6285 return const0_rtx;
6288 target = 0;
6291 /* If will do cse, generate all results into pseudo registers
6292 since 1) that allows cse to find more things
6293 and 2) otherwise cse could produce an insn the machine
6294 cannot support. An exception is a CONSTRUCTOR into a multi-word
6295 MEM: that's much more likely to be most efficient into the MEM.
6296 Another is a CALL_EXPR which must return in memory. */
6298 if (! cse_not_expected && mode != BLKmode && target
6299 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6300 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6301 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6302 target = 0;
6304 switch (code)
6306 case LABEL_DECL:
6308 tree function = decl_function_context (exp);
6309 /* Labels in containing functions, or labels used from initializers,
6310 must be forced. */
6311 if (modifier == EXPAND_INITIALIZER
6312 || (function != current_function_decl
6313 && function != inline_function_decl
6314 && function != 0))
6315 temp = force_label_rtx (exp);
6316 else
6317 temp = label_rtx (exp);
6319 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6320 if (function != current_function_decl
6321 && function != inline_function_decl && function != 0)
6322 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6323 return temp;
6326 case PARM_DECL:
6327 if (!DECL_RTL_SET_P (exp))
6329 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6330 return CONST0_RTX (mode);
6333 /* ... fall through ... */
6335 case VAR_DECL:
6336 /* If a static var's type was incomplete when the decl was written,
6337 but the type is complete now, lay out the decl now. */
6338 if (DECL_SIZE (exp) == 0
6339 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6340 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6341 layout_decl (exp, 0);
6343 /* ... fall through ... */
6345 case FUNCTION_DECL:
6346 case RESULT_DECL:
6347 if (DECL_RTL (exp) == 0)
6348 abort ();
6350 /* Ensure variable marked as used even if it doesn't go through
6351 a parser. If it hasn't be used yet, write out an external
6352 definition. */
6353 if (! TREE_USED (exp))
6355 assemble_external (exp);
6356 TREE_USED (exp) = 1;
6359 /* Show we haven't gotten RTL for this yet. */
6360 temp = 0;
6362 /* Handle variables inherited from containing functions. */
6363 context = decl_function_context (exp);
6365 /* We treat inline_function_decl as an alias for the current function
6366 because that is the inline function whose vars, types, etc.
6367 are being merged into the current function.
6368 See expand_inline_function. */
6370 if (context != 0 && context != current_function_decl
6371 && context != inline_function_decl
6372 /* If var is static, we don't need a static chain to access it. */
6373 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6374 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6376 rtx addr;
6378 /* Mark as non-local and addressable. */
6379 DECL_NONLOCAL (exp) = 1;
6380 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6381 abort ();
6382 (*lang_hooks.mark_addressable) (exp);
6383 if (GET_CODE (DECL_RTL (exp)) != MEM)
6384 abort ();
6385 addr = XEXP (DECL_RTL (exp), 0);
6386 if (GET_CODE (addr) == MEM)
6387 addr
6388 = replace_equiv_address (addr,
6389 fix_lexical_addr (XEXP (addr, 0), exp));
6390 else
6391 addr = fix_lexical_addr (addr, exp);
6393 temp = replace_equiv_address (DECL_RTL (exp), addr);
6396 /* This is the case of an array whose size is to be determined
6397 from its initializer, while the initializer is still being parsed.
6398 See expand_decl. */
6400 else if (GET_CODE (DECL_RTL (exp)) == MEM
6401 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6402 temp = validize_mem (DECL_RTL (exp));
6404 /* If DECL_RTL is memory, we are in the normal case and either
6405 the address is not valid or it is not a register and -fforce-addr
6406 is specified, get the address into a register. */
6408 else if (GET_CODE (DECL_RTL (exp)) == MEM
6409 && modifier != EXPAND_CONST_ADDRESS
6410 && modifier != EXPAND_SUM
6411 && modifier != EXPAND_INITIALIZER
6412 && (! memory_address_p (DECL_MODE (exp),
6413 XEXP (DECL_RTL (exp), 0))
6414 || (flag_force_addr
6415 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6416 temp = replace_equiv_address (DECL_RTL (exp),
6417 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6419 /* If we got something, return it. But first, set the alignment
6420 if the address is a register. */
6421 if (temp != 0)
6423 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6424 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6426 return temp;
6429 /* If the mode of DECL_RTL does not match that of the decl, it
6430 must be a promoted value. We return a SUBREG of the wanted mode,
6431 but mark it so that we know that it was already extended. */
6433 if (GET_CODE (DECL_RTL (exp)) == REG
6434 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6436 /* Get the signedness used for this variable. Ensure we get the
6437 same mode we got when the variable was declared. */
6438 if (GET_MODE (DECL_RTL (exp))
6439 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6440 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6441 abort ();
6443 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6444 SUBREG_PROMOTED_VAR_P (temp) = 1;
6445 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6446 return temp;
6449 return DECL_RTL (exp);
6451 case INTEGER_CST:
6452 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6453 TREE_INT_CST_HIGH (exp), mode);
6455 /* ??? If overflow is set, fold will have done an incomplete job,
6456 which can result in (plus xx (const_int 0)), which can get
6457 simplified by validate_replace_rtx during virtual register
6458 instantiation, which can result in unrecognizable insns.
6459 Avoid this by forcing all overflows into registers. */
6460 if (TREE_CONSTANT_OVERFLOW (exp)
6461 && modifier != EXPAND_INITIALIZER)
6462 temp = force_reg (mode, temp);
6464 return temp;
6466 case VECTOR_CST:
6467 return const_vector_from_tree (exp);
6469 case CONST_DECL:
6470 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6472 case REAL_CST:
6473 /* If optimized, generate immediate CONST_DOUBLE
6474 which will be turned into memory by reload if necessary.
6476 We used to force a register so that loop.c could see it. But
6477 this does not allow gen_* patterns to perform optimizations with
6478 the constants. It also produces two insns in cases like "x = 1.0;".
6479 On most machines, floating-point constants are not permitted in
6480 many insns, so we'd end up copying it to a register in any case.
6482 Now, we do the copying in expand_binop, if appropriate. */
6483 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6484 TYPE_MODE (TREE_TYPE (exp)));
6486 case COMPLEX_CST:
6487 /* Handle evaluating a complex constant in a CONCAT target. */
6488 if (original_target && GET_CODE (original_target) == CONCAT)
6490 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6491 rtx rtarg, itarg;
6493 rtarg = XEXP (original_target, 0);
6494 itarg = XEXP (original_target, 1);
6496 /* Move the real and imaginary parts separately. */
6497 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6498 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6500 if (op0 != rtarg)
6501 emit_move_insn (rtarg, op0);
6502 if (op1 != itarg)
6503 emit_move_insn (itarg, op1);
6505 return original_target;
6508 /* ... fall through ... */
6510 case STRING_CST:
6511 temp = output_constant_def (exp, 1);
6513 /* temp contains a constant address.
6514 On RISC machines where a constant address isn't valid,
6515 make some insns to get that address into a register. */
6516 if (modifier != EXPAND_CONST_ADDRESS
6517 && modifier != EXPAND_INITIALIZER
6518 && modifier != EXPAND_SUM
6519 && (! memory_address_p (mode, XEXP (temp, 0))
6520 || flag_force_addr))
6521 return replace_equiv_address (temp,
6522 copy_rtx (XEXP (temp, 0)));
6523 return temp;
6525 case EXPR_WITH_FILE_LOCATION:
6527 rtx to_return;
6528 struct file_stack fs;
6530 fs.location = input_location;
6531 fs.next = expr_wfl_stack;
6532 input_filename = EXPR_WFL_FILENAME (exp);
6533 input_line = EXPR_WFL_LINENO (exp);
6534 expr_wfl_stack = &fs;
6535 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6536 emit_line_note (input_location);
6537 /* Possibly avoid switching back and forth here. */
6538 to_return = expand_expr (EXPR_WFL_NODE (exp),
6539 (ignore ? const0_rtx : target),
6540 tmode, modifier);
6541 if (expr_wfl_stack != &fs)
6542 abort ();
6543 input_location = fs.location;
6544 expr_wfl_stack = fs.next;
6545 return to_return;
6548 case SAVE_EXPR:
6549 context = decl_function_context (exp);
6551 /* If this SAVE_EXPR was at global context, assume we are an
6552 initialization function and move it into our context. */
6553 if (context == 0)
6554 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6556 /* We treat inline_function_decl as an alias for the current function
6557 because that is the inline function whose vars, types, etc.
6558 are being merged into the current function.
6559 See expand_inline_function. */
6560 if (context == current_function_decl || context == inline_function_decl)
6561 context = 0;
6563 /* If this is non-local, handle it. */
6564 if (context)
6566 /* The following call just exists to abort if the context is
6567 not of a containing function. */
6568 find_function_data (context);
6570 temp = SAVE_EXPR_RTL (exp);
6571 if (temp && GET_CODE (temp) == REG)
6573 put_var_into_stack (exp, /*rescan=*/true);
6574 temp = SAVE_EXPR_RTL (exp);
6576 if (temp == 0 || GET_CODE (temp) != MEM)
6577 abort ();
6578 return
6579 replace_equiv_address (temp,
6580 fix_lexical_addr (XEXP (temp, 0), exp));
6582 if (SAVE_EXPR_RTL (exp) == 0)
6584 if (mode == VOIDmode)
6585 temp = const0_rtx;
6586 else
6587 temp = assign_temp (build_qualified_type (type,
6588 (TYPE_QUALS (type)
6589 | TYPE_QUAL_CONST)),
6590 3, 0, 0);
6592 SAVE_EXPR_RTL (exp) = temp;
6593 if (!optimize && GET_CODE (temp) == REG)
6594 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6595 save_expr_regs);
6597 /* If the mode of TEMP does not match that of the expression, it
6598 must be a promoted value. We pass store_expr a SUBREG of the
6599 wanted mode but mark it so that we know that it was already
6600 extended. */
6602 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6604 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6605 promote_mode (type, mode, &unsignedp, 0);
6606 SUBREG_PROMOTED_VAR_P (temp) = 1;
6607 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6610 if (temp == const0_rtx)
6611 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6612 else
6613 store_expr (TREE_OPERAND (exp, 0), temp,
6614 modifier == EXPAND_STACK_PARM ? 2 : 0);
6616 TREE_USED (exp) = 1;
6619 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6620 must be a promoted value. We return a SUBREG of the wanted mode,
6621 but mark it so that we know that it was already extended. */
6623 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6624 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6626 /* Compute the signedness and make the proper SUBREG. */
6627 promote_mode (type, mode, &unsignedp, 0);
6628 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6629 SUBREG_PROMOTED_VAR_P (temp) = 1;
6630 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6631 return temp;
6634 return SAVE_EXPR_RTL (exp);
6636 case UNSAVE_EXPR:
6638 rtx temp;
6639 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6640 TREE_OPERAND (exp, 0)
6641 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6642 return temp;
6645 case PLACEHOLDER_EXPR:
6647 tree old_list = placeholder_list;
6648 tree placeholder_expr = 0;
6650 exp = find_placeholder (exp, &placeholder_expr);
6651 if (exp == 0)
6652 abort ();
6654 placeholder_list = TREE_CHAIN (placeholder_expr);
6655 temp = expand_expr (exp, original_target, tmode, modifier);
6656 placeholder_list = old_list;
6657 return temp;
6660 case WITH_RECORD_EXPR:
6661 /* Put the object on the placeholder list, expand our first operand,
6662 and pop the list. */
6663 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6664 placeholder_list);
6665 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6666 modifier);
6667 placeholder_list = TREE_CHAIN (placeholder_list);
6668 return target;
6670 case GOTO_EXPR:
6671 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6672 expand_goto (TREE_OPERAND (exp, 0));
6673 else
6674 expand_computed_goto (TREE_OPERAND (exp, 0));
6675 return const0_rtx;
6677 case EXIT_EXPR:
6678 expand_exit_loop_if_false (NULL,
6679 invert_truthvalue (TREE_OPERAND (exp, 0)));
6680 return const0_rtx;
6682 case LABELED_BLOCK_EXPR:
6683 if (LABELED_BLOCK_BODY (exp))
6684 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6685 /* Should perhaps use expand_label, but this is simpler and safer. */
6686 do_pending_stack_adjust ();
6687 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6688 return const0_rtx;
6690 case EXIT_BLOCK_EXPR:
6691 if (EXIT_BLOCK_RETURN (exp))
6692 sorry ("returned value in block_exit_expr");
6693 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6694 return const0_rtx;
6696 case LOOP_EXPR:
6697 push_temp_slots ();
6698 expand_start_loop (1);
6699 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6700 expand_end_loop ();
6701 pop_temp_slots ();
6703 return const0_rtx;
6705 case BIND_EXPR:
6707 tree vars = TREE_OPERAND (exp, 0);
6709 /* Need to open a binding contour here because
6710 if there are any cleanups they must be contained here. */
6711 expand_start_bindings (2);
6713 /* Mark the corresponding BLOCK for output in its proper place. */
6714 if (TREE_OPERAND (exp, 2) != 0
6715 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6716 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6718 /* If VARS have not yet been expanded, expand them now. */
6719 while (vars)
6721 if (!DECL_RTL_SET_P (vars))
6722 expand_decl (vars);
6723 expand_decl_init (vars);
6724 vars = TREE_CHAIN (vars);
6727 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6729 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6731 return temp;
6734 case RTL_EXPR:
6735 if (RTL_EXPR_SEQUENCE (exp))
6737 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6738 abort ();
6739 emit_insn (RTL_EXPR_SEQUENCE (exp));
6740 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6742 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6743 free_temps_for_rtl_expr (exp);
6744 return RTL_EXPR_RTL (exp);
6746 case CONSTRUCTOR:
6747 /* If we don't need the result, just ensure we evaluate any
6748 subexpressions. */
6749 if (ignore)
6751 tree elt;
6753 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6754 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6756 return const0_rtx;
6759 /* All elts simple constants => refer to a constant in memory. But
6760 if this is a non-BLKmode mode, let it store a field at a time
6761 since that should make a CONST_INT or CONST_DOUBLE when we
6762 fold. Likewise, if we have a target we can use, it is best to
6763 store directly into the target unless the type is large enough
6764 that memcpy will be used. If we are making an initializer and
6765 all operands are constant, put it in memory as well.
6767 FIXME: Avoid trying to fill vector constructors piece-meal.
6768 Output them with output_constant_def below unless we're sure
6769 they're zeros. This should go away when vector initializers
6770 are treated like VECTOR_CST instead of arrays.
6772 else if ((TREE_STATIC (exp)
6773 && ((mode == BLKmode
6774 && ! (target != 0 && safe_from_p (target, exp, 1)))
6775 || TREE_ADDRESSABLE (exp)
6776 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6777 && (! MOVE_BY_PIECES_P
6778 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6779 TYPE_ALIGN (type)))
6780 && ((TREE_CODE (type) == VECTOR_TYPE
6781 && !is_zeros_p (exp))
6782 || ! mostly_zeros_p (exp)))))
6783 || ((modifier == EXPAND_INITIALIZER
6784 || modifier == EXPAND_CONST_ADDRESS)
6785 && TREE_CONSTANT (exp)))
6787 rtx constructor = output_constant_def (exp, 1);
6789 if (modifier != EXPAND_CONST_ADDRESS
6790 && modifier != EXPAND_INITIALIZER
6791 && modifier != EXPAND_SUM)
6792 constructor = validize_mem (constructor);
6794 return constructor;
6796 else
6798 /* Handle calls that pass values in multiple non-contiguous
6799 locations. The Irix 6 ABI has examples of this. */
6800 if (target == 0 || ! safe_from_p (target, exp, 1)
6801 || GET_CODE (target) == PARALLEL
6802 || modifier == EXPAND_STACK_PARM)
6803 target
6804 = assign_temp (build_qualified_type (type,
6805 (TYPE_QUALS (type)
6806 | (TREE_READONLY (exp)
6807 * TYPE_QUAL_CONST))),
6808 0, TREE_ADDRESSABLE (exp), 1);
6810 store_constructor (exp, target, 0, int_expr_size (exp));
6811 return target;
6814 case INDIRECT_REF:
6816 tree exp1 = TREE_OPERAND (exp, 0);
6817 tree index;
6818 tree string = string_constant (exp1, &index);
6820 /* Try to optimize reads from const strings. */
6821 if (string
6822 && TREE_CODE (string) == STRING_CST
6823 && TREE_CODE (index) == INTEGER_CST
6824 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6825 && GET_MODE_CLASS (mode) == MODE_INT
6826 && GET_MODE_SIZE (mode) == 1
6827 && modifier != EXPAND_WRITE)
6828 return gen_int_mode (TREE_STRING_POINTER (string)
6829 [TREE_INT_CST_LOW (index)], mode);
6831 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6832 op0 = memory_address (mode, op0);
6833 temp = gen_rtx_MEM (mode, op0);
6834 set_mem_attributes (temp, exp, 0);
6836 /* If we are writing to this object and its type is a record with
6837 readonly fields, we must mark it as readonly so it will
6838 conflict with readonly references to those fields. */
6839 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6840 RTX_UNCHANGING_P (temp) = 1;
6842 return temp;
6845 case ARRAY_REF:
6846 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6847 abort ();
6850 tree array = TREE_OPERAND (exp, 0);
6851 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6852 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6853 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6854 HOST_WIDE_INT i;
6856 /* Optimize the special-case of a zero lower bound.
6858 We convert the low_bound to sizetype to avoid some problems
6859 with constant folding. (E.g. suppose the lower bound is 1,
6860 and its mode is QI. Without the conversion, (ARRAY
6861 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6862 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6864 if (! integer_zerop (low_bound))
6865 index = size_diffop (index, convert (sizetype, low_bound));
6867 /* Fold an expression like: "foo"[2].
6868 This is not done in fold so it won't happen inside &.
6869 Don't fold if this is for wide characters since it's too
6870 difficult to do correctly and this is a very rare case. */
6872 if (modifier != EXPAND_CONST_ADDRESS
6873 && modifier != EXPAND_INITIALIZER
6874 && modifier != EXPAND_MEMORY
6875 && TREE_CODE (array) == STRING_CST
6876 && TREE_CODE (index) == INTEGER_CST
6877 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6878 && GET_MODE_CLASS (mode) == MODE_INT
6879 && GET_MODE_SIZE (mode) == 1)
6880 return gen_int_mode (TREE_STRING_POINTER (array)
6881 [TREE_INT_CST_LOW (index)], mode);
6883 /* If this is a constant index into a constant array,
6884 just get the value from the array. Handle both the cases when
6885 we have an explicit constructor and when our operand is a variable
6886 that was declared const. */
6888 if (modifier != EXPAND_CONST_ADDRESS
6889 && modifier != EXPAND_INITIALIZER
6890 && modifier != EXPAND_MEMORY
6891 && TREE_CODE (array) == CONSTRUCTOR
6892 && ! TREE_SIDE_EFFECTS (array)
6893 && TREE_CODE (index) == INTEGER_CST
6894 && 0 > compare_tree_int (index,
6895 list_length (CONSTRUCTOR_ELTS
6896 (TREE_OPERAND (exp, 0)))))
6898 tree elem;
6900 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6901 i = TREE_INT_CST_LOW (index);
6902 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6905 if (elem)
6906 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6907 modifier);
6910 else if (optimize >= 1
6911 && modifier != EXPAND_CONST_ADDRESS
6912 && modifier != EXPAND_INITIALIZER
6913 && modifier != EXPAND_MEMORY
6914 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6915 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6916 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6917 && targetm.binds_local_p (array))
6919 if (TREE_CODE (index) == INTEGER_CST)
6921 tree init = DECL_INITIAL (array);
6923 if (TREE_CODE (init) == CONSTRUCTOR)
6925 tree elem;
6927 for (elem = CONSTRUCTOR_ELTS (init);
6928 (elem
6929 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6930 elem = TREE_CHAIN (elem))
6933 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6934 return expand_expr (fold (TREE_VALUE (elem)), target,
6935 tmode, modifier);
6937 else if (TREE_CODE (init) == STRING_CST
6938 && 0 > compare_tree_int (index,
6939 TREE_STRING_LENGTH (init)))
6941 tree type = TREE_TYPE (TREE_TYPE (init));
6942 enum machine_mode mode = TYPE_MODE (type);
6944 if (GET_MODE_CLASS (mode) == MODE_INT
6945 && GET_MODE_SIZE (mode) == 1)
6946 return gen_int_mode (TREE_STRING_POINTER (init)
6947 [TREE_INT_CST_LOW (index)], mode);
6952 goto normal_inner_ref;
6954 case COMPONENT_REF:
6955 /* If the operand is a CONSTRUCTOR, we can just extract the
6956 appropriate field if it is present. */
6957 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6959 tree elt;
6961 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6962 elt = TREE_CHAIN (elt))
6963 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6964 /* We can normally use the value of the field in the
6965 CONSTRUCTOR. However, if this is a bitfield in
6966 an integral mode that we can fit in a HOST_WIDE_INT,
6967 we must mask only the number of bits in the bitfield,
6968 since this is done implicitly by the constructor. If
6969 the bitfield does not meet either of those conditions,
6970 we can't do this optimization. */
6971 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6972 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6973 == MODE_INT)
6974 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6975 <= HOST_BITS_PER_WIDE_INT))))
6977 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6978 && modifier == EXPAND_STACK_PARM)
6979 target = 0;
6980 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6981 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6983 HOST_WIDE_INT bitsize
6984 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6985 enum machine_mode imode
6986 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6988 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6990 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6991 op0 = expand_and (imode, op0, op1, target);
6993 else
6995 tree count
6996 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6999 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7000 target, 0);
7001 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7002 target, 0);
7006 return op0;
7009 goto normal_inner_ref;
7011 case BIT_FIELD_REF:
7012 case ARRAY_RANGE_REF:
7013 normal_inner_ref:
7015 enum machine_mode mode1;
7016 HOST_WIDE_INT bitsize, bitpos;
7017 tree offset;
7018 int volatilep = 0;
7019 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7020 &mode1, &unsignedp, &volatilep);
7021 rtx orig_op0;
7023 /* If we got back the original object, something is wrong. Perhaps
7024 we are evaluating an expression too early. In any event, don't
7025 infinitely recurse. */
7026 if (tem == exp)
7027 abort ();
7029 /* If TEM's type is a union of variable size, pass TARGET to the inner
7030 computation, since it will need a temporary and TARGET is known
7031 to have to do. This occurs in unchecked conversion in Ada. */
7033 orig_op0 = op0
7034 = expand_expr (tem,
7035 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7036 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7037 != INTEGER_CST)
7038 && modifier != EXPAND_STACK_PARM
7039 ? target : NULL_RTX),
7040 VOIDmode,
7041 (modifier == EXPAND_INITIALIZER
7042 || modifier == EXPAND_CONST_ADDRESS
7043 || modifier == EXPAND_STACK_PARM)
7044 ? modifier : EXPAND_NORMAL);
7046 /* If this is a constant, put it into a register if it is a
7047 legitimate constant and OFFSET is 0 and memory if it isn't. */
7048 if (CONSTANT_P (op0))
7050 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7051 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7052 && offset == 0)
7053 op0 = force_reg (mode, op0);
7054 else
7055 op0 = validize_mem (force_const_mem (mode, op0));
7058 /* Otherwise, if this object not in memory and we either have an
7059 offset or a BLKmode result, put it there. This case can't occur in
7060 C, but can in Ada if we have unchecked conversion of an expression
7061 from a scalar type to an array or record type or for an
7062 ARRAY_RANGE_REF whose type is BLKmode. */
7063 else if (GET_CODE (op0) != MEM
7064 && (offset != 0
7065 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7067 /* If the operand is a SAVE_EXPR, we can deal with this by
7068 forcing the SAVE_EXPR into memory. */
7069 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7071 put_var_into_stack (TREE_OPERAND (exp, 0),
7072 /*rescan=*/true);
7073 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7075 else
7077 tree nt
7078 = build_qualified_type (TREE_TYPE (tem),
7079 (TYPE_QUALS (TREE_TYPE (tem))
7080 | TYPE_QUAL_CONST));
7081 rtx memloc = assign_temp (nt, 1, 1, 1);
7083 emit_move_insn (memloc, op0);
7084 op0 = memloc;
7088 if (offset != 0)
7090 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7091 EXPAND_SUM);
7093 if (GET_CODE (op0) != MEM)
7094 abort ();
7096 #ifdef POINTERS_EXTEND_UNSIGNED
7097 if (GET_MODE (offset_rtx) != Pmode)
7098 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7099 #else
7100 if (GET_MODE (offset_rtx) != ptr_mode)
7101 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7102 #endif
7104 if (GET_MODE (op0) == BLKmode
7105 /* A constant address in OP0 can have VOIDmode, we must
7106 not try to call force_reg in that case. */
7107 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7108 && bitsize != 0
7109 && (bitpos % bitsize) == 0
7110 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7111 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7113 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7114 bitpos = 0;
7117 op0 = offset_address (op0, offset_rtx,
7118 highest_pow2_factor (offset));
7121 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7122 record its alignment as BIGGEST_ALIGNMENT. */
7123 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7124 && is_aligning_offset (offset, tem))
7125 set_mem_align (op0, BIGGEST_ALIGNMENT);
7127 /* Don't forget about volatility even if this is a bitfield. */
7128 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7130 if (op0 == orig_op0)
7131 op0 = copy_rtx (op0);
7133 MEM_VOLATILE_P (op0) = 1;
7136 /* The following code doesn't handle CONCAT.
7137 Assume only bitpos == 0 can be used for CONCAT, due to
7138 one element arrays having the same mode as its element. */
7139 if (GET_CODE (op0) == CONCAT)
7141 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7142 abort ();
7143 return op0;
7146 /* In cases where an aligned union has an unaligned object
7147 as a field, we might be extracting a BLKmode value from
7148 an integer-mode (e.g., SImode) object. Handle this case
7149 by doing the extract into an object as wide as the field
7150 (which we know to be the width of a basic mode), then
7151 storing into memory, and changing the mode to BLKmode. */
7152 if (mode1 == VOIDmode
7153 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7154 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7155 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7156 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7157 && modifier != EXPAND_CONST_ADDRESS
7158 && modifier != EXPAND_INITIALIZER)
7159 /* If the field isn't aligned enough to fetch as a memref,
7160 fetch it as a bit field. */
7161 || (mode1 != BLKmode
7162 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7163 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7164 || (GET_CODE (op0) == MEM
7165 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7166 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7167 && ((modifier == EXPAND_CONST_ADDRESS
7168 || modifier == EXPAND_INITIALIZER)
7169 ? STRICT_ALIGNMENT
7170 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7171 || (bitpos % BITS_PER_UNIT != 0)))
7172 /* If the type and the field are a constant size and the
7173 size of the type isn't the same size as the bitfield,
7174 we must use bitfield operations. */
7175 || (bitsize >= 0
7176 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7177 == INTEGER_CST)
7178 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7179 bitsize)))
7181 enum machine_mode ext_mode = mode;
7183 if (ext_mode == BLKmode
7184 && ! (target != 0 && GET_CODE (op0) == MEM
7185 && GET_CODE (target) == MEM
7186 && bitpos % BITS_PER_UNIT == 0))
7187 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7189 if (ext_mode == BLKmode)
7191 if (target == 0)
7192 target = assign_temp (type, 0, 1, 1);
7194 if (bitsize == 0)
7195 return target;
7197 /* In this case, BITPOS must start at a byte boundary and
7198 TARGET, if specified, must be a MEM. */
7199 if (GET_CODE (op0) != MEM
7200 || (target != 0 && GET_CODE (target) != MEM)
7201 || bitpos % BITS_PER_UNIT != 0)
7202 abort ();
7204 emit_block_move (target,
7205 adjust_address (op0, VOIDmode,
7206 bitpos / BITS_PER_UNIT),
7207 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7208 / BITS_PER_UNIT),
7209 (modifier == EXPAND_STACK_PARM
7210 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7212 return target;
7215 op0 = validize_mem (op0);
7217 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7218 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7220 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7221 (modifier == EXPAND_STACK_PARM
7222 ? NULL_RTX : target),
7223 ext_mode, ext_mode,
7224 int_size_in_bytes (TREE_TYPE (tem)));
7226 /* If the result is a record type and BITSIZE is narrower than
7227 the mode of OP0, an integral mode, and this is a big endian
7228 machine, we must put the field into the high-order bits. */
7229 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7230 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7231 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7232 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7233 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7234 - bitsize),
7235 op0, 1);
7237 if (mode == BLKmode)
7239 rtx new = assign_temp (build_qualified_type
7240 ((*lang_hooks.types.type_for_mode)
7241 (ext_mode, 0),
7242 TYPE_QUAL_CONST), 0, 1, 1);
7244 emit_move_insn (new, op0);
7245 op0 = copy_rtx (new);
7246 PUT_MODE (op0, BLKmode);
7247 set_mem_attributes (op0, exp, 1);
7250 return op0;
7253 /* If the result is BLKmode, use that to access the object
7254 now as well. */
7255 if (mode == BLKmode)
7256 mode1 = BLKmode;
7258 /* Get a reference to just this component. */
7259 if (modifier == EXPAND_CONST_ADDRESS
7260 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7261 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7262 else
7263 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7265 if (op0 == orig_op0)
7266 op0 = copy_rtx (op0);
7268 set_mem_attributes (op0, exp, 0);
7269 if (GET_CODE (XEXP (op0, 0)) == REG)
7270 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7272 MEM_VOLATILE_P (op0) |= volatilep;
7273 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7274 || modifier == EXPAND_CONST_ADDRESS
7275 || modifier == EXPAND_INITIALIZER)
7276 return op0;
7277 else if (target == 0)
7278 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7280 convert_move (target, op0, unsignedp);
7281 return target;
7284 case VTABLE_REF:
7286 rtx insn, before = get_last_insn (), vtbl_ref;
7288 /* Evaluate the interior expression. */
7289 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7290 tmode, modifier);
7292 /* Get or create an instruction off which to hang a note. */
7293 if (REG_P (subtarget))
7295 target = subtarget;
7296 insn = get_last_insn ();
7297 if (insn == before)
7298 abort ();
7299 if (! INSN_P (insn))
7300 insn = prev_nonnote_insn (insn);
7302 else
7304 target = gen_reg_rtx (GET_MODE (subtarget));
7305 insn = emit_move_insn (target, subtarget);
7308 /* Collect the data for the note. */
7309 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7310 vtbl_ref = plus_constant (vtbl_ref,
7311 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7312 /* Discard the initial CONST that was added. */
7313 vtbl_ref = XEXP (vtbl_ref, 0);
7315 REG_NOTES (insn)
7316 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7318 return target;
7321 /* Intended for a reference to a buffer of a file-object in Pascal.
7322 But it's not certain that a special tree code will really be
7323 necessary for these. INDIRECT_REF might work for them. */
7324 case BUFFER_REF:
7325 abort ();
7327 case IN_EXPR:
7329 /* Pascal set IN expression.
7331 Algorithm:
7332 rlo = set_low - (set_low%bits_per_word);
7333 the_word = set [ (index - rlo)/bits_per_word ];
7334 bit_index = index % bits_per_word;
7335 bitmask = 1 << bit_index;
7336 return !!(the_word & bitmask); */
7338 tree set = TREE_OPERAND (exp, 0);
7339 tree index = TREE_OPERAND (exp, 1);
7340 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7341 tree set_type = TREE_TYPE (set);
7342 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7343 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7344 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7345 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7346 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7347 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7348 rtx setaddr = XEXP (setval, 0);
7349 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7350 rtx rlow;
7351 rtx diff, quo, rem, addr, bit, result;
7353 /* If domain is empty, answer is no. Likewise if index is constant
7354 and out of bounds. */
7355 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7356 && TREE_CODE (set_low_bound) == INTEGER_CST
7357 && tree_int_cst_lt (set_high_bound, set_low_bound))
7358 || (TREE_CODE (index) == INTEGER_CST
7359 && TREE_CODE (set_low_bound) == INTEGER_CST
7360 && tree_int_cst_lt (index, set_low_bound))
7361 || (TREE_CODE (set_high_bound) == INTEGER_CST
7362 && TREE_CODE (index) == INTEGER_CST
7363 && tree_int_cst_lt (set_high_bound, index))))
7364 return const0_rtx;
7366 if (target == 0)
7367 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7369 /* If we get here, we have to generate the code for both cases
7370 (in range and out of range). */
7372 op0 = gen_label_rtx ();
7373 op1 = gen_label_rtx ();
7375 if (! (GET_CODE (index_val) == CONST_INT
7376 && GET_CODE (lo_r) == CONST_INT))
7377 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7378 GET_MODE (index_val), iunsignedp, op1);
7380 if (! (GET_CODE (index_val) == CONST_INT
7381 && GET_CODE (hi_r) == CONST_INT))
7382 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7383 GET_MODE (index_val), iunsignedp, op1);
7385 /* Calculate the element number of bit zero in the first word
7386 of the set. */
7387 if (GET_CODE (lo_r) == CONST_INT)
7388 rlow = GEN_INT (INTVAL (lo_r)
7389 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7390 else
7391 rlow = expand_binop (index_mode, and_optab, lo_r,
7392 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7393 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7395 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7396 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7398 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7399 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7400 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7401 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7403 addr = memory_address (byte_mode,
7404 expand_binop (index_mode, add_optab, diff,
7405 setaddr, NULL_RTX, iunsignedp,
7406 OPTAB_LIB_WIDEN));
7408 /* Extract the bit we want to examine. */
7409 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7410 gen_rtx_MEM (byte_mode, addr),
7411 make_tree (TREE_TYPE (index), rem),
7412 NULL_RTX, 1);
7413 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7414 GET_MODE (target) == byte_mode ? target : 0,
7415 1, OPTAB_LIB_WIDEN);
7417 if (result != target)
7418 convert_move (target, result, 1);
7420 /* Output the code to handle the out-of-range case. */
7421 emit_jump (op0);
7422 emit_label (op1);
7423 emit_move_insn (target, const0_rtx);
7424 emit_label (op0);
7425 return target;
7428 case WITH_CLEANUP_EXPR:
7429 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7431 WITH_CLEANUP_EXPR_RTL (exp)
7432 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7433 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7434 CLEANUP_EH_ONLY (exp));
7436 /* That's it for this cleanup. */
7437 TREE_OPERAND (exp, 1) = 0;
7439 return WITH_CLEANUP_EXPR_RTL (exp);
7441 case CLEANUP_POINT_EXPR:
7443 /* Start a new binding layer that will keep track of all cleanup
7444 actions to be performed. */
7445 expand_start_bindings (2);
7447 target_temp_slot_level = temp_slot_level;
7449 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7450 /* If we're going to use this value, load it up now. */
7451 if (! ignore)
7452 op0 = force_not_mem (op0);
7453 preserve_temp_slots (op0);
7454 expand_end_bindings (NULL_TREE, 0, 0);
7456 return op0;
7458 case CALL_EXPR:
7459 /* Check for a built-in function. */
7460 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7461 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7462 == FUNCTION_DECL)
7463 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7465 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7466 == BUILT_IN_FRONTEND)
7467 return (*lang_hooks.expand_expr) (exp, original_target,
7468 tmode, modifier);
7469 else
7470 return expand_builtin (exp, target, subtarget, tmode, ignore);
7473 return expand_call (exp, target, ignore);
7475 case NON_LVALUE_EXPR:
7476 case NOP_EXPR:
7477 case CONVERT_EXPR:
7478 case REFERENCE_EXPR:
7479 if (TREE_OPERAND (exp, 0) == error_mark_node)
7480 return const0_rtx;
7482 if (TREE_CODE (type) == UNION_TYPE)
7484 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7486 /* If both input and output are BLKmode, this conversion isn't doing
7487 anything except possibly changing memory attribute. */
7488 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7490 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7491 modifier);
7493 result = copy_rtx (result);
7494 set_mem_attributes (result, exp, 0);
7495 return result;
7498 if (target == 0)
7499 target = assign_temp (type, 0, 1, 1);
7501 if (GET_CODE (target) == MEM)
7502 /* Store data into beginning of memory target. */
7503 store_expr (TREE_OPERAND (exp, 0),
7504 adjust_address (target, TYPE_MODE (valtype), 0),
7505 modifier == EXPAND_STACK_PARM ? 2 : 0);
7507 else if (GET_CODE (target) == REG)
7508 /* Store this field into a union of the proper type. */
7509 store_field (target,
7510 MIN ((int_size_in_bytes (TREE_TYPE
7511 (TREE_OPERAND (exp, 0)))
7512 * BITS_PER_UNIT),
7513 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7514 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7515 VOIDmode, 0, type, 0);
7516 else
7517 abort ();
7519 /* Return the entire union. */
7520 return target;
7523 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7525 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7526 modifier);
7528 /* If the signedness of the conversion differs and OP0 is
7529 a promoted SUBREG, clear that indication since we now
7530 have to do the proper extension. */
7531 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7532 && GET_CODE (op0) == SUBREG)
7533 SUBREG_PROMOTED_VAR_P (op0) = 0;
7535 return op0;
7538 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7539 if (GET_MODE (op0) == mode)
7540 return op0;
7542 /* If OP0 is a constant, just convert it into the proper mode. */
7543 if (CONSTANT_P (op0))
7545 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7546 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7548 if (modifier == EXPAND_INITIALIZER)
7549 return simplify_gen_subreg (mode, op0, inner_mode,
7550 subreg_lowpart_offset (mode,
7551 inner_mode));
7552 else
7553 return convert_modes (mode, inner_mode, op0,
7554 TREE_UNSIGNED (inner_type));
7557 if (modifier == EXPAND_INITIALIZER)
7558 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7560 if (target == 0)
7561 return
7562 convert_to_mode (mode, op0,
7563 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7564 else
7565 convert_move (target, op0,
7566 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7567 return target;
7569 case VIEW_CONVERT_EXPR:
7570 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7572 /* If the input and output modes are both the same, we are done.
7573 Otherwise, if neither mode is BLKmode and both are integral and within
7574 a word, we can use gen_lowpart. If neither is true, make sure the
7575 operand is in memory and convert the MEM to the new mode. */
7576 if (TYPE_MODE (type) == GET_MODE (op0))
7578 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7579 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7580 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7581 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7582 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7583 op0 = gen_lowpart (TYPE_MODE (type), op0);
7584 else if (GET_CODE (op0) != MEM)
7586 /* If the operand is not a MEM, force it into memory. Since we
7587 are going to be be changing the mode of the MEM, don't call
7588 force_const_mem for constants because we don't allow pool
7589 constants to change mode. */
7590 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7592 if (TREE_ADDRESSABLE (exp))
7593 abort ();
7595 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7596 target
7597 = assign_stack_temp_for_type
7598 (TYPE_MODE (inner_type),
7599 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7601 emit_move_insn (target, op0);
7602 op0 = target;
7605 /* At this point, OP0 is in the correct mode. If the output type is such
7606 that the operand is known to be aligned, indicate that it is.
7607 Otherwise, we need only be concerned about alignment for non-BLKmode
7608 results. */
7609 if (GET_CODE (op0) == MEM)
7611 op0 = copy_rtx (op0);
7613 if (TYPE_ALIGN_OK (type))
7614 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7615 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7616 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7618 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7619 HOST_WIDE_INT temp_size
7620 = MAX (int_size_in_bytes (inner_type),
7621 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7622 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7623 temp_size, 0, type);
7624 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7626 if (TREE_ADDRESSABLE (exp))
7627 abort ();
7629 if (GET_MODE (op0) == BLKmode)
7630 emit_block_move (new_with_op0_mode, op0,
7631 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7632 (modifier == EXPAND_STACK_PARM
7633 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7634 else
7635 emit_move_insn (new_with_op0_mode, op0);
7637 op0 = new;
7640 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7643 return op0;
7645 case PLUS_EXPR:
7646 this_optab = ! unsignedp && flag_trapv
7647 && (GET_MODE_CLASS (mode) == MODE_INT)
7648 ? addv_optab : add_optab;
7650 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7651 something else, make sure we add the register to the constant and
7652 then to the other thing. This case can occur during strength
7653 reduction and doing it this way will produce better code if the
7654 frame pointer or argument pointer is eliminated.
7656 fold-const.c will ensure that the constant is always in the inner
7657 PLUS_EXPR, so the only case we need to do anything about is if
7658 sp, ap, or fp is our second argument, in which case we must swap
7659 the innermost first argument and our second argument. */
7661 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7662 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7663 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7664 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7665 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7666 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7668 tree t = TREE_OPERAND (exp, 1);
7670 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7671 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7674 /* If the result is to be ptr_mode and we are adding an integer to
7675 something, we might be forming a constant. So try to use
7676 plus_constant. If it produces a sum and we can't accept it,
7677 use force_operand. This allows P = &ARR[const] to generate
7678 efficient code on machines where a SYMBOL_REF is not a valid
7679 address.
7681 If this is an EXPAND_SUM call, always return the sum. */
7682 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7683 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7685 if (modifier == EXPAND_STACK_PARM)
7686 target = 0;
7687 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7688 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7689 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7691 rtx constant_part;
7693 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7694 EXPAND_SUM);
7695 /* Use immed_double_const to ensure that the constant is
7696 truncated according to the mode of OP1, then sign extended
7697 to a HOST_WIDE_INT. Using the constant directly can result
7698 in non-canonical RTL in a 64x32 cross compile. */
7699 constant_part
7700 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7701 (HOST_WIDE_INT) 0,
7702 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7703 op1 = plus_constant (op1, INTVAL (constant_part));
7704 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7705 op1 = force_operand (op1, target);
7706 return op1;
7709 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7710 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7711 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7713 rtx constant_part;
7715 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7716 (modifier == EXPAND_INITIALIZER
7717 ? EXPAND_INITIALIZER : EXPAND_SUM));
7718 if (! CONSTANT_P (op0))
7720 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7721 VOIDmode, modifier);
7722 /* Return a PLUS if modifier says it's OK. */
7723 if (modifier == EXPAND_SUM
7724 || modifier == EXPAND_INITIALIZER)
7725 return simplify_gen_binary (PLUS, mode, op0, op1);
7726 goto binop2;
7728 /* Use immed_double_const to ensure that the constant is
7729 truncated according to the mode of OP1, then sign extended
7730 to a HOST_WIDE_INT. Using the constant directly can result
7731 in non-canonical RTL in a 64x32 cross compile. */
7732 constant_part
7733 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7734 (HOST_WIDE_INT) 0,
7735 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7736 op0 = plus_constant (op0, INTVAL (constant_part));
7737 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7738 op0 = force_operand (op0, target);
7739 return op0;
7743 /* No sense saving up arithmetic to be done
7744 if it's all in the wrong mode to form part of an address.
7745 And force_operand won't know whether to sign-extend or
7746 zero-extend. */
7747 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7748 || mode != ptr_mode)
7750 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7751 subtarget, &op0, &op1, 0);
7752 if (op0 == const0_rtx)
7753 return op1;
7754 if (op1 == const0_rtx)
7755 return op0;
7756 goto binop2;
7759 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7760 subtarget, &op0, &op1, modifier);
7761 return simplify_gen_binary (PLUS, mode, op0, op1);
7763 case MINUS_EXPR:
7764 /* For initializers, we are allowed to return a MINUS of two
7765 symbolic constants. Here we handle all cases when both operands
7766 are constant. */
7767 /* Handle difference of two symbolic constants,
7768 for the sake of an initializer. */
7769 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7770 && really_constant_p (TREE_OPERAND (exp, 0))
7771 && really_constant_p (TREE_OPERAND (exp, 1)))
7773 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7774 NULL_RTX, &op0, &op1, modifier);
7776 /* If the last operand is a CONST_INT, use plus_constant of
7777 the negated constant. Else make the MINUS. */
7778 if (GET_CODE (op1) == CONST_INT)
7779 return plus_constant (op0, - INTVAL (op1));
7780 else
7781 return gen_rtx_MINUS (mode, op0, op1);
7784 this_optab = ! unsignedp && flag_trapv
7785 && (GET_MODE_CLASS(mode) == MODE_INT)
7786 ? subv_optab : sub_optab;
7788 /* No sense saving up arithmetic to be done
7789 if it's all in the wrong mode to form part of an address.
7790 And force_operand won't know whether to sign-extend or
7791 zero-extend. */
7792 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7793 || mode != ptr_mode)
7794 goto binop;
7796 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7797 subtarget, &op0, &op1, modifier);
7799 /* Convert A - const to A + (-const). */
7800 if (GET_CODE (op1) == CONST_INT)
7802 op1 = negate_rtx (mode, op1);
7803 return simplify_gen_binary (PLUS, mode, op0, op1);
7806 goto binop2;
7808 case MULT_EXPR:
7809 /* If first operand is constant, swap them.
7810 Thus the following special case checks need only
7811 check the second operand. */
7812 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7814 tree t1 = TREE_OPERAND (exp, 0);
7815 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7816 TREE_OPERAND (exp, 1) = t1;
7819 /* Attempt to return something suitable for generating an
7820 indexed address, for machines that support that. */
7822 if (modifier == EXPAND_SUM && mode == ptr_mode
7823 && host_integerp (TREE_OPERAND (exp, 1), 0))
7825 tree exp1 = TREE_OPERAND (exp, 1);
7827 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7828 EXPAND_SUM);
7830 if (GET_CODE (op0) != REG)
7831 op0 = force_operand (op0, NULL_RTX);
7832 if (GET_CODE (op0) != REG)
7833 op0 = copy_to_mode_reg (mode, op0);
7835 return gen_rtx_MULT (mode, op0,
7836 gen_int_mode (tree_low_cst (exp1, 0),
7837 TYPE_MODE (TREE_TYPE (exp1))));
7840 if (modifier == EXPAND_STACK_PARM)
7841 target = 0;
7843 /* Check for multiplying things that have been extended
7844 from a narrower type. If this machine supports multiplying
7845 in that narrower type with a result in the desired type,
7846 do it that way, and avoid the explicit type-conversion. */
7847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7848 && TREE_CODE (type) == INTEGER_TYPE
7849 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7850 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7851 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7852 && int_fits_type_p (TREE_OPERAND (exp, 1),
7853 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7854 /* Don't use a widening multiply if a shift will do. */
7855 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7856 > HOST_BITS_PER_WIDE_INT)
7857 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7859 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7860 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7862 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7863 /* If both operands are extended, they must either both
7864 be zero-extended or both be sign-extended. */
7865 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7867 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7869 enum machine_mode innermode
7870 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7871 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7872 ? smul_widen_optab : umul_widen_optab);
7873 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7874 ? umul_widen_optab : smul_widen_optab);
7875 if (mode == GET_MODE_WIDER_MODE (innermode))
7877 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7879 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7880 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7881 TREE_OPERAND (exp, 1),
7882 NULL_RTX, &op0, &op1, 0);
7883 else
7884 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7885 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7886 NULL_RTX, &op0, &op1, 0);
7887 goto binop2;
7889 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7890 && innermode == word_mode)
7892 rtx htem;
7893 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7894 NULL_RTX, VOIDmode, 0);
7895 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7896 op1 = convert_modes (innermode, mode,
7897 expand_expr (TREE_OPERAND (exp, 1),
7898 NULL_RTX, VOIDmode, 0),
7899 unsignedp);
7900 else
7901 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7902 NULL_RTX, VOIDmode, 0);
7903 temp = expand_binop (mode, other_optab, op0, op1, target,
7904 unsignedp, OPTAB_LIB_WIDEN);
7905 htem = expand_mult_highpart_adjust (innermode,
7906 gen_highpart (innermode, temp),
7907 op0, op1,
7908 gen_highpart (innermode, temp),
7909 unsignedp);
7910 emit_move_insn (gen_highpart (innermode, temp), htem);
7911 return temp;
7915 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7916 subtarget, &op0, &op1, 0);
7917 return expand_mult (mode, op0, op1, target, unsignedp);
7919 case TRUNC_DIV_EXPR:
7920 case FLOOR_DIV_EXPR:
7921 case CEIL_DIV_EXPR:
7922 case ROUND_DIV_EXPR:
7923 case EXACT_DIV_EXPR:
7924 if (modifier == EXPAND_STACK_PARM)
7925 target = 0;
7926 /* Possible optimization: compute the dividend with EXPAND_SUM
7927 then if the divisor is constant can optimize the case
7928 where some terms of the dividend have coeffs divisible by it. */
7929 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7930 subtarget, &op0, &op1, 0);
7931 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7933 case RDIV_EXPR:
7934 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7935 expensive divide. If not, combine will rebuild the original
7936 computation. */
7937 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7938 && TREE_CODE (type) == REAL_TYPE
7939 && !real_onep (TREE_OPERAND (exp, 0)))
7940 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7941 build (RDIV_EXPR, type,
7942 build_real (type, dconst1),
7943 TREE_OPERAND (exp, 1))),
7944 target, tmode, modifier);
7945 this_optab = sdiv_optab;
7946 goto binop;
7948 case TRUNC_MOD_EXPR:
7949 case FLOOR_MOD_EXPR:
7950 case CEIL_MOD_EXPR:
7951 case ROUND_MOD_EXPR:
7952 if (modifier == EXPAND_STACK_PARM)
7953 target = 0;
7954 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7955 subtarget, &op0, &op1, 0);
7956 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7958 case FIX_ROUND_EXPR:
7959 case FIX_FLOOR_EXPR:
7960 case FIX_CEIL_EXPR:
7961 abort (); /* Not used for C. */
7963 case FIX_TRUNC_EXPR:
7964 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7965 if (target == 0 || modifier == EXPAND_STACK_PARM)
7966 target = gen_reg_rtx (mode);
7967 expand_fix (target, op0, unsignedp);
7968 return target;
7970 case FLOAT_EXPR:
7971 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7972 if (target == 0 || modifier == EXPAND_STACK_PARM)
7973 target = gen_reg_rtx (mode);
7974 /* expand_float can't figure out what to do if FROM has VOIDmode.
7975 So give it the correct mode. With -O, cse will optimize this. */
7976 if (GET_MODE (op0) == VOIDmode)
7977 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7978 op0);
7979 expand_float (target, op0,
7980 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7981 return target;
7983 case NEGATE_EXPR:
7984 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7985 if (modifier == EXPAND_STACK_PARM)
7986 target = 0;
7987 temp = expand_unop (mode,
7988 ! unsignedp && flag_trapv
7989 && (GET_MODE_CLASS(mode) == MODE_INT)
7990 ? negv_optab : neg_optab, op0, target, 0);
7991 if (temp == 0)
7992 abort ();
7993 return temp;
7995 case ABS_EXPR:
7996 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7997 if (modifier == EXPAND_STACK_PARM)
7998 target = 0;
8000 /* ABS_EXPR is not valid for complex arguments. */
8001 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8002 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8003 abort ();
8005 /* Unsigned abs is simply the operand. Testing here means we don't
8006 risk generating incorrect code below. */
8007 if (TREE_UNSIGNED (type))
8008 return op0;
8010 return expand_abs (mode, op0, target, unsignedp,
8011 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8013 case MAX_EXPR:
8014 case MIN_EXPR:
8015 target = original_target;
8016 if (target == 0
8017 || modifier == EXPAND_STACK_PARM
8018 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8019 || GET_MODE (target) != mode
8020 || (GET_CODE (target) == REG
8021 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8022 target = gen_reg_rtx (mode);
8023 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8024 target, &op0, &op1, 0);
8026 /* First try to do it with a special MIN or MAX instruction.
8027 If that does not win, use a conditional jump to select the proper
8028 value. */
8029 this_optab = (TREE_UNSIGNED (type)
8030 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8031 : (code == MIN_EXPR ? smin_optab : smax_optab));
8033 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8034 OPTAB_WIDEN);
8035 if (temp != 0)
8036 return temp;
8038 /* At this point, a MEM target is no longer useful; we will get better
8039 code without it. */
8041 if (GET_CODE (target) == MEM)
8042 target = gen_reg_rtx (mode);
8044 /* If op1 was placed in target, swap op0 and op1. */
8045 if (target != op0 && target == op1)
8047 rtx tem = op0;
8048 op0 = op1;
8049 op1 = tem;
8052 if (target != op0)
8053 emit_move_insn (target, op0);
8055 op0 = gen_label_rtx ();
8057 /* If this mode is an integer too wide to compare properly,
8058 compare word by word. Rely on cse to optimize constant cases. */
8059 if (GET_MODE_CLASS (mode) == MODE_INT
8060 && ! can_compare_p (GE, mode, ccp_jump))
8062 if (code == MAX_EXPR)
8063 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8064 target, op1, NULL_RTX, op0);
8065 else
8066 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8067 op1, target, NULL_RTX, op0);
8069 else
8071 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8072 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8073 unsignedp, mode, NULL_RTX, NULL_RTX,
8074 op0);
8076 emit_move_insn (target, op1);
8077 emit_label (op0);
8078 return target;
8080 case BIT_NOT_EXPR:
8081 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8082 if (modifier == EXPAND_STACK_PARM)
8083 target = 0;
8084 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8085 if (temp == 0)
8086 abort ();
8087 return temp;
8089 /* ??? Can optimize bitwise operations with one arg constant.
8090 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8091 and (a bitwise1 b) bitwise2 b (etc)
8092 but that is probably not worth while. */
8094 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8095 boolean values when we want in all cases to compute both of them. In
8096 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8097 as actual zero-or-1 values and then bitwise anding. In cases where
8098 there cannot be any side effects, better code would be made by
8099 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8100 how to recognize those cases. */
8102 case TRUTH_AND_EXPR:
8103 case BIT_AND_EXPR:
8104 this_optab = and_optab;
8105 goto binop;
8107 case TRUTH_OR_EXPR:
8108 case BIT_IOR_EXPR:
8109 this_optab = ior_optab;
8110 goto binop;
8112 case TRUTH_XOR_EXPR:
8113 case BIT_XOR_EXPR:
8114 this_optab = xor_optab;
8115 goto binop;
8117 case LSHIFT_EXPR:
8118 case RSHIFT_EXPR:
8119 case LROTATE_EXPR:
8120 case RROTATE_EXPR:
8121 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8122 subtarget = 0;
8123 if (modifier == EXPAND_STACK_PARM)
8124 target = 0;
8125 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8126 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8127 unsignedp);
8129 /* Could determine the answer when only additive constants differ. Also,
8130 the addition of one can be handled by changing the condition. */
8131 case LT_EXPR:
8132 case LE_EXPR:
8133 case GT_EXPR:
8134 case GE_EXPR:
8135 case EQ_EXPR:
8136 case NE_EXPR:
8137 case UNORDERED_EXPR:
8138 case ORDERED_EXPR:
8139 case UNLT_EXPR:
8140 case UNLE_EXPR:
8141 case UNGT_EXPR:
8142 case UNGE_EXPR:
8143 case UNEQ_EXPR:
8144 temp = do_store_flag (exp,
8145 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8146 tmode != VOIDmode ? tmode : mode, 0);
8147 if (temp != 0)
8148 return temp;
8150 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8151 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8152 && original_target
8153 && GET_CODE (original_target) == REG
8154 && (GET_MODE (original_target)
8155 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8157 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8158 VOIDmode, 0);
8160 /* If temp is constant, we can just compute the result. */
8161 if (GET_CODE (temp) == CONST_INT)
8163 if (INTVAL (temp) != 0)
8164 emit_move_insn (target, const1_rtx);
8165 else
8166 emit_move_insn (target, const0_rtx);
8168 return target;
8171 if (temp != original_target)
8173 enum machine_mode mode1 = GET_MODE (temp);
8174 if (mode1 == VOIDmode)
8175 mode1 = tmode != VOIDmode ? tmode : mode;
8177 temp = copy_to_mode_reg (mode1, temp);
8180 op1 = gen_label_rtx ();
8181 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8182 GET_MODE (temp), unsignedp, op1);
8183 emit_move_insn (temp, const1_rtx);
8184 emit_label (op1);
8185 return temp;
8188 /* If no set-flag instruction, must generate a conditional
8189 store into a temporary variable. Drop through
8190 and handle this like && and ||. */
8192 case TRUTH_ANDIF_EXPR:
8193 case TRUTH_ORIF_EXPR:
8194 if (! ignore
8195 && (target == 0
8196 || modifier == EXPAND_STACK_PARM
8197 || ! safe_from_p (target, exp, 1)
8198 /* Make sure we don't have a hard reg (such as function's return
8199 value) live across basic blocks, if not optimizing. */
8200 || (!optimize && GET_CODE (target) == REG
8201 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8202 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8204 if (target)
8205 emit_clr_insn (target);
8207 op1 = gen_label_rtx ();
8208 jumpifnot (exp, op1);
8210 if (target)
8211 emit_0_to_1_insn (target);
8213 emit_label (op1);
8214 return ignore ? const0_rtx : target;
8216 case TRUTH_NOT_EXPR:
8217 if (modifier == EXPAND_STACK_PARM)
8218 target = 0;
8219 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8220 /* The parser is careful to generate TRUTH_NOT_EXPR
8221 only with operands that are always zero or one. */
8222 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8223 target, 1, OPTAB_LIB_WIDEN);
8224 if (temp == 0)
8225 abort ();
8226 return temp;
8228 case COMPOUND_EXPR:
8229 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8230 emit_queue ();
8231 return expand_expr (TREE_OPERAND (exp, 1),
8232 (ignore ? const0_rtx : target),
8233 VOIDmode, modifier);
8235 case COND_EXPR:
8236 /* If we would have a "singleton" (see below) were it not for a
8237 conversion in each arm, bring that conversion back out. */
8238 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8239 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8240 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8241 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8243 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8244 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8246 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8247 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8248 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8249 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8250 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8251 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8252 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8253 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8254 return expand_expr (build1 (NOP_EXPR, type,
8255 build (COND_EXPR, TREE_TYPE (iftrue),
8256 TREE_OPERAND (exp, 0),
8257 iftrue, iffalse)),
8258 target, tmode, modifier);
8262 /* Note that COND_EXPRs whose type is a structure or union
8263 are required to be constructed to contain assignments of
8264 a temporary variable, so that we can evaluate them here
8265 for side effect only. If type is void, we must do likewise. */
8267 /* If an arm of the branch requires a cleanup,
8268 only that cleanup is performed. */
8270 tree singleton = 0;
8271 tree binary_op = 0, unary_op = 0;
8273 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8274 convert it to our mode, if necessary. */
8275 if (integer_onep (TREE_OPERAND (exp, 1))
8276 && integer_zerop (TREE_OPERAND (exp, 2))
8277 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8279 if (ignore)
8281 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8282 modifier);
8283 return const0_rtx;
8286 if (modifier == EXPAND_STACK_PARM)
8287 target = 0;
8288 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8289 if (GET_MODE (op0) == mode)
8290 return op0;
8292 if (target == 0)
8293 target = gen_reg_rtx (mode);
8294 convert_move (target, op0, unsignedp);
8295 return target;
8298 /* Check for X ? A + B : A. If we have this, we can copy A to the
8299 output and conditionally add B. Similarly for unary operations.
8300 Don't do this if X has side-effects because those side effects
8301 might affect A or B and the "?" operation is a sequence point in
8302 ANSI. (operand_equal_p tests for side effects.) */
8304 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8305 && operand_equal_p (TREE_OPERAND (exp, 2),
8306 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8307 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8308 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8309 && operand_equal_p (TREE_OPERAND (exp, 1),
8310 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8311 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8312 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8313 && operand_equal_p (TREE_OPERAND (exp, 2),
8314 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8315 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8316 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8317 && operand_equal_p (TREE_OPERAND (exp, 1),
8318 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8319 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8321 /* If we are not to produce a result, we have no target. Otherwise,
8322 if a target was specified use it; it will not be used as an
8323 intermediate target unless it is safe. If no target, use a
8324 temporary. */
8326 if (ignore)
8327 temp = 0;
8328 else if (modifier == EXPAND_STACK_PARM)
8329 temp = assign_temp (type, 0, 0, 1);
8330 else if (original_target
8331 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8332 || (singleton && GET_CODE (original_target) == REG
8333 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8334 && original_target == var_rtx (singleton)))
8335 && GET_MODE (original_target) == mode
8336 #ifdef HAVE_conditional_move
8337 && (! can_conditionally_move_p (mode)
8338 || GET_CODE (original_target) == REG
8339 || TREE_ADDRESSABLE (type))
8340 #endif
8341 && (GET_CODE (original_target) != MEM
8342 || TREE_ADDRESSABLE (type)))
8343 temp = original_target;
8344 else if (TREE_ADDRESSABLE (type))
8345 abort ();
8346 else
8347 temp = assign_temp (type, 0, 0, 1);
8349 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8350 do the test of X as a store-flag operation, do this as
8351 A + ((X != 0) << log C). Similarly for other simple binary
8352 operators. Only do for C == 1 if BRANCH_COST is low. */
8353 if (temp && singleton && binary_op
8354 && (TREE_CODE (binary_op) == PLUS_EXPR
8355 || TREE_CODE (binary_op) == MINUS_EXPR
8356 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8357 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8358 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8359 : integer_onep (TREE_OPERAND (binary_op, 1)))
8360 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8362 rtx result;
8363 tree cond;
8364 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8365 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8366 ? addv_optab : add_optab)
8367 : TREE_CODE (binary_op) == MINUS_EXPR
8368 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8369 ? subv_optab : sub_optab)
8370 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8371 : xor_optab);
8373 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8374 if (singleton == TREE_OPERAND (exp, 1))
8375 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8376 else
8377 cond = TREE_OPERAND (exp, 0);
8379 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8380 ? temp : NULL_RTX),
8381 mode, BRANCH_COST <= 1);
8383 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8384 result = expand_shift (LSHIFT_EXPR, mode, result,
8385 build_int_2 (tree_log2
8386 (TREE_OPERAND
8387 (binary_op, 1)),
8389 (safe_from_p (temp, singleton, 1)
8390 ? temp : NULL_RTX), 0);
8392 if (result)
8394 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8395 return expand_binop (mode, boptab, op1, result, temp,
8396 unsignedp, OPTAB_LIB_WIDEN);
8400 do_pending_stack_adjust ();
8401 NO_DEFER_POP;
8402 op0 = gen_label_rtx ();
8404 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8406 if (temp != 0)
8408 /* If the target conflicts with the other operand of the
8409 binary op, we can't use it. Also, we can't use the target
8410 if it is a hard register, because evaluating the condition
8411 might clobber it. */
8412 if ((binary_op
8413 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8414 || (GET_CODE (temp) == REG
8415 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8416 temp = gen_reg_rtx (mode);
8417 store_expr (singleton, temp,
8418 modifier == EXPAND_STACK_PARM ? 2 : 0);
8420 else
8421 expand_expr (singleton,
8422 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8423 if (singleton == TREE_OPERAND (exp, 1))
8424 jumpif (TREE_OPERAND (exp, 0), op0);
8425 else
8426 jumpifnot (TREE_OPERAND (exp, 0), op0);
8428 start_cleanup_deferral ();
8429 if (binary_op && temp == 0)
8430 /* Just touch the other operand. */
8431 expand_expr (TREE_OPERAND (binary_op, 1),
8432 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8433 else if (binary_op)
8434 store_expr (build (TREE_CODE (binary_op), type,
8435 make_tree (type, temp),
8436 TREE_OPERAND (binary_op, 1)),
8437 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8438 else
8439 store_expr (build1 (TREE_CODE (unary_op), type,
8440 make_tree (type, temp)),
8441 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8442 op1 = op0;
8444 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8445 comparison operator. If we have one of these cases, set the
8446 output to A, branch on A (cse will merge these two references),
8447 then set the output to FOO. */
8448 else if (temp
8449 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8450 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8451 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8452 TREE_OPERAND (exp, 1), 0)
8453 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8454 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8455 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8457 if (GET_CODE (temp) == REG
8458 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8459 temp = gen_reg_rtx (mode);
8460 store_expr (TREE_OPERAND (exp, 1), temp,
8461 modifier == EXPAND_STACK_PARM ? 2 : 0);
8462 jumpif (TREE_OPERAND (exp, 0), op0);
8464 start_cleanup_deferral ();
8465 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8466 store_expr (TREE_OPERAND (exp, 2), temp,
8467 modifier == EXPAND_STACK_PARM ? 2 : 0);
8468 else
8469 expand_expr (TREE_OPERAND (exp, 2),
8470 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8471 op1 = op0;
8473 else if (temp
8474 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8475 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8476 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8477 TREE_OPERAND (exp, 2), 0)
8478 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8479 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8480 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8482 if (GET_CODE (temp) == REG
8483 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8484 temp = gen_reg_rtx (mode);
8485 store_expr (TREE_OPERAND (exp, 2), temp,
8486 modifier == EXPAND_STACK_PARM ? 2 : 0);
8487 jumpifnot (TREE_OPERAND (exp, 0), op0);
8489 start_cleanup_deferral ();
8490 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8491 store_expr (TREE_OPERAND (exp, 1), temp,
8492 modifier == EXPAND_STACK_PARM ? 2 : 0);
8493 else
8494 expand_expr (TREE_OPERAND (exp, 1),
8495 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8496 op1 = op0;
8498 else
8500 op1 = gen_label_rtx ();
8501 jumpifnot (TREE_OPERAND (exp, 0), op0);
8503 start_cleanup_deferral ();
8505 /* One branch of the cond can be void, if it never returns. For
8506 example A ? throw : E */
8507 if (temp != 0
8508 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8509 store_expr (TREE_OPERAND (exp, 1), temp,
8510 modifier == EXPAND_STACK_PARM ? 2 : 0);
8511 else
8512 expand_expr (TREE_OPERAND (exp, 1),
8513 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8514 end_cleanup_deferral ();
8515 emit_queue ();
8516 emit_jump_insn (gen_jump (op1));
8517 emit_barrier ();
8518 emit_label (op0);
8519 start_cleanup_deferral ();
8520 if (temp != 0
8521 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8522 store_expr (TREE_OPERAND (exp, 2), temp,
8523 modifier == EXPAND_STACK_PARM ? 2 : 0);
8524 else
8525 expand_expr (TREE_OPERAND (exp, 2),
8526 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8529 end_cleanup_deferral ();
8531 emit_queue ();
8532 emit_label (op1);
8533 OK_DEFER_POP;
8535 return temp;
8538 case TARGET_EXPR:
8540 /* Something needs to be initialized, but we didn't know
8541 where that thing was when building the tree. For example,
8542 it could be the return value of a function, or a parameter
8543 to a function which lays down in the stack, or a temporary
8544 variable which must be passed by reference.
8546 We guarantee that the expression will either be constructed
8547 or copied into our original target. */
8549 tree slot = TREE_OPERAND (exp, 0);
8550 tree cleanups = NULL_TREE;
8551 tree exp1;
8553 if (TREE_CODE (slot) != VAR_DECL)
8554 abort ();
8556 if (! ignore)
8557 target = original_target;
8559 /* Set this here so that if we get a target that refers to a
8560 register variable that's already been used, put_reg_into_stack
8561 knows that it should fix up those uses. */
8562 TREE_USED (slot) = 1;
8564 if (target == 0)
8566 if (DECL_RTL_SET_P (slot))
8568 target = DECL_RTL (slot);
8569 /* If we have already expanded the slot, so don't do
8570 it again. (mrs) */
8571 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8572 return target;
8574 else
8576 target = assign_temp (type, 2, 0, 1);
8577 /* All temp slots at this level must not conflict. */
8578 preserve_temp_slots (target);
8579 SET_DECL_RTL (slot, target);
8580 if (TREE_ADDRESSABLE (slot))
8581 put_var_into_stack (slot, /*rescan=*/false);
8583 /* Since SLOT is not known to the called function
8584 to belong to its stack frame, we must build an explicit
8585 cleanup. This case occurs when we must build up a reference
8586 to pass the reference as an argument. In this case,
8587 it is very likely that such a reference need not be
8588 built here. */
8590 if (TREE_OPERAND (exp, 2) == 0)
8591 TREE_OPERAND (exp, 2)
8592 = (*lang_hooks.maybe_build_cleanup) (slot);
8593 cleanups = TREE_OPERAND (exp, 2);
8596 else
8598 /* This case does occur, when expanding a parameter which
8599 needs to be constructed on the stack. The target
8600 is the actual stack address that we want to initialize.
8601 The function we call will perform the cleanup in this case. */
8603 /* If we have already assigned it space, use that space,
8604 not target that we were passed in, as our target
8605 parameter is only a hint. */
8606 if (DECL_RTL_SET_P (slot))
8608 target = DECL_RTL (slot);
8609 /* If we have already expanded the slot, so don't do
8610 it again. (mrs) */
8611 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8612 return target;
8614 else
8616 SET_DECL_RTL (slot, target);
8617 /* If we must have an addressable slot, then make sure that
8618 the RTL that we just stored in slot is OK. */
8619 if (TREE_ADDRESSABLE (slot))
8620 put_var_into_stack (slot, /*rescan=*/true);
8624 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8625 /* Mark it as expanded. */
8626 TREE_OPERAND (exp, 1) = NULL_TREE;
8628 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8630 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8632 return target;
8635 case INIT_EXPR:
8637 tree lhs = TREE_OPERAND (exp, 0);
8638 tree rhs = TREE_OPERAND (exp, 1);
8640 temp = expand_assignment (lhs, rhs, ! ignore);
8641 return temp;
8644 case MODIFY_EXPR:
8646 /* If lhs is complex, expand calls in rhs before computing it.
8647 That's so we don't compute a pointer and save it over a
8648 call. If lhs is simple, compute it first so we can give it
8649 as a target if the rhs is just a call. This avoids an
8650 extra temp and copy and that prevents a partial-subsumption
8651 which makes bad code. Actually we could treat
8652 component_ref's of vars like vars. */
8654 tree lhs = TREE_OPERAND (exp, 0);
8655 tree rhs = TREE_OPERAND (exp, 1);
8657 temp = 0;
8659 /* Check for |= or &= of a bitfield of size one into another bitfield
8660 of size 1. In this case, (unless we need the result of the
8661 assignment) we can do this more efficiently with a
8662 test followed by an assignment, if necessary.
8664 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8665 things change so we do, this code should be enhanced to
8666 support it. */
8667 if (ignore
8668 && TREE_CODE (lhs) == COMPONENT_REF
8669 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8670 || TREE_CODE (rhs) == BIT_AND_EXPR)
8671 && TREE_OPERAND (rhs, 0) == lhs
8672 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8673 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8674 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8676 rtx label = gen_label_rtx ();
8678 do_jump (TREE_OPERAND (rhs, 1),
8679 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8680 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8681 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8682 (TREE_CODE (rhs) == BIT_IOR_EXPR
8683 ? integer_one_node
8684 : integer_zero_node)),
8686 do_pending_stack_adjust ();
8687 emit_label (label);
8688 return const0_rtx;
8691 temp = expand_assignment (lhs, rhs, ! ignore);
8693 return temp;
8696 case RETURN_EXPR:
8697 if (!TREE_OPERAND (exp, 0))
8698 expand_null_return ();
8699 else
8700 expand_return (TREE_OPERAND (exp, 0));
8701 return const0_rtx;
8703 case PREINCREMENT_EXPR:
8704 case PREDECREMENT_EXPR:
8705 return expand_increment (exp, 0, ignore);
8707 case POSTINCREMENT_EXPR:
8708 case POSTDECREMENT_EXPR:
8709 /* Faster to treat as pre-increment if result is not used. */
8710 return expand_increment (exp, ! ignore, ignore);
8712 case ADDR_EXPR:
8713 if (modifier == EXPAND_STACK_PARM)
8714 target = 0;
8715 /* Are we taking the address of a nested function? */
8716 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8717 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8718 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8719 && ! TREE_STATIC (exp))
8721 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8722 op0 = force_operand (op0, target);
8724 /* If we are taking the address of something erroneous, just
8725 return a zero. */
8726 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8727 return const0_rtx;
8728 /* If we are taking the address of a constant and are at the
8729 top level, we have to use output_constant_def since we can't
8730 call force_const_mem at top level. */
8731 else if (cfun == 0
8732 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8733 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8734 == 'c')))
8735 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8736 else
8738 /* We make sure to pass const0_rtx down if we came in with
8739 ignore set, to avoid doing the cleanups twice for something. */
8740 op0 = expand_expr (TREE_OPERAND (exp, 0),
8741 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8742 (modifier == EXPAND_INITIALIZER
8743 ? modifier : EXPAND_CONST_ADDRESS));
8745 /* If we are going to ignore the result, OP0 will have been set
8746 to const0_rtx, so just return it. Don't get confused and
8747 think we are taking the address of the constant. */
8748 if (ignore)
8749 return op0;
8751 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8752 clever and returns a REG when given a MEM. */
8753 op0 = protect_from_queue (op0, 1);
8755 /* We would like the object in memory. If it is a constant, we can
8756 have it be statically allocated into memory. For a non-constant,
8757 we need to allocate some memory and store the value into it. */
8759 if (CONSTANT_P (op0))
8760 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8761 op0);
8762 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8763 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8764 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8766 /* If the operand is a SAVE_EXPR, we can deal with this by
8767 forcing the SAVE_EXPR into memory. */
8768 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8770 put_var_into_stack (TREE_OPERAND (exp, 0),
8771 /*rescan=*/true);
8772 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8774 else
8776 /* If this object is in a register, it can't be BLKmode. */
8777 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8778 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8780 if (GET_CODE (op0) == PARALLEL)
8781 /* Handle calls that pass values in multiple
8782 non-contiguous locations. The Irix 6 ABI has examples
8783 of this. */
8784 emit_group_store (memloc, op0, inner_type,
8785 int_size_in_bytes (inner_type));
8786 else
8787 emit_move_insn (memloc, op0);
8789 op0 = memloc;
8793 if (GET_CODE (op0) != MEM)
8794 abort ();
8796 mark_temp_addr_taken (op0);
8797 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8799 op0 = XEXP (op0, 0);
8800 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8801 op0 = convert_memory_address (ptr_mode, op0);
8802 return op0;
8805 /* If OP0 is not aligned as least as much as the type requires, we
8806 need to make a temporary, copy OP0 to it, and take the address of
8807 the temporary. We want to use the alignment of the type, not of
8808 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8809 the test for BLKmode means that can't happen. The test for
8810 BLKmode is because we never make mis-aligned MEMs with
8811 non-BLKmode.
8813 We don't need to do this at all if the machine doesn't have
8814 strict alignment. */
8815 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8816 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8817 > MEM_ALIGN (op0))
8818 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8820 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8821 rtx new;
8823 if (TYPE_ALIGN_OK (inner_type))
8824 abort ();
8826 if (TREE_ADDRESSABLE (inner_type))
8828 /* We can't make a bitwise copy of this object, so fail. */
8829 error ("cannot take the address of an unaligned member");
8830 return const0_rtx;
8833 new = assign_stack_temp_for_type
8834 (TYPE_MODE (inner_type),
8835 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8836 : int_size_in_bytes (inner_type),
8837 1, build_qualified_type (inner_type,
8838 (TYPE_QUALS (inner_type)
8839 | TYPE_QUAL_CONST)));
8841 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8842 (modifier == EXPAND_STACK_PARM
8843 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8845 op0 = new;
8848 op0 = force_operand (XEXP (op0, 0), target);
8851 if (flag_force_addr
8852 && GET_CODE (op0) != REG
8853 && modifier != EXPAND_CONST_ADDRESS
8854 && modifier != EXPAND_INITIALIZER
8855 && modifier != EXPAND_SUM)
8856 op0 = force_reg (Pmode, op0);
8858 if (GET_CODE (op0) == REG
8859 && ! REG_USERVAR_P (op0))
8860 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8862 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8863 op0 = convert_memory_address (ptr_mode, op0);
8865 return op0;
8867 case ENTRY_VALUE_EXPR:
8868 abort ();
8870 /* COMPLEX type for Extended Pascal & Fortran */
8871 case COMPLEX_EXPR:
8873 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8874 rtx insns;
8876 /* Get the rtx code of the operands. */
8877 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8878 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8880 if (! target)
8881 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8883 start_sequence ();
8885 /* Move the real (op0) and imaginary (op1) parts to their location. */
8886 emit_move_insn (gen_realpart (mode, target), op0);
8887 emit_move_insn (gen_imagpart (mode, target), op1);
8889 insns = get_insns ();
8890 end_sequence ();
8892 /* Complex construction should appear as a single unit. */
8893 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8894 each with a separate pseudo as destination.
8895 It's not correct for flow to treat them as a unit. */
8896 if (GET_CODE (target) != CONCAT)
8897 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8898 else
8899 emit_insn (insns);
8901 return target;
8904 case REALPART_EXPR:
8905 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8906 return gen_realpart (mode, op0);
8908 case IMAGPART_EXPR:
8909 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8910 return gen_imagpart (mode, op0);
8912 case CONJ_EXPR:
8914 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8915 rtx imag_t;
8916 rtx insns;
8918 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8920 if (! target)
8921 target = gen_reg_rtx (mode);
8923 start_sequence ();
8925 /* Store the realpart and the negated imagpart to target. */
8926 emit_move_insn (gen_realpart (partmode, target),
8927 gen_realpart (partmode, op0));
8929 imag_t = gen_imagpart (partmode, target);
8930 temp = expand_unop (partmode,
8931 ! unsignedp && flag_trapv
8932 && (GET_MODE_CLASS(partmode) == MODE_INT)
8933 ? negv_optab : neg_optab,
8934 gen_imagpart (partmode, op0), imag_t, 0);
8935 if (temp != imag_t)
8936 emit_move_insn (imag_t, temp);
8938 insns = get_insns ();
8939 end_sequence ();
8941 /* Conjugate should appear as a single unit
8942 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8943 each with a separate pseudo as destination.
8944 It's not correct for flow to treat them as a unit. */
8945 if (GET_CODE (target) != CONCAT)
8946 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8947 else
8948 emit_insn (insns);
8950 return target;
8953 case TRY_CATCH_EXPR:
8955 tree handler = TREE_OPERAND (exp, 1);
8957 expand_eh_region_start ();
8959 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8961 expand_eh_region_end_cleanup (handler);
8963 return op0;
8966 case TRY_FINALLY_EXPR:
8968 tree try_block = TREE_OPERAND (exp, 0);
8969 tree finally_block = TREE_OPERAND (exp, 1);
8971 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8973 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8974 is not sufficient, so we cannot expand the block twice.
8975 So we play games with GOTO_SUBROUTINE_EXPR to let us
8976 expand the thing only once. */
8977 /* When not optimizing, we go ahead with this form since
8978 (1) user breakpoints operate more predictably without
8979 code duplication, and
8980 (2) we're not running any of the global optimizers
8981 that would explode in time/space with the highly
8982 connected CFG created by the indirect branching. */
8984 rtx finally_label = gen_label_rtx ();
8985 rtx done_label = gen_label_rtx ();
8986 rtx return_link = gen_reg_rtx (Pmode);
8987 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8988 (tree) finally_label, (tree) return_link);
8989 TREE_SIDE_EFFECTS (cleanup) = 1;
8991 /* Start a new binding layer that will keep track of all cleanup
8992 actions to be performed. */
8993 expand_start_bindings (2);
8994 target_temp_slot_level = temp_slot_level;
8996 expand_decl_cleanup (NULL_TREE, cleanup);
8997 op0 = expand_expr (try_block, target, tmode, modifier);
8999 preserve_temp_slots (op0);
9000 expand_end_bindings (NULL_TREE, 0, 0);
9001 emit_jump (done_label);
9002 emit_label (finally_label);
9003 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9004 emit_indirect_jump (return_link);
9005 emit_label (done_label);
9007 else
9009 expand_start_bindings (2);
9010 target_temp_slot_level = temp_slot_level;
9012 expand_decl_cleanup (NULL_TREE, finally_block);
9013 op0 = expand_expr (try_block, target, tmode, modifier);
9015 preserve_temp_slots (op0);
9016 expand_end_bindings (NULL_TREE, 0, 0);
9019 return op0;
9022 case GOTO_SUBROUTINE_EXPR:
9024 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9025 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9026 rtx return_address = gen_label_rtx ();
9027 emit_move_insn (return_link,
9028 gen_rtx_LABEL_REF (Pmode, return_address));
9029 emit_jump (subr);
9030 emit_label (return_address);
9031 return const0_rtx;
9034 case VA_ARG_EXPR:
9035 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9037 case EXC_PTR_EXPR:
9038 return get_exception_pointer (cfun);
9040 case FDESC_EXPR:
9041 /* Function descriptors are not valid except for as
9042 initialization constants, and should not be expanded. */
9043 abort ();
9045 default:
9046 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9049 /* Here to do an ordinary binary operator, generating an instruction
9050 from the optab already placed in `this_optab'. */
9051 binop:
9052 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9053 subtarget, &op0, &op1, 0);
9054 binop2:
9055 if (modifier == EXPAND_STACK_PARM)
9056 target = 0;
9057 temp = expand_binop (mode, this_optab, op0, op1, target,
9058 unsignedp, OPTAB_LIB_WIDEN);
9059 if (temp == 0)
9060 abort ();
9061 return temp;
9064 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9065 when applied to the address of EXP produces an address known to be
9066 aligned more than BIGGEST_ALIGNMENT. */
9068 static int
9069 is_aligning_offset (tree offset, tree exp)
9071 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9072 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9073 || TREE_CODE (offset) == NOP_EXPR
9074 || TREE_CODE (offset) == CONVERT_EXPR
9075 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9076 offset = TREE_OPERAND (offset, 0);
9078 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9079 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9080 if (TREE_CODE (offset) != BIT_AND_EXPR
9081 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9082 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9083 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9084 return 0;
9086 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9087 It must be NEGATE_EXPR. Then strip any more conversions. */
9088 offset = TREE_OPERAND (offset, 0);
9089 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9090 || TREE_CODE (offset) == NOP_EXPR
9091 || TREE_CODE (offset) == CONVERT_EXPR)
9092 offset = TREE_OPERAND (offset, 0);
9094 if (TREE_CODE (offset) != NEGATE_EXPR)
9095 return 0;
9097 offset = TREE_OPERAND (offset, 0);
9098 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9099 || TREE_CODE (offset) == NOP_EXPR
9100 || TREE_CODE (offset) == CONVERT_EXPR)
9101 offset = TREE_OPERAND (offset, 0);
9103 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9104 whose type is the same as EXP. */
9105 return (TREE_CODE (offset) == ADDR_EXPR
9106 && (TREE_OPERAND (offset, 0) == exp
9107 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9108 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9109 == TREE_TYPE (exp)))));
9112 /* Return the tree node if an ARG corresponds to a string constant or zero
9113 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9114 in bytes within the string that ARG is accessing. The type of the
9115 offset will be `sizetype'. */
9117 tree
9118 string_constant (tree arg, tree *ptr_offset)
9120 STRIP_NOPS (arg);
9122 if (TREE_CODE (arg) == ADDR_EXPR
9123 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9125 *ptr_offset = size_zero_node;
9126 return TREE_OPERAND (arg, 0);
9128 else if (TREE_CODE (arg) == PLUS_EXPR)
9130 tree arg0 = TREE_OPERAND (arg, 0);
9131 tree arg1 = TREE_OPERAND (arg, 1);
9133 STRIP_NOPS (arg0);
9134 STRIP_NOPS (arg1);
9136 if (TREE_CODE (arg0) == ADDR_EXPR
9137 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9139 *ptr_offset = convert (sizetype, arg1);
9140 return TREE_OPERAND (arg0, 0);
9142 else if (TREE_CODE (arg1) == ADDR_EXPR
9143 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9145 *ptr_offset = convert (sizetype, arg0);
9146 return TREE_OPERAND (arg1, 0);
9150 return 0;
9153 /* Expand code for a post- or pre- increment or decrement
9154 and return the RTX for the result.
9155 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9157 static rtx
9158 expand_increment (tree exp, int post, int ignore)
9160 rtx op0, op1;
9161 rtx temp, value;
9162 tree incremented = TREE_OPERAND (exp, 0);
9163 optab this_optab = add_optab;
9164 int icode;
9165 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9166 int op0_is_copy = 0;
9167 int single_insn = 0;
9168 /* 1 means we can't store into OP0 directly,
9169 because it is a subreg narrower than a word,
9170 and we don't dare clobber the rest of the word. */
9171 int bad_subreg = 0;
9173 /* Stabilize any component ref that might need to be
9174 evaluated more than once below. */
9175 if (!post
9176 || TREE_CODE (incremented) == BIT_FIELD_REF
9177 || (TREE_CODE (incremented) == COMPONENT_REF
9178 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9179 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9180 incremented = stabilize_reference (incremented);
9181 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9182 ones into save exprs so that they don't accidentally get evaluated
9183 more than once by the code below. */
9184 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9185 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9186 incremented = save_expr (incremented);
9188 /* Compute the operands as RTX.
9189 Note whether OP0 is the actual lvalue or a copy of it:
9190 I believe it is a copy iff it is a register or subreg
9191 and insns were generated in computing it. */
9193 temp = get_last_insn ();
9194 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9196 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9197 in place but instead must do sign- or zero-extension during assignment,
9198 so we copy it into a new register and let the code below use it as
9199 a copy.
9201 Note that we can safely modify this SUBREG since it is know not to be
9202 shared (it was made by the expand_expr call above). */
9204 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9206 if (post)
9207 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9208 else
9209 bad_subreg = 1;
9211 else if (GET_CODE (op0) == SUBREG
9212 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9214 /* We cannot increment this SUBREG in place. If we are
9215 post-incrementing, get a copy of the old value. Otherwise,
9216 just mark that we cannot increment in place. */
9217 if (post)
9218 op0 = copy_to_reg (op0);
9219 else
9220 bad_subreg = 1;
9223 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9224 && temp != get_last_insn ());
9225 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9227 /* Decide whether incrementing or decrementing. */
9228 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9229 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9230 this_optab = sub_optab;
9232 /* Convert decrement by a constant into a negative increment. */
9233 if (this_optab == sub_optab
9234 && GET_CODE (op1) == CONST_INT)
9236 op1 = GEN_INT (-INTVAL (op1));
9237 this_optab = add_optab;
9240 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9241 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9243 /* For a preincrement, see if we can do this with a single instruction. */
9244 if (!post)
9246 icode = (int) this_optab->handlers[(int) mode].insn_code;
9247 if (icode != (int) CODE_FOR_nothing
9248 /* Make sure that OP0 is valid for operands 0 and 1
9249 of the insn we want to queue. */
9250 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9251 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9252 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9253 single_insn = 1;
9256 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9257 then we cannot just increment OP0. We must therefore contrive to
9258 increment the original value. Then, for postincrement, we can return
9259 OP0 since it is a copy of the old value. For preincrement, expand here
9260 unless we can do it with a single insn.
9262 Likewise if storing directly into OP0 would clobber high bits
9263 we need to preserve (bad_subreg). */
9264 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9266 /* This is the easiest way to increment the value wherever it is.
9267 Problems with multiple evaluation of INCREMENTED are prevented
9268 because either (1) it is a component_ref or preincrement,
9269 in which case it was stabilized above, or (2) it is an array_ref
9270 with constant index in an array in a register, which is
9271 safe to reevaluate. */
9272 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9273 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9274 ? MINUS_EXPR : PLUS_EXPR),
9275 TREE_TYPE (exp),
9276 incremented,
9277 TREE_OPERAND (exp, 1));
9279 while (TREE_CODE (incremented) == NOP_EXPR
9280 || TREE_CODE (incremented) == CONVERT_EXPR)
9282 newexp = convert (TREE_TYPE (incremented), newexp);
9283 incremented = TREE_OPERAND (incremented, 0);
9286 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9287 return post ? op0 : temp;
9290 if (post)
9292 /* We have a true reference to the value in OP0.
9293 If there is an insn to add or subtract in this mode, queue it.
9294 Queuing the increment insn avoids the register shuffling
9295 that often results if we must increment now and first save
9296 the old value for subsequent use. */
9298 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9299 op0 = stabilize (op0);
9300 #endif
9302 icode = (int) this_optab->handlers[(int) mode].insn_code;
9303 if (icode != (int) CODE_FOR_nothing
9304 /* Make sure that OP0 is valid for operands 0 and 1
9305 of the insn we want to queue. */
9306 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9307 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9309 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9310 op1 = force_reg (mode, op1);
9312 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9314 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9316 rtx addr = (general_operand (XEXP (op0, 0), mode)
9317 ? force_reg (Pmode, XEXP (op0, 0))
9318 : copy_to_reg (XEXP (op0, 0)));
9319 rtx temp, result;
9321 op0 = replace_equiv_address (op0, addr);
9322 temp = force_reg (GET_MODE (op0), op0);
9323 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9324 op1 = force_reg (mode, op1);
9326 /* The increment queue is LIFO, thus we have to `queue'
9327 the instructions in reverse order. */
9328 enqueue_insn (op0, gen_move_insn (op0, temp));
9329 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9330 return result;
9334 /* Preincrement, or we can't increment with one simple insn. */
9335 if (post)
9336 /* Save a copy of the value before inc or dec, to return it later. */
9337 temp = value = copy_to_reg (op0);
9338 else
9339 /* Arrange to return the incremented value. */
9340 /* Copy the rtx because expand_binop will protect from the queue,
9341 and the results of that would be invalid for us to return
9342 if our caller does emit_queue before using our result. */
9343 temp = copy_rtx (value = op0);
9345 /* Increment however we can. */
9346 op1 = expand_binop (mode, this_optab, value, op1, op0,
9347 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9349 /* Make sure the value is stored into OP0. */
9350 if (op1 != op0)
9351 emit_move_insn (op0, op1);
9353 return temp;
9356 /* Generate code to calculate EXP using a store-flag instruction
9357 and return an rtx for the result. EXP is either a comparison
9358 or a TRUTH_NOT_EXPR whose operand is a comparison.
9360 If TARGET is nonzero, store the result there if convenient.
9362 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9363 cheap.
9365 Return zero if there is no suitable set-flag instruction
9366 available on this machine.
9368 Once expand_expr has been called on the arguments of the comparison,
9369 we are committed to doing the store flag, since it is not safe to
9370 re-evaluate the expression. We emit the store-flag insn by calling
9371 emit_store_flag, but only expand the arguments if we have a reason
9372 to believe that emit_store_flag will be successful. If we think that
9373 it will, but it isn't, we have to simulate the store-flag with a
9374 set/jump/set sequence. */
9376 static rtx
9377 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9379 enum rtx_code code;
9380 tree arg0, arg1, type;
9381 tree tem;
9382 enum machine_mode operand_mode;
9383 int invert = 0;
9384 int unsignedp;
9385 rtx op0, op1;
9386 enum insn_code icode;
9387 rtx subtarget = target;
9388 rtx result, label;
9390 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9391 result at the end. We can't simply invert the test since it would
9392 have already been inverted if it were valid. This case occurs for
9393 some floating-point comparisons. */
9395 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9396 invert = 1, exp = TREE_OPERAND (exp, 0);
9398 arg0 = TREE_OPERAND (exp, 0);
9399 arg1 = TREE_OPERAND (exp, 1);
9401 /* Don't crash if the comparison was erroneous. */
9402 if (arg0 == error_mark_node || arg1 == error_mark_node)
9403 return const0_rtx;
9405 type = TREE_TYPE (arg0);
9406 operand_mode = TYPE_MODE (type);
9407 unsignedp = TREE_UNSIGNED (type);
9409 /* We won't bother with BLKmode store-flag operations because it would mean
9410 passing a lot of information to emit_store_flag. */
9411 if (operand_mode == BLKmode)
9412 return 0;
9414 /* We won't bother with store-flag operations involving function pointers
9415 when function pointers must be canonicalized before comparisons. */
9416 #ifdef HAVE_canonicalize_funcptr_for_compare
9417 if (HAVE_canonicalize_funcptr_for_compare
9418 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9419 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9420 == FUNCTION_TYPE))
9421 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9422 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9423 == FUNCTION_TYPE))))
9424 return 0;
9425 #endif
9427 STRIP_NOPS (arg0);
9428 STRIP_NOPS (arg1);
9430 /* Get the rtx comparison code to use. We know that EXP is a comparison
9431 operation of some type. Some comparisons against 1 and -1 can be
9432 converted to comparisons with zero. Do so here so that the tests
9433 below will be aware that we have a comparison with zero. These
9434 tests will not catch constants in the first operand, but constants
9435 are rarely passed as the first operand. */
9437 switch (TREE_CODE (exp))
9439 case EQ_EXPR:
9440 code = EQ;
9441 break;
9442 case NE_EXPR:
9443 code = NE;
9444 break;
9445 case LT_EXPR:
9446 if (integer_onep (arg1))
9447 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9448 else
9449 code = unsignedp ? LTU : LT;
9450 break;
9451 case LE_EXPR:
9452 if (! unsignedp && integer_all_onesp (arg1))
9453 arg1 = integer_zero_node, code = LT;
9454 else
9455 code = unsignedp ? LEU : LE;
9456 break;
9457 case GT_EXPR:
9458 if (! unsignedp && integer_all_onesp (arg1))
9459 arg1 = integer_zero_node, code = GE;
9460 else
9461 code = unsignedp ? GTU : GT;
9462 break;
9463 case GE_EXPR:
9464 if (integer_onep (arg1))
9465 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9466 else
9467 code = unsignedp ? GEU : GE;
9468 break;
9470 case UNORDERED_EXPR:
9471 code = UNORDERED;
9472 break;
9473 case ORDERED_EXPR:
9474 code = ORDERED;
9475 break;
9476 case UNLT_EXPR:
9477 code = UNLT;
9478 break;
9479 case UNLE_EXPR:
9480 code = UNLE;
9481 break;
9482 case UNGT_EXPR:
9483 code = UNGT;
9484 break;
9485 case UNGE_EXPR:
9486 code = UNGE;
9487 break;
9488 case UNEQ_EXPR:
9489 code = UNEQ;
9490 break;
9492 default:
9493 abort ();
9496 /* Put a constant second. */
9497 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9499 tem = arg0; arg0 = arg1; arg1 = tem;
9500 code = swap_condition (code);
9503 /* If this is an equality or inequality test of a single bit, we can
9504 do this by shifting the bit being tested to the low-order bit and
9505 masking the result with the constant 1. If the condition was EQ,
9506 we xor it with 1. This does not require an scc insn and is faster
9507 than an scc insn even if we have it.
9509 The code to make this transformation was moved into fold_single_bit_test,
9510 so we just call into the folder and expand its result. */
9512 if ((code == NE || code == EQ)
9513 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9514 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9516 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9517 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9518 arg0, arg1, type),
9519 target, VOIDmode, EXPAND_NORMAL);
9522 /* Now see if we are likely to be able to do this. Return if not. */
9523 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9524 return 0;
9526 icode = setcc_gen_code[(int) code];
9527 if (icode == CODE_FOR_nothing
9528 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9530 /* We can only do this if it is one of the special cases that
9531 can be handled without an scc insn. */
9532 if ((code == LT && integer_zerop (arg1))
9533 || (! only_cheap && code == GE && integer_zerop (arg1)))
9535 else if (BRANCH_COST >= 0
9536 && ! only_cheap && (code == NE || code == EQ)
9537 && TREE_CODE (type) != REAL_TYPE
9538 && ((abs_optab->handlers[(int) operand_mode].insn_code
9539 != CODE_FOR_nothing)
9540 || (ffs_optab->handlers[(int) operand_mode].insn_code
9541 != CODE_FOR_nothing)))
9543 else
9544 return 0;
9547 if (! get_subtarget (target)
9548 || GET_MODE (subtarget) != operand_mode)
9549 subtarget = 0;
9551 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9553 if (target == 0)
9554 target = gen_reg_rtx (mode);
9556 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9557 because, if the emit_store_flag does anything it will succeed and
9558 OP0 and OP1 will not be used subsequently. */
9560 result = emit_store_flag (target, code,
9561 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9562 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9563 operand_mode, unsignedp, 1);
9565 if (result)
9567 if (invert)
9568 result = expand_binop (mode, xor_optab, result, const1_rtx,
9569 result, 0, OPTAB_LIB_WIDEN);
9570 return result;
9573 /* If this failed, we have to do this with set/compare/jump/set code. */
9574 if (GET_CODE (target) != REG
9575 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9576 target = gen_reg_rtx (GET_MODE (target));
9578 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9579 result = compare_from_rtx (op0, op1, code, unsignedp,
9580 operand_mode, NULL_RTX);
9581 if (GET_CODE (result) == CONST_INT)
9582 return (((result == const0_rtx && ! invert)
9583 || (result != const0_rtx && invert))
9584 ? const0_rtx : const1_rtx);
9586 /* The code of RESULT may not match CODE if compare_from_rtx
9587 decided to swap its operands and reverse the original code.
9589 We know that compare_from_rtx returns either a CONST_INT or
9590 a new comparison code, so it is safe to just extract the
9591 code from RESULT. */
9592 code = GET_CODE (result);
9594 label = gen_label_rtx ();
9595 if (bcc_gen_fctn[(int) code] == 0)
9596 abort ();
9598 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9599 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9600 emit_label (label);
9602 return target;
9606 /* Stubs in case we haven't got a casesi insn. */
9607 #ifndef HAVE_casesi
9608 # define HAVE_casesi 0
9609 # define gen_casesi(a, b, c, d, e) (0)
9610 # define CODE_FOR_casesi CODE_FOR_nothing
9611 #endif
9613 /* If the machine does not have a case insn that compares the bounds,
9614 this means extra overhead for dispatch tables, which raises the
9615 threshold for using them. */
9616 #ifndef CASE_VALUES_THRESHOLD
9617 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9618 #endif /* CASE_VALUES_THRESHOLD */
9620 unsigned int
9621 case_values_threshold (void)
9623 return CASE_VALUES_THRESHOLD;
9626 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9627 0 otherwise (i.e. if there is no casesi instruction). */
9629 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9630 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9632 enum machine_mode index_mode = SImode;
9633 int index_bits = GET_MODE_BITSIZE (index_mode);
9634 rtx op1, op2, index;
9635 enum machine_mode op_mode;
9637 if (! HAVE_casesi)
9638 return 0;
9640 /* Convert the index to SImode. */
9641 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9643 enum machine_mode omode = TYPE_MODE (index_type);
9644 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9646 /* We must handle the endpoints in the original mode. */
9647 index_expr = build (MINUS_EXPR, index_type,
9648 index_expr, minval);
9649 minval = integer_zero_node;
9650 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9651 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9652 omode, 1, default_label);
9653 /* Now we can safely truncate. */
9654 index = convert_to_mode (index_mode, index, 0);
9656 else
9658 if (TYPE_MODE (index_type) != index_mode)
9660 index_expr = convert ((*lang_hooks.types.type_for_size)
9661 (index_bits, 0), index_expr);
9662 index_type = TREE_TYPE (index_expr);
9665 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9667 emit_queue ();
9668 index = protect_from_queue (index, 0);
9669 do_pending_stack_adjust ();
9671 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9672 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9673 (index, op_mode))
9674 index = copy_to_mode_reg (op_mode, index);
9676 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9678 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9679 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9680 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9681 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9682 (op1, op_mode))
9683 op1 = copy_to_mode_reg (op_mode, op1);
9685 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9687 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9688 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9689 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9690 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9691 (op2, op_mode))
9692 op2 = copy_to_mode_reg (op_mode, op2);
9694 emit_jump_insn (gen_casesi (index, op1, op2,
9695 table_label, default_label));
9696 return 1;
9699 /* Attempt to generate a tablejump instruction; same concept. */
9700 #ifndef HAVE_tablejump
9701 #define HAVE_tablejump 0
9702 #define gen_tablejump(x, y) (0)
9703 #endif
9705 /* Subroutine of the next function.
9707 INDEX is the value being switched on, with the lowest value
9708 in the table already subtracted.
9709 MODE is its expected mode (needed if INDEX is constant).
9710 RANGE is the length of the jump table.
9711 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9713 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9714 index value is out of range. */
9716 static void
9717 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9718 rtx default_label)
9720 rtx temp, vector;
9722 if (INTVAL (range) > cfun->max_jumptable_ents)
9723 cfun->max_jumptable_ents = INTVAL (range);
9725 /* Do an unsigned comparison (in the proper mode) between the index
9726 expression and the value which represents the length of the range.
9727 Since we just finished subtracting the lower bound of the range
9728 from the index expression, this comparison allows us to simultaneously
9729 check that the original index expression value is both greater than
9730 or equal to the minimum value of the range and less than or equal to
9731 the maximum value of the range. */
9733 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9734 default_label);
9736 /* If index is in range, it must fit in Pmode.
9737 Convert to Pmode so we can index with it. */
9738 if (mode != Pmode)
9739 index = convert_to_mode (Pmode, index, 1);
9741 /* Don't let a MEM slip through, because then INDEX that comes
9742 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9743 and break_out_memory_refs will go to work on it and mess it up. */
9744 #ifdef PIC_CASE_VECTOR_ADDRESS
9745 if (flag_pic && GET_CODE (index) != REG)
9746 index = copy_to_mode_reg (Pmode, index);
9747 #endif
9749 /* If flag_force_addr were to affect this address
9750 it could interfere with the tricky assumptions made
9751 about addresses that contain label-refs,
9752 which may be valid only very near the tablejump itself. */
9753 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9754 GET_MODE_SIZE, because this indicates how large insns are. The other
9755 uses should all be Pmode, because they are addresses. This code
9756 could fail if addresses and insns are not the same size. */
9757 index = gen_rtx_PLUS (Pmode,
9758 gen_rtx_MULT (Pmode, index,
9759 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9760 gen_rtx_LABEL_REF (Pmode, table_label));
9761 #ifdef PIC_CASE_VECTOR_ADDRESS
9762 if (flag_pic)
9763 index = PIC_CASE_VECTOR_ADDRESS (index);
9764 else
9765 #endif
9766 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9767 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9768 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9769 RTX_UNCHANGING_P (vector) = 1;
9770 MEM_NOTRAP_P (vector) = 1;
9771 convert_move (temp, vector, 0);
9773 emit_jump_insn (gen_tablejump (temp, table_label));
9775 /* If we are generating PIC code or if the table is PC-relative, the
9776 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9777 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9778 emit_barrier ();
9782 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9783 rtx table_label, rtx default_label)
9785 rtx index;
9787 if (! HAVE_tablejump)
9788 return 0;
9790 index_expr = fold (build (MINUS_EXPR, index_type,
9791 convert (index_type, index_expr),
9792 convert (index_type, minval)));
9793 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9794 emit_queue ();
9795 index = protect_from_queue (index, 0);
9796 do_pending_stack_adjust ();
9798 do_tablejump (index, TYPE_MODE (index_type),
9799 convert_modes (TYPE_MODE (index_type),
9800 TYPE_MODE (TREE_TYPE (range)),
9801 expand_expr (range, NULL_RTX,
9802 VOIDmode, 0),
9803 TREE_UNSIGNED (TREE_TYPE (range))),
9804 table_label, default_label);
9805 return 1;
9808 /* Nonzero if the mode is a valid vector mode for this architecture.
9809 This returns nonzero even if there is no hardware support for the
9810 vector mode, but we can emulate with narrower modes. */
9813 vector_mode_valid_p (enum machine_mode mode)
9815 enum mode_class class = GET_MODE_CLASS (mode);
9816 enum machine_mode innermode;
9818 /* Doh! What's going on? */
9819 if (class != MODE_VECTOR_INT
9820 && class != MODE_VECTOR_FLOAT)
9821 return 0;
9823 /* Hardware support. Woo hoo! */
9824 if (VECTOR_MODE_SUPPORTED_P (mode))
9825 return 1;
9827 innermode = GET_MODE_INNER (mode);
9829 /* We should probably return 1 if requesting V4DI and we have no DI,
9830 but we have V2DI, but this is probably very unlikely. */
9832 /* If we have support for the inner mode, we can safely emulate it.
9833 We may not have V2DI, but me can emulate with a pair of DIs. */
9834 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9837 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9838 static rtx
9839 const_vector_from_tree (tree exp)
9841 rtvec v;
9842 int units, i;
9843 tree link, elt;
9844 enum machine_mode inner, mode;
9846 mode = TYPE_MODE (TREE_TYPE (exp));
9848 if (is_zeros_p (exp))
9849 return CONST0_RTX (mode);
9851 units = GET_MODE_NUNITS (mode);
9852 inner = GET_MODE_INNER (mode);
9854 v = rtvec_alloc (units);
9856 link = TREE_VECTOR_CST_ELTS (exp);
9857 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9859 elt = TREE_VALUE (link);
9861 if (TREE_CODE (elt) == REAL_CST)
9862 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9863 inner);
9864 else
9865 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9866 TREE_INT_CST_HIGH (elt),
9867 inner);
9870 /* Initialize remaining elements to 0. */
9871 for (; i < units; ++i)
9872 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9874 return gen_rtx_raw_CONST_VECTOR (mode, v);
9877 #include "gt-expr.h"