* final.c (final_scan_insn): Run FINAL_PRESCAN_INSNS on asm insns
[official-gcc.git] / gcc / expr.c
blob927c158893968895aa415c51e7503b252550ac27
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
129 void *constfundata;
130 int reverse;
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
135 unsigned int);
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
159 tree, enum machine_mode, int, tree, int);
160 static rtx var_rtx (tree);
162 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165 static int is_aligning_offset (tree, tree);
166 static rtx expand_increment (tree, int, int);
167 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
168 enum expand_modifier);
169 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
170 #ifdef PUSH_ROUNDING
171 static void emit_single_push_insn (enum machine_mode, rtx, tree);
172 #endif
173 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
174 static rtx const_vector_from_tree (tree);
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* Record for each mode whether we can float-extend from memory. */
185 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
190 #ifndef MOVE_RATIO
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 #define MOVE_RATIO 2
193 #else
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 #endif
197 #endif
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 #endif
206 /* If a clear memory operation would take CLEAR_RATIO or more simple
207 move-instruction sequences, we will do a clrstr or libcall instead. */
209 #ifndef CLEAR_RATIO
210 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
211 #define CLEAR_RATIO 2
212 #else
213 /* If we are optimizing for space, cut down the default clear ratio. */
214 #define CLEAR_RATIO (optimize_size ? 3 : 15)
215 #endif
216 #endif
218 /* This macro is used to determine whether clear_by_pieces should be
219 called to clear storage. */
220 #ifndef CLEAR_BY_PIECES_P
221 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
222 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
223 #endif
225 /* This macro is used to determine whether store_by_pieces should be
226 called to "memset" storage with byte values other than zero, or
227 to "memcpy" storage when the source is a constant string. */
228 #ifndef STORE_BY_PIECES_P
229 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
230 #endif
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238 /* These arrays record the insn_code of two different kinds of insns
239 to perform block compares. */
240 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
241 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
243 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
244 struct file_stack *expr_wfl_stack;
246 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
248 #ifndef SLOW_UNALIGNED_ACCESS
249 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
250 #endif
252 /* This is run once per compilation to set up which modes can be used
253 directly in memory and to initialize the block move optab. */
255 void
256 init_expr_once (void)
258 rtx insn, pat;
259 enum machine_mode mode;
260 int num_clobbers;
261 rtx mem, mem1;
262 rtx reg;
264 /* Try indexing by frame ptr and try by stack ptr.
265 It is known that on the Convex the stack ptr isn't a valid index.
266 With luck, one or the other is valid on any machine. */
267 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
268 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
270 /* A scratch register we can modify in-place below to avoid
271 useless RTL allocations. */
272 reg = gen_rtx_REG (VOIDmode, -1);
274 insn = rtx_alloc (INSN);
275 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
276 PATTERN (insn) = pat;
278 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
279 mode = (enum machine_mode) ((int) mode + 1))
281 int regno;
283 direct_load[(int) mode] = direct_store[(int) mode] = 0;
284 PUT_MODE (mem, mode);
285 PUT_MODE (mem1, mode);
286 PUT_MODE (reg, mode);
288 /* See if there is some register that can be used in this mode and
289 directly loaded or stored from memory. */
291 if (mode != VOIDmode && mode != BLKmode)
292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
293 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
294 regno++)
296 if (! HARD_REGNO_MODE_OK (regno, mode))
297 continue;
299 REGNO (reg) = regno;
301 SET_SRC (pat) = mem;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
306 SET_SRC (pat) = mem1;
307 SET_DEST (pat) = reg;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_load[(int) mode] = 1;
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
316 SET_SRC (pat) = reg;
317 SET_DEST (pat) = mem1;
318 if (recog (pat, insn, &num_clobbers) >= 0)
319 direct_store[(int) mode] = 1;
323 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
325 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
326 mode = GET_MODE_WIDER_MODE (mode))
328 enum machine_mode srcmode;
329 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
330 srcmode = GET_MODE_WIDER_MODE (srcmode))
332 enum insn_code ic;
334 ic = can_extend_p (mode, srcmode, 0);
335 if (ic == CODE_FOR_nothing)
336 continue;
338 PUT_MODE (mem, srcmode);
340 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
341 float_extend_from_mem[mode][srcmode] = true;
346 /* This is run at the start of compiling a function. */
348 void
349 init_expr (void)
351 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
354 /* Small sanity check that the queue is empty at the end of a function. */
356 void
357 finish_expr_for_function (void)
359 if (pending_chain)
360 abort ();
363 /* Manage the queue of increment instructions to be output
364 for POSTINCREMENT_EXPR expressions, etc. */
366 /* Queue up to increment (or change) VAR later. BODY says how:
367 BODY should be the same thing you would pass to emit_insn
368 to increment right away. It will go to emit_insn later on.
370 The value is a QUEUED expression to be used in place of VAR
371 where you want to guarantee the pre-incrementation value of VAR. */
373 static rtx
374 enqueue_insn (rtx var, rtx body)
376 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
377 body, pending_chain);
378 return pending_chain;
381 /* Use protect_from_queue to convert a QUEUED expression
382 into something that you can put immediately into an instruction.
383 If the queued incrementation has not happened yet,
384 protect_from_queue returns the variable itself.
385 If the incrementation has happened, protect_from_queue returns a temp
386 that contains a copy of the old value of the variable.
388 Any time an rtx which might possibly be a QUEUED is to be put
389 into an instruction, it must be passed through protect_from_queue first.
390 QUEUED expressions are not meaningful in instructions.
392 Do not pass a value through protect_from_queue and then hold
393 on to it for a while before putting it in an instruction!
394 If the queue is flushed in between, incorrect code will result. */
397 protect_from_queue (rtx x, int modify)
399 RTX_CODE code = GET_CODE (x);
401 #if 0 /* A QUEUED can hang around after the queue is forced out. */
402 /* Shortcut for most common case. */
403 if (pending_chain == 0)
404 return x;
405 #endif
407 if (code != QUEUED)
409 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
410 use of autoincrement. Make a copy of the contents of the memory
411 location rather than a copy of the address, but not if the value is
412 of mode BLKmode. Don't modify X in place since it might be
413 shared. */
414 if (code == MEM && GET_MODE (x) != BLKmode
415 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 rtx y = XEXP (x, 0);
418 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
420 if (QUEUED_INSN (y))
422 rtx temp = gen_reg_rtx (GET_MODE (x));
424 emit_insn_before (gen_move_insn (temp, new),
425 QUEUED_INSN (y));
426 return temp;
429 /* Copy the address into a pseudo, so that the returned value
430 remains correct across calls to emit_queue. */
431 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
434 /* Otherwise, recursively protect the subexpressions of all
435 the kinds of rtx's that can contain a QUEUED. */
436 if (code == MEM)
438 rtx tem = protect_from_queue (XEXP (x, 0), 0);
439 if (tem != XEXP (x, 0))
441 x = copy_rtx (x);
442 XEXP (x, 0) = tem;
445 else if (code == PLUS || code == MULT)
447 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
448 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
449 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451 x = copy_rtx (x);
452 XEXP (x, 0) = new0;
453 XEXP (x, 1) = new1;
456 return x;
458 /* If the increment has not happened, use the variable itself. Copy it
459 into a new pseudo so that the value remains correct across calls to
460 emit_queue. */
461 if (QUEUED_INSN (x) == 0)
462 return copy_to_reg (QUEUED_VAR (x));
463 /* If the increment has happened and a pre-increment copy exists,
464 use that copy. */
465 if (QUEUED_COPY (x) != 0)
466 return QUEUED_COPY (x);
467 /* The increment has happened but we haven't set up a pre-increment copy.
468 Set one up now, and use it. */
469 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
470 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
471 QUEUED_INSN (x));
472 return QUEUED_COPY (x);
475 /* Return nonzero if X contains a QUEUED expression:
476 if it contains anything that will be altered by a queued increment.
477 We handle only combinations of MEM, PLUS, MINUS and MULT operators
478 since memory addresses generally contain only those. */
481 queued_subexp_p (rtx x)
483 enum rtx_code code = GET_CODE (x);
484 switch (code)
486 case QUEUED:
487 return 1;
488 case MEM:
489 return queued_subexp_p (XEXP (x, 0));
490 case MULT:
491 case PLUS:
492 case MINUS:
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
495 default:
496 return 0;
500 /* Perform all the pending incrementations. */
502 void
503 emit_queue (void)
505 rtx p;
506 while ((p = pending_chain))
508 rtx body = QUEUED_BODY (p);
510 switch (GET_CODE (body))
512 case INSN:
513 case JUMP_INSN:
514 case CALL_INSN:
515 case CODE_LABEL:
516 case BARRIER:
517 case NOTE:
518 QUEUED_INSN (p) = body;
519 emit_insn (body);
520 break;
522 #ifdef ENABLE_CHECKING
523 case SEQUENCE:
524 abort ();
525 break;
526 #endif
528 default:
529 QUEUED_INSN (p) = emit_insn (body);
530 break;
533 pending_chain = QUEUED_NEXT (p);
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
542 void
543 convert_move (rtx to, rtx from, int unsignedp)
545 enum machine_mode to_mode = GET_MODE (to);
546 enum machine_mode from_mode = GET_MODE (from);
547 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
548 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
549 enum insn_code code;
550 rtx libcall;
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
554 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
556 to = protect_from_queue (to, 1);
557 from = protect_from_queue (from, 0);
559 if (to_real != from_real)
560 abort ();
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
564 TO here. */
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
573 abort ();
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
579 return;
582 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
585 abort ();
587 if (VECTOR_MODE_P (to_mode))
588 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
589 else
590 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592 emit_move_insn (to, from);
593 return;
596 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
599 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
600 return;
603 if (to_real)
605 rtx value, insns;
606 convert_optab tab;
608 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609 tab = sext_optab;
610 else if (GET_MODE_BITSIZE (from_mode) > GET_MODE_BITSIZE (to_mode))
611 tab = trunc_optab;
612 else
613 abort ();
615 /* Try converting directly if the insn is supported. */
617 code = tab->handlers[to_mode][from_mode].insn_code;
618 if (code != CODE_FOR_nothing)
620 emit_unop_insn (code, to, from,
621 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
622 return;
625 /* Otherwise use a libcall. */
626 libcall = tab->handlers[to_mode][from_mode].libfunc;
628 if (!libcall)
629 /* This conversion is not implemented yet. */
630 abort ();
632 start_sequence ();
633 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
634 1, from, from_mode);
635 insns = get_insns ();
636 end_sequence ();
637 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
638 from));
639 return;
642 /* Handle pointer conversion. */ /* SPEE 900220. */
643 /* Targets are expected to provide conversion insns between PxImode and
644 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
645 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
647 enum machine_mode full_mode
648 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
650 if (trunc_optab->handlers[to_mode][full_mode].insn_code
651 == CODE_FOR_nothing)
652 abort ();
654 if (full_mode != from_mode)
655 from = convert_to_mode (full_mode, from, unsignedp);
656 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
657 to, from, UNKNOWN);
658 return;
660 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
662 enum machine_mode full_mode
663 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
665 if (sext_optab->handlers[full_mode][from_mode].insn_code
666 == CODE_FOR_nothing)
667 abort ();
669 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
670 to, from, UNKNOWN);
671 if (to_mode == full_mode)
672 return;
674 /* else proceed to integer conversions below */
675 from_mode = full_mode;
678 /* Now both modes are integers. */
680 /* Handle expanding beyond a word. */
681 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
682 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
684 rtx insns;
685 rtx lowpart;
686 rtx fill_value;
687 rtx lowfrom;
688 int i;
689 enum machine_mode lowpart_mode;
690 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
692 /* Try converting directly if the insn is supported. */
693 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
694 != CODE_FOR_nothing)
696 /* If FROM is a SUBREG, put it into a register. Do this
697 so that we always generate the same set of insns for
698 better cse'ing; if an intermediate assignment occurred,
699 we won't be doing the operation directly on the SUBREG. */
700 if (optimize > 0 && GET_CODE (from) == SUBREG)
701 from = force_reg (from_mode, from);
702 emit_unop_insn (code, to, from, equiv_code);
703 return;
705 /* Next, try converting via full word. */
706 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
707 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
708 != CODE_FOR_nothing))
710 if (GET_CODE (to) == REG)
711 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
712 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
713 emit_unop_insn (code, to,
714 gen_lowpart (word_mode, to), equiv_code);
715 return;
718 /* No special multiword conversion insn; do it by hand. */
719 start_sequence ();
721 /* Since we will turn this into a no conflict block, we must ensure
722 that the source does not overlap the target. */
724 if (reg_overlap_mentioned_p (to, from))
725 from = force_reg (from_mode, from);
727 /* Get a copy of FROM widened to a word, if necessary. */
728 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
729 lowpart_mode = word_mode;
730 else
731 lowpart_mode = from_mode;
733 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
735 lowpart = gen_lowpart (lowpart_mode, to);
736 emit_move_insn (lowpart, lowfrom);
738 /* Compute the value to put in each remaining word. */
739 if (unsignedp)
740 fill_value = const0_rtx;
741 else
743 #ifdef HAVE_slt
744 if (HAVE_slt
745 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
746 && STORE_FLAG_VALUE == -1)
748 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
749 lowpart_mode, 0);
750 fill_value = gen_reg_rtx (word_mode);
751 emit_insn (gen_slt (fill_value));
753 else
754 #endif
756 fill_value
757 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
758 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
759 NULL_RTX, 0);
760 fill_value = convert_to_mode (word_mode, fill_value, 1);
764 /* Fill the remaining words. */
765 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
767 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
768 rtx subword = operand_subword (to, index, 1, to_mode);
770 if (subword == 0)
771 abort ();
773 if (fill_value != subword)
774 emit_move_insn (subword, fill_value);
777 insns = get_insns ();
778 end_sequence ();
780 emit_no_conflict_block (insns, to, from, NULL_RTX,
781 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
782 return;
785 /* Truncating multi-word to a word or less. */
786 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
787 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
789 if (!((GET_CODE (from) == MEM
790 && ! MEM_VOLATILE_P (from)
791 && direct_load[(int) to_mode]
792 && ! mode_dependent_address_p (XEXP (from, 0)))
793 || GET_CODE (from) == REG
794 || GET_CODE (from) == SUBREG))
795 from = force_reg (from_mode, from);
796 convert_move (to, gen_lowpart (word_mode, from), 0);
797 return;
800 /* Now follow all the conversions between integers
801 no more than a word long. */
803 /* For truncation, usually we can just refer to FROM in a narrower mode. */
804 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
805 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
806 GET_MODE_BITSIZE (from_mode)))
808 if (!((GET_CODE (from) == MEM
809 && ! MEM_VOLATILE_P (from)
810 && direct_load[(int) to_mode]
811 && ! mode_dependent_address_p (XEXP (from, 0)))
812 || GET_CODE (from) == REG
813 || GET_CODE (from) == SUBREG))
814 from = force_reg (from_mode, from);
815 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
816 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
817 from = copy_to_reg (from);
818 emit_move_insn (to, gen_lowpart (to_mode, from));
819 return;
822 /* Handle extension. */
823 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
825 /* Convert directly if that works. */
826 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
827 != CODE_FOR_nothing)
829 if (flag_force_mem)
830 from = force_not_mem (from);
832 emit_unop_insn (code, to, from, equiv_code);
833 return;
835 else
837 enum machine_mode intermediate;
838 rtx tmp;
839 tree shift_amount;
841 /* Search for a mode to convert via. */
842 for (intermediate = from_mode; intermediate != VOIDmode;
843 intermediate = GET_MODE_WIDER_MODE (intermediate))
844 if (((can_extend_p (to_mode, intermediate, unsignedp)
845 != CODE_FOR_nothing)
846 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
847 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
848 GET_MODE_BITSIZE (intermediate))))
849 && (can_extend_p (intermediate, from_mode, unsignedp)
850 != CODE_FOR_nothing))
852 convert_move (to, convert_to_mode (intermediate, from,
853 unsignedp), unsignedp);
854 return;
857 /* No suitable intermediate mode.
858 Generate what we need with shifts. */
859 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
860 - GET_MODE_BITSIZE (from_mode), 0);
861 from = gen_lowpart (to_mode, force_reg (from_mode, from));
862 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
863 to, unsignedp);
864 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
865 to, unsignedp);
866 if (tmp != to)
867 emit_move_insn (to, tmp);
868 return;
872 /* Support special truncate insns for certain modes. */
873 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
875 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
876 to, from, UNKNOWN);
877 return;
880 /* Handle truncation of volatile memrefs, and so on;
881 the things that couldn't be truncated directly,
882 and for which there was no special instruction.
884 ??? Code above formerly short-circuited this, for most integer
885 mode pairs, with a force_reg in from_mode followed by a recursive
886 call to this routine. Appears always to have been wrong. */
887 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
889 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
890 emit_move_insn (to, temp);
891 return;
894 /* Mode combination is not recognized. */
895 abort ();
898 /* Return an rtx for a value that would result
899 from converting X to mode MODE.
900 Both X and MODE may be floating, or both integer.
901 UNSIGNEDP is nonzero if X is an unsigned value.
902 This can be done by referring to a part of X in place
903 or by copying to a new temporary with conversion.
905 This function *must not* call protect_from_queue
906 except when putting X into an insn (in which case convert_move does it). */
909 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
911 return convert_modes (mode, VOIDmode, x, unsignedp);
914 /* Return an rtx for a value that would result
915 from converting X from mode OLDMODE to mode MODE.
916 Both modes may be floating, or both integer.
917 UNSIGNEDP is nonzero if X is an unsigned value.
919 This can be done by referring to a part of X in place
920 or by copying to a new temporary with conversion.
922 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
924 This function *must not* call protect_from_queue
925 except when putting X into an insn (in which case convert_move does it). */
928 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
930 rtx temp;
932 /* If FROM is a SUBREG that indicates that we have already done at least
933 the required extension, strip it. */
935 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
936 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
937 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
938 x = gen_lowpart (mode, x);
940 if (GET_MODE (x) != VOIDmode)
941 oldmode = GET_MODE (x);
943 if (mode == oldmode)
944 return x;
946 /* There is one case that we must handle specially: If we are converting
947 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
948 we are to interpret the constant as unsigned, gen_lowpart will do
949 the wrong if the constant appears negative. What we want to do is
950 make the high-order word of the constant zero, not all ones. */
952 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
953 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
954 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
956 HOST_WIDE_INT val = INTVAL (x);
958 if (oldmode != VOIDmode
959 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
961 int width = GET_MODE_BITSIZE (oldmode);
963 /* We need to zero extend VAL. */
964 val &= ((HOST_WIDE_INT) 1 << width) - 1;
967 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
970 /* We can do this with a gen_lowpart if both desired and current modes
971 are integer, and this is either a constant integer, a register, or a
972 non-volatile MEM. Except for the constant case where MODE is no
973 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
975 if ((GET_CODE (x) == CONST_INT
976 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
977 || (GET_MODE_CLASS (mode) == MODE_INT
978 && GET_MODE_CLASS (oldmode) == MODE_INT
979 && (GET_CODE (x) == CONST_DOUBLE
980 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
981 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
982 && direct_load[(int) mode])
983 || (GET_CODE (x) == REG
984 && (! HARD_REGISTER_P (x)
985 || HARD_REGNO_MODE_OK (REGNO (x), mode))
986 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
987 GET_MODE_BITSIZE (GET_MODE (x)))))))))
989 /* ?? If we don't know OLDMODE, we have to assume here that
990 X does not need sign- or zero-extension. This may not be
991 the case, but it's the best we can do. */
992 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
993 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
995 HOST_WIDE_INT val = INTVAL (x);
996 int width = GET_MODE_BITSIZE (oldmode);
998 /* We must sign or zero-extend in this case. Start by
999 zero-extending, then sign extend if we need to. */
1000 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1001 if (! unsignedp
1002 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1003 val |= (HOST_WIDE_INT) (-1) << width;
1005 return gen_int_mode (val, mode);
1008 return gen_lowpart (mode, x);
1011 /* Converting from integer constant into mode is always equivalent to an
1012 subreg operation. */
1013 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1015 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1016 abort ();
1017 return simplify_gen_subreg (mode, x, oldmode, 0);
1020 temp = gen_reg_rtx (mode);
1021 convert_move (temp, x, unsignedp);
1022 return temp;
1025 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1026 store efficiently. Due to internal GCC limitations, this is
1027 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1028 for an immediate constant. */
1030 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1032 /* Determine whether the LEN bytes can be moved by using several move
1033 instructions. Return nonzero if a call to move_by_pieces should
1034 succeed. */
1037 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1038 unsigned int align ATTRIBUTE_UNUSED)
1040 return MOVE_BY_PIECES_P (len, align);
1043 /* Generate several move instructions to copy LEN bytes from block FROM to
1044 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1045 and TO through protect_from_queue before calling.
1047 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1048 used to push FROM to the stack.
1050 ALIGN is maximum stack alignment we can assume.
1052 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1053 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1054 stpcpy. */
1057 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1058 unsigned int align, int endp)
1060 struct move_by_pieces data;
1061 rtx to_addr, from_addr = XEXP (from, 0);
1062 unsigned int max_size = MOVE_MAX_PIECES + 1;
1063 enum machine_mode mode = VOIDmode, tmode;
1064 enum insn_code icode;
1066 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1068 data.offset = 0;
1069 data.from_addr = from_addr;
1070 if (to)
1072 to_addr = XEXP (to, 0);
1073 data.to = to;
1074 data.autinc_to
1075 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1076 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1077 data.reverse
1078 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1080 else
1082 to_addr = NULL_RTX;
1083 data.to = NULL_RTX;
1084 data.autinc_to = 1;
1085 #ifdef STACK_GROWS_DOWNWARD
1086 data.reverse = 1;
1087 #else
1088 data.reverse = 0;
1089 #endif
1091 data.to_addr = to_addr;
1092 data.from = from;
1093 data.autinc_from
1094 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1095 || GET_CODE (from_addr) == POST_INC
1096 || GET_CODE (from_addr) == POST_DEC);
1098 data.explicit_inc_from = 0;
1099 data.explicit_inc_to = 0;
1100 if (data.reverse) data.offset = len;
1101 data.len = len;
1103 /* If copying requires more than two move insns,
1104 copy addresses to registers (to make displacements shorter)
1105 and use post-increment if available. */
1106 if (!(data.autinc_from && data.autinc_to)
1107 && move_by_pieces_ninsns (len, align) > 2)
1109 /* Find the mode of the largest move... */
1110 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1111 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1112 if (GET_MODE_SIZE (tmode) < max_size)
1113 mode = tmode;
1115 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1117 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1118 data.autinc_from = 1;
1119 data.explicit_inc_from = -1;
1121 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1123 data.from_addr = copy_addr_to_reg (from_addr);
1124 data.autinc_from = 1;
1125 data.explicit_inc_from = 1;
1127 if (!data.autinc_from && CONSTANT_P (from_addr))
1128 data.from_addr = copy_addr_to_reg (from_addr);
1129 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1131 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1132 data.autinc_to = 1;
1133 data.explicit_inc_to = -1;
1135 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1137 data.to_addr = copy_addr_to_reg (to_addr);
1138 data.autinc_to = 1;
1139 data.explicit_inc_to = 1;
1141 if (!data.autinc_to && CONSTANT_P (to_addr))
1142 data.to_addr = copy_addr_to_reg (to_addr);
1145 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1146 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1147 align = MOVE_MAX * BITS_PER_UNIT;
1149 /* First move what we can in the largest integer mode, then go to
1150 successively smaller modes. */
1152 while (max_size > 1)
1154 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1155 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1156 if (GET_MODE_SIZE (tmode) < max_size)
1157 mode = tmode;
1159 if (mode == VOIDmode)
1160 break;
1162 icode = mov_optab->handlers[(int) mode].insn_code;
1163 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1164 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1166 max_size = GET_MODE_SIZE (mode);
1169 /* The code above should have handled everything. */
1170 if (data.len > 0)
1171 abort ();
1173 if (endp)
1175 rtx to1;
1177 if (data.reverse)
1178 abort ();
1179 if (data.autinc_to)
1181 if (endp == 2)
1183 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1184 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1185 else
1186 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1187 -1));
1189 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1190 data.offset);
1192 else
1194 if (endp == 2)
1195 --data.offset;
1196 to1 = adjust_address (data.to, QImode, data.offset);
1198 return to1;
1200 else
1201 return data.to;
1204 /* Return number of insns required to move L bytes by pieces.
1205 ALIGN (in bits) is maximum alignment we can assume. */
1207 static unsigned HOST_WIDE_INT
1208 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1210 unsigned HOST_WIDE_INT n_insns = 0;
1211 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1213 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1214 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1215 align = MOVE_MAX * BITS_PER_UNIT;
1217 while (max_size > 1)
1219 enum machine_mode mode = VOIDmode, tmode;
1220 enum insn_code icode;
1222 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1223 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1224 if (GET_MODE_SIZE (tmode) < max_size)
1225 mode = tmode;
1227 if (mode == VOIDmode)
1228 break;
1230 icode = mov_optab->handlers[(int) mode].insn_code;
1231 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1232 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1234 max_size = GET_MODE_SIZE (mode);
1237 if (l)
1238 abort ();
1239 return n_insns;
1242 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1243 with move instructions for mode MODE. GENFUN is the gen_... function
1244 to make a move insn for that mode. DATA has all the other info. */
1246 static void
1247 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1248 struct move_by_pieces *data)
1250 unsigned int size = GET_MODE_SIZE (mode);
1251 rtx to1 = NULL_RTX, from1;
1253 while (data->len >= size)
1255 if (data->reverse)
1256 data->offset -= size;
1258 if (data->to)
1260 if (data->autinc_to)
1261 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1262 data->offset);
1263 else
1264 to1 = adjust_address (data->to, mode, data->offset);
1267 if (data->autinc_from)
1268 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1269 data->offset);
1270 else
1271 from1 = adjust_address (data->from, mode, data->offset);
1273 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1274 emit_insn (gen_add2_insn (data->to_addr,
1275 GEN_INT (-(HOST_WIDE_INT)size)));
1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1277 emit_insn (gen_add2_insn (data->from_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
1280 if (data->to)
1281 emit_insn ((*genfun) (to1, from1));
1282 else
1284 #ifdef PUSH_ROUNDING
1285 emit_single_push_insn (mode, from1, NULL);
1286 #else
1287 abort ();
1288 #endif
1291 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1292 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1293 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1294 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1296 if (! data->reverse)
1297 data->offset += size;
1299 data->len -= size;
1303 /* Emit code to move a block Y to a block X. This may be done with
1304 string-move instructions, with multiple scalar move instructions,
1305 or with a library call.
1307 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1308 SIZE is an rtx that says how long they are.
1309 ALIGN is the maximum alignment we can assume they have.
1310 METHOD describes what kind of copy this is, and what mechanisms may be used.
1312 Return the address of the new block, if memcpy is called and returns it,
1313 0 otherwise. */
1316 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1318 bool may_use_call;
1319 rtx retval = 0;
1320 unsigned int align;
1322 switch (method)
1324 case BLOCK_OP_NORMAL:
1325 may_use_call = true;
1326 break;
1328 case BLOCK_OP_CALL_PARM:
1329 may_use_call = block_move_libcall_safe_for_call_parm ();
1331 /* Make inhibit_defer_pop nonzero around the library call
1332 to force it to pop the arguments right away. */
1333 NO_DEFER_POP;
1334 break;
1336 case BLOCK_OP_NO_LIBCALL:
1337 may_use_call = false;
1338 break;
1340 default:
1341 abort ();
1344 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1346 if (GET_MODE (x) != BLKmode)
1347 abort ();
1348 if (GET_MODE (y) != BLKmode)
1349 abort ();
1351 x = protect_from_queue (x, 1);
1352 y = protect_from_queue (y, 0);
1353 size = protect_from_queue (size, 0);
1355 if (GET_CODE (x) != MEM)
1356 abort ();
1357 if (GET_CODE (y) != MEM)
1358 abort ();
1359 if (size == 0)
1360 abort ();
1362 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1363 can be incorrect is coming from __builtin_memcpy. */
1364 if (GET_CODE (size) == CONST_INT)
1366 if (INTVAL (size) == 0)
1367 return 0;
1369 x = shallow_copy_rtx (x);
1370 y = shallow_copy_rtx (y);
1371 set_mem_size (x, size);
1372 set_mem_size (y, size);
1375 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1376 move_by_pieces (x, y, INTVAL (size), align, 0);
1377 else if (emit_block_move_via_movstr (x, y, size, align))
1379 else if (may_use_call)
1380 retval = emit_block_move_via_libcall (x, y, size);
1381 else
1382 emit_block_move_via_loop (x, y, size, align);
1384 if (method == BLOCK_OP_CALL_PARM)
1385 OK_DEFER_POP;
1387 return retval;
1390 /* A subroutine of emit_block_move. Returns true if calling the
1391 block move libcall will not clobber any parameters which may have
1392 already been placed on the stack. */
1394 static bool
1395 block_move_libcall_safe_for_call_parm (void)
1397 /* If arguments are pushed on the stack, then they're safe. */
1398 if (PUSH_ARGS)
1399 return true;
1401 /* If registers go on the stack anyway, any argument is sure to clobber
1402 an outgoing argument. */
1403 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1405 tree fn = emit_block_move_libcall_fn (false);
1406 (void) fn;
1407 if (REG_PARM_STACK_SPACE (fn) != 0)
1408 return false;
1410 #endif
1412 /* If any argument goes in memory, then it might clobber an outgoing
1413 argument. */
1415 CUMULATIVE_ARGS args_so_far;
1416 tree fn, arg;
1418 fn = emit_block_move_libcall_fn (false);
1419 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1421 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1422 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1424 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1425 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1426 if (!tmp || !REG_P (tmp))
1427 return false;
1428 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1429 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1430 NULL_TREE, 1))
1431 return false;
1432 #endif
1433 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1436 return true;
1439 /* A subroutine of emit_block_move. Expand a movstr pattern;
1440 return true if successful. */
1442 static bool
1443 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1445 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1446 enum machine_mode mode;
1448 /* Since this is a move insn, we don't care about volatility. */
1449 volatile_ok = 1;
1451 /* Try the most limited insn first, because there's no point
1452 including more than one in the machine description unless
1453 the more limited one has some advantage. */
1455 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1456 mode = GET_MODE_WIDER_MODE (mode))
1458 enum insn_code code = movstr_optab[(int) mode];
1459 insn_operand_predicate_fn pred;
1461 if (code != CODE_FOR_nothing
1462 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1463 here because if SIZE is less than the mode mask, as it is
1464 returned by the macro, it will definitely be less than the
1465 actual mode mask. */
1466 && ((GET_CODE (size) == CONST_INT
1467 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1468 <= (GET_MODE_MASK (mode) >> 1)))
1469 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1470 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1471 || (*pred) (x, BLKmode))
1472 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1473 || (*pred) (y, BLKmode))
1474 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1475 || (*pred) (opalign, VOIDmode)))
1477 rtx op2;
1478 rtx last = get_last_insn ();
1479 rtx pat;
1481 op2 = convert_to_mode (mode, size, 1);
1482 pred = insn_data[(int) code].operand[2].predicate;
1483 if (pred != 0 && ! (*pred) (op2, mode))
1484 op2 = copy_to_mode_reg (mode, op2);
1486 /* ??? When called via emit_block_move_for_call, it'd be
1487 nice if there were some way to inform the backend, so
1488 that it doesn't fail the expansion because it thinks
1489 emitting the libcall would be more efficient. */
1491 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1492 if (pat)
1494 emit_insn (pat);
1495 volatile_ok = 0;
1496 return true;
1498 else
1499 delete_insns_since (last);
1503 volatile_ok = 0;
1504 return false;
1507 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1508 Return the return value from memcpy, 0 otherwise. */
1510 static rtx
1511 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1513 rtx dst_addr, src_addr;
1514 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1515 enum machine_mode size_mode;
1516 rtx retval;
1518 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1520 It is unsafe to save the value generated by protect_from_queue and reuse
1521 it later. Consider what happens if emit_queue is called before the
1522 return value from protect_from_queue is used.
1524 Expansion of the CALL_EXPR below will call emit_queue before we are
1525 finished emitting RTL for argument setup. So if we are not careful we
1526 could get the wrong value for an argument.
1528 To avoid this problem we go ahead and emit code to copy the addresses of
1529 DST and SRC and SIZE into new pseudos. We can then place those new
1530 pseudos into an RTL_EXPR and use them later, even after a call to
1531 emit_queue.
1533 Note this is not strictly needed for library calls since they do not call
1534 emit_queue before loading their arguments. However, we may need to have
1535 library calls call emit_queue in the future since failing to do so could
1536 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1537 arguments in registers. */
1539 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1540 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1542 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1543 src_addr = convert_memory_address (ptr_mode, src_addr);
1545 dst_tree = make_tree (ptr_type_node, dst_addr);
1546 src_tree = make_tree (ptr_type_node, src_addr);
1548 if (TARGET_MEM_FUNCTIONS)
1549 size_mode = TYPE_MODE (sizetype);
1550 else
1551 size_mode = TYPE_MODE (unsigned_type_node);
1553 size = convert_to_mode (size_mode, size, 1);
1554 size = copy_to_mode_reg (size_mode, size);
1556 /* It is incorrect to use the libcall calling conventions to call
1557 memcpy in this context. This could be a user call to memcpy and
1558 the user may wish to examine the return value from memcpy. For
1559 targets where libcalls and normal calls have different conventions
1560 for returning pointers, we could end up generating incorrect code.
1562 For convenience, we generate the call to bcopy this way as well. */
1564 if (TARGET_MEM_FUNCTIONS)
1565 size_tree = make_tree (sizetype, size);
1566 else
1567 size_tree = make_tree (unsigned_type_node, size);
1569 fn = emit_block_move_libcall_fn (true);
1570 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1571 if (TARGET_MEM_FUNCTIONS)
1573 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1574 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1576 else
1578 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1579 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1582 /* Now we have to build up the CALL_EXPR itself. */
1583 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1584 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1585 call_expr, arg_list, NULL_TREE);
1587 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1589 /* If we are initializing a readonly value, show the above call clobbered
1590 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1591 the delay slot scheduler might overlook conflicts and take nasty
1592 decisions. */
1593 if (RTX_UNCHANGING_P (dst))
1594 add_function_usage_to
1595 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1596 gen_rtx_CLOBBER (VOIDmode, dst),
1597 NULL_RTX));
1599 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1602 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1603 for the function we use for block copies. The first time FOR_CALL
1604 is true, we call assemble_external. */
1606 static GTY(()) tree block_move_fn;
1608 void
1609 init_block_move_fn (const char *asmspec)
1611 if (!block_move_fn)
1613 tree args, fn;
1615 if (TARGET_MEM_FUNCTIONS)
1617 fn = get_identifier ("memcpy");
1618 args = build_function_type_list (ptr_type_node, ptr_type_node,
1619 const_ptr_type_node, sizetype,
1620 NULL_TREE);
1622 else
1624 fn = get_identifier ("bcopy");
1625 args = build_function_type_list (void_type_node, const_ptr_type_node,
1626 ptr_type_node, unsigned_type_node,
1627 NULL_TREE);
1630 fn = build_decl (FUNCTION_DECL, fn, args);
1631 DECL_EXTERNAL (fn) = 1;
1632 TREE_PUBLIC (fn) = 1;
1633 DECL_ARTIFICIAL (fn) = 1;
1634 TREE_NOTHROW (fn) = 1;
1636 block_move_fn = fn;
1639 if (asmspec)
1641 SET_DECL_RTL (block_move_fn, NULL_RTX);
1642 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1646 static tree
1647 emit_block_move_libcall_fn (int for_call)
1649 static bool emitted_extern;
1651 if (!block_move_fn)
1652 init_block_move_fn (NULL);
1654 if (for_call && !emitted_extern)
1656 emitted_extern = true;
1657 make_decl_rtl (block_move_fn, NULL);
1658 assemble_external (block_move_fn);
1661 return block_move_fn;
1664 /* A subroutine of emit_block_move. Copy the data via an explicit
1665 loop. This is used only when libcalls are forbidden. */
1666 /* ??? It'd be nice to copy in hunks larger than QImode. */
1668 static void
1669 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1670 unsigned int align ATTRIBUTE_UNUSED)
1672 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1673 enum machine_mode iter_mode;
1675 iter_mode = GET_MODE (size);
1676 if (iter_mode == VOIDmode)
1677 iter_mode = word_mode;
1679 top_label = gen_label_rtx ();
1680 cmp_label = gen_label_rtx ();
1681 iter = gen_reg_rtx (iter_mode);
1683 emit_move_insn (iter, const0_rtx);
1685 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1686 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1687 do_pending_stack_adjust ();
1689 emit_note (NOTE_INSN_LOOP_BEG);
1691 emit_jump (cmp_label);
1692 emit_label (top_label);
1694 tmp = convert_modes (Pmode, iter_mode, iter, true);
1695 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1696 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1697 x = change_address (x, QImode, x_addr);
1698 y = change_address (y, QImode, y_addr);
1700 emit_move_insn (x, y);
1702 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1703 true, OPTAB_LIB_WIDEN);
1704 if (tmp != iter)
1705 emit_move_insn (iter, tmp);
1707 emit_note (NOTE_INSN_LOOP_CONT);
1708 emit_label (cmp_label);
1710 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1711 true, top_label);
1713 emit_note (NOTE_INSN_LOOP_END);
1716 /* Copy all or part of a value X into registers starting at REGNO.
1717 The number of registers to be filled is NREGS. */
1719 void
1720 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1722 int i;
1723 #ifdef HAVE_load_multiple
1724 rtx pat;
1725 rtx last;
1726 #endif
1728 if (nregs == 0)
1729 return;
1731 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1732 x = validize_mem (force_const_mem (mode, x));
1734 /* See if the machine can do this with a load multiple insn. */
1735 #ifdef HAVE_load_multiple
1736 if (HAVE_load_multiple)
1738 last = get_last_insn ();
1739 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1740 GEN_INT (nregs));
1741 if (pat)
1743 emit_insn (pat);
1744 return;
1746 else
1747 delete_insns_since (last);
1749 #endif
1751 for (i = 0; i < nregs; i++)
1752 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1753 operand_subword_force (x, i, mode));
1756 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1757 The number of registers to be filled is NREGS. */
1759 void
1760 move_block_from_reg (int regno, rtx x, int nregs)
1762 int i;
1764 if (nregs == 0)
1765 return;
1767 /* See if the machine can do this with a store multiple insn. */
1768 #ifdef HAVE_store_multiple
1769 if (HAVE_store_multiple)
1771 rtx last = get_last_insn ();
1772 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1773 GEN_INT (nregs));
1774 if (pat)
1776 emit_insn (pat);
1777 return;
1779 else
1780 delete_insns_since (last);
1782 #endif
1784 for (i = 0; i < nregs; i++)
1786 rtx tem = operand_subword (x, i, 1, BLKmode);
1788 if (tem == 0)
1789 abort ();
1791 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1795 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1796 ORIG, where ORIG is a non-consecutive group of registers represented by
1797 a PARALLEL. The clone is identical to the original except in that the
1798 original set of registers is replaced by a new set of pseudo registers.
1799 The new set has the same modes as the original set. */
1802 gen_group_rtx (rtx orig)
1804 int i, length;
1805 rtx *tmps;
1807 if (GET_CODE (orig) != PARALLEL)
1808 abort ();
1810 length = XVECLEN (orig, 0);
1811 tmps = alloca (sizeof (rtx) * length);
1813 /* Skip a NULL entry in first slot. */
1814 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1816 if (i)
1817 tmps[0] = 0;
1819 for (; i < length; i++)
1821 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1822 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1824 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1827 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1830 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1831 where DST is non-consecutive registers represented by a PARALLEL.
1832 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1833 if not known. */
1835 void
1836 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1838 rtx *tmps, src;
1839 int start, i;
1841 if (GET_CODE (dst) != PARALLEL)
1842 abort ();
1844 /* Check for a NULL entry, used to indicate that the parameter goes
1845 both on the stack and in registers. */
1846 if (XEXP (XVECEXP (dst, 0, 0), 0))
1847 start = 0;
1848 else
1849 start = 1;
1851 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1853 /* Process the pieces. */
1854 for (i = start; i < XVECLEN (dst, 0); i++)
1856 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1857 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1858 unsigned int bytelen = GET_MODE_SIZE (mode);
1859 int shift = 0;
1861 /* Handle trailing fragments that run over the size of the struct. */
1862 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1864 /* Arrange to shift the fragment to where it belongs.
1865 extract_bit_field loads to the lsb of the reg. */
1866 if (
1867 #ifdef BLOCK_REG_PADDING
1868 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1869 == (BYTES_BIG_ENDIAN ? upward : downward)
1870 #else
1871 BYTES_BIG_ENDIAN
1872 #endif
1874 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1875 bytelen = ssize - bytepos;
1876 if (bytelen <= 0)
1877 abort ();
1880 /* If we won't be loading directly from memory, protect the real source
1881 from strange tricks we might play; but make sure that the source can
1882 be loaded directly into the destination. */
1883 src = orig_src;
1884 if (GET_CODE (orig_src) != MEM
1885 && (!CONSTANT_P (orig_src)
1886 || (GET_MODE (orig_src) != mode
1887 && GET_MODE (orig_src) != VOIDmode)))
1889 if (GET_MODE (orig_src) == VOIDmode)
1890 src = gen_reg_rtx (mode);
1891 else
1892 src = gen_reg_rtx (GET_MODE (orig_src));
1894 emit_move_insn (src, orig_src);
1897 /* Optimize the access just a bit. */
1898 if (GET_CODE (src) == MEM
1899 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1900 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1901 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1902 && bytelen == GET_MODE_SIZE (mode))
1904 tmps[i] = gen_reg_rtx (mode);
1905 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1907 else if (GET_CODE (src) == CONCAT)
1909 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1910 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1912 if ((bytepos == 0 && bytelen == slen0)
1913 || (bytepos != 0 && bytepos + bytelen <= slen))
1915 /* The following assumes that the concatenated objects all
1916 have the same size. In this case, a simple calculation
1917 can be used to determine the object and the bit field
1918 to be extracted. */
1919 tmps[i] = XEXP (src, bytepos / slen0);
1920 if (! CONSTANT_P (tmps[i])
1921 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1922 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1923 (bytepos % slen0) * BITS_PER_UNIT,
1924 1, NULL_RTX, mode, mode, ssize);
1926 else if (bytepos == 0)
1928 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1929 emit_move_insn (mem, src);
1930 tmps[i] = adjust_address (mem, mode, 0);
1932 else
1933 abort ();
1935 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1936 SIMD register, which is currently broken. While we get GCC
1937 to emit proper RTL for these cases, let's dump to memory. */
1938 else if (VECTOR_MODE_P (GET_MODE (dst))
1939 && GET_CODE (src) == REG)
1941 int slen = GET_MODE_SIZE (GET_MODE (src));
1942 rtx mem;
1944 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1945 emit_move_insn (mem, src);
1946 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1948 else if (CONSTANT_P (src)
1949 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1950 tmps[i] = src;
1951 else
1952 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1953 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1954 mode, mode, ssize);
1956 if (shift)
1957 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1958 tmps[i], 0, OPTAB_WIDEN);
1961 emit_queue ();
1963 /* Copy the extracted pieces into the proper (probable) hard regs. */
1964 for (i = start; i < XVECLEN (dst, 0); i++)
1965 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1968 /* Emit code to move a block SRC to block DST, where SRC and DST are
1969 non-consecutive groups of registers, each represented by a PARALLEL. */
1971 void
1972 emit_group_move (rtx dst, rtx src)
1974 int i;
1976 if (GET_CODE (src) != PARALLEL
1977 || GET_CODE (dst) != PARALLEL
1978 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1979 abort ();
1981 /* Skip first entry if NULL. */
1982 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1983 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1984 XEXP (XVECEXP (src, 0, i), 0));
1987 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1988 where SRC is non-consecutive registers represented by a PARALLEL.
1989 SSIZE represents the total size of block ORIG_DST, or -1 if not
1990 known. */
1992 void
1993 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1995 rtx *tmps, dst;
1996 int start, i;
1998 if (GET_CODE (src) != PARALLEL)
1999 abort ();
2001 /* Check for a NULL entry, used to indicate that the parameter goes
2002 both on the stack and in registers. */
2003 if (XEXP (XVECEXP (src, 0, 0), 0))
2004 start = 0;
2005 else
2006 start = 1;
2008 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2010 /* Copy the (probable) hard regs into pseudos. */
2011 for (i = start; i < XVECLEN (src, 0); i++)
2013 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2014 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2015 emit_move_insn (tmps[i], reg);
2017 emit_queue ();
2019 /* If we won't be storing directly into memory, protect the real destination
2020 from strange tricks we might play. */
2021 dst = orig_dst;
2022 if (GET_CODE (dst) == PARALLEL)
2024 rtx temp;
2026 /* We can get a PARALLEL dst if there is a conditional expression in
2027 a return statement. In that case, the dst and src are the same,
2028 so no action is necessary. */
2029 if (rtx_equal_p (dst, src))
2030 return;
2032 /* It is unclear if we can ever reach here, but we may as well handle
2033 it. Allocate a temporary, and split this into a store/load to/from
2034 the temporary. */
2036 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2037 emit_group_store (temp, src, type, ssize);
2038 emit_group_load (dst, temp, type, ssize);
2039 return;
2041 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2043 dst = gen_reg_rtx (GET_MODE (orig_dst));
2044 /* Make life a bit easier for combine. */
2045 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2048 /* Process the pieces. */
2049 for (i = start; i < XVECLEN (src, 0); i++)
2051 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2052 enum machine_mode mode = GET_MODE (tmps[i]);
2053 unsigned int bytelen = GET_MODE_SIZE (mode);
2054 rtx dest = dst;
2056 /* Handle trailing fragments that run over the size of the struct. */
2057 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2059 /* store_bit_field always takes its value from the lsb.
2060 Move the fragment to the lsb if it's not already there. */
2061 if (
2062 #ifdef BLOCK_REG_PADDING
2063 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2064 == (BYTES_BIG_ENDIAN ? upward : downward)
2065 #else
2066 BYTES_BIG_ENDIAN
2067 #endif
2070 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2071 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2072 tmps[i], 0, OPTAB_WIDEN);
2074 bytelen = ssize - bytepos;
2077 if (GET_CODE (dst) == CONCAT)
2079 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2080 dest = XEXP (dst, 0);
2081 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2083 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2084 dest = XEXP (dst, 1);
2086 else if (bytepos == 0 && XVECLEN (src, 0))
2088 dest = assign_stack_temp (GET_MODE (dest),
2089 GET_MODE_SIZE (GET_MODE (dest)), 0);
2090 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2091 tmps[i]);
2092 dst = dest;
2093 break;
2095 else
2096 abort ();
2099 /* Optimize the access just a bit. */
2100 if (GET_CODE (dest) == MEM
2101 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2102 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2103 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2104 && bytelen == GET_MODE_SIZE (mode))
2105 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2106 else
2107 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2108 mode, tmps[i], ssize);
2111 emit_queue ();
2113 /* Copy from the pseudo into the (probable) hard reg. */
2114 if (orig_dst != dst)
2115 emit_move_insn (orig_dst, dst);
2118 /* Generate code to copy a BLKmode object of TYPE out of a
2119 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2120 is null, a stack temporary is created. TGTBLK is returned.
2122 The primary purpose of this routine is to handle functions
2123 that return BLKmode structures in registers. Some machines
2124 (the PA for example) want to return all small structures
2125 in registers regardless of the structure's alignment. */
2128 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2130 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2131 rtx src = NULL, dst = NULL;
2132 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2133 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2135 if (tgtblk == 0)
2137 tgtblk = assign_temp (build_qualified_type (type,
2138 (TYPE_QUALS (type)
2139 | TYPE_QUAL_CONST)),
2140 0, 1, 1);
2141 preserve_temp_slots (tgtblk);
2144 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2145 into a new pseudo which is a full word. */
2147 if (GET_MODE (srcreg) != BLKmode
2148 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2149 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2151 /* Structures whose size is not a multiple of a word are aligned
2152 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2153 machine, this means we must skip the empty high order bytes when
2154 calculating the bit offset. */
2155 if (BYTES_BIG_ENDIAN
2156 && bytes % UNITS_PER_WORD)
2157 big_endian_correction
2158 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2160 /* Copy the structure BITSIZE bites at a time.
2162 We could probably emit more efficient code for machines which do not use
2163 strict alignment, but it doesn't seem worth the effort at the current
2164 time. */
2165 for (bitpos = 0, xbitpos = big_endian_correction;
2166 bitpos < bytes * BITS_PER_UNIT;
2167 bitpos += bitsize, xbitpos += bitsize)
2169 /* We need a new source operand each time xbitpos is on a
2170 word boundary and when xbitpos == big_endian_correction
2171 (the first time through). */
2172 if (xbitpos % BITS_PER_WORD == 0
2173 || xbitpos == big_endian_correction)
2174 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2175 GET_MODE (srcreg));
2177 /* We need a new destination operand each time bitpos is on
2178 a word boundary. */
2179 if (bitpos % BITS_PER_WORD == 0)
2180 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2182 /* Use xbitpos for the source extraction (right justified) and
2183 xbitpos for the destination store (left justified). */
2184 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2185 extract_bit_field (src, bitsize,
2186 xbitpos % BITS_PER_WORD, 1,
2187 NULL_RTX, word_mode, word_mode,
2188 BITS_PER_WORD),
2189 BITS_PER_WORD);
2192 return tgtblk;
2195 /* Add a USE expression for REG to the (possibly empty) list pointed
2196 to by CALL_FUSAGE. REG must denote a hard register. */
2198 void
2199 use_reg (rtx *call_fusage, rtx reg)
2201 if (GET_CODE (reg) != REG
2202 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2203 abort ();
2205 *call_fusage
2206 = gen_rtx_EXPR_LIST (VOIDmode,
2207 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2210 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2211 starting at REGNO. All of these registers must be hard registers. */
2213 void
2214 use_regs (rtx *call_fusage, int regno, int nregs)
2216 int i;
2218 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2219 abort ();
2221 for (i = 0; i < nregs; i++)
2222 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2225 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2226 PARALLEL REGS. This is for calls that pass values in multiple
2227 non-contiguous locations. The Irix 6 ABI has examples of this. */
2229 void
2230 use_group_regs (rtx *call_fusage, rtx regs)
2232 int i;
2234 for (i = 0; i < XVECLEN (regs, 0); i++)
2236 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2238 /* A NULL entry means the parameter goes both on the stack and in
2239 registers. This can also be a MEM for targets that pass values
2240 partially on the stack and partially in registers. */
2241 if (reg != 0 && GET_CODE (reg) == REG)
2242 use_reg (call_fusage, reg);
2247 /* Determine whether the LEN bytes generated by CONSTFUN can be
2248 stored to memory using several move instructions. CONSTFUNDATA is
2249 a pointer which will be passed as argument in every CONSTFUN call.
2250 ALIGN is maximum alignment we can assume. Return nonzero if a
2251 call to store_by_pieces should succeed. */
2254 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2255 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2256 void *constfundata, unsigned int align)
2258 unsigned HOST_WIDE_INT max_size, l;
2259 HOST_WIDE_INT offset = 0;
2260 enum machine_mode mode, tmode;
2261 enum insn_code icode;
2262 int reverse;
2263 rtx cst;
2265 if (len == 0)
2266 return 1;
2268 if (! STORE_BY_PIECES_P (len, align))
2269 return 0;
2271 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2272 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2273 align = MOVE_MAX * BITS_PER_UNIT;
2275 /* We would first store what we can in the largest integer mode, then go to
2276 successively smaller modes. */
2278 for (reverse = 0;
2279 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2280 reverse++)
2282 l = len;
2283 mode = VOIDmode;
2284 max_size = STORE_MAX_PIECES + 1;
2285 while (max_size > 1)
2287 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2288 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2289 if (GET_MODE_SIZE (tmode) < max_size)
2290 mode = tmode;
2292 if (mode == VOIDmode)
2293 break;
2295 icode = mov_optab->handlers[(int) mode].insn_code;
2296 if (icode != CODE_FOR_nothing
2297 && align >= GET_MODE_ALIGNMENT (mode))
2299 unsigned int size = GET_MODE_SIZE (mode);
2301 while (l >= size)
2303 if (reverse)
2304 offset -= size;
2306 cst = (*constfun) (constfundata, offset, mode);
2307 if (!LEGITIMATE_CONSTANT_P (cst))
2308 return 0;
2310 if (!reverse)
2311 offset += size;
2313 l -= size;
2317 max_size = GET_MODE_SIZE (mode);
2320 /* The code above should have handled everything. */
2321 if (l != 0)
2322 abort ();
2325 return 1;
2328 /* Generate several move instructions to store LEN bytes generated by
2329 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2330 pointer which will be passed as argument in every CONSTFUN call.
2331 ALIGN is maximum alignment we can assume.
2332 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2333 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2334 stpcpy. */
2337 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2338 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2339 void *constfundata, unsigned int align, int endp)
2341 struct store_by_pieces data;
2343 if (len == 0)
2345 if (endp == 2)
2346 abort ();
2347 return to;
2350 if (! STORE_BY_PIECES_P (len, align))
2351 abort ();
2352 to = protect_from_queue (to, 1);
2353 data.constfun = constfun;
2354 data.constfundata = constfundata;
2355 data.len = len;
2356 data.to = to;
2357 store_by_pieces_1 (&data, align);
2358 if (endp)
2360 rtx to1;
2362 if (data.reverse)
2363 abort ();
2364 if (data.autinc_to)
2366 if (endp == 2)
2368 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2369 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2370 else
2371 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2372 -1));
2374 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2375 data.offset);
2377 else
2379 if (endp == 2)
2380 --data.offset;
2381 to1 = adjust_address (data.to, QImode, data.offset);
2383 return to1;
2385 else
2386 return data.to;
2389 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2390 rtx with BLKmode). The caller must pass TO through protect_from_queue
2391 before calling. ALIGN is maximum alignment we can assume. */
2393 static void
2394 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2396 struct store_by_pieces data;
2398 if (len == 0)
2399 return;
2401 data.constfun = clear_by_pieces_1;
2402 data.constfundata = NULL;
2403 data.len = len;
2404 data.to = to;
2405 store_by_pieces_1 (&data, align);
2408 /* Callback routine for clear_by_pieces.
2409 Return const0_rtx unconditionally. */
2411 static rtx
2412 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2413 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2414 enum machine_mode mode ATTRIBUTE_UNUSED)
2416 return const0_rtx;
2419 /* Subroutine of clear_by_pieces and store_by_pieces.
2420 Generate several move instructions to store LEN bytes of block TO. (A MEM
2421 rtx with BLKmode). The caller must pass TO through protect_from_queue
2422 before calling. ALIGN is maximum alignment we can assume. */
2424 static void
2425 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2426 unsigned int align ATTRIBUTE_UNUSED)
2428 rtx to_addr = XEXP (data->to, 0);
2429 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2430 enum machine_mode mode = VOIDmode, tmode;
2431 enum insn_code icode;
2433 data->offset = 0;
2434 data->to_addr = to_addr;
2435 data->autinc_to
2436 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2437 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2439 data->explicit_inc_to = 0;
2440 data->reverse
2441 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2442 if (data->reverse)
2443 data->offset = data->len;
2445 /* If storing requires more than two move insns,
2446 copy addresses to registers (to make displacements shorter)
2447 and use post-increment if available. */
2448 if (!data->autinc_to
2449 && move_by_pieces_ninsns (data->len, align) > 2)
2451 /* Determine the main mode we'll be using. */
2452 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2453 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2454 if (GET_MODE_SIZE (tmode) < max_size)
2455 mode = tmode;
2457 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2459 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2460 data->autinc_to = 1;
2461 data->explicit_inc_to = -1;
2464 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2465 && ! data->autinc_to)
2467 data->to_addr = copy_addr_to_reg (to_addr);
2468 data->autinc_to = 1;
2469 data->explicit_inc_to = 1;
2472 if ( !data->autinc_to && CONSTANT_P (to_addr))
2473 data->to_addr = copy_addr_to_reg (to_addr);
2476 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2477 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2478 align = MOVE_MAX * BITS_PER_UNIT;
2480 /* First store what we can in the largest integer mode, then go to
2481 successively smaller modes. */
2483 while (max_size > 1)
2485 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2486 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2487 if (GET_MODE_SIZE (tmode) < max_size)
2488 mode = tmode;
2490 if (mode == VOIDmode)
2491 break;
2493 icode = mov_optab->handlers[(int) mode].insn_code;
2494 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2495 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2497 max_size = GET_MODE_SIZE (mode);
2500 /* The code above should have handled everything. */
2501 if (data->len != 0)
2502 abort ();
2505 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2506 with move instructions for mode MODE. GENFUN is the gen_... function
2507 to make a move insn for that mode. DATA has all the other info. */
2509 static void
2510 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2511 struct store_by_pieces *data)
2513 unsigned int size = GET_MODE_SIZE (mode);
2514 rtx to1, cst;
2516 while (data->len >= size)
2518 if (data->reverse)
2519 data->offset -= size;
2521 if (data->autinc_to)
2522 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2523 data->offset);
2524 else
2525 to1 = adjust_address (data->to, mode, data->offset);
2527 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2528 emit_insn (gen_add2_insn (data->to_addr,
2529 GEN_INT (-(HOST_WIDE_INT) size)));
2531 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2532 emit_insn ((*genfun) (to1, cst));
2534 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2535 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2537 if (! data->reverse)
2538 data->offset += size;
2540 data->len -= size;
2544 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2545 its length in bytes. */
2548 clear_storage (rtx object, rtx size)
2550 rtx retval = 0;
2551 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2552 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2554 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2555 just move a zero. Otherwise, do this a piece at a time. */
2556 if (GET_MODE (object) != BLKmode
2557 && GET_CODE (size) == CONST_INT
2558 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2559 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2560 else
2562 object = protect_from_queue (object, 1);
2563 size = protect_from_queue (size, 0);
2565 if (size == const0_rtx)
2567 else if (GET_CODE (size) == CONST_INT
2568 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2569 clear_by_pieces (object, INTVAL (size), align);
2570 else if (clear_storage_via_clrstr (object, size, align))
2572 else
2573 retval = clear_storage_via_libcall (object, size);
2576 return retval;
2579 /* A subroutine of clear_storage. Expand a clrstr pattern;
2580 return true if successful. */
2582 static bool
2583 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2585 /* Try the most limited insn first, because there's no point
2586 including more than one in the machine description unless
2587 the more limited one has some advantage. */
2589 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2590 enum machine_mode mode;
2592 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2593 mode = GET_MODE_WIDER_MODE (mode))
2595 enum insn_code code = clrstr_optab[(int) mode];
2596 insn_operand_predicate_fn pred;
2598 if (code != CODE_FOR_nothing
2599 /* We don't need MODE to be narrower than
2600 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2601 the mode mask, as it is returned by the macro, it will
2602 definitely be less than the actual mode mask. */
2603 && ((GET_CODE (size) == CONST_INT
2604 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2605 <= (GET_MODE_MASK (mode) >> 1)))
2606 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2607 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2608 || (*pred) (object, BLKmode))
2609 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2610 || (*pred) (opalign, VOIDmode)))
2612 rtx op1;
2613 rtx last = get_last_insn ();
2614 rtx pat;
2616 op1 = convert_to_mode (mode, size, 1);
2617 pred = insn_data[(int) code].operand[1].predicate;
2618 if (pred != 0 && ! (*pred) (op1, mode))
2619 op1 = copy_to_mode_reg (mode, op1);
2621 pat = GEN_FCN ((int) code) (object, op1, opalign);
2622 if (pat)
2624 emit_insn (pat);
2625 return true;
2627 else
2628 delete_insns_since (last);
2632 return false;
2635 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2636 Return the return value of memset, 0 otherwise. */
2638 static rtx
2639 clear_storage_via_libcall (rtx object, rtx size)
2641 tree call_expr, arg_list, fn, object_tree, size_tree;
2642 enum machine_mode size_mode;
2643 rtx retval;
2645 /* OBJECT or SIZE may have been passed through protect_from_queue.
2647 It is unsafe to save the value generated by protect_from_queue
2648 and reuse it later. Consider what happens if emit_queue is
2649 called before the return value from protect_from_queue is used.
2651 Expansion of the CALL_EXPR below will call emit_queue before
2652 we are finished emitting RTL for argument setup. So if we are
2653 not careful we could get the wrong value for an argument.
2655 To avoid this problem we go ahead and emit code to copy OBJECT
2656 and SIZE into new pseudos. We can then place those new pseudos
2657 into an RTL_EXPR and use them later, even after a call to
2658 emit_queue.
2660 Note this is not strictly needed for library calls since they
2661 do not call emit_queue before loading their arguments. However,
2662 we may need to have library calls call emit_queue in the future
2663 since failing to do so could cause problems for targets which
2664 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2666 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2668 if (TARGET_MEM_FUNCTIONS)
2669 size_mode = TYPE_MODE (sizetype);
2670 else
2671 size_mode = TYPE_MODE (unsigned_type_node);
2672 size = convert_to_mode (size_mode, size, 1);
2673 size = copy_to_mode_reg (size_mode, size);
2675 /* It is incorrect to use the libcall calling conventions to call
2676 memset in this context. This could be a user call to memset and
2677 the user may wish to examine the return value from memset. For
2678 targets where libcalls and normal calls have different conventions
2679 for returning pointers, we could end up generating incorrect code.
2681 For convenience, we generate the call to bzero this way as well. */
2683 object_tree = make_tree (ptr_type_node, object);
2684 if (TARGET_MEM_FUNCTIONS)
2685 size_tree = make_tree (sizetype, size);
2686 else
2687 size_tree = make_tree (unsigned_type_node, size);
2689 fn = clear_storage_libcall_fn (true);
2690 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2691 if (TARGET_MEM_FUNCTIONS)
2692 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2693 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2695 /* Now we have to build up the CALL_EXPR itself. */
2696 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2697 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2698 call_expr, arg_list, NULL_TREE);
2700 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2702 /* If we are initializing a readonly value, show the above call
2703 clobbered it. Otherwise, a load from it may erroneously be
2704 hoisted from a loop. */
2705 if (RTX_UNCHANGING_P (object))
2706 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2708 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2711 /* A subroutine of clear_storage_via_libcall. Create the tree node
2712 for the function we use for block clears. The first time FOR_CALL
2713 is true, we call assemble_external. */
2715 static GTY(()) tree block_clear_fn;
2717 void
2718 init_block_clear_fn (const char *asmspec)
2720 if (!block_clear_fn)
2722 tree fn, args;
2724 if (TARGET_MEM_FUNCTIONS)
2726 fn = get_identifier ("memset");
2727 args = build_function_type_list (ptr_type_node, ptr_type_node,
2728 integer_type_node, sizetype,
2729 NULL_TREE);
2731 else
2733 fn = get_identifier ("bzero");
2734 args = build_function_type_list (void_type_node, ptr_type_node,
2735 unsigned_type_node, NULL_TREE);
2738 fn = build_decl (FUNCTION_DECL, fn, args);
2739 DECL_EXTERNAL (fn) = 1;
2740 TREE_PUBLIC (fn) = 1;
2741 DECL_ARTIFICIAL (fn) = 1;
2742 TREE_NOTHROW (fn) = 1;
2744 block_clear_fn = fn;
2747 if (asmspec)
2749 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2750 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2754 static tree
2755 clear_storage_libcall_fn (int for_call)
2757 static bool emitted_extern;
2759 if (!block_clear_fn)
2760 init_block_clear_fn (NULL);
2762 if (for_call && !emitted_extern)
2764 emitted_extern = true;
2765 make_decl_rtl (block_clear_fn, NULL);
2766 assemble_external (block_clear_fn);
2769 return block_clear_fn;
2772 /* Generate code to copy Y into X.
2773 Both Y and X must have the same mode, except that
2774 Y can be a constant with VOIDmode.
2775 This mode cannot be BLKmode; use emit_block_move for that.
2777 Return the last instruction emitted. */
2780 emit_move_insn (rtx x, rtx y)
2782 enum machine_mode mode = GET_MODE (x);
2783 rtx y_cst = NULL_RTX;
2784 rtx last_insn, set;
2786 x = protect_from_queue (x, 1);
2787 y = protect_from_queue (y, 0);
2789 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2790 abort ();
2792 /* Never force constant_p_rtx to memory. */
2793 if (GET_CODE (y) == CONSTANT_P_RTX)
2795 else if (CONSTANT_P (y))
2797 if (optimize
2798 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2799 && (last_insn = compress_float_constant (x, y)))
2800 return last_insn;
2802 y_cst = y;
2804 if (!LEGITIMATE_CONSTANT_P (y))
2806 y = force_const_mem (mode, y);
2808 /* If the target's cannot_force_const_mem prevented the spill,
2809 assume that the target's move expanders will also take care
2810 of the non-legitimate constant. */
2811 if (!y)
2812 y = y_cst;
2816 /* If X or Y are memory references, verify that their addresses are valid
2817 for the machine. */
2818 if (GET_CODE (x) == MEM
2819 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2820 && ! push_operand (x, GET_MODE (x)))
2821 || (flag_force_addr
2822 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2823 x = validize_mem (x);
2825 if (GET_CODE (y) == MEM
2826 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2827 || (flag_force_addr
2828 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2829 y = validize_mem (y);
2831 if (mode == BLKmode)
2832 abort ();
2834 last_insn = emit_move_insn_1 (x, y);
2836 if (y_cst && GET_CODE (x) == REG
2837 && (set = single_set (last_insn)) != NULL_RTX
2838 && SET_DEST (set) == x
2839 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2840 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2842 return last_insn;
2845 /* Low level part of emit_move_insn.
2846 Called just like emit_move_insn, but assumes X and Y
2847 are basically valid. */
2850 emit_move_insn_1 (rtx x, rtx y)
2852 enum machine_mode mode = GET_MODE (x);
2853 enum machine_mode submode;
2854 enum mode_class class = GET_MODE_CLASS (mode);
2856 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2857 abort ();
2859 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2860 return
2861 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2863 /* Expand complex moves by moving real part and imag part, if possible. */
2864 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2865 && BLKmode != (submode = GET_MODE_INNER (mode))
2866 && (mov_optab->handlers[(int) submode].insn_code
2867 != CODE_FOR_nothing))
2869 /* Don't split destination if it is a stack push. */
2870 int stack = push_operand (x, GET_MODE (x));
2872 #ifdef PUSH_ROUNDING
2873 /* In case we output to the stack, but the size is smaller than the
2874 machine can push exactly, we need to use move instructions. */
2875 if (stack
2876 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2877 != GET_MODE_SIZE (submode)))
2879 rtx temp;
2880 HOST_WIDE_INT offset1, offset2;
2882 /* Do not use anti_adjust_stack, since we don't want to update
2883 stack_pointer_delta. */
2884 temp = expand_binop (Pmode,
2885 #ifdef STACK_GROWS_DOWNWARD
2886 sub_optab,
2887 #else
2888 add_optab,
2889 #endif
2890 stack_pointer_rtx,
2891 GEN_INT
2892 (PUSH_ROUNDING
2893 (GET_MODE_SIZE (GET_MODE (x)))),
2894 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2896 if (temp != stack_pointer_rtx)
2897 emit_move_insn (stack_pointer_rtx, temp);
2899 #ifdef STACK_GROWS_DOWNWARD
2900 offset1 = 0;
2901 offset2 = GET_MODE_SIZE (submode);
2902 #else
2903 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2904 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2905 + GET_MODE_SIZE (submode));
2906 #endif
2908 emit_move_insn (change_address (x, submode,
2909 gen_rtx_PLUS (Pmode,
2910 stack_pointer_rtx,
2911 GEN_INT (offset1))),
2912 gen_realpart (submode, y));
2913 emit_move_insn (change_address (x, submode,
2914 gen_rtx_PLUS (Pmode,
2915 stack_pointer_rtx,
2916 GEN_INT (offset2))),
2917 gen_imagpart (submode, y));
2919 else
2920 #endif
2921 /* If this is a stack, push the highpart first, so it
2922 will be in the argument order.
2924 In that case, change_address is used only to convert
2925 the mode, not to change the address. */
2926 if (stack)
2928 /* Note that the real part always precedes the imag part in memory
2929 regardless of machine's endianness. */
2930 #ifdef STACK_GROWS_DOWNWARD
2931 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2932 gen_imagpart (submode, y));
2933 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2934 gen_realpart (submode, y));
2935 #else
2936 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2937 gen_realpart (submode, y));
2938 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2939 gen_imagpart (submode, y));
2940 #endif
2942 else
2944 rtx realpart_x, realpart_y;
2945 rtx imagpart_x, imagpart_y;
2947 /* If this is a complex value with each part being smaller than a
2948 word, the usual calling sequence will likely pack the pieces into
2949 a single register. Unfortunately, SUBREG of hard registers only
2950 deals in terms of words, so we have a problem converting input
2951 arguments to the CONCAT of two registers that is used elsewhere
2952 for complex values. If this is before reload, we can copy it into
2953 memory and reload. FIXME, we should see about using extract and
2954 insert on integer registers, but complex short and complex char
2955 variables should be rarely used. */
2956 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2957 && (reload_in_progress | reload_completed) == 0)
2959 int packed_dest_p
2960 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2961 int packed_src_p
2962 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2964 if (packed_dest_p || packed_src_p)
2966 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2967 ? MODE_FLOAT : MODE_INT);
2969 enum machine_mode reg_mode
2970 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2972 if (reg_mode != BLKmode)
2974 rtx mem = assign_stack_temp (reg_mode,
2975 GET_MODE_SIZE (mode), 0);
2976 rtx cmem = adjust_address (mem, mode, 0);
2978 cfun->cannot_inline
2979 = N_("function using short complex types cannot be inline");
2981 if (packed_dest_p)
2983 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2985 emit_move_insn_1 (cmem, y);
2986 return emit_move_insn_1 (sreg, mem);
2988 else
2990 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2992 emit_move_insn_1 (mem, sreg);
2993 return emit_move_insn_1 (x, cmem);
2999 realpart_x = gen_realpart (submode, x);
3000 realpart_y = gen_realpart (submode, y);
3001 imagpart_x = gen_imagpart (submode, x);
3002 imagpart_y = gen_imagpart (submode, y);
3004 /* Show the output dies here. This is necessary for SUBREGs
3005 of pseudos since we cannot track their lifetimes correctly;
3006 hard regs shouldn't appear here except as return values.
3007 We never want to emit such a clobber after reload. */
3008 if (x != y
3009 && ! (reload_in_progress || reload_completed)
3010 && (GET_CODE (realpart_x) == SUBREG
3011 || GET_CODE (imagpart_x) == SUBREG))
3012 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3014 emit_move_insn (realpart_x, realpart_y);
3015 emit_move_insn (imagpart_x, imagpart_y);
3018 return get_last_insn ();
3021 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3022 find a mode to do it in. If we have a movcc, use it. Otherwise,
3023 find the MODE_INT mode of the same width. */
3024 else if (GET_MODE_CLASS (mode) == MODE_CC
3025 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3027 enum insn_code insn_code;
3028 enum machine_mode tmode = VOIDmode;
3029 rtx x1 = x, y1 = y;
3031 if (mode != CCmode
3032 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3033 tmode = CCmode;
3034 else
3035 for (tmode = QImode; tmode != VOIDmode;
3036 tmode = GET_MODE_WIDER_MODE (tmode))
3037 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3038 break;
3040 if (tmode == VOIDmode)
3041 abort ();
3043 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3044 may call change_address which is not appropriate if we were
3045 called when a reload was in progress. We don't have to worry
3046 about changing the address since the size in bytes is supposed to
3047 be the same. Copy the MEM to change the mode and move any
3048 substitutions from the old MEM to the new one. */
3050 if (reload_in_progress)
3052 x = gen_lowpart_common (tmode, x1);
3053 if (x == 0 && GET_CODE (x1) == MEM)
3055 x = adjust_address_nv (x1, tmode, 0);
3056 copy_replacements (x1, x);
3059 y = gen_lowpart_common (tmode, y1);
3060 if (y == 0 && GET_CODE (y1) == MEM)
3062 y = adjust_address_nv (y1, tmode, 0);
3063 copy_replacements (y1, y);
3066 else
3068 x = gen_lowpart (tmode, x);
3069 y = gen_lowpart (tmode, y);
3072 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3073 return emit_insn (GEN_FCN (insn_code) (x, y));
3076 /* Try using a move pattern for the corresponding integer mode. This is
3077 only safe when simplify_subreg can convert MODE constants into integer
3078 constants. At present, it can only do this reliably if the value
3079 fits within a HOST_WIDE_INT. */
3080 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3081 && (submode = int_mode_for_mode (mode)) != BLKmode
3082 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3083 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3084 (simplify_gen_subreg (submode, x, mode, 0),
3085 simplify_gen_subreg (submode, y, mode, 0)));
3087 /* This will handle any multi-word or full-word mode that lacks a move_insn
3088 pattern. However, you will get better code if you define such patterns,
3089 even if they must turn into multiple assembler instructions. */
3090 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3092 rtx last_insn = 0;
3093 rtx seq, inner;
3094 int need_clobber;
3095 int i;
3097 #ifdef PUSH_ROUNDING
3099 /* If X is a push on the stack, do the push now and replace
3100 X with a reference to the stack pointer. */
3101 if (push_operand (x, GET_MODE (x)))
3103 rtx temp;
3104 enum rtx_code code;
3106 /* Do not use anti_adjust_stack, since we don't want to update
3107 stack_pointer_delta. */
3108 temp = expand_binop (Pmode,
3109 #ifdef STACK_GROWS_DOWNWARD
3110 sub_optab,
3111 #else
3112 add_optab,
3113 #endif
3114 stack_pointer_rtx,
3115 GEN_INT
3116 (PUSH_ROUNDING
3117 (GET_MODE_SIZE (GET_MODE (x)))),
3118 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3120 if (temp != stack_pointer_rtx)
3121 emit_move_insn (stack_pointer_rtx, temp);
3123 code = GET_CODE (XEXP (x, 0));
3125 /* Just hope that small offsets off SP are OK. */
3126 if (code == POST_INC)
3127 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3128 GEN_INT (-((HOST_WIDE_INT)
3129 GET_MODE_SIZE (GET_MODE (x)))));
3130 else if (code == POST_DEC)
3131 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3132 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3133 else
3134 temp = stack_pointer_rtx;
3136 x = change_address (x, VOIDmode, temp);
3138 #endif
3140 /* If we are in reload, see if either operand is a MEM whose address
3141 is scheduled for replacement. */
3142 if (reload_in_progress && GET_CODE (x) == MEM
3143 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3144 x = replace_equiv_address_nv (x, inner);
3145 if (reload_in_progress && GET_CODE (y) == MEM
3146 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3147 y = replace_equiv_address_nv (y, inner);
3149 start_sequence ();
3151 need_clobber = 0;
3152 for (i = 0;
3153 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3154 i++)
3156 rtx xpart = operand_subword (x, i, 1, mode);
3157 rtx ypart = operand_subword (y, i, 1, mode);
3159 /* If we can't get a part of Y, put Y into memory if it is a
3160 constant. Otherwise, force it into a register. If we still
3161 can't get a part of Y, abort. */
3162 if (ypart == 0 && CONSTANT_P (y))
3164 y = force_const_mem (mode, y);
3165 ypart = operand_subword (y, i, 1, mode);
3167 else if (ypart == 0)
3168 ypart = operand_subword_force (y, i, mode);
3170 if (xpart == 0 || ypart == 0)
3171 abort ();
3173 need_clobber |= (GET_CODE (xpart) == SUBREG);
3175 last_insn = emit_move_insn (xpart, ypart);
3178 seq = get_insns ();
3179 end_sequence ();
3181 /* Show the output dies here. This is necessary for SUBREGs
3182 of pseudos since we cannot track their lifetimes correctly;
3183 hard regs shouldn't appear here except as return values.
3184 We never want to emit such a clobber after reload. */
3185 if (x != y
3186 && ! (reload_in_progress || reload_completed)
3187 && need_clobber != 0)
3188 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3190 emit_insn (seq);
3192 return last_insn;
3194 else
3195 abort ();
3198 /* If Y is representable exactly in a narrower mode, and the target can
3199 perform the extension directly from constant or memory, then emit the
3200 move as an extension. */
3202 static rtx
3203 compress_float_constant (rtx x, rtx y)
3205 enum machine_mode dstmode = GET_MODE (x);
3206 enum machine_mode orig_srcmode = GET_MODE (y);
3207 enum machine_mode srcmode;
3208 REAL_VALUE_TYPE r;
3210 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3212 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3213 srcmode != orig_srcmode;
3214 srcmode = GET_MODE_WIDER_MODE (srcmode))
3216 enum insn_code ic;
3217 rtx trunc_y, last_insn;
3219 /* Skip if the target can't extend this way. */
3220 ic = can_extend_p (dstmode, srcmode, 0);
3221 if (ic == CODE_FOR_nothing)
3222 continue;
3224 /* Skip if the narrowed value isn't exact. */
3225 if (! exact_real_truncate (srcmode, &r))
3226 continue;
3228 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3230 if (LEGITIMATE_CONSTANT_P (trunc_y))
3232 /* Skip if the target needs extra instructions to perform
3233 the extension. */
3234 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3235 continue;
3237 else if (float_extend_from_mem[dstmode][srcmode])
3238 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3239 else
3240 continue;
3242 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3243 last_insn = get_last_insn ();
3245 if (GET_CODE (x) == REG)
3246 set_unique_reg_note (last_insn, REG_EQUAL, y);
3248 return last_insn;
3251 return NULL_RTX;
3254 /* Pushing data onto the stack. */
3256 /* Push a block of length SIZE (perhaps variable)
3257 and return an rtx to address the beginning of the block.
3258 Note that it is not possible for the value returned to be a QUEUED.
3259 The value may be virtual_outgoing_args_rtx.
3261 EXTRA is the number of bytes of padding to push in addition to SIZE.
3262 BELOW nonzero means this padding comes at low addresses;
3263 otherwise, the padding comes at high addresses. */
3266 push_block (rtx size, int extra, int below)
3268 rtx temp;
3270 size = convert_modes (Pmode, ptr_mode, size, 1);
3271 if (CONSTANT_P (size))
3272 anti_adjust_stack (plus_constant (size, extra));
3273 else if (GET_CODE (size) == REG && extra == 0)
3274 anti_adjust_stack (size);
3275 else
3277 temp = copy_to_mode_reg (Pmode, size);
3278 if (extra != 0)
3279 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3280 temp, 0, OPTAB_LIB_WIDEN);
3281 anti_adjust_stack (temp);
3284 #ifndef STACK_GROWS_DOWNWARD
3285 if (0)
3286 #else
3287 if (1)
3288 #endif
3290 temp = virtual_outgoing_args_rtx;
3291 if (extra != 0 && below)
3292 temp = plus_constant (temp, extra);
3294 else
3296 if (GET_CODE (size) == CONST_INT)
3297 temp = plus_constant (virtual_outgoing_args_rtx,
3298 -INTVAL (size) - (below ? 0 : extra));
3299 else if (extra != 0 && !below)
3300 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3301 negate_rtx (Pmode, plus_constant (size, extra)));
3302 else
3303 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3304 negate_rtx (Pmode, size));
3307 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3310 #ifdef PUSH_ROUNDING
3312 /* Emit single push insn. */
3314 static void
3315 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3317 rtx dest_addr;
3318 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3319 rtx dest;
3320 enum insn_code icode;
3321 insn_operand_predicate_fn pred;
3323 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3324 /* If there is push pattern, use it. Otherwise try old way of throwing
3325 MEM representing push operation to move expander. */
3326 icode = push_optab->handlers[(int) mode].insn_code;
3327 if (icode != CODE_FOR_nothing)
3329 if (((pred = insn_data[(int) icode].operand[0].predicate)
3330 && !((*pred) (x, mode))))
3331 x = force_reg (mode, x);
3332 emit_insn (GEN_FCN (icode) (x));
3333 return;
3335 if (GET_MODE_SIZE (mode) == rounded_size)
3336 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3337 /* If we are to pad downward, adjust the stack pointer first and
3338 then store X into the stack location using an offset. This is
3339 because emit_move_insn does not know how to pad; it does not have
3340 access to type. */
3341 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3343 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3344 HOST_WIDE_INT offset;
3346 emit_move_insn (stack_pointer_rtx,
3347 expand_binop (Pmode,
3348 #ifdef STACK_GROWS_DOWNWARD
3349 sub_optab,
3350 #else
3351 add_optab,
3352 #endif
3353 stack_pointer_rtx,
3354 GEN_INT (rounded_size),
3355 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3357 offset = (HOST_WIDE_INT) padding_size;
3358 #ifdef STACK_GROWS_DOWNWARD
3359 if (STACK_PUSH_CODE == POST_DEC)
3360 /* We have already decremented the stack pointer, so get the
3361 previous value. */
3362 offset += (HOST_WIDE_INT) rounded_size;
3363 #else
3364 if (STACK_PUSH_CODE == POST_INC)
3365 /* We have already incremented the stack pointer, so get the
3366 previous value. */
3367 offset -= (HOST_WIDE_INT) rounded_size;
3368 #endif
3369 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3371 else
3373 #ifdef STACK_GROWS_DOWNWARD
3374 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3375 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3376 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3377 #else
3378 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3379 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3380 GEN_INT (rounded_size));
3381 #endif
3382 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3385 dest = gen_rtx_MEM (mode, dest_addr);
3387 if (type != 0)
3389 set_mem_attributes (dest, type, 1);
3391 if (flag_optimize_sibling_calls)
3392 /* Function incoming arguments may overlap with sibling call
3393 outgoing arguments and we cannot allow reordering of reads
3394 from function arguments with stores to outgoing arguments
3395 of sibling calls. */
3396 set_mem_alias_set (dest, 0);
3398 emit_move_insn (dest, x);
3400 #endif
3402 /* Generate code to push X onto the stack, assuming it has mode MODE and
3403 type TYPE.
3404 MODE is redundant except when X is a CONST_INT (since they don't
3405 carry mode info).
3406 SIZE is an rtx for the size of data to be copied (in bytes),
3407 needed only if X is BLKmode.
3409 ALIGN (in bits) is maximum alignment we can assume.
3411 If PARTIAL and REG are both nonzero, then copy that many of the first
3412 words of X into registers starting with REG, and push the rest of X.
3413 The amount of space pushed is decreased by PARTIAL words,
3414 rounded *down* to a multiple of PARM_BOUNDARY.
3415 REG must be a hard register in this case.
3416 If REG is zero but PARTIAL is not, take any all others actions for an
3417 argument partially in registers, but do not actually load any
3418 registers.
3420 EXTRA is the amount in bytes of extra space to leave next to this arg.
3421 This is ignored if an argument block has already been allocated.
3423 On a machine that lacks real push insns, ARGS_ADDR is the address of
3424 the bottom of the argument block for this call. We use indexing off there
3425 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3426 argument block has not been preallocated.
3428 ARGS_SO_FAR is the size of args previously pushed for this call.
3430 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3431 for arguments passed in registers. If nonzero, it will be the number
3432 of bytes required. */
3434 void
3435 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3436 unsigned int align, int partial, rtx reg, int extra,
3437 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3438 rtx alignment_pad)
3440 rtx xinner;
3441 enum direction stack_direction
3442 #ifdef STACK_GROWS_DOWNWARD
3443 = downward;
3444 #else
3445 = upward;
3446 #endif
3448 /* Decide where to pad the argument: `downward' for below,
3449 `upward' for above, or `none' for don't pad it.
3450 Default is below for small data on big-endian machines; else above. */
3451 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3453 /* Invert direction if stack is post-decrement.
3454 FIXME: why? */
3455 if (STACK_PUSH_CODE == POST_DEC)
3456 if (where_pad != none)
3457 where_pad = (where_pad == downward ? upward : downward);
3459 xinner = x = protect_from_queue (x, 0);
3461 if (mode == BLKmode)
3463 /* Copy a block into the stack, entirely or partially. */
3465 rtx temp;
3466 int used = partial * UNITS_PER_WORD;
3467 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3468 int skip;
3470 if (size == 0)
3471 abort ();
3473 used -= offset;
3475 /* USED is now the # of bytes we need not copy to the stack
3476 because registers will take care of them. */
3478 if (partial != 0)
3479 xinner = adjust_address (xinner, BLKmode, used);
3481 /* If the partial register-part of the arg counts in its stack size,
3482 skip the part of stack space corresponding to the registers.
3483 Otherwise, start copying to the beginning of the stack space,
3484 by setting SKIP to 0. */
3485 skip = (reg_parm_stack_space == 0) ? 0 : used;
3487 #ifdef PUSH_ROUNDING
3488 /* Do it with several push insns if that doesn't take lots of insns
3489 and if there is no difficulty with push insns that skip bytes
3490 on the stack for alignment purposes. */
3491 if (args_addr == 0
3492 && PUSH_ARGS
3493 && GET_CODE (size) == CONST_INT
3494 && skip == 0
3495 && MEM_ALIGN (xinner) >= align
3496 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3497 /* Here we avoid the case of a structure whose weak alignment
3498 forces many pushes of a small amount of data,
3499 and such small pushes do rounding that causes trouble. */
3500 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3501 || align >= BIGGEST_ALIGNMENT
3502 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3503 == (align / BITS_PER_UNIT)))
3504 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3506 /* Push padding now if padding above and stack grows down,
3507 or if padding below and stack grows up.
3508 But if space already allocated, this has already been done. */
3509 if (extra && args_addr == 0
3510 && where_pad != none && where_pad != stack_direction)
3511 anti_adjust_stack (GEN_INT (extra));
3513 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3515 else
3516 #endif /* PUSH_ROUNDING */
3518 rtx target;
3520 /* Otherwise make space on the stack and copy the data
3521 to the address of that space. */
3523 /* Deduct words put into registers from the size we must copy. */
3524 if (partial != 0)
3526 if (GET_CODE (size) == CONST_INT)
3527 size = GEN_INT (INTVAL (size) - used);
3528 else
3529 size = expand_binop (GET_MODE (size), sub_optab, size,
3530 GEN_INT (used), NULL_RTX, 0,
3531 OPTAB_LIB_WIDEN);
3534 /* Get the address of the stack space.
3535 In this case, we do not deal with EXTRA separately.
3536 A single stack adjust will do. */
3537 if (! args_addr)
3539 temp = push_block (size, extra, where_pad == downward);
3540 extra = 0;
3542 else if (GET_CODE (args_so_far) == CONST_INT)
3543 temp = memory_address (BLKmode,
3544 plus_constant (args_addr,
3545 skip + INTVAL (args_so_far)));
3546 else
3547 temp = memory_address (BLKmode,
3548 plus_constant (gen_rtx_PLUS (Pmode,
3549 args_addr,
3550 args_so_far),
3551 skip));
3553 if (!ACCUMULATE_OUTGOING_ARGS)
3555 /* If the source is referenced relative to the stack pointer,
3556 copy it to another register to stabilize it. We do not need
3557 to do this if we know that we won't be changing sp. */
3559 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3560 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3561 temp = copy_to_reg (temp);
3564 target = gen_rtx_MEM (BLKmode, temp);
3566 if (type != 0)
3568 set_mem_attributes (target, type, 1);
3569 /* Function incoming arguments may overlap with sibling call
3570 outgoing arguments and we cannot allow reordering of reads
3571 from function arguments with stores to outgoing arguments
3572 of sibling calls. */
3573 set_mem_alias_set (target, 0);
3576 /* ALIGN may well be better aligned than TYPE, e.g. due to
3577 PARM_BOUNDARY. Assume the caller isn't lying. */
3578 set_mem_align (target, align);
3580 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3583 else if (partial > 0)
3585 /* Scalar partly in registers. */
3587 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3588 int i;
3589 int not_stack;
3590 /* # words of start of argument
3591 that we must make space for but need not store. */
3592 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3593 int args_offset = INTVAL (args_so_far);
3594 int skip;
3596 /* Push padding now if padding above and stack grows down,
3597 or if padding below and stack grows up.
3598 But if space already allocated, this has already been done. */
3599 if (extra && args_addr == 0
3600 && where_pad != none && where_pad != stack_direction)
3601 anti_adjust_stack (GEN_INT (extra));
3603 /* If we make space by pushing it, we might as well push
3604 the real data. Otherwise, we can leave OFFSET nonzero
3605 and leave the space uninitialized. */
3606 if (args_addr == 0)
3607 offset = 0;
3609 /* Now NOT_STACK gets the number of words that we don't need to
3610 allocate on the stack. */
3611 not_stack = partial - offset;
3613 /* If the partial register-part of the arg counts in its stack size,
3614 skip the part of stack space corresponding to the registers.
3615 Otherwise, start copying to the beginning of the stack space,
3616 by setting SKIP to 0. */
3617 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3619 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3620 x = validize_mem (force_const_mem (mode, x));
3622 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3623 SUBREGs of such registers are not allowed. */
3624 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3625 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3626 x = copy_to_reg (x);
3628 /* Loop over all the words allocated on the stack for this arg. */
3629 /* We can do it by words, because any scalar bigger than a word
3630 has a size a multiple of a word. */
3631 #ifndef PUSH_ARGS_REVERSED
3632 for (i = not_stack; i < size; i++)
3633 #else
3634 for (i = size - 1; i >= not_stack; i--)
3635 #endif
3636 if (i >= not_stack + offset)
3637 emit_push_insn (operand_subword_force (x, i, mode),
3638 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3639 0, args_addr,
3640 GEN_INT (args_offset + ((i - not_stack + skip)
3641 * UNITS_PER_WORD)),
3642 reg_parm_stack_space, alignment_pad);
3644 else
3646 rtx addr;
3647 rtx dest;
3649 /* Push padding now if padding above and stack grows down,
3650 or if padding below and stack grows up.
3651 But if space already allocated, this has already been done. */
3652 if (extra && args_addr == 0
3653 && where_pad != none && where_pad != stack_direction)
3654 anti_adjust_stack (GEN_INT (extra));
3656 #ifdef PUSH_ROUNDING
3657 if (args_addr == 0 && PUSH_ARGS)
3658 emit_single_push_insn (mode, x, type);
3659 else
3660 #endif
3662 if (GET_CODE (args_so_far) == CONST_INT)
3663 addr
3664 = memory_address (mode,
3665 plus_constant (args_addr,
3666 INTVAL (args_so_far)));
3667 else
3668 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3669 args_so_far));
3670 dest = gen_rtx_MEM (mode, addr);
3671 if (type != 0)
3673 set_mem_attributes (dest, type, 1);
3674 /* Function incoming arguments may overlap with sibling call
3675 outgoing arguments and we cannot allow reordering of reads
3676 from function arguments with stores to outgoing arguments
3677 of sibling calls. */
3678 set_mem_alias_set (dest, 0);
3681 emit_move_insn (dest, x);
3685 /* If part should go in registers, copy that part
3686 into the appropriate registers. Do this now, at the end,
3687 since mem-to-mem copies above may do function calls. */
3688 if (partial > 0 && reg != 0)
3690 /* Handle calls that pass values in multiple non-contiguous locations.
3691 The Irix 6 ABI has examples of this. */
3692 if (GET_CODE (reg) == PARALLEL)
3693 emit_group_load (reg, x, type, -1);
3694 else
3695 move_block_to_reg (REGNO (reg), x, partial, mode);
3698 if (extra && args_addr == 0 && where_pad == stack_direction)
3699 anti_adjust_stack (GEN_INT (extra));
3701 if (alignment_pad && args_addr == 0)
3702 anti_adjust_stack (alignment_pad);
3705 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3706 operations. */
3708 static rtx
3709 get_subtarget (rtx x)
3711 return ((x == 0
3712 /* Only registers can be subtargets. */
3713 || GET_CODE (x) != REG
3714 /* If the register is readonly, it can't be set more than once. */
3715 || RTX_UNCHANGING_P (x)
3716 /* Don't use hard regs to avoid extending their life. */
3717 || REGNO (x) < FIRST_PSEUDO_REGISTER
3718 /* Avoid subtargets inside loops,
3719 since they hide some invariant expressions. */
3720 || preserve_subexpressions_p ())
3721 ? 0 : x);
3724 /* Expand an assignment that stores the value of FROM into TO.
3725 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3726 (This may contain a QUEUED rtx;
3727 if the value is constant, this rtx is a constant.)
3728 Otherwise, the returned value is NULL_RTX. */
3731 expand_assignment (tree to, tree from, int want_value)
3733 rtx to_rtx = 0;
3734 rtx result;
3736 /* Don't crash if the lhs of the assignment was erroneous. */
3738 if (TREE_CODE (to) == ERROR_MARK)
3740 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3741 return want_value ? result : NULL_RTX;
3744 /* Assignment of a structure component needs special treatment
3745 if the structure component's rtx is not simply a MEM.
3746 Assignment of an array element at a constant index, and assignment of
3747 an array element in an unaligned packed structure field, has the same
3748 problem. */
3750 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3751 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3752 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3754 enum machine_mode mode1;
3755 HOST_WIDE_INT bitsize, bitpos;
3756 rtx orig_to_rtx;
3757 tree offset;
3758 int unsignedp;
3759 int volatilep = 0;
3760 tree tem;
3762 push_temp_slots ();
3763 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3764 &unsignedp, &volatilep);
3766 /* If we are going to use store_bit_field and extract_bit_field,
3767 make sure to_rtx will be safe for multiple use. */
3769 if (mode1 == VOIDmode && want_value)
3770 tem = stabilize_reference (tem);
3772 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3774 if (offset != 0)
3776 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3778 if (GET_CODE (to_rtx) != MEM)
3779 abort ();
3781 #ifdef POINTERS_EXTEND_UNSIGNED
3782 if (GET_MODE (offset_rtx) != Pmode)
3783 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3784 #else
3785 if (GET_MODE (offset_rtx) != ptr_mode)
3786 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3787 #endif
3789 /* A constant address in TO_RTX can have VOIDmode, we must not try
3790 to call force_reg for that case. Avoid that case. */
3791 if (GET_CODE (to_rtx) == MEM
3792 && GET_MODE (to_rtx) == BLKmode
3793 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3794 && bitsize > 0
3795 && (bitpos % bitsize) == 0
3796 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3797 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3799 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3800 bitpos = 0;
3803 to_rtx = offset_address (to_rtx, offset_rtx,
3804 highest_pow2_factor_for_type (TREE_TYPE (to),
3805 offset));
3808 if (GET_CODE (to_rtx) == MEM)
3810 /* If the field is at offset zero, we could have been given the
3811 DECL_RTX of the parent struct. Don't munge it. */
3812 to_rtx = shallow_copy_rtx (to_rtx);
3814 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3817 /* Deal with volatile and readonly fields. The former is only done
3818 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3819 if (volatilep && GET_CODE (to_rtx) == MEM)
3821 if (to_rtx == orig_to_rtx)
3822 to_rtx = copy_rtx (to_rtx);
3823 MEM_VOLATILE_P (to_rtx) = 1;
3826 if (TREE_CODE (to) == COMPONENT_REF
3827 && TREE_READONLY (TREE_OPERAND (to, 1)))
3829 if (to_rtx == orig_to_rtx)
3830 to_rtx = copy_rtx (to_rtx);
3831 RTX_UNCHANGING_P (to_rtx) = 1;
3834 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3836 if (to_rtx == orig_to_rtx)
3837 to_rtx = copy_rtx (to_rtx);
3838 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3841 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3842 (want_value
3843 /* Spurious cast for HPUX compiler. */
3844 ? ((enum machine_mode)
3845 TYPE_MODE (TREE_TYPE (to)))
3846 : VOIDmode),
3847 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3849 preserve_temp_slots (result);
3850 free_temp_slots ();
3851 pop_temp_slots ();
3853 /* If the value is meaningful, convert RESULT to the proper mode.
3854 Otherwise, return nothing. */
3855 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3856 TYPE_MODE (TREE_TYPE (from)),
3857 result,
3858 TREE_UNSIGNED (TREE_TYPE (to)))
3859 : NULL_RTX);
3862 /* If the rhs is a function call and its value is not an aggregate,
3863 call the function before we start to compute the lhs.
3864 This is needed for correct code for cases such as
3865 val = setjmp (buf) on machines where reference to val
3866 requires loading up part of an address in a separate insn.
3868 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3869 since it might be a promoted variable where the zero- or sign- extension
3870 needs to be done. Handling this in the normal way is safe because no
3871 computation is done before the call. */
3872 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3874 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3875 && GET_CODE (DECL_RTL (to)) == REG))
3877 rtx value;
3879 push_temp_slots ();
3880 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3881 if (to_rtx == 0)
3882 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3884 /* Handle calls that return values in multiple non-contiguous locations.
3885 The Irix 6 ABI has examples of this. */
3886 if (GET_CODE (to_rtx) == PARALLEL)
3887 emit_group_load (to_rtx, value, TREE_TYPE (from),
3888 int_size_in_bytes (TREE_TYPE (from)));
3889 else if (GET_MODE (to_rtx) == BLKmode)
3890 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3891 else
3893 if (POINTER_TYPE_P (TREE_TYPE (to)))
3894 value = convert_memory_address (GET_MODE (to_rtx), value);
3895 emit_move_insn (to_rtx, value);
3897 preserve_temp_slots (to_rtx);
3898 free_temp_slots ();
3899 pop_temp_slots ();
3900 return want_value ? to_rtx : NULL_RTX;
3903 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3904 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3906 if (to_rtx == 0)
3907 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3909 /* Don't move directly into a return register. */
3910 if (TREE_CODE (to) == RESULT_DECL
3911 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3913 rtx temp;
3915 push_temp_slots ();
3916 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3918 if (GET_CODE (to_rtx) == PARALLEL)
3919 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3920 int_size_in_bytes (TREE_TYPE (from)));
3921 else
3922 emit_move_insn (to_rtx, temp);
3924 preserve_temp_slots (to_rtx);
3925 free_temp_slots ();
3926 pop_temp_slots ();
3927 return want_value ? to_rtx : NULL_RTX;
3930 /* In case we are returning the contents of an object which overlaps
3931 the place the value is being stored, use a safe function when copying
3932 a value through a pointer into a structure value return block. */
3933 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3934 && current_function_returns_struct
3935 && !current_function_returns_pcc_struct)
3937 rtx from_rtx, size;
3939 push_temp_slots ();
3940 size = expr_size (from);
3941 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3943 if (TARGET_MEM_FUNCTIONS)
3944 emit_library_call (memmove_libfunc, LCT_NORMAL,
3945 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3946 XEXP (from_rtx, 0), Pmode,
3947 convert_to_mode (TYPE_MODE (sizetype),
3948 size, TREE_UNSIGNED (sizetype)),
3949 TYPE_MODE (sizetype));
3950 else
3951 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3952 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3953 XEXP (to_rtx, 0), Pmode,
3954 convert_to_mode (TYPE_MODE (integer_type_node),
3955 size,
3956 TREE_UNSIGNED (integer_type_node)),
3957 TYPE_MODE (integer_type_node));
3959 preserve_temp_slots (to_rtx);
3960 free_temp_slots ();
3961 pop_temp_slots ();
3962 return want_value ? to_rtx : NULL_RTX;
3965 /* Compute FROM and store the value in the rtx we got. */
3967 push_temp_slots ();
3968 result = store_expr (from, to_rtx, want_value);
3969 preserve_temp_slots (result);
3970 free_temp_slots ();
3971 pop_temp_slots ();
3972 return want_value ? result : NULL_RTX;
3975 /* Generate code for computing expression EXP,
3976 and storing the value into TARGET.
3977 TARGET may contain a QUEUED rtx.
3979 If WANT_VALUE & 1 is nonzero, return a copy of the value
3980 not in TARGET, so that we can be sure to use the proper
3981 value in a containing expression even if TARGET has something
3982 else stored in it. If possible, we copy the value through a pseudo
3983 and return that pseudo. Or, if the value is constant, we try to
3984 return the constant. In some cases, we return a pseudo
3985 copied *from* TARGET.
3987 If the mode is BLKmode then we may return TARGET itself.
3988 It turns out that in BLKmode it doesn't cause a problem.
3989 because C has no operators that could combine two different
3990 assignments into the same BLKmode object with different values
3991 with no sequence point. Will other languages need this to
3992 be more thorough?
3994 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3995 to catch quickly any cases where the caller uses the value
3996 and fails to set WANT_VALUE.
3998 If WANT_VALUE & 2 is set, this is a store into a call param on the
3999 stack, and block moves may need to be treated specially. */
4002 store_expr (tree exp, rtx target, int want_value)
4004 rtx temp;
4005 int dont_return_target = 0;
4006 int dont_store_target = 0;
4008 if (VOID_TYPE_P (TREE_TYPE (exp)))
4010 /* C++ can generate ?: expressions with a throw expression in one
4011 branch and an rvalue in the other. Here, we resolve attempts to
4012 store the throw expression's nonexistent result. */
4013 if (want_value)
4014 abort ();
4015 expand_expr (exp, const0_rtx, VOIDmode, 0);
4016 return NULL_RTX;
4018 if (TREE_CODE (exp) == COMPOUND_EXPR)
4020 /* Perform first part of compound expression, then assign from second
4021 part. */
4022 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4023 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4024 emit_queue ();
4025 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4027 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4029 /* For conditional expression, get safe form of the target. Then
4030 test the condition, doing the appropriate assignment on either
4031 side. This avoids the creation of unnecessary temporaries.
4032 For non-BLKmode, it is more efficient not to do this. */
4034 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4036 emit_queue ();
4037 target = protect_from_queue (target, 1);
4039 do_pending_stack_adjust ();
4040 NO_DEFER_POP;
4041 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4042 start_cleanup_deferral ();
4043 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4044 end_cleanup_deferral ();
4045 emit_queue ();
4046 emit_jump_insn (gen_jump (lab2));
4047 emit_barrier ();
4048 emit_label (lab1);
4049 start_cleanup_deferral ();
4050 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4051 end_cleanup_deferral ();
4052 emit_queue ();
4053 emit_label (lab2);
4054 OK_DEFER_POP;
4056 return want_value & 1 ? target : NULL_RTX;
4058 else if (queued_subexp_p (target))
4059 /* If target contains a postincrement, let's not risk
4060 using it as the place to generate the rhs. */
4062 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4064 /* Expand EXP into a new pseudo. */
4065 temp = gen_reg_rtx (GET_MODE (target));
4066 temp = expand_expr (exp, temp, GET_MODE (target),
4067 (want_value & 2
4068 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4070 else
4071 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4072 (want_value & 2
4073 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4075 /* If target is volatile, ANSI requires accessing the value
4076 *from* the target, if it is accessed. So make that happen.
4077 In no case return the target itself. */
4078 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4079 dont_return_target = 1;
4081 else if ((want_value & 1) != 0
4082 && GET_CODE (target) == MEM
4083 && ! MEM_VOLATILE_P (target)
4084 && GET_MODE (target) != BLKmode)
4085 /* If target is in memory and caller wants value in a register instead,
4086 arrange that. Pass TARGET as target for expand_expr so that,
4087 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4088 We know expand_expr will not use the target in that case.
4089 Don't do this if TARGET is volatile because we are supposed
4090 to write it and then read it. */
4092 temp = expand_expr (exp, target, GET_MODE (target),
4093 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4094 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4096 /* If TEMP is already in the desired TARGET, only copy it from
4097 memory and don't store it there again. */
4098 if (temp == target
4099 || (rtx_equal_p (temp, target)
4100 && ! side_effects_p (temp) && ! side_effects_p (target)))
4101 dont_store_target = 1;
4102 temp = copy_to_reg (temp);
4104 dont_return_target = 1;
4106 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4107 /* If this is a scalar in a register that is stored in a wider mode
4108 than the declared mode, compute the result into its declared mode
4109 and then convert to the wider mode. Our value is the computed
4110 expression. */
4112 rtx inner_target = 0;
4114 /* If we don't want a value, we can do the conversion inside EXP,
4115 which will often result in some optimizations. Do the conversion
4116 in two steps: first change the signedness, if needed, then
4117 the extend. But don't do this if the type of EXP is a subtype
4118 of something else since then the conversion might involve
4119 more than just converting modes. */
4120 if ((want_value & 1) == 0
4121 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4122 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4124 if (TREE_UNSIGNED (TREE_TYPE (exp))
4125 != SUBREG_PROMOTED_UNSIGNED_P (target))
4126 exp = convert
4127 ((*lang_hooks.types.signed_or_unsigned_type)
4128 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4130 exp = convert ((*lang_hooks.types.type_for_mode)
4131 (GET_MODE (SUBREG_REG (target)),
4132 SUBREG_PROMOTED_UNSIGNED_P (target)),
4133 exp);
4135 inner_target = SUBREG_REG (target);
4138 temp = expand_expr (exp, inner_target, VOIDmode,
4139 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4141 /* If TEMP is a MEM and we want a result value, make the access
4142 now so it gets done only once. Strictly speaking, this is
4143 only necessary if the MEM is volatile, or if the address
4144 overlaps TARGET. But not performing the load twice also
4145 reduces the amount of rtl we generate and then have to CSE. */
4146 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4147 temp = copy_to_reg (temp);
4149 /* If TEMP is a VOIDmode constant, use convert_modes to make
4150 sure that we properly convert it. */
4151 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4153 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4154 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4155 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4156 GET_MODE (target), temp,
4157 SUBREG_PROMOTED_UNSIGNED_P (target));
4160 convert_move (SUBREG_REG (target), temp,
4161 SUBREG_PROMOTED_UNSIGNED_P (target));
4163 /* If we promoted a constant, change the mode back down to match
4164 target. Otherwise, the caller might get confused by a result whose
4165 mode is larger than expected. */
4167 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4169 if (GET_MODE (temp) != VOIDmode)
4171 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4172 SUBREG_PROMOTED_VAR_P (temp) = 1;
4173 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4174 SUBREG_PROMOTED_UNSIGNED_P (target));
4176 else
4177 temp = convert_modes (GET_MODE (target),
4178 GET_MODE (SUBREG_REG (target)),
4179 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4182 return want_value & 1 ? temp : NULL_RTX;
4184 else
4186 temp = expand_expr (exp, target, GET_MODE (target),
4187 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4188 /* Return TARGET if it's a specified hardware register.
4189 If TARGET is a volatile mem ref, either return TARGET
4190 or return a reg copied *from* TARGET; ANSI requires this.
4192 Otherwise, if TEMP is not TARGET, return TEMP
4193 if it is constant (for efficiency),
4194 or if we really want the correct value. */
4195 if (!(target && GET_CODE (target) == REG
4196 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4197 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4198 && ! rtx_equal_p (temp, target)
4199 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4200 dont_return_target = 1;
4203 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4204 the same as that of TARGET, adjust the constant. This is needed, for
4205 example, in case it is a CONST_DOUBLE and we want only a word-sized
4206 value. */
4207 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4208 && TREE_CODE (exp) != ERROR_MARK
4209 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4210 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4211 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4213 /* If value was not generated in the target, store it there.
4214 Convert the value to TARGET's type first if necessary.
4215 If TEMP and TARGET compare equal according to rtx_equal_p, but
4216 one or both of them are volatile memory refs, we have to distinguish
4217 two cases:
4218 - expand_expr has used TARGET. In this case, we must not generate
4219 another copy. This can be detected by TARGET being equal according
4220 to == .
4221 - expand_expr has not used TARGET - that means that the source just
4222 happens to have the same RTX form. Since temp will have been created
4223 by expand_expr, it will compare unequal according to == .
4224 We must generate a copy in this case, to reach the correct number
4225 of volatile memory references. */
4227 if ((! rtx_equal_p (temp, target)
4228 || (temp != target && (side_effects_p (temp)
4229 || side_effects_p (target))))
4230 && TREE_CODE (exp) != ERROR_MARK
4231 && ! dont_store_target
4232 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4233 but TARGET is not valid memory reference, TEMP will differ
4234 from TARGET although it is really the same location. */
4235 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4236 || target != DECL_RTL_IF_SET (exp))
4237 /* If there's nothing to copy, don't bother. Don't call expr_size
4238 unless necessary, because some front-ends (C++) expr_size-hook
4239 aborts on objects that are not supposed to be bit-copied or
4240 bit-initialized. */
4241 && expr_size (exp) != const0_rtx)
4243 target = protect_from_queue (target, 1);
4244 if (GET_MODE (temp) != GET_MODE (target)
4245 && GET_MODE (temp) != VOIDmode)
4247 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4248 if (dont_return_target)
4250 /* In this case, we will return TEMP,
4251 so make sure it has the proper mode.
4252 But don't forget to store the value into TARGET. */
4253 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4254 emit_move_insn (target, temp);
4256 else
4257 convert_move (target, temp, unsignedp);
4260 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4262 /* Handle copying a string constant into an array. The string
4263 constant may be shorter than the array. So copy just the string's
4264 actual length, and clear the rest. First get the size of the data
4265 type of the string, which is actually the size of the target. */
4266 rtx size = expr_size (exp);
4268 if (GET_CODE (size) == CONST_INT
4269 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4270 emit_block_move (target, temp, size,
4271 (want_value & 2
4272 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4273 else
4275 /* Compute the size of the data to copy from the string. */
4276 tree copy_size
4277 = size_binop (MIN_EXPR,
4278 make_tree (sizetype, size),
4279 size_int (TREE_STRING_LENGTH (exp)));
4280 rtx copy_size_rtx
4281 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4282 (want_value & 2
4283 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4284 rtx label = 0;
4286 /* Copy that much. */
4287 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4288 TREE_UNSIGNED (sizetype));
4289 emit_block_move (target, temp, copy_size_rtx,
4290 (want_value & 2
4291 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4293 /* Figure out how much is left in TARGET that we have to clear.
4294 Do all calculations in ptr_mode. */
4295 if (GET_CODE (copy_size_rtx) == CONST_INT)
4297 size = plus_constant (size, -INTVAL (copy_size_rtx));
4298 target = adjust_address (target, BLKmode,
4299 INTVAL (copy_size_rtx));
4301 else
4303 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4304 copy_size_rtx, NULL_RTX, 0,
4305 OPTAB_LIB_WIDEN);
4307 #ifdef POINTERS_EXTEND_UNSIGNED
4308 if (GET_MODE (copy_size_rtx) != Pmode)
4309 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4310 TREE_UNSIGNED (sizetype));
4311 #endif
4313 target = offset_address (target, copy_size_rtx,
4314 highest_pow2_factor (copy_size));
4315 label = gen_label_rtx ();
4316 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4317 GET_MODE (size), 0, label);
4320 if (size != const0_rtx)
4321 clear_storage (target, size);
4323 if (label)
4324 emit_label (label);
4327 /* Handle calls that return values in multiple non-contiguous locations.
4328 The Irix 6 ABI has examples of this. */
4329 else if (GET_CODE (target) == PARALLEL)
4330 emit_group_load (target, temp, TREE_TYPE (exp),
4331 int_size_in_bytes (TREE_TYPE (exp)));
4332 else if (GET_MODE (temp) == BLKmode)
4333 emit_block_move (target, temp, expr_size (exp),
4334 (want_value & 2
4335 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4336 else
4337 emit_move_insn (target, temp);
4340 /* If we don't want a value, return NULL_RTX. */
4341 if ((want_value & 1) == 0)
4342 return NULL_RTX;
4344 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4345 ??? The latter test doesn't seem to make sense. */
4346 else if (dont_return_target && GET_CODE (temp) != MEM)
4347 return temp;
4349 /* Return TARGET itself if it is a hard register. */
4350 else if ((want_value & 1) != 0
4351 && GET_MODE (target) != BLKmode
4352 && ! (GET_CODE (target) == REG
4353 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4354 return copy_to_reg (target);
4356 else
4357 return target;
4360 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4362 static int
4363 is_zeros_p (tree exp)
4365 tree elt;
4367 switch (TREE_CODE (exp))
4369 case CONVERT_EXPR:
4370 case NOP_EXPR:
4371 case NON_LVALUE_EXPR:
4372 case VIEW_CONVERT_EXPR:
4373 return is_zeros_p (TREE_OPERAND (exp, 0));
4375 case INTEGER_CST:
4376 return integer_zerop (exp);
4378 case COMPLEX_CST:
4379 return
4380 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4382 case REAL_CST:
4383 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4385 case VECTOR_CST:
4386 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4387 elt = TREE_CHAIN (elt))
4388 if (!is_zeros_p (TREE_VALUE (elt)))
4389 return 0;
4391 return 1;
4393 case CONSTRUCTOR:
4394 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4395 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4396 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4397 if (! is_zeros_p (TREE_VALUE (elt)))
4398 return 0;
4400 return 1;
4402 default:
4403 return 0;
4407 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410 mostly_zeros_p (tree exp)
4412 if (TREE_CODE (exp) == CONSTRUCTOR)
4414 int elts = 0, zeros = 0;
4415 tree elt = CONSTRUCTOR_ELTS (exp);
4416 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4418 /* If there are no ranges of true bits, it is all zero. */
4419 return elt == NULL_TREE;
4421 for (; elt; elt = TREE_CHAIN (elt))
4423 /* We do not handle the case where the index is a RANGE_EXPR,
4424 so the statistic will be somewhat inaccurate.
4425 We do make a more accurate count in store_constructor itself,
4426 so since this function is only used for nested array elements,
4427 this should be close enough. */
4428 if (mostly_zeros_p (TREE_VALUE (elt)))
4429 zeros++;
4430 elts++;
4433 return 4 * zeros >= 3 * elts;
4436 return is_zeros_p (exp);
4439 /* Helper function for store_constructor.
4440 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4441 TYPE is the type of the CONSTRUCTOR, not the element type.
4442 CLEARED is as for store_constructor.
4443 ALIAS_SET is the alias set to use for any stores.
4445 This provides a recursive shortcut back to store_constructor when it isn't
4446 necessary to go through store_field. This is so that we can pass through
4447 the cleared field to let store_constructor know that we may not have to
4448 clear a substructure if the outer structure has already been cleared. */
4450 static void
4451 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4452 HOST_WIDE_INT bitpos, enum machine_mode mode,
4453 tree exp, tree type, int cleared, int alias_set)
4455 if (TREE_CODE (exp) == CONSTRUCTOR
4456 && bitpos % BITS_PER_UNIT == 0
4457 /* If we have a nonzero bitpos for a register target, then we just
4458 let store_field do the bitfield handling. This is unlikely to
4459 generate unnecessary clear instructions anyways. */
4460 && (bitpos == 0 || GET_CODE (target) == MEM))
4462 if (GET_CODE (target) == MEM)
4463 target
4464 = adjust_address (target,
4465 GET_MODE (target) == BLKmode
4466 || 0 != (bitpos
4467 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4468 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4471 /* Update the alias set, if required. */
4472 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4473 && MEM_ALIAS_SET (target) != 0)
4475 target = copy_rtx (target);
4476 set_mem_alias_set (target, alias_set);
4479 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4481 else
4482 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4483 alias_set);
4486 /* Store the value of constructor EXP into the rtx TARGET.
4487 TARGET is either a REG or a MEM; we know it cannot conflict, since
4488 safe_from_p has been called.
4489 CLEARED is true if TARGET is known to have been zero'd.
4490 SIZE is the number of bytes of TARGET we are allowed to modify: this
4491 may not be the same as the size of EXP if we are assigning to a field
4492 which has been packed to exclude padding bits. */
4494 static void
4495 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4497 tree type = TREE_TYPE (exp);
4498 #ifdef WORD_REGISTER_OPERATIONS
4499 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4500 #endif
4502 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4503 || TREE_CODE (type) == QUAL_UNION_TYPE)
4505 tree elt;
4507 /* If size is zero or the target is already cleared, do nothing. */
4508 if (size == 0 || cleared)
4509 cleared = 1;
4510 /* We either clear the aggregate or indicate the value is dead. */
4511 else if ((TREE_CODE (type) == UNION_TYPE
4512 || TREE_CODE (type) == QUAL_UNION_TYPE)
4513 && ! CONSTRUCTOR_ELTS (exp))
4514 /* If the constructor is empty, clear the union. */
4516 clear_storage (target, expr_size (exp));
4517 cleared = 1;
4520 /* If we are building a static constructor into a register,
4521 set the initial value as zero so we can fold the value into
4522 a constant. But if more than one register is involved,
4523 this probably loses. */
4524 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4525 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4527 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4528 cleared = 1;
4531 /* If the constructor has fewer fields than the structure
4532 or if we are initializing the structure to mostly zeros,
4533 clear the whole structure first. Don't do this if TARGET is a
4534 register whose mode size isn't equal to SIZE since clear_storage
4535 can't handle this case. */
4536 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4537 || mostly_zeros_p (exp))
4538 && (GET_CODE (target) != REG
4539 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4540 == size)))
4542 rtx xtarget = target;
4544 if (readonly_fields_p (type))
4546 xtarget = copy_rtx (xtarget);
4547 RTX_UNCHANGING_P (xtarget) = 1;
4550 clear_storage (xtarget, GEN_INT (size));
4551 cleared = 1;
4554 if (! cleared)
4555 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4557 /* Store each element of the constructor into
4558 the corresponding field of TARGET. */
4560 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4562 tree field = TREE_PURPOSE (elt);
4563 tree value = TREE_VALUE (elt);
4564 enum machine_mode mode;
4565 HOST_WIDE_INT bitsize;
4566 HOST_WIDE_INT bitpos = 0;
4567 tree offset;
4568 rtx to_rtx = target;
4570 /* Just ignore missing fields.
4571 We cleared the whole structure, above,
4572 if any fields are missing. */
4573 if (field == 0)
4574 continue;
4576 if (cleared && is_zeros_p (value))
4577 continue;
4579 if (host_integerp (DECL_SIZE (field), 1))
4580 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4581 else
4582 bitsize = -1;
4584 mode = DECL_MODE (field);
4585 if (DECL_BIT_FIELD (field))
4586 mode = VOIDmode;
4588 offset = DECL_FIELD_OFFSET (field);
4589 if (host_integerp (offset, 0)
4590 && host_integerp (bit_position (field), 0))
4592 bitpos = int_bit_position (field);
4593 offset = 0;
4595 else
4596 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4598 if (offset)
4600 rtx offset_rtx;
4602 if (CONTAINS_PLACEHOLDER_P (offset))
4603 offset = build (WITH_RECORD_EXPR, sizetype,
4604 offset, make_tree (TREE_TYPE (exp), target));
4606 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4607 if (GET_CODE (to_rtx) != MEM)
4608 abort ();
4610 #ifdef POINTERS_EXTEND_UNSIGNED
4611 if (GET_MODE (offset_rtx) != Pmode)
4612 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4613 #else
4614 if (GET_MODE (offset_rtx) != ptr_mode)
4615 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4616 #endif
4618 to_rtx = offset_address (to_rtx, offset_rtx,
4619 highest_pow2_factor (offset));
4622 if (TREE_READONLY (field))
4624 if (GET_CODE (to_rtx) == MEM)
4625 to_rtx = copy_rtx (to_rtx);
4627 RTX_UNCHANGING_P (to_rtx) = 1;
4630 #ifdef WORD_REGISTER_OPERATIONS
4631 /* If this initializes a field that is smaller than a word, at the
4632 start of a word, try to widen it to a full word.
4633 This special case allows us to output C++ member function
4634 initializations in a form that the optimizers can understand. */
4635 if (GET_CODE (target) == REG
4636 && bitsize < BITS_PER_WORD
4637 && bitpos % BITS_PER_WORD == 0
4638 && GET_MODE_CLASS (mode) == MODE_INT
4639 && TREE_CODE (value) == INTEGER_CST
4640 && exp_size >= 0
4641 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4643 tree type = TREE_TYPE (value);
4645 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4647 type = (*lang_hooks.types.type_for_size)
4648 (BITS_PER_WORD, TREE_UNSIGNED (type));
4649 value = convert (type, value);
4652 if (BYTES_BIG_ENDIAN)
4653 value
4654 = fold (build (LSHIFT_EXPR, type, value,
4655 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4656 bitsize = BITS_PER_WORD;
4657 mode = word_mode;
4659 #endif
4661 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4662 && DECL_NONADDRESSABLE_P (field))
4664 to_rtx = copy_rtx (to_rtx);
4665 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4668 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4669 value, type, cleared,
4670 get_alias_set (TREE_TYPE (field)));
4673 else if (TREE_CODE (type) == ARRAY_TYPE
4674 || TREE_CODE (type) == VECTOR_TYPE)
4676 tree elt;
4677 int i;
4678 int need_to_clear;
4679 tree domain = TYPE_DOMAIN (type);
4680 tree elttype = TREE_TYPE (type);
4681 int const_bounds_p;
4682 HOST_WIDE_INT minelt = 0;
4683 HOST_WIDE_INT maxelt = 0;
4685 /* Vectors are like arrays, but the domain is stored via an array
4686 type indirectly. */
4687 if (TREE_CODE (type) == VECTOR_TYPE)
4689 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4690 the same field as TYPE_DOMAIN, we are not guaranteed that
4691 it always will. */
4692 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4693 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4696 const_bounds_p = (TYPE_MIN_VALUE (domain)
4697 && TYPE_MAX_VALUE (domain)
4698 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4699 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4701 /* If we have constant bounds for the range of the type, get them. */
4702 if (const_bounds_p)
4704 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4705 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4708 /* If the constructor has fewer elements than the array,
4709 clear the whole array first. Similarly if this is
4710 static constructor of a non-BLKmode object. */
4711 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4712 need_to_clear = 1;
4713 else
4715 HOST_WIDE_INT count = 0, zero_count = 0;
4716 need_to_clear = ! const_bounds_p;
4718 /* This loop is a more accurate version of the loop in
4719 mostly_zeros_p (it handles RANGE_EXPR in an index).
4720 It is also needed to check for missing elements. */
4721 for (elt = CONSTRUCTOR_ELTS (exp);
4722 elt != NULL_TREE && ! need_to_clear;
4723 elt = TREE_CHAIN (elt))
4725 tree index = TREE_PURPOSE (elt);
4726 HOST_WIDE_INT this_node_count;
4728 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4730 tree lo_index = TREE_OPERAND (index, 0);
4731 tree hi_index = TREE_OPERAND (index, 1);
4733 if (! host_integerp (lo_index, 1)
4734 || ! host_integerp (hi_index, 1))
4736 need_to_clear = 1;
4737 break;
4740 this_node_count = (tree_low_cst (hi_index, 1)
4741 - tree_low_cst (lo_index, 1) + 1);
4743 else
4744 this_node_count = 1;
4746 count += this_node_count;
4747 if (mostly_zeros_p (TREE_VALUE (elt)))
4748 zero_count += this_node_count;
4751 /* Clear the entire array first if there are any missing elements,
4752 or if the incidence of zero elements is >= 75%. */
4753 if (! need_to_clear
4754 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4755 need_to_clear = 1;
4758 if (need_to_clear && size > 0)
4760 if (! cleared)
4762 if (REG_P (target))
4763 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4764 else
4765 clear_storage (target, GEN_INT (size));
4767 cleared = 1;
4769 else if (REG_P (target))
4770 /* Inform later passes that the old value is dead. */
4771 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4773 /* Store each element of the constructor into
4774 the corresponding element of TARGET, determined
4775 by counting the elements. */
4776 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4777 elt;
4778 elt = TREE_CHAIN (elt), i++)
4780 enum machine_mode mode;
4781 HOST_WIDE_INT bitsize;
4782 HOST_WIDE_INT bitpos;
4783 int unsignedp;
4784 tree value = TREE_VALUE (elt);
4785 tree index = TREE_PURPOSE (elt);
4786 rtx xtarget = target;
4788 if (cleared && is_zeros_p (value))
4789 continue;
4791 unsignedp = TREE_UNSIGNED (elttype);
4792 mode = TYPE_MODE (elttype);
4793 if (mode == BLKmode)
4794 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4795 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4796 : -1);
4797 else
4798 bitsize = GET_MODE_BITSIZE (mode);
4800 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4802 tree lo_index = TREE_OPERAND (index, 0);
4803 tree hi_index = TREE_OPERAND (index, 1);
4804 rtx index_r, pos_rtx, loop_end;
4805 struct nesting *loop;
4806 HOST_WIDE_INT lo, hi, count;
4807 tree position;
4809 /* If the range is constant and "small", unroll the loop. */
4810 if (const_bounds_p
4811 && host_integerp (lo_index, 0)
4812 && host_integerp (hi_index, 0)
4813 && (lo = tree_low_cst (lo_index, 0),
4814 hi = tree_low_cst (hi_index, 0),
4815 count = hi - lo + 1,
4816 (GET_CODE (target) != MEM
4817 || count <= 2
4818 || (host_integerp (TYPE_SIZE (elttype), 1)
4819 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4820 <= 40 * 8)))))
4822 lo -= minelt; hi -= minelt;
4823 for (; lo <= hi; lo++)
4825 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4827 if (GET_CODE (target) == MEM
4828 && !MEM_KEEP_ALIAS_SET_P (target)
4829 && TREE_CODE (type) == ARRAY_TYPE
4830 && TYPE_NONALIASED_COMPONENT (type))
4832 target = copy_rtx (target);
4833 MEM_KEEP_ALIAS_SET_P (target) = 1;
4836 store_constructor_field
4837 (target, bitsize, bitpos, mode, value, type, cleared,
4838 get_alias_set (elttype));
4841 else
4843 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4844 loop_end = gen_label_rtx ();
4846 unsignedp = TREE_UNSIGNED (domain);
4848 index = build_decl (VAR_DECL, NULL_TREE, domain);
4850 index_r
4851 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4852 &unsignedp, 0));
4853 SET_DECL_RTL (index, index_r);
4854 if (TREE_CODE (value) == SAVE_EXPR
4855 && SAVE_EXPR_RTL (value) == 0)
4857 /* Make sure value gets expanded once before the
4858 loop. */
4859 expand_expr (value, const0_rtx, VOIDmode, 0);
4860 emit_queue ();
4862 store_expr (lo_index, index_r, 0);
4863 loop = expand_start_loop (0);
4865 /* Assign value to element index. */
4866 position
4867 = convert (ssizetype,
4868 fold (build (MINUS_EXPR, TREE_TYPE (index),
4869 index, TYPE_MIN_VALUE (domain))));
4870 position = size_binop (MULT_EXPR, position,
4871 convert (ssizetype,
4872 TYPE_SIZE_UNIT (elttype)));
4874 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4875 xtarget = offset_address (target, pos_rtx,
4876 highest_pow2_factor (position));
4877 xtarget = adjust_address (xtarget, mode, 0);
4878 if (TREE_CODE (value) == CONSTRUCTOR)
4879 store_constructor (value, xtarget, cleared,
4880 bitsize / BITS_PER_UNIT);
4881 else
4882 store_expr (value, xtarget, 0);
4884 expand_exit_loop_if_false (loop,
4885 build (LT_EXPR, integer_type_node,
4886 index, hi_index));
4888 expand_increment (build (PREINCREMENT_EXPR,
4889 TREE_TYPE (index),
4890 index, integer_one_node), 0, 0);
4891 expand_end_loop ();
4892 emit_label (loop_end);
4895 else if ((index != 0 && ! host_integerp (index, 0))
4896 || ! host_integerp (TYPE_SIZE (elttype), 1))
4898 tree position;
4900 if (index == 0)
4901 index = ssize_int (1);
4903 if (minelt)
4904 index = convert (ssizetype,
4905 fold (build (MINUS_EXPR, index,
4906 TYPE_MIN_VALUE (domain))));
4908 position = size_binop (MULT_EXPR, index,
4909 convert (ssizetype,
4910 TYPE_SIZE_UNIT (elttype)));
4911 xtarget = offset_address (target,
4912 expand_expr (position, 0, VOIDmode, 0),
4913 highest_pow2_factor (position));
4914 xtarget = adjust_address (xtarget, mode, 0);
4915 store_expr (value, xtarget, 0);
4917 else
4919 if (index != 0)
4920 bitpos = ((tree_low_cst (index, 0) - minelt)
4921 * tree_low_cst (TYPE_SIZE (elttype), 1));
4922 else
4923 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4925 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4926 && TREE_CODE (type) == ARRAY_TYPE
4927 && TYPE_NONALIASED_COMPONENT (type))
4929 target = copy_rtx (target);
4930 MEM_KEEP_ALIAS_SET_P (target) = 1;
4933 store_constructor_field (target, bitsize, bitpos, mode, value,
4934 type, cleared, get_alias_set (elttype));
4940 /* Set constructor assignments. */
4941 else if (TREE_CODE (type) == SET_TYPE)
4943 tree elt = CONSTRUCTOR_ELTS (exp);
4944 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4945 tree domain = TYPE_DOMAIN (type);
4946 tree domain_min, domain_max, bitlength;
4948 /* The default implementation strategy is to extract the constant
4949 parts of the constructor, use that to initialize the target,
4950 and then "or" in whatever non-constant ranges we need in addition.
4952 If a large set is all zero or all ones, it is
4953 probably better to set it using memset (if available) or bzero.
4954 Also, if a large set has just a single range, it may also be
4955 better to first clear all the first clear the set (using
4956 bzero/memset), and set the bits we want. */
4958 /* Check for all zeros. */
4959 if (elt == NULL_TREE && size > 0)
4961 if (!cleared)
4962 clear_storage (target, GEN_INT (size));
4963 return;
4966 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4967 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4968 bitlength = size_binop (PLUS_EXPR,
4969 size_diffop (domain_max, domain_min),
4970 ssize_int (1));
4972 nbits = tree_low_cst (bitlength, 1);
4974 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4975 are "complicated" (more than one range), initialize (the
4976 constant parts) by copying from a constant. */
4977 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4978 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4980 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4981 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4982 char *bit_buffer = alloca (nbits);
4983 HOST_WIDE_INT word = 0;
4984 unsigned int bit_pos = 0;
4985 unsigned int ibit = 0;
4986 unsigned int offset = 0; /* In bytes from beginning of set. */
4988 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4989 for (;;)
4991 if (bit_buffer[ibit])
4993 if (BYTES_BIG_ENDIAN)
4994 word |= (1 << (set_word_size - 1 - bit_pos));
4995 else
4996 word |= 1 << bit_pos;
4999 bit_pos++; ibit++;
5000 if (bit_pos >= set_word_size || ibit == nbits)
5002 if (word != 0 || ! cleared)
5004 rtx datum = GEN_INT (word);
5005 rtx to_rtx;
5007 /* The assumption here is that it is safe to use
5008 XEXP if the set is multi-word, but not if
5009 it's single-word. */
5010 if (GET_CODE (target) == MEM)
5011 to_rtx = adjust_address (target, mode, offset);
5012 else if (offset == 0)
5013 to_rtx = target;
5014 else
5015 abort ();
5016 emit_move_insn (to_rtx, datum);
5019 if (ibit == nbits)
5020 break;
5021 word = 0;
5022 bit_pos = 0;
5023 offset += set_word_size / BITS_PER_UNIT;
5027 else if (!cleared)
5028 /* Don't bother clearing storage if the set is all ones. */
5029 if (TREE_CHAIN (elt) != NULL_TREE
5030 || (TREE_PURPOSE (elt) == NULL_TREE
5031 ? nbits != 1
5032 : ( ! host_integerp (TREE_VALUE (elt), 0)
5033 || ! host_integerp (TREE_PURPOSE (elt), 0)
5034 || (tree_low_cst (TREE_VALUE (elt), 0)
5035 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5036 != (HOST_WIDE_INT) nbits))))
5037 clear_storage (target, expr_size (exp));
5039 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5041 /* Start of range of element or NULL. */
5042 tree startbit = TREE_PURPOSE (elt);
5043 /* End of range of element, or element value. */
5044 tree endbit = TREE_VALUE (elt);
5045 HOST_WIDE_INT startb, endb;
5046 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5048 bitlength_rtx = expand_expr (bitlength,
5049 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5051 /* Handle non-range tuple element like [ expr ]. */
5052 if (startbit == NULL_TREE)
5054 startbit = save_expr (endbit);
5055 endbit = startbit;
5058 startbit = convert (sizetype, startbit);
5059 endbit = convert (sizetype, endbit);
5060 if (! integer_zerop (domain_min))
5062 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5063 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5065 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5066 EXPAND_CONST_ADDRESS);
5067 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5068 EXPAND_CONST_ADDRESS);
5070 if (REG_P (target))
5072 targetx
5073 = assign_temp
5074 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5075 (GET_MODE (target), 0),
5076 TYPE_QUAL_CONST)),
5077 0, 1, 1);
5078 emit_move_insn (targetx, target);
5081 else if (GET_CODE (target) == MEM)
5082 targetx = target;
5083 else
5084 abort ();
5086 /* Optimization: If startbit and endbit are constants divisible
5087 by BITS_PER_UNIT, call memset instead. */
5088 if (TARGET_MEM_FUNCTIONS
5089 && TREE_CODE (startbit) == INTEGER_CST
5090 && TREE_CODE (endbit) == INTEGER_CST
5091 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5092 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5094 emit_library_call (memset_libfunc, LCT_NORMAL,
5095 VOIDmode, 3,
5096 plus_constant (XEXP (targetx, 0),
5097 startb / BITS_PER_UNIT),
5098 Pmode,
5099 constm1_rtx, TYPE_MODE (integer_type_node),
5100 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5101 TYPE_MODE (sizetype));
5103 else
5104 emit_library_call (setbits_libfunc, LCT_NORMAL,
5105 VOIDmode, 4, XEXP (targetx, 0),
5106 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5107 startbit_rtx, TYPE_MODE (sizetype),
5108 endbit_rtx, TYPE_MODE (sizetype));
5110 if (REG_P (target))
5111 emit_move_insn (target, targetx);
5115 else
5116 abort ();
5119 /* Store the value of EXP (an expression tree)
5120 into a subfield of TARGET which has mode MODE and occupies
5121 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5122 If MODE is VOIDmode, it means that we are storing into a bit-field.
5124 If VALUE_MODE is VOIDmode, return nothing in particular.
5125 UNSIGNEDP is not used in this case.
5127 Otherwise, return an rtx for the value stored. This rtx
5128 has mode VALUE_MODE if that is convenient to do.
5129 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5131 TYPE is the type of the underlying object,
5133 ALIAS_SET is the alias set for the destination. This value will
5134 (in general) be different from that for TARGET, since TARGET is a
5135 reference to the containing structure. */
5137 static rtx
5138 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5139 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5140 int unsignedp, tree type, int alias_set)
5142 HOST_WIDE_INT width_mask = 0;
5144 if (TREE_CODE (exp) == ERROR_MARK)
5145 return const0_rtx;
5147 /* If we have nothing to store, do nothing unless the expression has
5148 side-effects. */
5149 if (bitsize == 0)
5150 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5151 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5152 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5154 /* If we are storing into an unaligned field of an aligned union that is
5155 in a register, we may have the mode of TARGET being an integer mode but
5156 MODE == BLKmode. In that case, get an aligned object whose size and
5157 alignment are the same as TARGET and store TARGET into it (we can avoid
5158 the store if the field being stored is the entire width of TARGET). Then
5159 call ourselves recursively to store the field into a BLKmode version of
5160 that object. Finally, load from the object into TARGET. This is not
5161 very efficient in general, but should only be slightly more expensive
5162 than the otherwise-required unaligned accesses. Perhaps this can be
5163 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5164 twice, once with emit_move_insn and once via store_field. */
5166 if (mode == BLKmode
5167 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5169 rtx object = assign_temp (type, 0, 1, 1);
5170 rtx blk_object = adjust_address (object, BLKmode, 0);
5172 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5173 emit_move_insn (object, target);
5175 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5176 alias_set);
5178 emit_move_insn (target, object);
5180 /* We want to return the BLKmode version of the data. */
5181 return blk_object;
5184 if (GET_CODE (target) == CONCAT)
5186 /* We're storing into a struct containing a single __complex. */
5188 if (bitpos != 0)
5189 abort ();
5190 return store_expr (exp, target, 0);
5193 /* If the structure is in a register or if the component
5194 is a bit field, we cannot use addressing to access it.
5195 Use bit-field techniques or SUBREG to store in it. */
5197 if (mode == VOIDmode
5198 || (mode != BLKmode && ! direct_store[(int) mode]
5199 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5200 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5201 || GET_CODE (target) == REG
5202 || GET_CODE (target) == SUBREG
5203 /* If the field isn't aligned enough to store as an ordinary memref,
5204 store it as a bit field. */
5205 || (mode != BLKmode
5206 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5207 || bitpos % GET_MODE_ALIGNMENT (mode))
5208 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5209 || (bitpos % BITS_PER_UNIT != 0)))
5210 /* If the RHS and field are a constant size and the size of the
5211 RHS isn't the same size as the bitfield, we must use bitfield
5212 operations. */
5213 || (bitsize >= 0
5214 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5215 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5217 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5219 /* If BITSIZE is narrower than the size of the type of EXP
5220 we will be narrowing TEMP. Normally, what's wanted are the
5221 low-order bits. However, if EXP's type is a record and this is
5222 big-endian machine, we want the upper BITSIZE bits. */
5223 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5224 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5225 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5226 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5227 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5228 - bitsize),
5229 NULL_RTX, 1);
5231 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5232 MODE. */
5233 if (mode != VOIDmode && mode != BLKmode
5234 && mode != TYPE_MODE (TREE_TYPE (exp)))
5235 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5237 /* If the modes of TARGET and TEMP are both BLKmode, both
5238 must be in memory and BITPOS must be aligned on a byte
5239 boundary. If so, we simply do a block copy. */
5240 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5242 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5243 || bitpos % BITS_PER_UNIT != 0)
5244 abort ();
5246 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5247 emit_block_move (target, temp,
5248 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5249 / BITS_PER_UNIT),
5250 BLOCK_OP_NORMAL);
5252 return value_mode == VOIDmode ? const0_rtx : target;
5255 /* Store the value in the bitfield. */
5256 store_bit_field (target, bitsize, bitpos, mode, temp,
5257 int_size_in_bytes (type));
5259 if (value_mode != VOIDmode)
5261 /* The caller wants an rtx for the value.
5262 If possible, avoid refetching from the bitfield itself. */
5263 if (width_mask != 0
5264 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5266 tree count;
5267 enum machine_mode tmode;
5269 tmode = GET_MODE (temp);
5270 if (tmode == VOIDmode)
5271 tmode = value_mode;
5273 if (unsignedp)
5274 return expand_and (tmode, temp,
5275 gen_int_mode (width_mask, tmode),
5276 NULL_RTX);
5278 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5279 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5280 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5283 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5284 NULL_RTX, value_mode, VOIDmode,
5285 int_size_in_bytes (type));
5287 return const0_rtx;
5289 else
5291 rtx addr = XEXP (target, 0);
5292 rtx to_rtx = target;
5294 /* If a value is wanted, it must be the lhs;
5295 so make the address stable for multiple use. */
5297 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5298 && ! CONSTANT_ADDRESS_P (addr)
5299 /* A frame-pointer reference is already stable. */
5300 && ! (GET_CODE (addr) == PLUS
5301 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5302 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5303 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5304 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5306 /* Now build a reference to just the desired component. */
5308 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5310 if (to_rtx == target)
5311 to_rtx = copy_rtx (to_rtx);
5313 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5314 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5315 set_mem_alias_set (to_rtx, alias_set);
5317 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5321 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5322 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5323 codes and find the ultimate containing object, which we return.
5325 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5326 bit position, and *PUNSIGNEDP to the signedness of the field.
5327 If the position of the field is variable, we store a tree
5328 giving the variable offset (in units) in *POFFSET.
5329 This offset is in addition to the bit position.
5330 If the position is not variable, we store 0 in *POFFSET.
5332 If any of the extraction expressions is volatile,
5333 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5335 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5336 is a mode that can be used to access the field. In that case, *PBITSIZE
5337 is redundant.
5339 If the field describes a variable-sized object, *PMODE is set to
5340 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5341 this case, but the address of the object can be found. */
5343 tree
5344 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5345 HOST_WIDE_INT *pbitpos, tree *poffset,
5346 enum machine_mode *pmode, int *punsignedp,
5347 int *pvolatilep)
5349 tree size_tree = 0;
5350 enum machine_mode mode = VOIDmode;
5351 tree offset = size_zero_node;
5352 tree bit_offset = bitsize_zero_node;
5353 tree placeholder_ptr = 0;
5354 tree tem;
5356 /* First get the mode, signedness, and size. We do this from just the
5357 outermost expression. */
5358 if (TREE_CODE (exp) == COMPONENT_REF)
5360 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5361 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5362 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5364 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5366 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5368 size_tree = TREE_OPERAND (exp, 1);
5369 *punsignedp = TREE_UNSIGNED (exp);
5371 else
5373 mode = TYPE_MODE (TREE_TYPE (exp));
5374 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5376 if (mode == BLKmode)
5377 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5378 else
5379 *pbitsize = GET_MODE_BITSIZE (mode);
5382 if (size_tree != 0)
5384 if (! host_integerp (size_tree, 1))
5385 mode = BLKmode, *pbitsize = -1;
5386 else
5387 *pbitsize = tree_low_cst (size_tree, 1);
5390 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5391 and find the ultimate containing object. */
5392 while (1)
5394 if (TREE_CODE (exp) == BIT_FIELD_REF)
5395 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5396 else if (TREE_CODE (exp) == COMPONENT_REF)
5398 tree field = TREE_OPERAND (exp, 1);
5399 tree this_offset = DECL_FIELD_OFFSET (field);
5401 /* If this field hasn't been filled in yet, don't go
5402 past it. This should only happen when folding expressions
5403 made during type construction. */
5404 if (this_offset == 0)
5405 break;
5406 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5407 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5409 offset = size_binop (PLUS_EXPR, offset, this_offset);
5410 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5411 DECL_FIELD_BIT_OFFSET (field));
5413 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5416 else if (TREE_CODE (exp) == ARRAY_REF
5417 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5419 tree index = TREE_OPERAND (exp, 1);
5420 tree array = TREE_OPERAND (exp, 0);
5421 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5422 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5423 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5425 /* We assume all arrays have sizes that are a multiple of a byte.
5426 First subtract the lower bound, if any, in the type of the
5427 index, then convert to sizetype and multiply by the size of the
5428 array element. */
5429 if (low_bound != 0 && ! integer_zerop (low_bound))
5430 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5431 index, low_bound));
5433 /* If the index has a self-referential type, pass it to a
5434 WITH_RECORD_EXPR; if the component size is, pass our
5435 component to one. */
5436 if (CONTAINS_PLACEHOLDER_P (index))
5437 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5438 if (CONTAINS_PLACEHOLDER_P (unit_size))
5439 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5441 offset = size_binop (PLUS_EXPR, offset,
5442 size_binop (MULT_EXPR,
5443 convert (sizetype, index),
5444 unit_size));
5447 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5449 tree new = find_placeholder (exp, &placeholder_ptr);
5451 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5452 We might have been called from tree optimization where we
5453 haven't set up an object yet. */
5454 if (new == 0)
5455 break;
5456 else
5457 exp = new;
5459 continue;
5462 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5463 conversions that don't change the mode, and all view conversions
5464 except those that need to "step up" the alignment. */
5465 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5466 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5467 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5468 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5469 && STRICT_ALIGNMENT
5470 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5471 < BIGGEST_ALIGNMENT)
5472 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5473 || TYPE_ALIGN_OK (TREE_TYPE
5474 (TREE_OPERAND (exp, 0))))))
5475 && ! ((TREE_CODE (exp) == NOP_EXPR
5476 || TREE_CODE (exp) == CONVERT_EXPR)
5477 && (TYPE_MODE (TREE_TYPE (exp))
5478 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5479 break;
5481 /* If any reference in the chain is volatile, the effect is volatile. */
5482 if (TREE_THIS_VOLATILE (exp))
5483 *pvolatilep = 1;
5485 exp = TREE_OPERAND (exp, 0);
5488 /* If OFFSET is constant, see if we can return the whole thing as a
5489 constant bit position. Otherwise, split it up. */
5490 if (host_integerp (offset, 0)
5491 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5492 bitsize_unit_node))
5493 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5494 && host_integerp (tem, 0))
5495 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5496 else
5497 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5499 *pmode = mode;
5500 return exp;
5503 /* Return 1 if T is an expression that get_inner_reference handles. */
5506 handled_component_p (tree t)
5508 switch (TREE_CODE (t))
5510 case BIT_FIELD_REF:
5511 case COMPONENT_REF:
5512 case ARRAY_REF:
5513 case ARRAY_RANGE_REF:
5514 case NON_LVALUE_EXPR:
5515 case VIEW_CONVERT_EXPR:
5516 return 1;
5518 /* ??? Sure they are handled, but get_inner_reference may return
5519 a different PBITSIZE, depending upon whether the expression is
5520 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5521 case NOP_EXPR:
5522 case CONVERT_EXPR:
5523 return (TYPE_MODE (TREE_TYPE (t))
5524 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5526 default:
5527 return 0;
5531 /* Given an rtx VALUE that may contain additions and multiplications, return
5532 an equivalent value that just refers to a register, memory, or constant.
5533 This is done by generating instructions to perform the arithmetic and
5534 returning a pseudo-register containing the value.
5536 The returned value may be a REG, SUBREG, MEM or constant. */
5539 force_operand (rtx value, rtx target)
5541 rtx op1, op2;
5542 /* Use subtarget as the target for operand 0 of a binary operation. */
5543 rtx subtarget = get_subtarget (target);
5544 enum rtx_code code = GET_CODE (value);
5546 /* Check for a PIC address load. */
5547 if ((code == PLUS || code == MINUS)
5548 && XEXP (value, 0) == pic_offset_table_rtx
5549 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5550 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5551 || GET_CODE (XEXP (value, 1)) == CONST))
5553 if (!subtarget)
5554 subtarget = gen_reg_rtx (GET_MODE (value));
5555 emit_move_insn (subtarget, value);
5556 return subtarget;
5559 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5561 if (!target)
5562 target = gen_reg_rtx (GET_MODE (value));
5563 convert_move (target, force_operand (XEXP (value, 0), NULL),
5564 code == ZERO_EXTEND);
5565 return target;
5568 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5570 op2 = XEXP (value, 1);
5571 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5572 subtarget = 0;
5573 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5575 code = PLUS;
5576 op2 = negate_rtx (GET_MODE (value), op2);
5579 /* Check for an addition with OP2 a constant integer and our first
5580 operand a PLUS of a virtual register and something else. In that
5581 case, we want to emit the sum of the virtual register and the
5582 constant first and then add the other value. This allows virtual
5583 register instantiation to simply modify the constant rather than
5584 creating another one around this addition. */
5585 if (code == PLUS && GET_CODE (op2) == CONST_INT
5586 && GET_CODE (XEXP (value, 0)) == PLUS
5587 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5588 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5589 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5591 rtx temp = expand_simple_binop (GET_MODE (value), code,
5592 XEXP (XEXP (value, 0), 0), op2,
5593 subtarget, 0, OPTAB_LIB_WIDEN);
5594 return expand_simple_binop (GET_MODE (value), code, temp,
5595 force_operand (XEXP (XEXP (value,
5596 0), 1), 0),
5597 target, 0, OPTAB_LIB_WIDEN);
5600 op1 = force_operand (XEXP (value, 0), subtarget);
5601 op2 = force_operand (op2, NULL_RTX);
5602 switch (code)
5604 case MULT:
5605 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5606 case DIV:
5607 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5608 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5609 target, 1, OPTAB_LIB_WIDEN);
5610 else
5611 return expand_divmod (0,
5612 FLOAT_MODE_P (GET_MODE (value))
5613 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5614 GET_MODE (value), op1, op2, target, 0);
5615 break;
5616 case MOD:
5617 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5618 target, 0);
5619 break;
5620 case UDIV:
5621 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5622 target, 1);
5623 break;
5624 case UMOD:
5625 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5626 target, 1);
5627 break;
5628 case ASHIFTRT:
5629 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5630 target, 0, OPTAB_LIB_WIDEN);
5631 break;
5632 default:
5633 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5634 target, 1, OPTAB_LIB_WIDEN);
5637 if (GET_RTX_CLASS (code) == '1')
5639 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5640 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5643 #ifdef INSN_SCHEDULING
5644 /* On machines that have insn scheduling, we want all memory reference to be
5645 explicit, so we need to deal with such paradoxical SUBREGs. */
5646 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5647 && (GET_MODE_SIZE (GET_MODE (value))
5648 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5649 value
5650 = simplify_gen_subreg (GET_MODE (value),
5651 force_reg (GET_MODE (SUBREG_REG (value)),
5652 force_operand (SUBREG_REG (value),
5653 NULL_RTX)),
5654 GET_MODE (SUBREG_REG (value)),
5655 SUBREG_BYTE (value));
5656 #endif
5658 return value;
5661 /* Subroutine of expand_expr: return nonzero iff there is no way that
5662 EXP can reference X, which is being modified. TOP_P is nonzero if this
5663 call is going to be used to determine whether we need a temporary
5664 for EXP, as opposed to a recursive call to this function.
5666 It is always safe for this routine to return zero since it merely
5667 searches for optimization opportunities. */
5670 safe_from_p (rtx x, tree exp, int top_p)
5672 rtx exp_rtl = 0;
5673 int i, nops;
5674 static tree save_expr_list;
5676 if (x == 0
5677 /* If EXP has varying size, we MUST use a target since we currently
5678 have no way of allocating temporaries of variable size
5679 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5680 So we assume here that something at a higher level has prevented a
5681 clash. This is somewhat bogus, but the best we can do. Only
5682 do this when X is BLKmode and when we are at the top level. */
5683 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5684 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5685 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5686 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5687 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5688 != INTEGER_CST)
5689 && GET_MODE (x) == BLKmode)
5690 /* If X is in the outgoing argument area, it is always safe. */
5691 || (GET_CODE (x) == MEM
5692 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5693 || (GET_CODE (XEXP (x, 0)) == PLUS
5694 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5695 return 1;
5697 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5698 find the underlying pseudo. */
5699 if (GET_CODE (x) == SUBREG)
5701 x = SUBREG_REG (x);
5702 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5703 return 0;
5706 /* A SAVE_EXPR might appear many times in the expression passed to the
5707 top-level safe_from_p call, and if it has a complex subexpression,
5708 examining it multiple times could result in a combinatorial explosion.
5709 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5710 with optimization took about 28 minutes to compile -- even though it was
5711 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5712 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5713 we have processed. Note that the only test of top_p was above. */
5715 if (top_p)
5717 int rtn;
5718 tree t;
5720 save_expr_list = 0;
5722 rtn = safe_from_p (x, exp, 0);
5724 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5725 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5727 return rtn;
5730 /* Now look at our tree code and possibly recurse. */
5731 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5733 case 'd':
5734 exp_rtl = DECL_RTL_IF_SET (exp);
5735 break;
5737 case 'c':
5738 return 1;
5740 case 'x':
5741 if (TREE_CODE (exp) == TREE_LIST)
5743 while (1)
5745 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5746 return 0;
5747 exp = TREE_CHAIN (exp);
5748 if (!exp)
5749 return 1;
5750 if (TREE_CODE (exp) != TREE_LIST)
5751 return safe_from_p (x, exp, 0);
5754 else if (TREE_CODE (exp) == ERROR_MARK)
5755 return 1; /* An already-visited SAVE_EXPR? */
5756 else
5757 return 0;
5759 case '2':
5760 case '<':
5761 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5762 return 0;
5763 /* FALLTHRU */
5765 case '1':
5766 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5768 case 'e':
5769 case 'r':
5770 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5771 the expression. If it is set, we conflict iff we are that rtx or
5772 both are in memory. Otherwise, we check all operands of the
5773 expression recursively. */
5775 switch (TREE_CODE (exp))
5777 case ADDR_EXPR:
5778 /* If the operand is static or we are static, we can't conflict.
5779 Likewise if we don't conflict with the operand at all. */
5780 if (staticp (TREE_OPERAND (exp, 0))
5781 || TREE_STATIC (exp)
5782 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5783 return 1;
5785 /* Otherwise, the only way this can conflict is if we are taking
5786 the address of a DECL a that address if part of X, which is
5787 very rare. */
5788 exp = TREE_OPERAND (exp, 0);
5789 if (DECL_P (exp))
5791 if (!DECL_RTL_SET_P (exp)
5792 || GET_CODE (DECL_RTL (exp)) != MEM)
5793 return 0;
5794 else
5795 exp_rtl = XEXP (DECL_RTL (exp), 0);
5797 break;
5799 case INDIRECT_REF:
5800 if (GET_CODE (x) == MEM
5801 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5802 get_alias_set (exp)))
5803 return 0;
5804 break;
5806 case CALL_EXPR:
5807 /* Assume that the call will clobber all hard registers and
5808 all of memory. */
5809 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5810 || GET_CODE (x) == MEM)
5811 return 0;
5812 break;
5814 case RTL_EXPR:
5815 /* If a sequence exists, we would have to scan every instruction
5816 in the sequence to see if it was safe. This is probably not
5817 worthwhile. */
5818 if (RTL_EXPR_SEQUENCE (exp))
5819 return 0;
5821 exp_rtl = RTL_EXPR_RTL (exp);
5822 break;
5824 case WITH_CLEANUP_EXPR:
5825 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5826 break;
5828 case CLEANUP_POINT_EXPR:
5829 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5831 case SAVE_EXPR:
5832 exp_rtl = SAVE_EXPR_RTL (exp);
5833 if (exp_rtl)
5834 break;
5836 /* If we've already scanned this, don't do it again. Otherwise,
5837 show we've scanned it and record for clearing the flag if we're
5838 going on. */
5839 if (TREE_PRIVATE (exp))
5840 return 1;
5842 TREE_PRIVATE (exp) = 1;
5843 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5845 TREE_PRIVATE (exp) = 0;
5846 return 0;
5849 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5850 return 1;
5852 case BIND_EXPR:
5853 /* The only operand we look at is operand 1. The rest aren't
5854 part of the expression. */
5855 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5857 default:
5858 break;
5861 /* If we have an rtx, we do not need to scan our operands. */
5862 if (exp_rtl)
5863 break;
5865 nops = first_rtl_op (TREE_CODE (exp));
5866 for (i = 0; i < nops; i++)
5867 if (TREE_OPERAND (exp, i) != 0
5868 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5869 return 0;
5871 /* If this is a language-specific tree code, it may require
5872 special handling. */
5873 if ((unsigned int) TREE_CODE (exp)
5874 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5875 && !(*lang_hooks.safe_from_p) (x, exp))
5876 return 0;
5879 /* If we have an rtl, find any enclosed object. Then see if we conflict
5880 with it. */
5881 if (exp_rtl)
5883 if (GET_CODE (exp_rtl) == SUBREG)
5885 exp_rtl = SUBREG_REG (exp_rtl);
5886 if (GET_CODE (exp_rtl) == REG
5887 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5888 return 0;
5891 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5892 are memory and they conflict. */
5893 return ! (rtx_equal_p (x, exp_rtl)
5894 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5895 && true_dependence (exp_rtl, VOIDmode, x,
5896 rtx_addr_varies_p)));
5899 /* If we reach here, it is safe. */
5900 return 1;
5903 /* Subroutine of expand_expr: return rtx if EXP is a
5904 variable or parameter; else return 0. */
5906 static rtx
5907 var_rtx (tree exp)
5909 STRIP_NOPS (exp);
5910 switch (TREE_CODE (exp))
5912 case PARM_DECL:
5913 case VAR_DECL:
5914 return DECL_RTL (exp);
5915 default:
5916 return 0;
5920 #ifdef MAX_INTEGER_COMPUTATION_MODE
5922 void
5923 check_max_integer_computation_mode (tree exp)
5925 enum tree_code code;
5926 enum machine_mode mode;
5928 /* Strip any NOPs that don't change the mode. */
5929 STRIP_NOPS (exp);
5930 code = TREE_CODE (exp);
5932 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5933 if (code == NOP_EXPR
5934 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5935 return;
5937 /* First check the type of the overall operation. We need only look at
5938 unary, binary and relational operations. */
5939 if (TREE_CODE_CLASS (code) == '1'
5940 || TREE_CODE_CLASS (code) == '2'
5941 || TREE_CODE_CLASS (code) == '<')
5943 mode = TYPE_MODE (TREE_TYPE (exp));
5944 if (GET_MODE_CLASS (mode) == MODE_INT
5945 && mode > MAX_INTEGER_COMPUTATION_MODE)
5946 internal_error ("unsupported wide integer operation");
5949 /* Check operand of a unary op. */
5950 if (TREE_CODE_CLASS (code) == '1')
5952 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5953 if (GET_MODE_CLASS (mode) == MODE_INT
5954 && mode > MAX_INTEGER_COMPUTATION_MODE)
5955 internal_error ("unsupported wide integer operation");
5958 /* Check operands of a binary/comparison op. */
5959 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5961 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5962 if (GET_MODE_CLASS (mode) == MODE_INT
5963 && mode > MAX_INTEGER_COMPUTATION_MODE)
5964 internal_error ("unsupported wide integer operation");
5966 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5967 if (GET_MODE_CLASS (mode) == MODE_INT
5968 && mode > MAX_INTEGER_COMPUTATION_MODE)
5969 internal_error ("unsupported wide integer operation");
5972 #endif
5974 /* Return the highest power of two that EXP is known to be a multiple of.
5975 This is used in updating alignment of MEMs in array references. */
5977 static unsigned HOST_WIDE_INT
5978 highest_pow2_factor (tree exp)
5980 unsigned HOST_WIDE_INT c0, c1;
5982 switch (TREE_CODE (exp))
5984 case INTEGER_CST:
5985 /* We can find the lowest bit that's a one. If the low
5986 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5987 We need to handle this case since we can find it in a COND_EXPR,
5988 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5989 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5990 later ICE. */
5991 if (TREE_CONSTANT_OVERFLOW (exp))
5992 return BIGGEST_ALIGNMENT;
5993 else
5995 /* Note: tree_low_cst is intentionally not used here,
5996 we don't care about the upper bits. */
5997 c0 = TREE_INT_CST_LOW (exp);
5998 c0 &= -c0;
5999 return c0 ? c0 : BIGGEST_ALIGNMENT;
6001 break;
6003 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6004 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6005 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6006 return MIN (c0, c1);
6008 case MULT_EXPR:
6009 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6010 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6011 return c0 * c1;
6013 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6014 case CEIL_DIV_EXPR:
6015 if (integer_pow2p (TREE_OPERAND (exp, 1))
6016 && host_integerp (TREE_OPERAND (exp, 1), 1))
6018 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6019 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6020 return MAX (1, c0 / c1);
6022 break;
6024 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6025 case SAVE_EXPR: case WITH_RECORD_EXPR:
6026 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6028 case COMPOUND_EXPR:
6029 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6031 case COND_EXPR:
6032 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6033 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6034 return MIN (c0, c1);
6036 default:
6037 break;
6040 return 1;
6043 /* Similar, except that it is known that the expression must be a multiple
6044 of the alignment of TYPE. */
6046 static unsigned HOST_WIDE_INT
6047 highest_pow2_factor_for_type (tree type, tree exp)
6049 unsigned HOST_WIDE_INT type_align, factor;
6051 factor = highest_pow2_factor (exp);
6052 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6053 return MAX (factor, type_align);
6056 /* Return an object on the placeholder list that matches EXP, a
6057 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6058 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6059 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6060 is a location which initially points to a starting location in the
6061 placeholder list (zero means start of the list) and where a pointer into
6062 the placeholder list at which the object is found is placed. */
6064 tree
6065 find_placeholder (tree exp, tree *plist)
6067 tree type = TREE_TYPE (exp);
6068 tree placeholder_expr;
6070 for (placeholder_expr
6071 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6072 placeholder_expr != 0;
6073 placeholder_expr = TREE_CHAIN (placeholder_expr))
6075 tree need_type = TYPE_MAIN_VARIANT (type);
6076 tree elt;
6078 /* Find the outermost reference that is of the type we want. If none,
6079 see if any object has a type that is a pointer to the type we
6080 want. */
6081 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6082 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6083 || TREE_CODE (elt) == COND_EXPR)
6084 ? TREE_OPERAND (elt, 1)
6085 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6086 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6087 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6088 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6089 ? TREE_OPERAND (elt, 0) : 0))
6090 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6092 if (plist)
6093 *plist = placeholder_expr;
6094 return elt;
6097 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6099 = ((TREE_CODE (elt) == COMPOUND_EXPR
6100 || TREE_CODE (elt) == COND_EXPR)
6101 ? TREE_OPERAND (elt, 1)
6102 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6103 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6104 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6105 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6106 ? TREE_OPERAND (elt, 0) : 0))
6107 if (POINTER_TYPE_P (TREE_TYPE (elt))
6108 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6109 == need_type))
6111 if (plist)
6112 *plist = placeholder_expr;
6113 return build1 (INDIRECT_REF, need_type, elt);
6117 return 0;
6120 /* Subroutine of expand_expr. Expand the two operands of a binary
6121 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6122 The value may be stored in TARGET if TARGET is nonzero. The
6123 MODIFIER argument is as documented by expand_expr. */
6125 static void
6126 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6127 enum expand_modifier modifier)
6129 if (! safe_from_p (target, exp1, 1))
6130 target = 0;
6131 if (operand_equal_p (exp0, exp1, 0))
6133 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6134 *op1 = copy_rtx (*op0);
6136 else
6138 /* If we need to preserve evaluation order, copy exp0 into its own
6139 temporary variable so that it can't be clobbered by exp1. */
6140 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6141 exp0 = save_expr (exp0);
6142 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6143 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6148 /* expand_expr: generate code for computing expression EXP.
6149 An rtx for the computed value is returned. The value is never null.
6150 In the case of a void EXP, const0_rtx is returned.
6152 The value may be stored in TARGET if TARGET is nonzero.
6153 TARGET is just a suggestion; callers must assume that
6154 the rtx returned may not be the same as TARGET.
6156 If TARGET is CONST0_RTX, it means that the value will be ignored.
6158 If TMODE is not VOIDmode, it suggests generating the
6159 result in mode TMODE. But this is done only when convenient.
6160 Otherwise, TMODE is ignored and the value generated in its natural mode.
6161 TMODE is just a suggestion; callers must assume that
6162 the rtx returned may not have mode TMODE.
6164 Note that TARGET may have neither TMODE nor MODE. In that case, it
6165 probably will not be used.
6167 If MODIFIER is EXPAND_SUM then when EXP is an addition
6168 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6169 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6170 products as above, or REG or MEM, or constant.
6171 Ordinarily in such cases we would output mul or add instructions
6172 and then return a pseudo reg containing the sum.
6174 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6175 it also marks a label as absolutely required (it can't be dead).
6176 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6177 This is used for outputting expressions used in initializers.
6179 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6180 with a constant address even if that address is not normally legitimate.
6181 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6183 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6184 a call parameter. Such targets require special care as we haven't yet
6185 marked TARGET so that it's safe from being trashed by libcalls. We
6186 don't want to use TARGET for anything but the final result;
6187 Intermediate values must go elsewhere. Additionally, calls to
6188 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6191 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6192 enum expand_modifier modifier)
6194 rtx op0, op1, temp;
6195 tree type = TREE_TYPE (exp);
6196 int unsignedp = TREE_UNSIGNED (type);
6197 enum machine_mode mode;
6198 enum tree_code code = TREE_CODE (exp);
6199 optab this_optab;
6200 rtx subtarget, original_target;
6201 int ignore;
6202 tree context;
6204 /* Handle ERROR_MARK before anybody tries to access its type. */
6205 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6207 op0 = CONST0_RTX (tmode);
6208 if (op0 != 0)
6209 return op0;
6210 return const0_rtx;
6213 mode = TYPE_MODE (type);
6214 /* Use subtarget as the target for operand 0 of a binary operation. */
6215 subtarget = get_subtarget (target);
6216 original_target = target;
6217 ignore = (target == const0_rtx
6218 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6219 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6220 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6221 && TREE_CODE (type) == VOID_TYPE));
6223 /* If we are going to ignore this result, we need only do something
6224 if there is a side-effect somewhere in the expression. If there
6225 is, short-circuit the most common cases here. Note that we must
6226 not call expand_expr with anything but const0_rtx in case this
6227 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6229 if (ignore)
6231 if (! TREE_SIDE_EFFECTS (exp))
6232 return const0_rtx;
6234 /* Ensure we reference a volatile object even if value is ignored, but
6235 don't do this if all we are doing is taking its address. */
6236 if (TREE_THIS_VOLATILE (exp)
6237 && TREE_CODE (exp) != FUNCTION_DECL
6238 && mode != VOIDmode && mode != BLKmode
6239 && modifier != EXPAND_CONST_ADDRESS)
6241 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6242 if (GET_CODE (temp) == MEM)
6243 temp = copy_to_reg (temp);
6244 return const0_rtx;
6247 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6248 || code == INDIRECT_REF || code == BUFFER_REF)
6249 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6250 modifier);
6252 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6253 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6255 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6256 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6257 return const0_rtx;
6259 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6260 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6261 /* If the second operand has no side effects, just evaluate
6262 the first. */
6263 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6264 modifier);
6265 else if (code == BIT_FIELD_REF)
6267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6268 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6269 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6270 return const0_rtx;
6273 target = 0;
6276 #ifdef MAX_INTEGER_COMPUTATION_MODE
6277 /* Only check stuff here if the mode we want is different from the mode
6278 of the expression; if it's the same, check_max_integer_computation_mode
6279 will handle it. Do we really need to check this stuff at all? */
6281 if (target
6282 && GET_MODE (target) != mode
6283 && TREE_CODE (exp) != INTEGER_CST
6284 && TREE_CODE (exp) != PARM_DECL
6285 && TREE_CODE (exp) != ARRAY_REF
6286 && TREE_CODE (exp) != ARRAY_RANGE_REF
6287 && TREE_CODE (exp) != COMPONENT_REF
6288 && TREE_CODE (exp) != BIT_FIELD_REF
6289 && TREE_CODE (exp) != INDIRECT_REF
6290 && TREE_CODE (exp) != CALL_EXPR
6291 && TREE_CODE (exp) != VAR_DECL
6292 && TREE_CODE (exp) != RTL_EXPR)
6294 enum machine_mode mode = GET_MODE (target);
6296 if (GET_MODE_CLASS (mode) == MODE_INT
6297 && mode > MAX_INTEGER_COMPUTATION_MODE)
6298 internal_error ("unsupported wide integer operation");
6301 if (tmode != mode
6302 && TREE_CODE (exp) != INTEGER_CST
6303 && TREE_CODE (exp) != PARM_DECL
6304 && TREE_CODE (exp) != ARRAY_REF
6305 && TREE_CODE (exp) != ARRAY_RANGE_REF
6306 && TREE_CODE (exp) != COMPONENT_REF
6307 && TREE_CODE (exp) != BIT_FIELD_REF
6308 && TREE_CODE (exp) != INDIRECT_REF
6309 && TREE_CODE (exp) != VAR_DECL
6310 && TREE_CODE (exp) != CALL_EXPR
6311 && TREE_CODE (exp) != RTL_EXPR
6312 && GET_MODE_CLASS (tmode) == MODE_INT
6313 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6314 internal_error ("unsupported wide integer operation");
6316 check_max_integer_computation_mode (exp);
6317 #endif
6319 /* If will do cse, generate all results into pseudo registers
6320 since 1) that allows cse to find more things
6321 and 2) otherwise cse could produce an insn the machine
6322 cannot support. An exception is a CONSTRUCTOR into a multi-word
6323 MEM: that's much more likely to be most efficient into the MEM.
6324 Another is a CALL_EXPR which must return in memory. */
6326 if (! cse_not_expected && mode != BLKmode && target
6327 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6328 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6329 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6330 target = 0;
6332 switch (code)
6334 case LABEL_DECL:
6336 tree function = decl_function_context (exp);
6337 /* Labels in containing functions, or labels used from initializers,
6338 must be forced. */
6339 if (modifier == EXPAND_INITIALIZER
6340 || (function != current_function_decl
6341 && function != inline_function_decl
6342 && function != 0))
6343 temp = force_label_rtx (exp);
6344 else
6345 temp = label_rtx (exp);
6347 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6348 if (function != current_function_decl
6349 && function != inline_function_decl && function != 0)
6350 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6351 return temp;
6354 case PARM_DECL:
6355 if (!DECL_RTL_SET_P (exp))
6357 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6358 return CONST0_RTX (mode);
6361 /* ... fall through ... */
6363 case VAR_DECL:
6364 /* If a static var's type was incomplete when the decl was written,
6365 but the type is complete now, lay out the decl now. */
6366 if (DECL_SIZE (exp) == 0
6367 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6368 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6369 layout_decl (exp, 0);
6371 /* ... fall through ... */
6373 case FUNCTION_DECL:
6374 case RESULT_DECL:
6375 if (DECL_RTL (exp) == 0)
6376 abort ();
6378 /* Ensure variable marked as used even if it doesn't go through
6379 a parser. If it hasn't be used yet, write out an external
6380 definition. */
6381 if (! TREE_USED (exp))
6383 assemble_external (exp);
6384 TREE_USED (exp) = 1;
6387 /* Show we haven't gotten RTL for this yet. */
6388 temp = 0;
6390 /* Handle variables inherited from containing functions. */
6391 context = decl_function_context (exp);
6393 /* We treat inline_function_decl as an alias for the current function
6394 because that is the inline function whose vars, types, etc.
6395 are being merged into the current function.
6396 See expand_inline_function. */
6398 if (context != 0 && context != current_function_decl
6399 && context != inline_function_decl
6400 /* If var is static, we don't need a static chain to access it. */
6401 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6402 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6404 rtx addr;
6406 /* Mark as non-local and addressable. */
6407 DECL_NONLOCAL (exp) = 1;
6408 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6409 abort ();
6410 (*lang_hooks.mark_addressable) (exp);
6411 if (GET_CODE (DECL_RTL (exp)) != MEM)
6412 abort ();
6413 addr = XEXP (DECL_RTL (exp), 0);
6414 if (GET_CODE (addr) == MEM)
6415 addr
6416 = replace_equiv_address (addr,
6417 fix_lexical_addr (XEXP (addr, 0), exp));
6418 else
6419 addr = fix_lexical_addr (addr, exp);
6421 temp = replace_equiv_address (DECL_RTL (exp), addr);
6424 /* This is the case of an array whose size is to be determined
6425 from its initializer, while the initializer is still being parsed.
6426 See expand_decl. */
6428 else if (GET_CODE (DECL_RTL (exp)) == MEM
6429 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6430 temp = validize_mem (DECL_RTL (exp));
6432 /* If DECL_RTL is memory, we are in the normal case and either
6433 the address is not valid or it is not a register and -fforce-addr
6434 is specified, get the address into a register. */
6436 else if (GET_CODE (DECL_RTL (exp)) == MEM
6437 && modifier != EXPAND_CONST_ADDRESS
6438 && modifier != EXPAND_SUM
6439 && modifier != EXPAND_INITIALIZER
6440 && (! memory_address_p (DECL_MODE (exp),
6441 XEXP (DECL_RTL (exp), 0))
6442 || (flag_force_addr
6443 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6444 temp = replace_equiv_address (DECL_RTL (exp),
6445 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6447 /* If we got something, return it. But first, set the alignment
6448 if the address is a register. */
6449 if (temp != 0)
6451 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6452 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6454 return temp;
6457 /* If the mode of DECL_RTL does not match that of the decl, it
6458 must be a promoted value. We return a SUBREG of the wanted mode,
6459 but mark it so that we know that it was already extended. */
6461 if (GET_CODE (DECL_RTL (exp)) == REG
6462 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6464 /* Get the signedness used for this variable. Ensure we get the
6465 same mode we got when the variable was declared. */
6466 if (GET_MODE (DECL_RTL (exp))
6467 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6468 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6469 abort ();
6471 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6472 SUBREG_PROMOTED_VAR_P (temp) = 1;
6473 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6474 return temp;
6477 return DECL_RTL (exp);
6479 case INTEGER_CST:
6480 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6481 TREE_INT_CST_HIGH (exp), mode);
6483 /* ??? If overflow is set, fold will have done an incomplete job,
6484 which can result in (plus xx (const_int 0)), which can get
6485 simplified by validate_replace_rtx during virtual register
6486 instantiation, which can result in unrecognizable insns.
6487 Avoid this by forcing all overflows into registers. */
6488 if (TREE_CONSTANT_OVERFLOW (exp)
6489 && modifier != EXPAND_INITIALIZER)
6490 temp = force_reg (mode, temp);
6492 return temp;
6494 case VECTOR_CST:
6495 return const_vector_from_tree (exp);
6497 case CONST_DECL:
6498 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6500 case REAL_CST:
6501 /* If optimized, generate immediate CONST_DOUBLE
6502 which will be turned into memory by reload if necessary.
6504 We used to force a register so that loop.c could see it. But
6505 this does not allow gen_* patterns to perform optimizations with
6506 the constants. It also produces two insns in cases like "x = 1.0;".
6507 On most machines, floating-point constants are not permitted in
6508 many insns, so we'd end up copying it to a register in any case.
6510 Now, we do the copying in expand_binop, if appropriate. */
6511 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6512 TYPE_MODE (TREE_TYPE (exp)));
6514 case COMPLEX_CST:
6515 /* Handle evaluating a complex constant in a CONCAT target. */
6516 if (original_target && GET_CODE (original_target) == CONCAT)
6518 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6519 rtx rtarg, itarg;
6521 rtarg = XEXP (original_target, 0);
6522 itarg = XEXP (original_target, 1);
6524 /* Move the real and imaginary parts separately. */
6525 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6526 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6528 if (op0 != rtarg)
6529 emit_move_insn (rtarg, op0);
6530 if (op1 != itarg)
6531 emit_move_insn (itarg, op1);
6533 return original_target;
6536 /* ... fall through ... */
6538 case STRING_CST:
6539 temp = output_constant_def (exp, 1);
6541 /* temp contains a constant address.
6542 On RISC machines where a constant address isn't valid,
6543 make some insns to get that address into a register. */
6544 if (modifier != EXPAND_CONST_ADDRESS
6545 && modifier != EXPAND_INITIALIZER
6546 && modifier != EXPAND_SUM
6547 && (! memory_address_p (mode, XEXP (temp, 0))
6548 || flag_force_addr))
6549 return replace_equiv_address (temp,
6550 copy_rtx (XEXP (temp, 0)));
6551 return temp;
6553 case EXPR_WITH_FILE_LOCATION:
6555 rtx to_return;
6556 struct file_stack fs;
6558 fs.location = input_location;
6559 fs.next = expr_wfl_stack;
6560 input_filename = EXPR_WFL_FILENAME (exp);
6561 input_line = EXPR_WFL_LINENO (exp);
6562 expr_wfl_stack = &fs;
6563 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6564 emit_line_note (input_location);
6565 /* Possibly avoid switching back and forth here. */
6566 to_return = expand_expr (EXPR_WFL_NODE (exp),
6567 (ignore ? const0_rtx : target),
6568 tmode, modifier);
6569 if (expr_wfl_stack != &fs)
6570 abort ();
6571 input_location = fs.location;
6572 expr_wfl_stack = fs.next;
6573 return to_return;
6576 case SAVE_EXPR:
6577 context = decl_function_context (exp);
6579 /* If this SAVE_EXPR was at global context, assume we are an
6580 initialization function and move it into our context. */
6581 if (context == 0)
6582 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6584 /* We treat inline_function_decl as an alias for the current function
6585 because that is the inline function whose vars, types, etc.
6586 are being merged into the current function.
6587 See expand_inline_function. */
6588 if (context == current_function_decl || context == inline_function_decl)
6589 context = 0;
6591 /* If this is non-local, handle it. */
6592 if (context)
6594 /* The following call just exists to abort if the context is
6595 not of a containing function. */
6596 find_function_data (context);
6598 temp = SAVE_EXPR_RTL (exp);
6599 if (temp && GET_CODE (temp) == REG)
6601 put_var_into_stack (exp, /*rescan=*/true);
6602 temp = SAVE_EXPR_RTL (exp);
6604 if (temp == 0 || GET_CODE (temp) != MEM)
6605 abort ();
6606 return
6607 replace_equiv_address (temp,
6608 fix_lexical_addr (XEXP (temp, 0), exp));
6610 if (SAVE_EXPR_RTL (exp) == 0)
6612 if (mode == VOIDmode)
6613 temp = const0_rtx;
6614 else
6615 temp = assign_temp (build_qualified_type (type,
6616 (TYPE_QUALS (type)
6617 | TYPE_QUAL_CONST)),
6618 3, 0, 0);
6620 SAVE_EXPR_RTL (exp) = temp;
6621 if (!optimize && GET_CODE (temp) == REG)
6622 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6623 save_expr_regs);
6625 /* If the mode of TEMP does not match that of the expression, it
6626 must be a promoted value. We pass store_expr a SUBREG of the
6627 wanted mode but mark it so that we know that it was already
6628 extended. */
6630 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6632 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6633 promote_mode (type, mode, &unsignedp, 0);
6634 SUBREG_PROMOTED_VAR_P (temp) = 1;
6635 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6638 if (temp == const0_rtx)
6639 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6640 else
6641 store_expr (TREE_OPERAND (exp, 0), temp,
6642 modifier == EXPAND_STACK_PARM ? 2 : 0);
6644 TREE_USED (exp) = 1;
6647 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6648 must be a promoted value. We return a SUBREG of the wanted mode,
6649 but mark it so that we know that it was already extended. */
6651 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6652 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6654 /* Compute the signedness and make the proper SUBREG. */
6655 promote_mode (type, mode, &unsignedp, 0);
6656 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6657 SUBREG_PROMOTED_VAR_P (temp) = 1;
6658 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6659 return temp;
6662 return SAVE_EXPR_RTL (exp);
6664 case UNSAVE_EXPR:
6666 rtx temp;
6667 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6668 TREE_OPERAND (exp, 0)
6669 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6670 return temp;
6673 case PLACEHOLDER_EXPR:
6675 tree old_list = placeholder_list;
6676 tree placeholder_expr = 0;
6678 exp = find_placeholder (exp, &placeholder_expr);
6679 if (exp == 0)
6680 abort ();
6682 placeholder_list = TREE_CHAIN (placeholder_expr);
6683 temp = expand_expr (exp, original_target, tmode, modifier);
6684 placeholder_list = old_list;
6685 return temp;
6688 case WITH_RECORD_EXPR:
6689 /* Put the object on the placeholder list, expand our first operand,
6690 and pop the list. */
6691 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6692 placeholder_list);
6693 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6694 modifier);
6695 placeholder_list = TREE_CHAIN (placeholder_list);
6696 return target;
6698 case GOTO_EXPR:
6699 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6700 expand_goto (TREE_OPERAND (exp, 0));
6701 else
6702 expand_computed_goto (TREE_OPERAND (exp, 0));
6703 return const0_rtx;
6705 case EXIT_EXPR:
6706 expand_exit_loop_if_false (NULL,
6707 invert_truthvalue (TREE_OPERAND (exp, 0)));
6708 return const0_rtx;
6710 case LABELED_BLOCK_EXPR:
6711 if (LABELED_BLOCK_BODY (exp))
6712 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6713 /* Should perhaps use expand_label, but this is simpler and safer. */
6714 do_pending_stack_adjust ();
6715 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6716 return const0_rtx;
6718 case EXIT_BLOCK_EXPR:
6719 if (EXIT_BLOCK_RETURN (exp))
6720 sorry ("returned value in block_exit_expr");
6721 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6722 return const0_rtx;
6724 case LOOP_EXPR:
6725 push_temp_slots ();
6726 expand_start_loop (1);
6727 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6728 expand_end_loop ();
6729 pop_temp_slots ();
6731 return const0_rtx;
6733 case BIND_EXPR:
6735 tree vars = TREE_OPERAND (exp, 0);
6737 /* Need to open a binding contour here because
6738 if there are any cleanups they must be contained here. */
6739 expand_start_bindings (2);
6741 /* Mark the corresponding BLOCK for output in its proper place. */
6742 if (TREE_OPERAND (exp, 2) != 0
6743 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6744 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6746 /* If VARS have not yet been expanded, expand them now. */
6747 while (vars)
6749 if (!DECL_RTL_SET_P (vars))
6750 expand_decl (vars);
6751 expand_decl_init (vars);
6752 vars = TREE_CHAIN (vars);
6755 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6757 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6759 return temp;
6762 case RTL_EXPR:
6763 if (RTL_EXPR_SEQUENCE (exp))
6765 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6766 abort ();
6767 emit_insn (RTL_EXPR_SEQUENCE (exp));
6768 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6770 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6771 free_temps_for_rtl_expr (exp);
6772 return RTL_EXPR_RTL (exp);
6774 case CONSTRUCTOR:
6775 /* If we don't need the result, just ensure we evaluate any
6776 subexpressions. */
6777 if (ignore)
6779 tree elt;
6781 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6782 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6784 return const0_rtx;
6787 /* All elts simple constants => refer to a constant in memory. But
6788 if this is a non-BLKmode mode, let it store a field at a time
6789 since that should make a CONST_INT or CONST_DOUBLE when we
6790 fold. Likewise, if we have a target we can use, it is best to
6791 store directly into the target unless the type is large enough
6792 that memcpy will be used. If we are making an initializer and
6793 all operands are constant, put it in memory as well.
6795 FIXME: Avoid trying to fill vector constructors piece-meal.
6796 Output them with output_constant_def below unless we're sure
6797 they're zeros. This should go away when vector initializers
6798 are treated like VECTOR_CST instead of arrays.
6800 else if ((TREE_STATIC (exp)
6801 && ((mode == BLKmode
6802 && ! (target != 0 && safe_from_p (target, exp, 1)))
6803 || TREE_ADDRESSABLE (exp)
6804 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6805 && (! MOVE_BY_PIECES_P
6806 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6807 TYPE_ALIGN (type)))
6808 && ((TREE_CODE (type) == VECTOR_TYPE
6809 && !is_zeros_p (exp))
6810 || ! mostly_zeros_p (exp)))))
6811 || ((modifier == EXPAND_INITIALIZER
6812 || modifier == EXPAND_CONST_ADDRESS)
6813 && TREE_CONSTANT (exp)))
6815 rtx constructor = output_constant_def (exp, 1);
6817 if (modifier != EXPAND_CONST_ADDRESS
6818 && modifier != EXPAND_INITIALIZER
6819 && modifier != EXPAND_SUM)
6820 constructor = validize_mem (constructor);
6822 return constructor;
6824 else
6826 /* Handle calls that pass values in multiple non-contiguous
6827 locations. The Irix 6 ABI has examples of this. */
6828 if (target == 0 || ! safe_from_p (target, exp, 1)
6829 || GET_CODE (target) == PARALLEL
6830 || modifier == EXPAND_STACK_PARM)
6831 target
6832 = assign_temp (build_qualified_type (type,
6833 (TYPE_QUALS (type)
6834 | (TREE_READONLY (exp)
6835 * TYPE_QUAL_CONST))),
6836 0, TREE_ADDRESSABLE (exp), 1);
6838 store_constructor (exp, target, 0, int_expr_size (exp));
6839 return target;
6842 case INDIRECT_REF:
6844 tree exp1 = TREE_OPERAND (exp, 0);
6845 tree index;
6846 tree string = string_constant (exp1, &index);
6848 /* Try to optimize reads from const strings. */
6849 if (string
6850 && TREE_CODE (string) == STRING_CST
6851 && TREE_CODE (index) == INTEGER_CST
6852 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6853 && GET_MODE_CLASS (mode) == MODE_INT
6854 && GET_MODE_SIZE (mode) == 1
6855 && modifier != EXPAND_WRITE)
6856 return gen_int_mode (TREE_STRING_POINTER (string)
6857 [TREE_INT_CST_LOW (index)], mode);
6859 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6860 op0 = memory_address (mode, op0);
6861 temp = gen_rtx_MEM (mode, op0);
6862 set_mem_attributes (temp, exp, 0);
6864 /* If we are writing to this object and its type is a record with
6865 readonly fields, we must mark it as readonly so it will
6866 conflict with readonly references to those fields. */
6867 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6868 RTX_UNCHANGING_P (temp) = 1;
6870 return temp;
6873 case ARRAY_REF:
6874 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6875 abort ();
6878 tree array = TREE_OPERAND (exp, 0);
6879 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6880 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6881 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6882 HOST_WIDE_INT i;
6884 /* Optimize the special-case of a zero lower bound.
6886 We convert the low_bound to sizetype to avoid some problems
6887 with constant folding. (E.g. suppose the lower bound is 1,
6888 and its mode is QI. Without the conversion, (ARRAY
6889 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6890 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6892 if (! integer_zerop (low_bound))
6893 index = size_diffop (index, convert (sizetype, low_bound));
6895 /* Fold an expression like: "foo"[2].
6896 This is not done in fold so it won't happen inside &.
6897 Don't fold if this is for wide characters since it's too
6898 difficult to do correctly and this is a very rare case. */
6900 if (modifier != EXPAND_CONST_ADDRESS
6901 && modifier != EXPAND_INITIALIZER
6902 && modifier != EXPAND_MEMORY
6903 && TREE_CODE (array) == STRING_CST
6904 && TREE_CODE (index) == INTEGER_CST
6905 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6906 && GET_MODE_CLASS (mode) == MODE_INT
6907 && GET_MODE_SIZE (mode) == 1)
6908 return gen_int_mode (TREE_STRING_POINTER (array)
6909 [TREE_INT_CST_LOW (index)], mode);
6911 /* If this is a constant index into a constant array,
6912 just get the value from the array. Handle both the cases when
6913 we have an explicit constructor and when our operand is a variable
6914 that was declared const. */
6916 if (modifier != EXPAND_CONST_ADDRESS
6917 && modifier != EXPAND_INITIALIZER
6918 && modifier != EXPAND_MEMORY
6919 && TREE_CODE (array) == CONSTRUCTOR
6920 && ! TREE_SIDE_EFFECTS (array)
6921 && TREE_CODE (index) == INTEGER_CST
6922 && 0 > compare_tree_int (index,
6923 list_length (CONSTRUCTOR_ELTS
6924 (TREE_OPERAND (exp, 0)))))
6926 tree elem;
6928 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6929 i = TREE_INT_CST_LOW (index);
6930 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6933 if (elem)
6934 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6935 modifier);
6938 else if (optimize >= 1
6939 && modifier != EXPAND_CONST_ADDRESS
6940 && modifier != EXPAND_INITIALIZER
6941 && modifier != EXPAND_MEMORY
6942 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6943 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6944 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6946 if (TREE_CODE (index) == INTEGER_CST)
6948 tree init = DECL_INITIAL (array);
6950 if (TREE_CODE (init) == CONSTRUCTOR)
6952 tree elem;
6954 for (elem = CONSTRUCTOR_ELTS (init);
6955 (elem
6956 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6957 elem = TREE_CHAIN (elem))
6960 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6961 return expand_expr (fold (TREE_VALUE (elem)), target,
6962 tmode, modifier);
6964 else if (TREE_CODE (init) == STRING_CST
6965 && 0 > compare_tree_int (index,
6966 TREE_STRING_LENGTH (init)))
6968 tree type = TREE_TYPE (TREE_TYPE (init));
6969 enum machine_mode mode = TYPE_MODE (type);
6971 if (GET_MODE_CLASS (mode) == MODE_INT
6972 && GET_MODE_SIZE (mode) == 1)
6973 return gen_int_mode (TREE_STRING_POINTER (init)
6974 [TREE_INT_CST_LOW (index)], mode);
6979 goto normal_inner_ref;
6981 case COMPONENT_REF:
6982 /* If the operand is a CONSTRUCTOR, we can just extract the
6983 appropriate field if it is present. */
6984 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6986 tree elt;
6988 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6989 elt = TREE_CHAIN (elt))
6990 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6991 /* We can normally use the value of the field in the
6992 CONSTRUCTOR. However, if this is a bitfield in
6993 an integral mode that we can fit in a HOST_WIDE_INT,
6994 we must mask only the number of bits in the bitfield,
6995 since this is done implicitly by the constructor. If
6996 the bitfield does not meet either of those conditions,
6997 we can't do this optimization. */
6998 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6999 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7000 == MODE_INT)
7001 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7002 <= HOST_BITS_PER_WIDE_INT))))
7004 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7005 && modifier == EXPAND_STACK_PARM)
7006 target = 0;
7007 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7008 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7010 HOST_WIDE_INT bitsize
7011 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7012 enum machine_mode imode
7013 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7015 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7017 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7018 op0 = expand_and (imode, op0, op1, target);
7020 else
7022 tree count
7023 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7026 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7027 target, 0);
7028 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7029 target, 0);
7033 return op0;
7036 goto normal_inner_ref;
7038 case BIT_FIELD_REF:
7039 case ARRAY_RANGE_REF:
7040 normal_inner_ref:
7042 enum machine_mode mode1;
7043 HOST_WIDE_INT bitsize, bitpos;
7044 tree offset;
7045 int volatilep = 0;
7046 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7047 &mode1, &unsignedp, &volatilep);
7048 rtx orig_op0;
7050 /* If we got back the original object, something is wrong. Perhaps
7051 we are evaluating an expression too early. In any event, don't
7052 infinitely recurse. */
7053 if (tem == exp)
7054 abort ();
7056 /* If TEM's type is a union of variable size, pass TARGET to the inner
7057 computation, since it will need a temporary and TARGET is known
7058 to have to do. This occurs in unchecked conversion in Ada. */
7060 orig_op0 = op0
7061 = expand_expr (tem,
7062 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7063 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7064 != INTEGER_CST)
7065 && modifier != EXPAND_STACK_PARM
7066 ? target : NULL_RTX),
7067 VOIDmode,
7068 (modifier == EXPAND_INITIALIZER
7069 || modifier == EXPAND_CONST_ADDRESS
7070 || modifier == EXPAND_STACK_PARM)
7071 ? modifier : EXPAND_NORMAL);
7073 /* If this is a constant, put it into a register if it is a
7074 legitimate constant and OFFSET is 0 and memory if it isn't. */
7075 if (CONSTANT_P (op0))
7077 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7078 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7079 && offset == 0)
7080 op0 = force_reg (mode, op0);
7081 else
7082 op0 = validize_mem (force_const_mem (mode, op0));
7085 /* Otherwise, if this object not in memory and we either have an
7086 offset or a BLKmode result, put it there. This case can't occur in
7087 C, but can in Ada if we have unchecked conversion of an expression
7088 from a scalar type to an array or record type or for an
7089 ARRAY_RANGE_REF whose type is BLKmode. */
7090 else if (GET_CODE (op0) != MEM
7091 && (offset != 0
7092 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7094 /* If the operand is a SAVE_EXPR, we can deal with this by
7095 forcing the SAVE_EXPR into memory. */
7096 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7098 put_var_into_stack (TREE_OPERAND (exp, 0),
7099 /*rescan=*/true);
7100 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7102 else
7104 tree nt
7105 = build_qualified_type (TREE_TYPE (tem),
7106 (TYPE_QUALS (TREE_TYPE (tem))
7107 | TYPE_QUAL_CONST));
7108 rtx memloc = assign_temp (nt, 1, 1, 1);
7110 emit_move_insn (memloc, op0);
7111 op0 = memloc;
7115 if (offset != 0)
7117 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7118 EXPAND_SUM);
7120 if (GET_CODE (op0) != MEM)
7121 abort ();
7123 #ifdef POINTERS_EXTEND_UNSIGNED
7124 if (GET_MODE (offset_rtx) != Pmode)
7125 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7126 #else
7127 if (GET_MODE (offset_rtx) != ptr_mode)
7128 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7129 #endif
7131 /* A constant address in OP0 can have VOIDmode, we must not try
7132 to call force_reg for that case. Avoid that case. */
7133 if (GET_CODE (op0) == MEM
7134 && GET_MODE (op0) == BLKmode
7135 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7136 && bitsize != 0
7137 && (bitpos % bitsize) == 0
7138 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7139 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7141 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7142 bitpos = 0;
7145 op0 = offset_address (op0, offset_rtx,
7146 highest_pow2_factor (offset));
7149 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7150 record its alignment as BIGGEST_ALIGNMENT. */
7151 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7152 && is_aligning_offset (offset, tem))
7153 set_mem_align (op0, BIGGEST_ALIGNMENT);
7155 /* Don't forget about volatility even if this is a bitfield. */
7156 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7158 if (op0 == orig_op0)
7159 op0 = copy_rtx (op0);
7161 MEM_VOLATILE_P (op0) = 1;
7164 /* The following code doesn't handle CONCAT.
7165 Assume only bitpos == 0 can be used for CONCAT, due to
7166 one element arrays having the same mode as its element. */
7167 if (GET_CODE (op0) == CONCAT)
7169 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7170 abort ();
7171 return op0;
7174 /* In cases where an aligned union has an unaligned object
7175 as a field, we might be extracting a BLKmode value from
7176 an integer-mode (e.g., SImode) object. Handle this case
7177 by doing the extract into an object as wide as the field
7178 (which we know to be the width of a basic mode), then
7179 storing into memory, and changing the mode to BLKmode. */
7180 if (mode1 == VOIDmode
7181 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7182 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7183 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7184 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7185 && modifier != EXPAND_CONST_ADDRESS
7186 && modifier != EXPAND_INITIALIZER)
7187 /* If the field isn't aligned enough to fetch as a memref,
7188 fetch it as a bit field. */
7189 || (mode1 != BLKmode
7190 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7191 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7192 && ((modifier == EXPAND_CONST_ADDRESS
7193 || modifier == EXPAND_INITIALIZER)
7194 ? STRICT_ALIGNMENT
7195 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7196 || (bitpos % BITS_PER_UNIT != 0)))
7197 /* If the type and the field are a constant size and the
7198 size of the type isn't the same size as the bitfield,
7199 we must use bitfield operations. */
7200 || (bitsize >= 0
7201 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7202 == INTEGER_CST)
7203 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7204 bitsize)))
7206 enum machine_mode ext_mode = mode;
7208 if (ext_mode == BLKmode
7209 && ! (target != 0 && GET_CODE (op0) == MEM
7210 && GET_CODE (target) == MEM
7211 && bitpos % BITS_PER_UNIT == 0))
7212 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7214 if (ext_mode == BLKmode)
7216 if (target == 0)
7217 target = assign_temp (type, 0, 1, 1);
7219 if (bitsize == 0)
7220 return target;
7222 /* In this case, BITPOS must start at a byte boundary and
7223 TARGET, if specified, must be a MEM. */
7224 if (GET_CODE (op0) != MEM
7225 || (target != 0 && GET_CODE (target) != MEM)
7226 || bitpos % BITS_PER_UNIT != 0)
7227 abort ();
7229 emit_block_move (target,
7230 adjust_address (op0, VOIDmode,
7231 bitpos / BITS_PER_UNIT),
7232 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7233 / BITS_PER_UNIT),
7234 (modifier == EXPAND_STACK_PARM
7235 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7237 return target;
7240 op0 = validize_mem (op0);
7242 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7243 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7245 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7246 (modifier == EXPAND_STACK_PARM
7247 ? NULL_RTX : target),
7248 ext_mode, ext_mode,
7249 int_size_in_bytes (TREE_TYPE (tem)));
7251 /* If the result is a record type and BITSIZE is narrower than
7252 the mode of OP0, an integral mode, and this is a big endian
7253 machine, we must put the field into the high-order bits. */
7254 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7255 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7256 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7257 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7258 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7259 - bitsize),
7260 op0, 1);
7262 if (mode == BLKmode)
7264 rtx new = assign_temp (build_qualified_type
7265 ((*lang_hooks.types.type_for_mode)
7266 (ext_mode, 0),
7267 TYPE_QUAL_CONST), 0, 1, 1);
7269 emit_move_insn (new, op0);
7270 op0 = copy_rtx (new);
7271 PUT_MODE (op0, BLKmode);
7272 set_mem_attributes (op0, exp, 1);
7275 return op0;
7278 /* If the result is BLKmode, use that to access the object
7279 now as well. */
7280 if (mode == BLKmode)
7281 mode1 = BLKmode;
7283 /* Get a reference to just this component. */
7284 if (modifier == EXPAND_CONST_ADDRESS
7285 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7286 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7287 else
7288 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7290 if (op0 == orig_op0)
7291 op0 = copy_rtx (op0);
7293 set_mem_attributes (op0, exp, 0);
7294 if (GET_CODE (XEXP (op0, 0)) == REG)
7295 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7297 MEM_VOLATILE_P (op0) |= volatilep;
7298 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7299 || modifier == EXPAND_CONST_ADDRESS
7300 || modifier == EXPAND_INITIALIZER)
7301 return op0;
7302 else if (target == 0)
7303 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7305 convert_move (target, op0, unsignedp);
7306 return target;
7309 case VTABLE_REF:
7311 rtx insn, before = get_last_insn (), vtbl_ref;
7313 /* Evaluate the interior expression. */
7314 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7315 tmode, modifier);
7317 /* Get or create an instruction off which to hang a note. */
7318 if (REG_P (subtarget))
7320 target = subtarget;
7321 insn = get_last_insn ();
7322 if (insn == before)
7323 abort ();
7324 if (! INSN_P (insn))
7325 insn = prev_nonnote_insn (insn);
7327 else
7329 target = gen_reg_rtx (GET_MODE (subtarget));
7330 insn = emit_move_insn (target, subtarget);
7333 /* Collect the data for the note. */
7334 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7335 vtbl_ref = plus_constant (vtbl_ref,
7336 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7337 /* Discard the initial CONST that was added. */
7338 vtbl_ref = XEXP (vtbl_ref, 0);
7340 REG_NOTES (insn)
7341 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7343 return target;
7346 /* Intended for a reference to a buffer of a file-object in Pascal.
7347 But it's not certain that a special tree code will really be
7348 necessary for these. INDIRECT_REF might work for them. */
7349 case BUFFER_REF:
7350 abort ();
7352 case IN_EXPR:
7354 /* Pascal set IN expression.
7356 Algorithm:
7357 rlo = set_low - (set_low%bits_per_word);
7358 the_word = set [ (index - rlo)/bits_per_word ];
7359 bit_index = index % bits_per_word;
7360 bitmask = 1 << bit_index;
7361 return !!(the_word & bitmask); */
7363 tree set = TREE_OPERAND (exp, 0);
7364 tree index = TREE_OPERAND (exp, 1);
7365 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7366 tree set_type = TREE_TYPE (set);
7367 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7368 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7369 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7370 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7371 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7372 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7373 rtx setaddr = XEXP (setval, 0);
7374 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7375 rtx rlow;
7376 rtx diff, quo, rem, addr, bit, result;
7378 /* If domain is empty, answer is no. Likewise if index is constant
7379 and out of bounds. */
7380 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7381 && TREE_CODE (set_low_bound) == INTEGER_CST
7382 && tree_int_cst_lt (set_high_bound, set_low_bound))
7383 || (TREE_CODE (index) == INTEGER_CST
7384 && TREE_CODE (set_low_bound) == INTEGER_CST
7385 && tree_int_cst_lt (index, set_low_bound))
7386 || (TREE_CODE (set_high_bound) == INTEGER_CST
7387 && TREE_CODE (index) == INTEGER_CST
7388 && tree_int_cst_lt (set_high_bound, index))))
7389 return const0_rtx;
7391 if (target == 0)
7392 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7394 /* If we get here, we have to generate the code for both cases
7395 (in range and out of range). */
7397 op0 = gen_label_rtx ();
7398 op1 = gen_label_rtx ();
7400 if (! (GET_CODE (index_val) == CONST_INT
7401 && GET_CODE (lo_r) == CONST_INT))
7402 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7403 GET_MODE (index_val), iunsignedp, op1);
7405 if (! (GET_CODE (index_val) == CONST_INT
7406 && GET_CODE (hi_r) == CONST_INT))
7407 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7408 GET_MODE (index_val), iunsignedp, op1);
7410 /* Calculate the element number of bit zero in the first word
7411 of the set. */
7412 if (GET_CODE (lo_r) == CONST_INT)
7413 rlow = GEN_INT (INTVAL (lo_r)
7414 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7415 else
7416 rlow = expand_binop (index_mode, and_optab, lo_r,
7417 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7418 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7420 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7421 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7423 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7424 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7425 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7426 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7428 addr = memory_address (byte_mode,
7429 expand_binop (index_mode, add_optab, diff,
7430 setaddr, NULL_RTX, iunsignedp,
7431 OPTAB_LIB_WIDEN));
7433 /* Extract the bit we want to examine. */
7434 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7435 gen_rtx_MEM (byte_mode, addr),
7436 make_tree (TREE_TYPE (index), rem),
7437 NULL_RTX, 1);
7438 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7439 GET_MODE (target) == byte_mode ? target : 0,
7440 1, OPTAB_LIB_WIDEN);
7442 if (result != target)
7443 convert_move (target, result, 1);
7445 /* Output the code to handle the out-of-range case. */
7446 emit_jump (op0);
7447 emit_label (op1);
7448 emit_move_insn (target, const0_rtx);
7449 emit_label (op0);
7450 return target;
7453 case WITH_CLEANUP_EXPR:
7454 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7456 WITH_CLEANUP_EXPR_RTL (exp)
7457 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7458 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7459 CLEANUP_EH_ONLY (exp));
7461 /* That's it for this cleanup. */
7462 TREE_OPERAND (exp, 1) = 0;
7464 return WITH_CLEANUP_EXPR_RTL (exp);
7466 case CLEANUP_POINT_EXPR:
7468 /* Start a new binding layer that will keep track of all cleanup
7469 actions to be performed. */
7470 expand_start_bindings (2);
7472 target_temp_slot_level = temp_slot_level;
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7475 /* If we're going to use this value, load it up now. */
7476 if (! ignore)
7477 op0 = force_not_mem (op0);
7478 preserve_temp_slots (op0);
7479 expand_end_bindings (NULL_TREE, 0, 0);
7481 return op0;
7483 case CALL_EXPR:
7484 /* Check for a built-in function. */
7485 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7486 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7487 == FUNCTION_DECL)
7488 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7490 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7491 == BUILT_IN_FRONTEND)
7492 return (*lang_hooks.expand_expr) (exp, original_target,
7493 tmode, modifier);
7494 else
7495 return expand_builtin (exp, target, subtarget, tmode, ignore);
7498 return expand_call (exp, target, ignore);
7500 case NON_LVALUE_EXPR:
7501 case NOP_EXPR:
7502 case CONVERT_EXPR:
7503 case REFERENCE_EXPR:
7504 if (TREE_OPERAND (exp, 0) == error_mark_node)
7505 return const0_rtx;
7507 if (TREE_CODE (type) == UNION_TYPE)
7509 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7511 /* If both input and output are BLKmode, this conversion isn't doing
7512 anything except possibly changing memory attribute. */
7513 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7515 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7516 modifier);
7518 result = copy_rtx (result);
7519 set_mem_attributes (result, exp, 0);
7520 return result;
7523 if (target == 0)
7524 target = assign_temp (type, 0, 1, 1);
7526 if (GET_CODE (target) == MEM)
7527 /* Store data into beginning of memory target. */
7528 store_expr (TREE_OPERAND (exp, 0),
7529 adjust_address (target, TYPE_MODE (valtype), 0),
7530 modifier == EXPAND_STACK_PARM ? 2 : 0);
7532 else if (GET_CODE (target) == REG)
7533 /* Store this field into a union of the proper type. */
7534 store_field (target,
7535 MIN ((int_size_in_bytes (TREE_TYPE
7536 (TREE_OPERAND (exp, 0)))
7537 * BITS_PER_UNIT),
7538 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7539 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7540 VOIDmode, 0, type, 0);
7541 else
7542 abort ();
7544 /* Return the entire union. */
7545 return target;
7548 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7550 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7551 modifier);
7553 /* If the signedness of the conversion differs and OP0 is
7554 a promoted SUBREG, clear that indication since we now
7555 have to do the proper extension. */
7556 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7557 && GET_CODE (op0) == SUBREG)
7558 SUBREG_PROMOTED_VAR_P (op0) = 0;
7560 return op0;
7563 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7564 if (GET_MODE (op0) == mode)
7565 return op0;
7567 /* If OP0 is a constant, just convert it into the proper mode. */
7568 if (CONSTANT_P (op0))
7570 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7571 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7573 if (modifier == EXPAND_INITIALIZER)
7574 return simplify_gen_subreg (mode, op0, inner_mode,
7575 subreg_lowpart_offset (mode,
7576 inner_mode));
7577 else
7578 return convert_modes (mode, inner_mode, op0,
7579 TREE_UNSIGNED (inner_type));
7582 if (modifier == EXPAND_INITIALIZER)
7583 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7585 if (target == 0)
7586 return
7587 convert_to_mode (mode, op0,
7588 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7589 else
7590 convert_move (target, op0,
7591 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7592 return target;
7594 case VIEW_CONVERT_EXPR:
7595 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7597 /* If the input and output modes are both the same, we are done.
7598 Otherwise, if neither mode is BLKmode and both are integral and within
7599 a word, we can use gen_lowpart. If neither is true, make sure the
7600 operand is in memory and convert the MEM to the new mode. */
7601 if (TYPE_MODE (type) == GET_MODE (op0))
7603 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7604 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7605 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7606 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7607 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7608 op0 = gen_lowpart (TYPE_MODE (type), op0);
7609 else if (GET_CODE (op0) != MEM)
7611 /* If the operand is not a MEM, force it into memory. Since we
7612 are going to be be changing the mode of the MEM, don't call
7613 force_const_mem for constants because we don't allow pool
7614 constants to change mode. */
7615 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7617 if (TREE_ADDRESSABLE (exp))
7618 abort ();
7620 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7621 target
7622 = assign_stack_temp_for_type
7623 (TYPE_MODE (inner_type),
7624 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7626 emit_move_insn (target, op0);
7627 op0 = target;
7630 /* At this point, OP0 is in the correct mode. If the output type is such
7631 that the operand is known to be aligned, indicate that it is.
7632 Otherwise, we need only be concerned about alignment for non-BLKmode
7633 results. */
7634 if (GET_CODE (op0) == MEM)
7636 op0 = copy_rtx (op0);
7638 if (TYPE_ALIGN_OK (type))
7639 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7640 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7641 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7643 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7644 HOST_WIDE_INT temp_size
7645 = MAX (int_size_in_bytes (inner_type),
7646 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7647 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7648 temp_size, 0, type);
7649 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7651 if (TREE_ADDRESSABLE (exp))
7652 abort ();
7654 if (GET_MODE (op0) == BLKmode)
7655 emit_block_move (new_with_op0_mode, op0,
7656 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7657 (modifier == EXPAND_STACK_PARM
7658 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7659 else
7660 emit_move_insn (new_with_op0_mode, op0);
7662 op0 = new;
7665 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7668 return op0;
7670 case PLUS_EXPR:
7671 this_optab = ! unsignedp && flag_trapv
7672 && (GET_MODE_CLASS (mode) == MODE_INT)
7673 ? addv_optab : add_optab;
7675 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7676 something else, make sure we add the register to the constant and
7677 then to the other thing. This case can occur during strength
7678 reduction and doing it this way will produce better code if the
7679 frame pointer or argument pointer is eliminated.
7681 fold-const.c will ensure that the constant is always in the inner
7682 PLUS_EXPR, so the only case we need to do anything about is if
7683 sp, ap, or fp is our second argument, in which case we must swap
7684 the innermost first argument and our second argument. */
7686 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7687 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7688 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7689 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7690 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7691 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7693 tree t = TREE_OPERAND (exp, 1);
7695 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7696 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7699 /* If the result is to be ptr_mode and we are adding an integer to
7700 something, we might be forming a constant. So try to use
7701 plus_constant. If it produces a sum and we can't accept it,
7702 use force_operand. This allows P = &ARR[const] to generate
7703 efficient code on machines where a SYMBOL_REF is not a valid
7704 address.
7706 If this is an EXPAND_SUM call, always return the sum. */
7707 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7708 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7710 if (modifier == EXPAND_STACK_PARM)
7711 target = 0;
7712 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7713 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7714 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7716 rtx constant_part;
7718 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7719 EXPAND_SUM);
7720 /* Use immed_double_const to ensure that the constant is
7721 truncated according to the mode of OP1, then sign extended
7722 to a HOST_WIDE_INT. Using the constant directly can result
7723 in non-canonical RTL in a 64x32 cross compile. */
7724 constant_part
7725 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7726 (HOST_WIDE_INT) 0,
7727 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7728 op1 = plus_constant (op1, INTVAL (constant_part));
7729 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7730 op1 = force_operand (op1, target);
7731 return op1;
7734 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7735 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7736 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7738 rtx constant_part;
7740 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7741 (modifier == EXPAND_INITIALIZER
7742 ? EXPAND_INITIALIZER : EXPAND_SUM));
7743 if (! CONSTANT_P (op0))
7745 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7746 VOIDmode, modifier);
7747 /* Return a PLUS if modifier says it's OK. */
7748 if (modifier == EXPAND_SUM
7749 || modifier == EXPAND_INITIALIZER)
7750 return simplify_gen_binary (PLUS, mode, op0, op1);
7751 goto binop2;
7753 /* Use immed_double_const to ensure that the constant is
7754 truncated according to the mode of OP1, then sign extended
7755 to a HOST_WIDE_INT. Using the constant directly can result
7756 in non-canonical RTL in a 64x32 cross compile. */
7757 constant_part
7758 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7759 (HOST_WIDE_INT) 0,
7760 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7761 op0 = plus_constant (op0, INTVAL (constant_part));
7762 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7763 op0 = force_operand (op0, target);
7764 return op0;
7768 /* No sense saving up arithmetic to be done
7769 if it's all in the wrong mode to form part of an address.
7770 And force_operand won't know whether to sign-extend or
7771 zero-extend. */
7772 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7773 || mode != ptr_mode)
7775 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7776 subtarget, &op0, &op1, 0);
7777 if (op0 == const0_rtx)
7778 return op1;
7779 if (op1 == const0_rtx)
7780 return op0;
7781 goto binop2;
7784 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7785 subtarget, &op0, &op1, modifier);
7786 return simplify_gen_binary (PLUS, mode, op0, op1);
7788 case MINUS_EXPR:
7789 /* For initializers, we are allowed to return a MINUS of two
7790 symbolic constants. Here we handle all cases when both operands
7791 are constant. */
7792 /* Handle difference of two symbolic constants,
7793 for the sake of an initializer. */
7794 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7795 && really_constant_p (TREE_OPERAND (exp, 0))
7796 && really_constant_p (TREE_OPERAND (exp, 1)))
7798 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7799 NULL_RTX, &op0, &op1, modifier);
7801 /* If the last operand is a CONST_INT, use plus_constant of
7802 the negated constant. Else make the MINUS. */
7803 if (GET_CODE (op1) == CONST_INT)
7804 return plus_constant (op0, - INTVAL (op1));
7805 else
7806 return gen_rtx_MINUS (mode, op0, op1);
7809 this_optab = ! unsignedp && flag_trapv
7810 && (GET_MODE_CLASS(mode) == MODE_INT)
7811 ? subv_optab : sub_optab;
7813 /* No sense saving up arithmetic to be done
7814 if it's all in the wrong mode to form part of an address.
7815 And force_operand won't know whether to sign-extend or
7816 zero-extend. */
7817 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7818 || mode != ptr_mode)
7819 goto binop;
7821 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7822 subtarget, &op0, &op1, modifier);
7824 /* Convert A - const to A + (-const). */
7825 if (GET_CODE (op1) == CONST_INT)
7827 op1 = negate_rtx (mode, op1);
7828 return simplify_gen_binary (PLUS, mode, op0, op1);
7831 goto binop2;
7833 case MULT_EXPR:
7834 /* If first operand is constant, swap them.
7835 Thus the following special case checks need only
7836 check the second operand. */
7837 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7839 tree t1 = TREE_OPERAND (exp, 0);
7840 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7841 TREE_OPERAND (exp, 1) = t1;
7844 /* Attempt to return something suitable for generating an
7845 indexed address, for machines that support that. */
7847 if (modifier == EXPAND_SUM && mode == ptr_mode
7848 && host_integerp (TREE_OPERAND (exp, 1), 0))
7850 tree exp1 = TREE_OPERAND (exp, 1);
7852 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7853 EXPAND_SUM);
7855 /* If we knew for certain that this is arithmetic for an array
7856 reference, and we knew the bounds of the array, then we could
7857 apply the distributive law across (PLUS X C) for constant C.
7858 Without such knowledge, we risk overflowing the computation
7859 when both X and C are large, but X+C isn't. */
7860 /* ??? Could perhaps special-case EXP being unsigned and C being
7861 positive. In that case we are certain that X+C is no smaller
7862 than X and so the transformed expression will overflow iff the
7863 original would have. */
7865 if (GET_CODE (op0) != REG)
7866 op0 = force_operand (op0, NULL_RTX);
7867 if (GET_CODE (op0) != REG)
7868 op0 = copy_to_mode_reg (mode, op0);
7870 return gen_rtx_MULT (mode, op0,
7871 gen_int_mode (tree_low_cst (exp1, 0),
7872 TYPE_MODE (TREE_TYPE (exp1))));
7875 if (modifier == EXPAND_STACK_PARM)
7876 target = 0;
7878 /* Check for multiplying things that have been extended
7879 from a narrower type. If this machine supports multiplying
7880 in that narrower type with a result in the desired type,
7881 do it that way, and avoid the explicit type-conversion. */
7882 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7883 && TREE_CODE (type) == INTEGER_TYPE
7884 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7885 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7886 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7887 && int_fits_type_p (TREE_OPERAND (exp, 1),
7888 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7889 /* Don't use a widening multiply if a shift will do. */
7890 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7891 > HOST_BITS_PER_WIDE_INT)
7892 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7894 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7895 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7897 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7898 /* If both operands are extended, they must either both
7899 be zero-extended or both be sign-extended. */
7900 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7902 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7904 enum machine_mode innermode
7905 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7906 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7907 ? smul_widen_optab : umul_widen_optab);
7908 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7909 ? umul_widen_optab : smul_widen_optab);
7910 if (mode == GET_MODE_WIDER_MODE (innermode))
7912 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7914 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7915 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7916 TREE_OPERAND (exp, 1),
7917 NULL_RTX, &op0, &op1, 0);
7918 else
7919 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7920 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7921 NULL_RTX, &op0, &op1, 0);
7922 goto binop2;
7924 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7925 && innermode == word_mode)
7927 rtx htem;
7928 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7929 NULL_RTX, VOIDmode, 0);
7930 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7931 op1 = convert_modes (innermode, mode,
7932 expand_expr (TREE_OPERAND (exp, 1),
7933 NULL_RTX, VOIDmode, 0),
7934 unsignedp);
7935 else
7936 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7937 NULL_RTX, VOIDmode, 0);
7938 temp = expand_binop (mode, other_optab, op0, op1, target,
7939 unsignedp, OPTAB_LIB_WIDEN);
7940 htem = expand_mult_highpart_adjust (innermode,
7941 gen_highpart (innermode, temp),
7942 op0, op1,
7943 gen_highpart (innermode, temp),
7944 unsignedp);
7945 emit_move_insn (gen_highpart (innermode, temp), htem);
7946 return temp;
7950 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7951 subtarget, &op0, &op1, 0);
7952 return expand_mult (mode, op0, op1, target, unsignedp);
7954 case TRUNC_DIV_EXPR:
7955 case FLOOR_DIV_EXPR:
7956 case CEIL_DIV_EXPR:
7957 case ROUND_DIV_EXPR:
7958 case EXACT_DIV_EXPR:
7959 if (modifier == EXPAND_STACK_PARM)
7960 target = 0;
7961 /* Possible optimization: compute the dividend with EXPAND_SUM
7962 then if the divisor is constant can optimize the case
7963 where some terms of the dividend have coeffs divisible by it. */
7964 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7965 subtarget, &op0, &op1, 0);
7966 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7968 case RDIV_EXPR:
7969 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7970 expensive divide. If not, combine will rebuild the original
7971 computation. */
7972 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7973 && TREE_CODE (type) == REAL_TYPE
7974 && !real_onep (TREE_OPERAND (exp, 0)))
7975 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7976 build (RDIV_EXPR, type,
7977 build_real (type, dconst1),
7978 TREE_OPERAND (exp, 1))),
7979 target, tmode, modifier);
7980 this_optab = sdiv_optab;
7981 goto binop;
7983 case TRUNC_MOD_EXPR:
7984 case FLOOR_MOD_EXPR:
7985 case CEIL_MOD_EXPR:
7986 case ROUND_MOD_EXPR:
7987 if (modifier == EXPAND_STACK_PARM)
7988 target = 0;
7989 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7990 subtarget, &op0, &op1, 0);
7991 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7993 case FIX_ROUND_EXPR:
7994 case FIX_FLOOR_EXPR:
7995 case FIX_CEIL_EXPR:
7996 abort (); /* Not used for C. */
7998 case FIX_TRUNC_EXPR:
7999 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8000 if (target == 0 || modifier == EXPAND_STACK_PARM)
8001 target = gen_reg_rtx (mode);
8002 expand_fix (target, op0, unsignedp);
8003 return target;
8005 case FLOAT_EXPR:
8006 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8007 if (target == 0 || modifier == EXPAND_STACK_PARM)
8008 target = gen_reg_rtx (mode);
8009 /* expand_float can't figure out what to do if FROM has VOIDmode.
8010 So give it the correct mode. With -O, cse will optimize this. */
8011 if (GET_MODE (op0) == VOIDmode)
8012 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8013 op0);
8014 expand_float (target, op0,
8015 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8016 return target;
8018 case NEGATE_EXPR:
8019 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8020 if (modifier == EXPAND_STACK_PARM)
8021 target = 0;
8022 temp = expand_unop (mode,
8023 ! unsignedp && flag_trapv
8024 && (GET_MODE_CLASS(mode) == MODE_INT)
8025 ? negv_optab : neg_optab, op0, target, 0);
8026 if (temp == 0)
8027 abort ();
8028 return temp;
8030 case ABS_EXPR:
8031 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8032 if (modifier == EXPAND_STACK_PARM)
8033 target = 0;
8035 /* ABS_EXPR is not valid for complex arguments. */
8036 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8037 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8038 abort ();
8040 /* Unsigned abs is simply the operand. Testing here means we don't
8041 risk generating incorrect code below. */
8042 if (TREE_UNSIGNED (type))
8043 return op0;
8045 return expand_abs (mode, op0, target, unsignedp,
8046 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8048 case MAX_EXPR:
8049 case MIN_EXPR:
8050 target = original_target;
8051 if (target == 0
8052 || modifier == EXPAND_STACK_PARM
8053 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8054 || GET_MODE (target) != mode
8055 || (GET_CODE (target) == REG
8056 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8057 target = gen_reg_rtx (mode);
8058 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8059 target, &op0, &op1, 0);
8061 /* First try to do it with a special MIN or MAX instruction.
8062 If that does not win, use a conditional jump to select the proper
8063 value. */
8064 this_optab = (TREE_UNSIGNED (type)
8065 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8066 : (code == MIN_EXPR ? smin_optab : smax_optab));
8068 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8069 OPTAB_WIDEN);
8070 if (temp != 0)
8071 return temp;
8073 /* At this point, a MEM target is no longer useful; we will get better
8074 code without it. */
8076 if (GET_CODE (target) == MEM)
8077 target = gen_reg_rtx (mode);
8079 /* If op1 was placed in target, swap op0 and op1. */
8080 if (target != op0 && target == op1)
8082 rtx tem = op0;
8083 op0 = op1;
8084 op1 = tem;
8087 if (target != op0)
8088 emit_move_insn (target, op0);
8090 op0 = gen_label_rtx ();
8092 /* If this mode is an integer too wide to compare properly,
8093 compare word by word. Rely on cse to optimize constant cases. */
8094 if (GET_MODE_CLASS (mode) == MODE_INT
8095 && ! can_compare_p (GE, mode, ccp_jump))
8097 if (code == MAX_EXPR)
8098 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8099 target, op1, NULL_RTX, op0);
8100 else
8101 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8102 op1, target, NULL_RTX, op0);
8104 else
8106 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8107 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8108 unsignedp, mode, NULL_RTX, NULL_RTX,
8109 op0);
8111 emit_move_insn (target, op1);
8112 emit_label (op0);
8113 return target;
8115 case BIT_NOT_EXPR:
8116 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8117 if (modifier == EXPAND_STACK_PARM)
8118 target = 0;
8119 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8120 if (temp == 0)
8121 abort ();
8122 return temp;
8124 /* ??? Can optimize bitwise operations with one arg constant.
8125 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8126 and (a bitwise1 b) bitwise2 b (etc)
8127 but that is probably not worth while. */
8129 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8130 boolean values when we want in all cases to compute both of them. In
8131 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8132 as actual zero-or-1 values and then bitwise anding. In cases where
8133 there cannot be any side effects, better code would be made by
8134 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8135 how to recognize those cases. */
8137 case TRUTH_AND_EXPR:
8138 case BIT_AND_EXPR:
8139 this_optab = and_optab;
8140 goto binop;
8142 case TRUTH_OR_EXPR:
8143 case BIT_IOR_EXPR:
8144 this_optab = ior_optab;
8145 goto binop;
8147 case TRUTH_XOR_EXPR:
8148 case BIT_XOR_EXPR:
8149 this_optab = xor_optab;
8150 goto binop;
8152 case LSHIFT_EXPR:
8153 case RSHIFT_EXPR:
8154 case LROTATE_EXPR:
8155 case RROTATE_EXPR:
8156 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8157 subtarget = 0;
8158 if (modifier == EXPAND_STACK_PARM)
8159 target = 0;
8160 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8161 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8162 unsignedp);
8164 /* Could determine the answer when only additive constants differ. Also,
8165 the addition of one can be handled by changing the condition. */
8166 case LT_EXPR:
8167 case LE_EXPR:
8168 case GT_EXPR:
8169 case GE_EXPR:
8170 case EQ_EXPR:
8171 case NE_EXPR:
8172 case UNORDERED_EXPR:
8173 case ORDERED_EXPR:
8174 case UNLT_EXPR:
8175 case UNLE_EXPR:
8176 case UNGT_EXPR:
8177 case UNGE_EXPR:
8178 case UNEQ_EXPR:
8179 temp = do_store_flag (exp,
8180 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8181 tmode != VOIDmode ? tmode : mode, 0);
8182 if (temp != 0)
8183 return temp;
8185 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8186 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8187 && original_target
8188 && GET_CODE (original_target) == REG
8189 && (GET_MODE (original_target)
8190 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8192 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8193 VOIDmode, 0);
8195 /* If temp is constant, we can just compute the result. */
8196 if (GET_CODE (temp) == CONST_INT)
8198 if (INTVAL (temp) != 0)
8199 emit_move_insn (target, const1_rtx);
8200 else
8201 emit_move_insn (target, const0_rtx);
8203 return target;
8206 if (temp != original_target)
8208 enum machine_mode mode1 = GET_MODE (temp);
8209 if (mode1 == VOIDmode)
8210 mode1 = tmode != VOIDmode ? tmode : mode;
8212 temp = copy_to_mode_reg (mode1, temp);
8215 op1 = gen_label_rtx ();
8216 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8217 GET_MODE (temp), unsignedp, op1);
8218 emit_move_insn (temp, const1_rtx);
8219 emit_label (op1);
8220 return temp;
8223 /* If no set-flag instruction, must generate a conditional
8224 store into a temporary variable. Drop through
8225 and handle this like && and ||. */
8227 case TRUTH_ANDIF_EXPR:
8228 case TRUTH_ORIF_EXPR:
8229 if (! ignore
8230 && (target == 0
8231 || modifier == EXPAND_STACK_PARM
8232 || ! safe_from_p (target, exp, 1)
8233 /* Make sure we don't have a hard reg (such as function's return
8234 value) live across basic blocks, if not optimizing. */
8235 || (!optimize && GET_CODE (target) == REG
8236 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8237 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8239 if (target)
8240 emit_clr_insn (target);
8242 op1 = gen_label_rtx ();
8243 jumpifnot (exp, op1);
8245 if (target)
8246 emit_0_to_1_insn (target);
8248 emit_label (op1);
8249 return ignore ? const0_rtx : target;
8251 case TRUTH_NOT_EXPR:
8252 if (modifier == EXPAND_STACK_PARM)
8253 target = 0;
8254 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8255 /* The parser is careful to generate TRUTH_NOT_EXPR
8256 only with operands that are always zero or one. */
8257 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8258 target, 1, OPTAB_LIB_WIDEN);
8259 if (temp == 0)
8260 abort ();
8261 return temp;
8263 case COMPOUND_EXPR:
8264 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8265 emit_queue ();
8266 return expand_expr (TREE_OPERAND (exp, 1),
8267 (ignore ? const0_rtx : target),
8268 VOIDmode, modifier);
8270 case COND_EXPR:
8271 /* If we would have a "singleton" (see below) were it not for a
8272 conversion in each arm, bring that conversion back out. */
8273 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8274 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8275 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8276 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8278 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8279 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8281 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8282 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8283 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8284 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8285 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8286 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8287 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8288 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8289 return expand_expr (build1 (NOP_EXPR, type,
8290 build (COND_EXPR, TREE_TYPE (iftrue),
8291 TREE_OPERAND (exp, 0),
8292 iftrue, iffalse)),
8293 target, tmode, modifier);
8297 /* Note that COND_EXPRs whose type is a structure or union
8298 are required to be constructed to contain assignments of
8299 a temporary variable, so that we can evaluate them here
8300 for side effect only. If type is void, we must do likewise. */
8302 /* If an arm of the branch requires a cleanup,
8303 only that cleanup is performed. */
8305 tree singleton = 0;
8306 tree binary_op = 0, unary_op = 0;
8308 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8309 convert it to our mode, if necessary. */
8310 if (integer_onep (TREE_OPERAND (exp, 1))
8311 && integer_zerop (TREE_OPERAND (exp, 2))
8312 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8314 if (ignore)
8316 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8317 modifier);
8318 return const0_rtx;
8321 if (modifier == EXPAND_STACK_PARM)
8322 target = 0;
8323 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8324 if (GET_MODE (op0) == mode)
8325 return op0;
8327 if (target == 0)
8328 target = gen_reg_rtx (mode);
8329 convert_move (target, op0, unsignedp);
8330 return target;
8333 /* Check for X ? A + B : A. If we have this, we can copy A to the
8334 output and conditionally add B. Similarly for unary operations.
8335 Don't do this if X has side-effects because those side effects
8336 might affect A or B and the "?" operation is a sequence point in
8337 ANSI. (operand_equal_p tests for side effects.) */
8339 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8340 && operand_equal_p (TREE_OPERAND (exp, 2),
8341 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8342 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8343 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8344 && operand_equal_p (TREE_OPERAND (exp, 1),
8345 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8346 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8347 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8348 && operand_equal_p (TREE_OPERAND (exp, 2),
8349 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8350 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8351 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8352 && operand_equal_p (TREE_OPERAND (exp, 1),
8353 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8354 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8356 /* If we are not to produce a result, we have no target. Otherwise,
8357 if a target was specified use it; it will not be used as an
8358 intermediate target unless it is safe. If no target, use a
8359 temporary. */
8361 if (ignore)
8362 temp = 0;
8363 else if (modifier == EXPAND_STACK_PARM)
8364 temp = assign_temp (type, 0, 0, 1);
8365 else if (original_target
8366 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8367 || (singleton && GET_CODE (original_target) == REG
8368 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8369 && original_target == var_rtx (singleton)))
8370 && GET_MODE (original_target) == mode
8371 #ifdef HAVE_conditional_move
8372 && (! can_conditionally_move_p (mode)
8373 || GET_CODE (original_target) == REG
8374 || TREE_ADDRESSABLE (type))
8375 #endif
8376 && (GET_CODE (original_target) != MEM
8377 || TREE_ADDRESSABLE (type)))
8378 temp = original_target;
8379 else if (TREE_ADDRESSABLE (type))
8380 abort ();
8381 else
8382 temp = assign_temp (type, 0, 0, 1);
8384 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8385 do the test of X as a store-flag operation, do this as
8386 A + ((X != 0) << log C). Similarly for other simple binary
8387 operators. Only do for C == 1 if BRANCH_COST is low. */
8388 if (temp && singleton && binary_op
8389 && (TREE_CODE (binary_op) == PLUS_EXPR
8390 || TREE_CODE (binary_op) == MINUS_EXPR
8391 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8392 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8393 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8394 : integer_onep (TREE_OPERAND (binary_op, 1)))
8395 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8397 rtx result;
8398 tree cond;
8399 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8400 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8401 ? addv_optab : add_optab)
8402 : TREE_CODE (binary_op) == MINUS_EXPR
8403 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8404 ? subv_optab : sub_optab)
8405 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8406 : xor_optab);
8408 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8409 if (singleton == TREE_OPERAND (exp, 1))
8410 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8411 else
8412 cond = TREE_OPERAND (exp, 0);
8414 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8415 ? temp : NULL_RTX),
8416 mode, BRANCH_COST <= 1);
8418 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8419 result = expand_shift (LSHIFT_EXPR, mode, result,
8420 build_int_2 (tree_log2
8421 (TREE_OPERAND
8422 (binary_op, 1)),
8424 (safe_from_p (temp, singleton, 1)
8425 ? temp : NULL_RTX), 0);
8427 if (result)
8429 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8430 return expand_binop (mode, boptab, op1, result, temp,
8431 unsignedp, OPTAB_LIB_WIDEN);
8435 do_pending_stack_adjust ();
8436 NO_DEFER_POP;
8437 op0 = gen_label_rtx ();
8439 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8441 if (temp != 0)
8443 /* If the target conflicts with the other operand of the
8444 binary op, we can't use it. Also, we can't use the target
8445 if it is a hard register, because evaluating the condition
8446 might clobber it. */
8447 if ((binary_op
8448 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8449 || (GET_CODE (temp) == REG
8450 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8451 temp = gen_reg_rtx (mode);
8452 store_expr (singleton, temp,
8453 modifier == EXPAND_STACK_PARM ? 2 : 0);
8455 else
8456 expand_expr (singleton,
8457 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8458 if (singleton == TREE_OPERAND (exp, 1))
8459 jumpif (TREE_OPERAND (exp, 0), op0);
8460 else
8461 jumpifnot (TREE_OPERAND (exp, 0), op0);
8463 start_cleanup_deferral ();
8464 if (binary_op && temp == 0)
8465 /* Just touch the other operand. */
8466 expand_expr (TREE_OPERAND (binary_op, 1),
8467 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8468 else if (binary_op)
8469 store_expr (build (TREE_CODE (binary_op), type,
8470 make_tree (type, temp),
8471 TREE_OPERAND (binary_op, 1)),
8472 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8473 else
8474 store_expr (build1 (TREE_CODE (unary_op), type,
8475 make_tree (type, temp)),
8476 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8477 op1 = op0;
8479 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8480 comparison operator. If we have one of these cases, set the
8481 output to A, branch on A (cse will merge these two references),
8482 then set the output to FOO. */
8483 else if (temp
8484 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8485 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8486 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8487 TREE_OPERAND (exp, 1), 0)
8488 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8489 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8490 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8492 if (GET_CODE (temp) == REG
8493 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8494 temp = gen_reg_rtx (mode);
8495 store_expr (TREE_OPERAND (exp, 1), temp,
8496 modifier == EXPAND_STACK_PARM ? 2 : 0);
8497 jumpif (TREE_OPERAND (exp, 0), op0);
8499 start_cleanup_deferral ();
8500 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8501 store_expr (TREE_OPERAND (exp, 2), temp,
8502 modifier == EXPAND_STACK_PARM ? 2 : 0);
8503 else
8504 expand_expr (TREE_OPERAND (exp, 2),
8505 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8506 op1 = op0;
8508 else if (temp
8509 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8510 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8511 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8512 TREE_OPERAND (exp, 2), 0)
8513 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8514 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8515 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8517 if (GET_CODE (temp) == REG
8518 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8519 temp = gen_reg_rtx (mode);
8520 store_expr (TREE_OPERAND (exp, 2), temp,
8521 modifier == EXPAND_STACK_PARM ? 2 : 0);
8522 jumpifnot (TREE_OPERAND (exp, 0), op0);
8524 start_cleanup_deferral ();
8525 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8526 store_expr (TREE_OPERAND (exp, 1), temp,
8527 modifier == EXPAND_STACK_PARM ? 2 : 0);
8528 else
8529 expand_expr (TREE_OPERAND (exp, 1),
8530 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8531 op1 = op0;
8533 else
8535 op1 = gen_label_rtx ();
8536 jumpifnot (TREE_OPERAND (exp, 0), op0);
8538 start_cleanup_deferral ();
8540 /* One branch of the cond can be void, if it never returns. For
8541 example A ? throw : E */
8542 if (temp != 0
8543 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8544 store_expr (TREE_OPERAND (exp, 1), temp,
8545 modifier == EXPAND_STACK_PARM ? 2 : 0);
8546 else
8547 expand_expr (TREE_OPERAND (exp, 1),
8548 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8549 end_cleanup_deferral ();
8550 emit_queue ();
8551 emit_jump_insn (gen_jump (op1));
8552 emit_barrier ();
8553 emit_label (op0);
8554 start_cleanup_deferral ();
8555 if (temp != 0
8556 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8557 store_expr (TREE_OPERAND (exp, 2), temp,
8558 modifier == EXPAND_STACK_PARM ? 2 : 0);
8559 else
8560 expand_expr (TREE_OPERAND (exp, 2),
8561 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8564 end_cleanup_deferral ();
8566 emit_queue ();
8567 emit_label (op1);
8568 OK_DEFER_POP;
8570 return temp;
8573 case TARGET_EXPR:
8575 /* Something needs to be initialized, but we didn't know
8576 where that thing was when building the tree. For example,
8577 it could be the return value of a function, or a parameter
8578 to a function which lays down in the stack, or a temporary
8579 variable which must be passed by reference.
8581 We guarantee that the expression will either be constructed
8582 or copied into our original target. */
8584 tree slot = TREE_OPERAND (exp, 0);
8585 tree cleanups = NULL_TREE;
8586 tree exp1;
8588 if (TREE_CODE (slot) != VAR_DECL)
8589 abort ();
8591 if (! ignore)
8592 target = original_target;
8594 /* Set this here so that if we get a target that refers to a
8595 register variable that's already been used, put_reg_into_stack
8596 knows that it should fix up those uses. */
8597 TREE_USED (slot) = 1;
8599 if (target == 0)
8601 if (DECL_RTL_SET_P (slot))
8603 target = DECL_RTL (slot);
8604 /* If we have already expanded the slot, so don't do
8605 it again. (mrs) */
8606 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8607 return target;
8609 else
8611 target = assign_temp (type, 2, 0, 1);
8612 /* All temp slots at this level must not conflict. */
8613 preserve_temp_slots (target);
8614 SET_DECL_RTL (slot, target);
8615 if (TREE_ADDRESSABLE (slot))
8616 put_var_into_stack (slot, /*rescan=*/false);
8618 /* Since SLOT is not known to the called function
8619 to belong to its stack frame, we must build an explicit
8620 cleanup. This case occurs when we must build up a reference
8621 to pass the reference as an argument. In this case,
8622 it is very likely that such a reference need not be
8623 built here. */
8625 if (TREE_OPERAND (exp, 2) == 0)
8626 TREE_OPERAND (exp, 2)
8627 = (*lang_hooks.maybe_build_cleanup) (slot);
8628 cleanups = TREE_OPERAND (exp, 2);
8631 else
8633 /* This case does occur, when expanding a parameter which
8634 needs to be constructed on the stack. The target
8635 is the actual stack address that we want to initialize.
8636 The function we call will perform the cleanup in this case. */
8638 /* If we have already assigned it space, use that space,
8639 not target that we were passed in, as our target
8640 parameter is only a hint. */
8641 if (DECL_RTL_SET_P (slot))
8643 target = DECL_RTL (slot);
8644 /* If we have already expanded the slot, so don't do
8645 it again. (mrs) */
8646 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8647 return target;
8649 else
8651 SET_DECL_RTL (slot, target);
8652 /* If we must have an addressable slot, then make sure that
8653 the RTL that we just stored in slot is OK. */
8654 if (TREE_ADDRESSABLE (slot))
8655 put_var_into_stack (slot, /*rescan=*/true);
8659 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8660 /* Mark it as expanded. */
8661 TREE_OPERAND (exp, 1) = NULL_TREE;
8663 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8665 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8667 return target;
8670 case INIT_EXPR:
8672 tree lhs = TREE_OPERAND (exp, 0);
8673 tree rhs = TREE_OPERAND (exp, 1);
8675 temp = expand_assignment (lhs, rhs, ! ignore);
8676 return temp;
8679 case MODIFY_EXPR:
8681 /* If lhs is complex, expand calls in rhs before computing it.
8682 That's so we don't compute a pointer and save it over a
8683 call. If lhs is simple, compute it first so we can give it
8684 as a target if the rhs is just a call. This avoids an
8685 extra temp and copy and that prevents a partial-subsumption
8686 which makes bad code. Actually we could treat
8687 component_ref's of vars like vars. */
8689 tree lhs = TREE_OPERAND (exp, 0);
8690 tree rhs = TREE_OPERAND (exp, 1);
8692 temp = 0;
8694 /* Check for |= or &= of a bitfield of size one into another bitfield
8695 of size 1. In this case, (unless we need the result of the
8696 assignment) we can do this more efficiently with a
8697 test followed by an assignment, if necessary.
8699 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8700 things change so we do, this code should be enhanced to
8701 support it. */
8702 if (ignore
8703 && TREE_CODE (lhs) == COMPONENT_REF
8704 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8705 || TREE_CODE (rhs) == BIT_AND_EXPR)
8706 && TREE_OPERAND (rhs, 0) == lhs
8707 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8708 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8709 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8711 rtx label = gen_label_rtx ();
8713 do_jump (TREE_OPERAND (rhs, 1),
8714 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8715 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8716 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8717 (TREE_CODE (rhs) == BIT_IOR_EXPR
8718 ? integer_one_node
8719 : integer_zero_node)),
8721 do_pending_stack_adjust ();
8722 emit_label (label);
8723 return const0_rtx;
8726 temp = expand_assignment (lhs, rhs, ! ignore);
8728 return temp;
8731 case RETURN_EXPR:
8732 if (!TREE_OPERAND (exp, 0))
8733 expand_null_return ();
8734 else
8735 expand_return (TREE_OPERAND (exp, 0));
8736 return const0_rtx;
8738 case PREINCREMENT_EXPR:
8739 case PREDECREMENT_EXPR:
8740 return expand_increment (exp, 0, ignore);
8742 case POSTINCREMENT_EXPR:
8743 case POSTDECREMENT_EXPR:
8744 /* Faster to treat as pre-increment if result is not used. */
8745 return expand_increment (exp, ! ignore, ignore);
8747 case ADDR_EXPR:
8748 if (modifier == EXPAND_STACK_PARM)
8749 target = 0;
8750 /* Are we taking the address of a nested function? */
8751 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8752 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8753 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8754 && ! TREE_STATIC (exp))
8756 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8757 op0 = force_operand (op0, target);
8759 /* If we are taking the address of something erroneous, just
8760 return a zero. */
8761 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8762 return const0_rtx;
8763 /* If we are taking the address of a constant and are at the
8764 top level, we have to use output_constant_def since we can't
8765 call force_const_mem at top level. */
8766 else if (cfun == 0
8767 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8768 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8769 == 'c')))
8770 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8771 else
8773 /* We make sure to pass const0_rtx down if we came in with
8774 ignore set, to avoid doing the cleanups twice for something. */
8775 op0 = expand_expr (TREE_OPERAND (exp, 0),
8776 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8777 (modifier == EXPAND_INITIALIZER
8778 ? modifier : EXPAND_CONST_ADDRESS));
8780 /* If we are going to ignore the result, OP0 will have been set
8781 to const0_rtx, so just return it. Don't get confused and
8782 think we are taking the address of the constant. */
8783 if (ignore)
8784 return op0;
8786 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8787 clever and returns a REG when given a MEM. */
8788 op0 = protect_from_queue (op0, 1);
8790 /* We would like the object in memory. If it is a constant, we can
8791 have it be statically allocated into memory. For a non-constant,
8792 we need to allocate some memory and store the value into it. */
8794 if (CONSTANT_P (op0))
8795 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8796 op0);
8797 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8798 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8799 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8801 /* If the operand is a SAVE_EXPR, we can deal with this by
8802 forcing the SAVE_EXPR into memory. */
8803 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8805 put_var_into_stack (TREE_OPERAND (exp, 0),
8806 /*rescan=*/true);
8807 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8809 else
8811 /* If this object is in a register, it can't be BLKmode. */
8812 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8813 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8815 if (GET_CODE (op0) == PARALLEL)
8816 /* Handle calls that pass values in multiple
8817 non-contiguous locations. The Irix 6 ABI has examples
8818 of this. */
8819 emit_group_store (memloc, op0, inner_type,
8820 int_size_in_bytes (inner_type));
8821 else
8822 emit_move_insn (memloc, op0);
8824 op0 = memloc;
8828 if (GET_CODE (op0) != MEM)
8829 abort ();
8831 mark_temp_addr_taken (op0);
8832 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8834 op0 = XEXP (op0, 0);
8835 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8836 op0 = convert_memory_address (ptr_mode, op0);
8837 return op0;
8840 /* If OP0 is not aligned as least as much as the type requires, we
8841 need to make a temporary, copy OP0 to it, and take the address of
8842 the temporary. We want to use the alignment of the type, not of
8843 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8844 the test for BLKmode means that can't happen. The test for
8845 BLKmode is because we never make mis-aligned MEMs with
8846 non-BLKmode.
8848 We don't need to do this at all if the machine doesn't have
8849 strict alignment. */
8850 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8851 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8852 > MEM_ALIGN (op0))
8853 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8855 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8856 rtx new;
8858 if (TYPE_ALIGN_OK (inner_type))
8859 abort ();
8861 if (TREE_ADDRESSABLE (inner_type))
8863 /* We can't make a bitwise copy of this object, so fail. */
8864 error ("cannot take the address of an unaligned member");
8865 return const0_rtx;
8868 new = assign_stack_temp_for_type
8869 (TYPE_MODE (inner_type),
8870 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8871 : int_size_in_bytes (inner_type),
8872 1, build_qualified_type (inner_type,
8873 (TYPE_QUALS (inner_type)
8874 | TYPE_QUAL_CONST)));
8876 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8877 (modifier == EXPAND_STACK_PARM
8878 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8880 op0 = new;
8883 op0 = force_operand (XEXP (op0, 0), target);
8886 if (flag_force_addr
8887 && GET_CODE (op0) != REG
8888 && modifier != EXPAND_CONST_ADDRESS
8889 && modifier != EXPAND_INITIALIZER
8890 && modifier != EXPAND_SUM)
8891 op0 = force_reg (Pmode, op0);
8893 if (GET_CODE (op0) == REG
8894 && ! REG_USERVAR_P (op0))
8895 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8897 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8898 op0 = convert_memory_address (ptr_mode, op0);
8900 return op0;
8902 case ENTRY_VALUE_EXPR:
8903 abort ();
8905 /* COMPLEX type for Extended Pascal & Fortran */
8906 case COMPLEX_EXPR:
8908 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8909 rtx insns;
8911 /* Get the rtx code of the operands. */
8912 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8913 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8915 if (! target)
8916 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8918 start_sequence ();
8920 /* Move the real (op0) and imaginary (op1) parts to their location. */
8921 emit_move_insn (gen_realpart (mode, target), op0);
8922 emit_move_insn (gen_imagpart (mode, target), op1);
8924 insns = get_insns ();
8925 end_sequence ();
8927 /* Complex construction should appear as a single unit. */
8928 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8929 each with a separate pseudo as destination.
8930 It's not correct for flow to treat them as a unit. */
8931 if (GET_CODE (target) != CONCAT)
8932 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8933 else
8934 emit_insn (insns);
8936 return target;
8939 case REALPART_EXPR:
8940 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8941 return gen_realpart (mode, op0);
8943 case IMAGPART_EXPR:
8944 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8945 return gen_imagpart (mode, op0);
8947 case CONJ_EXPR:
8949 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8950 rtx imag_t;
8951 rtx insns;
8953 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8955 if (! target)
8956 target = gen_reg_rtx (mode);
8958 start_sequence ();
8960 /* Store the realpart and the negated imagpart to target. */
8961 emit_move_insn (gen_realpart (partmode, target),
8962 gen_realpart (partmode, op0));
8964 imag_t = gen_imagpart (partmode, target);
8965 temp = expand_unop (partmode,
8966 ! unsignedp && flag_trapv
8967 && (GET_MODE_CLASS(partmode) == MODE_INT)
8968 ? negv_optab : neg_optab,
8969 gen_imagpart (partmode, op0), imag_t, 0);
8970 if (temp != imag_t)
8971 emit_move_insn (imag_t, temp);
8973 insns = get_insns ();
8974 end_sequence ();
8976 /* Conjugate should appear as a single unit
8977 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8978 each with a separate pseudo as destination.
8979 It's not correct for flow to treat them as a unit. */
8980 if (GET_CODE (target) != CONCAT)
8981 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8982 else
8983 emit_insn (insns);
8985 return target;
8988 case TRY_CATCH_EXPR:
8990 tree handler = TREE_OPERAND (exp, 1);
8992 expand_eh_region_start ();
8994 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8996 expand_eh_region_end_cleanup (handler);
8998 return op0;
9001 case TRY_FINALLY_EXPR:
9003 tree try_block = TREE_OPERAND (exp, 0);
9004 tree finally_block = TREE_OPERAND (exp, 1);
9006 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9008 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9009 is not sufficient, so we cannot expand the block twice.
9010 So we play games with GOTO_SUBROUTINE_EXPR to let us
9011 expand the thing only once. */
9012 /* When not optimizing, we go ahead with this form since
9013 (1) user breakpoints operate more predictably without
9014 code duplication, and
9015 (2) we're not running any of the global optimizers
9016 that would explode in time/space with the highly
9017 connected CFG created by the indirect branching. */
9019 rtx finally_label = gen_label_rtx ();
9020 rtx done_label = gen_label_rtx ();
9021 rtx return_link = gen_reg_rtx (Pmode);
9022 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9023 (tree) finally_label, (tree) return_link);
9024 TREE_SIDE_EFFECTS (cleanup) = 1;
9026 /* Start a new binding layer that will keep track of all cleanup
9027 actions to be performed. */
9028 expand_start_bindings (2);
9029 target_temp_slot_level = temp_slot_level;
9031 expand_decl_cleanup (NULL_TREE, cleanup);
9032 op0 = expand_expr (try_block, target, tmode, modifier);
9034 preserve_temp_slots (op0);
9035 expand_end_bindings (NULL_TREE, 0, 0);
9036 emit_jump (done_label);
9037 emit_label (finally_label);
9038 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9039 emit_indirect_jump (return_link);
9040 emit_label (done_label);
9042 else
9044 expand_start_bindings (2);
9045 target_temp_slot_level = temp_slot_level;
9047 expand_decl_cleanup (NULL_TREE, finally_block);
9048 op0 = expand_expr (try_block, target, tmode, modifier);
9050 preserve_temp_slots (op0);
9051 expand_end_bindings (NULL_TREE, 0, 0);
9054 return op0;
9057 case GOTO_SUBROUTINE_EXPR:
9059 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9060 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9061 rtx return_address = gen_label_rtx ();
9062 emit_move_insn (return_link,
9063 gen_rtx_LABEL_REF (Pmode, return_address));
9064 emit_jump (subr);
9065 emit_label (return_address);
9066 return const0_rtx;
9069 case VA_ARG_EXPR:
9070 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9072 case EXC_PTR_EXPR:
9073 return get_exception_pointer (cfun);
9075 case FDESC_EXPR:
9076 /* Function descriptors are not valid except for as
9077 initialization constants, and should not be expanded. */
9078 abort ();
9080 default:
9081 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9084 /* Here to do an ordinary binary operator, generating an instruction
9085 from the optab already placed in `this_optab'. */
9086 binop:
9087 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9088 subtarget, &op0, &op1, 0);
9089 binop2:
9090 if (modifier == EXPAND_STACK_PARM)
9091 target = 0;
9092 temp = expand_binop (mode, this_optab, op0, op1, target,
9093 unsignedp, OPTAB_LIB_WIDEN);
9094 if (temp == 0)
9095 abort ();
9096 return temp;
9099 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9100 when applied to the address of EXP produces an address known to be
9101 aligned more than BIGGEST_ALIGNMENT. */
9103 static int
9104 is_aligning_offset (tree offset, tree exp)
9106 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9107 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9108 || TREE_CODE (offset) == NOP_EXPR
9109 || TREE_CODE (offset) == CONVERT_EXPR
9110 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9111 offset = TREE_OPERAND (offset, 0);
9113 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9114 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9115 if (TREE_CODE (offset) != BIT_AND_EXPR
9116 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9117 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9118 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9119 return 0;
9121 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9122 It must be NEGATE_EXPR. Then strip any more conversions. */
9123 offset = TREE_OPERAND (offset, 0);
9124 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9125 || TREE_CODE (offset) == NOP_EXPR
9126 || TREE_CODE (offset) == CONVERT_EXPR)
9127 offset = TREE_OPERAND (offset, 0);
9129 if (TREE_CODE (offset) != NEGATE_EXPR)
9130 return 0;
9132 offset = TREE_OPERAND (offset, 0);
9133 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9134 || TREE_CODE (offset) == NOP_EXPR
9135 || TREE_CODE (offset) == CONVERT_EXPR)
9136 offset = TREE_OPERAND (offset, 0);
9138 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9139 whose type is the same as EXP. */
9140 return (TREE_CODE (offset) == ADDR_EXPR
9141 && (TREE_OPERAND (offset, 0) == exp
9142 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9143 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9144 == TREE_TYPE (exp)))));
9147 /* Return the tree node if an ARG corresponds to a string constant or zero
9148 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9149 in bytes within the string that ARG is accessing. The type of the
9150 offset will be `sizetype'. */
9152 tree
9153 string_constant (tree arg, tree *ptr_offset)
9155 STRIP_NOPS (arg);
9157 if (TREE_CODE (arg) == ADDR_EXPR
9158 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9160 *ptr_offset = size_zero_node;
9161 return TREE_OPERAND (arg, 0);
9163 else if (TREE_CODE (arg) == PLUS_EXPR)
9165 tree arg0 = TREE_OPERAND (arg, 0);
9166 tree arg1 = TREE_OPERAND (arg, 1);
9168 STRIP_NOPS (arg0);
9169 STRIP_NOPS (arg1);
9171 if (TREE_CODE (arg0) == ADDR_EXPR
9172 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9174 *ptr_offset = convert (sizetype, arg1);
9175 return TREE_OPERAND (arg0, 0);
9177 else if (TREE_CODE (arg1) == ADDR_EXPR
9178 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9180 *ptr_offset = convert (sizetype, arg0);
9181 return TREE_OPERAND (arg1, 0);
9185 return 0;
9188 /* Expand code for a post- or pre- increment or decrement
9189 and return the RTX for the result.
9190 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9192 static rtx
9193 expand_increment (tree exp, int post, int ignore)
9195 rtx op0, op1;
9196 rtx temp, value;
9197 tree incremented = TREE_OPERAND (exp, 0);
9198 optab this_optab = add_optab;
9199 int icode;
9200 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9201 int op0_is_copy = 0;
9202 int single_insn = 0;
9203 /* 1 means we can't store into OP0 directly,
9204 because it is a subreg narrower than a word,
9205 and we don't dare clobber the rest of the word. */
9206 int bad_subreg = 0;
9208 /* Stabilize any component ref that might need to be
9209 evaluated more than once below. */
9210 if (!post
9211 || TREE_CODE (incremented) == BIT_FIELD_REF
9212 || (TREE_CODE (incremented) == COMPONENT_REF
9213 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9214 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9215 incremented = stabilize_reference (incremented);
9216 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9217 ones into save exprs so that they don't accidentally get evaluated
9218 more than once by the code below. */
9219 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9220 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9221 incremented = save_expr (incremented);
9223 /* Compute the operands as RTX.
9224 Note whether OP0 is the actual lvalue or a copy of it:
9225 I believe it is a copy iff it is a register or subreg
9226 and insns were generated in computing it. */
9228 temp = get_last_insn ();
9229 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9231 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9232 in place but instead must do sign- or zero-extension during assignment,
9233 so we copy it into a new register and let the code below use it as
9234 a copy.
9236 Note that we can safely modify this SUBREG since it is know not to be
9237 shared (it was made by the expand_expr call above). */
9239 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9241 if (post)
9242 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9243 else
9244 bad_subreg = 1;
9246 else if (GET_CODE (op0) == SUBREG
9247 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9249 /* We cannot increment this SUBREG in place. If we are
9250 post-incrementing, get a copy of the old value. Otherwise,
9251 just mark that we cannot increment in place. */
9252 if (post)
9253 op0 = copy_to_reg (op0);
9254 else
9255 bad_subreg = 1;
9258 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9259 && temp != get_last_insn ());
9260 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9262 /* Decide whether incrementing or decrementing. */
9263 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9264 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9265 this_optab = sub_optab;
9267 /* Convert decrement by a constant into a negative increment. */
9268 if (this_optab == sub_optab
9269 && GET_CODE (op1) == CONST_INT)
9271 op1 = GEN_INT (-INTVAL (op1));
9272 this_optab = add_optab;
9275 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9276 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9278 /* For a preincrement, see if we can do this with a single instruction. */
9279 if (!post)
9281 icode = (int) this_optab->handlers[(int) mode].insn_code;
9282 if (icode != (int) CODE_FOR_nothing
9283 /* Make sure that OP0 is valid for operands 0 and 1
9284 of the insn we want to queue. */
9285 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9286 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9287 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9288 single_insn = 1;
9291 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9292 then we cannot just increment OP0. We must therefore contrive to
9293 increment the original value. Then, for postincrement, we can return
9294 OP0 since it is a copy of the old value. For preincrement, expand here
9295 unless we can do it with a single insn.
9297 Likewise if storing directly into OP0 would clobber high bits
9298 we need to preserve (bad_subreg). */
9299 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9301 /* This is the easiest way to increment the value wherever it is.
9302 Problems with multiple evaluation of INCREMENTED are prevented
9303 because either (1) it is a component_ref or preincrement,
9304 in which case it was stabilized above, or (2) it is an array_ref
9305 with constant index in an array in a register, which is
9306 safe to reevaluate. */
9307 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9308 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9309 ? MINUS_EXPR : PLUS_EXPR),
9310 TREE_TYPE (exp),
9311 incremented,
9312 TREE_OPERAND (exp, 1));
9314 while (TREE_CODE (incremented) == NOP_EXPR
9315 || TREE_CODE (incremented) == CONVERT_EXPR)
9317 newexp = convert (TREE_TYPE (incremented), newexp);
9318 incremented = TREE_OPERAND (incremented, 0);
9321 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9322 return post ? op0 : temp;
9325 if (post)
9327 /* We have a true reference to the value in OP0.
9328 If there is an insn to add or subtract in this mode, queue it.
9329 Queueing the increment insn avoids the register shuffling
9330 that often results if we must increment now and first save
9331 the old value for subsequent use. */
9333 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9334 op0 = stabilize (op0);
9335 #endif
9337 icode = (int) this_optab->handlers[(int) mode].insn_code;
9338 if (icode != (int) CODE_FOR_nothing
9339 /* Make sure that OP0 is valid for operands 0 and 1
9340 of the insn we want to queue. */
9341 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9342 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9344 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9345 op1 = force_reg (mode, op1);
9347 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9349 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9351 rtx addr = (general_operand (XEXP (op0, 0), mode)
9352 ? force_reg (Pmode, XEXP (op0, 0))
9353 : copy_to_reg (XEXP (op0, 0)));
9354 rtx temp, result;
9356 op0 = replace_equiv_address (op0, addr);
9357 temp = force_reg (GET_MODE (op0), op0);
9358 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9359 op1 = force_reg (mode, op1);
9361 /* The increment queue is LIFO, thus we have to `queue'
9362 the instructions in reverse order. */
9363 enqueue_insn (op0, gen_move_insn (op0, temp));
9364 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9365 return result;
9369 /* Preincrement, or we can't increment with one simple insn. */
9370 if (post)
9371 /* Save a copy of the value before inc or dec, to return it later. */
9372 temp = value = copy_to_reg (op0);
9373 else
9374 /* Arrange to return the incremented value. */
9375 /* Copy the rtx because expand_binop will protect from the queue,
9376 and the results of that would be invalid for us to return
9377 if our caller does emit_queue before using our result. */
9378 temp = copy_rtx (value = op0);
9380 /* Increment however we can. */
9381 op1 = expand_binop (mode, this_optab, value, op1, op0,
9382 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9384 /* Make sure the value is stored into OP0. */
9385 if (op1 != op0)
9386 emit_move_insn (op0, op1);
9388 return temp;
9391 /* Generate code to calculate EXP using a store-flag instruction
9392 and return an rtx for the result. EXP is either a comparison
9393 or a TRUTH_NOT_EXPR whose operand is a comparison.
9395 If TARGET is nonzero, store the result there if convenient.
9397 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9398 cheap.
9400 Return zero if there is no suitable set-flag instruction
9401 available on this machine.
9403 Once expand_expr has been called on the arguments of the comparison,
9404 we are committed to doing the store flag, since it is not safe to
9405 re-evaluate the expression. We emit the store-flag insn by calling
9406 emit_store_flag, but only expand the arguments if we have a reason
9407 to believe that emit_store_flag will be successful. If we think that
9408 it will, but it isn't, we have to simulate the store-flag with a
9409 set/jump/set sequence. */
9411 static rtx
9412 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9414 enum rtx_code code;
9415 tree arg0, arg1, type;
9416 tree tem;
9417 enum machine_mode operand_mode;
9418 int invert = 0;
9419 int unsignedp;
9420 rtx op0, op1;
9421 enum insn_code icode;
9422 rtx subtarget = target;
9423 rtx result, label;
9425 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9426 result at the end. We can't simply invert the test since it would
9427 have already been inverted if it were valid. This case occurs for
9428 some floating-point comparisons. */
9430 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9431 invert = 1, exp = TREE_OPERAND (exp, 0);
9433 arg0 = TREE_OPERAND (exp, 0);
9434 arg1 = TREE_OPERAND (exp, 1);
9436 /* Don't crash if the comparison was erroneous. */
9437 if (arg0 == error_mark_node || arg1 == error_mark_node)
9438 return const0_rtx;
9440 type = TREE_TYPE (arg0);
9441 operand_mode = TYPE_MODE (type);
9442 unsignedp = TREE_UNSIGNED (type);
9444 /* We won't bother with BLKmode store-flag operations because it would mean
9445 passing a lot of information to emit_store_flag. */
9446 if (operand_mode == BLKmode)
9447 return 0;
9449 /* We won't bother with store-flag operations involving function pointers
9450 when function pointers must be canonicalized before comparisons. */
9451 #ifdef HAVE_canonicalize_funcptr_for_compare
9452 if (HAVE_canonicalize_funcptr_for_compare
9453 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9454 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9455 == FUNCTION_TYPE))
9456 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9457 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9458 == FUNCTION_TYPE))))
9459 return 0;
9460 #endif
9462 STRIP_NOPS (arg0);
9463 STRIP_NOPS (arg1);
9465 /* Get the rtx comparison code to use. We know that EXP is a comparison
9466 operation of some type. Some comparisons against 1 and -1 can be
9467 converted to comparisons with zero. Do so here so that the tests
9468 below will be aware that we have a comparison with zero. These
9469 tests will not catch constants in the first operand, but constants
9470 are rarely passed as the first operand. */
9472 switch (TREE_CODE (exp))
9474 case EQ_EXPR:
9475 code = EQ;
9476 break;
9477 case NE_EXPR:
9478 code = NE;
9479 break;
9480 case LT_EXPR:
9481 if (integer_onep (arg1))
9482 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9483 else
9484 code = unsignedp ? LTU : LT;
9485 break;
9486 case LE_EXPR:
9487 if (! unsignedp && integer_all_onesp (arg1))
9488 arg1 = integer_zero_node, code = LT;
9489 else
9490 code = unsignedp ? LEU : LE;
9491 break;
9492 case GT_EXPR:
9493 if (! unsignedp && integer_all_onesp (arg1))
9494 arg1 = integer_zero_node, code = GE;
9495 else
9496 code = unsignedp ? GTU : GT;
9497 break;
9498 case GE_EXPR:
9499 if (integer_onep (arg1))
9500 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9501 else
9502 code = unsignedp ? GEU : GE;
9503 break;
9505 case UNORDERED_EXPR:
9506 code = UNORDERED;
9507 break;
9508 case ORDERED_EXPR:
9509 code = ORDERED;
9510 break;
9511 case UNLT_EXPR:
9512 code = UNLT;
9513 break;
9514 case UNLE_EXPR:
9515 code = UNLE;
9516 break;
9517 case UNGT_EXPR:
9518 code = UNGT;
9519 break;
9520 case UNGE_EXPR:
9521 code = UNGE;
9522 break;
9523 case UNEQ_EXPR:
9524 code = UNEQ;
9525 break;
9527 default:
9528 abort ();
9531 /* Put a constant second. */
9532 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9534 tem = arg0; arg0 = arg1; arg1 = tem;
9535 code = swap_condition (code);
9538 /* If this is an equality or inequality test of a single bit, we can
9539 do this by shifting the bit being tested to the low-order bit and
9540 masking the result with the constant 1. If the condition was EQ,
9541 we xor it with 1. This does not require an scc insn and is faster
9542 than an scc insn even if we have it.
9544 The code to make this transformation was moved into fold_single_bit_test,
9545 so we just call into the folder and expand its result. */
9547 if ((code == NE || code == EQ)
9548 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9549 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9551 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9552 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9553 arg0, arg1, type),
9554 target, VOIDmode, EXPAND_NORMAL);
9557 /* Now see if we are likely to be able to do this. Return if not. */
9558 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9559 return 0;
9561 icode = setcc_gen_code[(int) code];
9562 if (icode == CODE_FOR_nothing
9563 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9565 /* We can only do this if it is one of the special cases that
9566 can be handled without an scc insn. */
9567 if ((code == LT && integer_zerop (arg1))
9568 || (! only_cheap && code == GE && integer_zerop (arg1)))
9570 else if (BRANCH_COST >= 0
9571 && ! only_cheap && (code == NE || code == EQ)
9572 && TREE_CODE (type) != REAL_TYPE
9573 && ((abs_optab->handlers[(int) operand_mode].insn_code
9574 != CODE_FOR_nothing)
9575 || (ffs_optab->handlers[(int) operand_mode].insn_code
9576 != CODE_FOR_nothing)))
9578 else
9579 return 0;
9582 if (! get_subtarget (target)
9583 || GET_MODE (subtarget) != operand_mode)
9584 subtarget = 0;
9586 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9588 if (target == 0)
9589 target = gen_reg_rtx (mode);
9591 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9592 because, if the emit_store_flag does anything it will succeed and
9593 OP0 and OP1 will not be used subsequently. */
9595 result = emit_store_flag (target, code,
9596 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9597 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9598 operand_mode, unsignedp, 1);
9600 if (result)
9602 if (invert)
9603 result = expand_binop (mode, xor_optab, result, const1_rtx,
9604 result, 0, OPTAB_LIB_WIDEN);
9605 return result;
9608 /* If this failed, we have to do this with set/compare/jump/set code. */
9609 if (GET_CODE (target) != REG
9610 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9611 target = gen_reg_rtx (GET_MODE (target));
9613 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9614 result = compare_from_rtx (op0, op1, code, unsignedp,
9615 operand_mode, NULL_RTX);
9616 if (GET_CODE (result) == CONST_INT)
9617 return (((result == const0_rtx && ! invert)
9618 || (result != const0_rtx && invert))
9619 ? const0_rtx : const1_rtx);
9621 /* The code of RESULT may not match CODE if compare_from_rtx
9622 decided to swap its operands and reverse the original code.
9624 We know that compare_from_rtx returns either a CONST_INT or
9625 a new comparison code, so it is safe to just extract the
9626 code from RESULT. */
9627 code = GET_CODE (result);
9629 label = gen_label_rtx ();
9630 if (bcc_gen_fctn[(int) code] == 0)
9631 abort ();
9633 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9634 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9635 emit_label (label);
9637 return target;
9641 /* Stubs in case we haven't got a casesi insn. */
9642 #ifndef HAVE_casesi
9643 # define HAVE_casesi 0
9644 # define gen_casesi(a, b, c, d, e) (0)
9645 # define CODE_FOR_casesi CODE_FOR_nothing
9646 #endif
9648 /* If the machine does not have a case insn that compares the bounds,
9649 this means extra overhead for dispatch tables, which raises the
9650 threshold for using them. */
9651 #ifndef CASE_VALUES_THRESHOLD
9652 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9653 #endif /* CASE_VALUES_THRESHOLD */
9655 unsigned int
9656 case_values_threshold (void)
9658 return CASE_VALUES_THRESHOLD;
9661 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9662 0 otherwise (i.e. if there is no casesi instruction). */
9664 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9665 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9667 enum machine_mode index_mode = SImode;
9668 int index_bits = GET_MODE_BITSIZE (index_mode);
9669 rtx op1, op2, index;
9670 enum machine_mode op_mode;
9672 if (! HAVE_casesi)
9673 return 0;
9675 /* Convert the index to SImode. */
9676 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9678 enum machine_mode omode = TYPE_MODE (index_type);
9679 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9681 /* We must handle the endpoints in the original mode. */
9682 index_expr = build (MINUS_EXPR, index_type,
9683 index_expr, minval);
9684 minval = integer_zero_node;
9685 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9686 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9687 omode, 1, default_label);
9688 /* Now we can safely truncate. */
9689 index = convert_to_mode (index_mode, index, 0);
9691 else
9693 if (TYPE_MODE (index_type) != index_mode)
9695 index_expr = convert ((*lang_hooks.types.type_for_size)
9696 (index_bits, 0), index_expr);
9697 index_type = TREE_TYPE (index_expr);
9700 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9702 emit_queue ();
9703 index = protect_from_queue (index, 0);
9704 do_pending_stack_adjust ();
9706 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9707 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9708 (index, op_mode))
9709 index = copy_to_mode_reg (op_mode, index);
9711 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9713 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9714 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9715 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9716 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9717 (op1, op_mode))
9718 op1 = copy_to_mode_reg (op_mode, op1);
9720 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9722 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9723 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9724 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9725 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9726 (op2, op_mode))
9727 op2 = copy_to_mode_reg (op_mode, op2);
9729 emit_jump_insn (gen_casesi (index, op1, op2,
9730 table_label, default_label));
9731 return 1;
9734 /* Attempt to generate a tablejump instruction; same concept. */
9735 #ifndef HAVE_tablejump
9736 #define HAVE_tablejump 0
9737 #define gen_tablejump(x, y) (0)
9738 #endif
9740 /* Subroutine of the next function.
9742 INDEX is the value being switched on, with the lowest value
9743 in the table already subtracted.
9744 MODE is its expected mode (needed if INDEX is constant).
9745 RANGE is the length of the jump table.
9746 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9748 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9749 index value is out of range. */
9751 static void
9752 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9753 rtx default_label)
9755 rtx temp, vector;
9757 if (INTVAL (range) > cfun->max_jumptable_ents)
9758 cfun->max_jumptable_ents = INTVAL (range);
9760 /* Do an unsigned comparison (in the proper mode) between the index
9761 expression and the value which represents the length of the range.
9762 Since we just finished subtracting the lower bound of the range
9763 from the index expression, this comparison allows us to simultaneously
9764 check that the original index expression value is both greater than
9765 or equal to the minimum value of the range and less than or equal to
9766 the maximum value of the range. */
9768 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9769 default_label);
9771 /* If index is in range, it must fit in Pmode.
9772 Convert to Pmode so we can index with it. */
9773 if (mode != Pmode)
9774 index = convert_to_mode (Pmode, index, 1);
9776 /* Don't let a MEM slip thru, because then INDEX that comes
9777 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9778 and break_out_memory_refs will go to work on it and mess it up. */
9779 #ifdef PIC_CASE_VECTOR_ADDRESS
9780 if (flag_pic && GET_CODE (index) != REG)
9781 index = copy_to_mode_reg (Pmode, index);
9782 #endif
9784 /* If flag_force_addr were to affect this address
9785 it could interfere with the tricky assumptions made
9786 about addresses that contain label-refs,
9787 which may be valid only very near the tablejump itself. */
9788 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9789 GET_MODE_SIZE, because this indicates how large insns are. The other
9790 uses should all be Pmode, because they are addresses. This code
9791 could fail if addresses and insns are not the same size. */
9792 index = gen_rtx_PLUS (Pmode,
9793 gen_rtx_MULT (Pmode, index,
9794 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9795 gen_rtx_LABEL_REF (Pmode, table_label));
9796 #ifdef PIC_CASE_VECTOR_ADDRESS
9797 if (flag_pic)
9798 index = PIC_CASE_VECTOR_ADDRESS (index);
9799 else
9800 #endif
9801 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9802 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9803 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9804 RTX_UNCHANGING_P (vector) = 1;
9805 MEM_NOTRAP_P (vector) = 1;
9806 convert_move (temp, vector, 0);
9808 emit_jump_insn (gen_tablejump (temp, table_label));
9810 /* If we are generating PIC code or if the table is PC-relative, the
9811 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9812 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9813 emit_barrier ();
9817 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9818 rtx table_label, rtx default_label)
9820 rtx index;
9822 if (! HAVE_tablejump)
9823 return 0;
9825 index_expr = fold (build (MINUS_EXPR, index_type,
9826 convert (index_type, index_expr),
9827 convert (index_type, minval)));
9828 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9829 emit_queue ();
9830 index = protect_from_queue (index, 0);
9831 do_pending_stack_adjust ();
9833 do_tablejump (index, TYPE_MODE (index_type),
9834 convert_modes (TYPE_MODE (index_type),
9835 TYPE_MODE (TREE_TYPE (range)),
9836 expand_expr (range, NULL_RTX,
9837 VOIDmode, 0),
9838 TREE_UNSIGNED (TREE_TYPE (range))),
9839 table_label, default_label);
9840 return 1;
9843 /* Nonzero if the mode is a valid vector mode for this architecture.
9844 This returns nonzero even if there is no hardware support for the
9845 vector mode, but we can emulate with narrower modes. */
9848 vector_mode_valid_p (enum machine_mode mode)
9850 enum mode_class class = GET_MODE_CLASS (mode);
9851 enum machine_mode innermode;
9853 /* Doh! What's going on? */
9854 if (class != MODE_VECTOR_INT
9855 && class != MODE_VECTOR_FLOAT)
9856 return 0;
9858 /* Hardware support. Woo hoo! */
9859 if (VECTOR_MODE_SUPPORTED_P (mode))
9860 return 1;
9862 innermode = GET_MODE_INNER (mode);
9864 /* We should probably return 1 if requesting V4DI and we have no DI,
9865 but we have V2DI, but this is probably very unlikely. */
9867 /* If we have support for the inner mode, we can safely emulate it.
9868 We may not have V2DI, but me can emulate with a pair of DIs. */
9869 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9872 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9873 static rtx
9874 const_vector_from_tree (tree exp)
9876 rtvec v;
9877 int units, i;
9878 tree link, elt;
9879 enum machine_mode inner, mode;
9881 mode = TYPE_MODE (TREE_TYPE (exp));
9883 if (is_zeros_p (exp))
9884 return CONST0_RTX (mode);
9886 units = GET_MODE_NUNITS (mode);
9887 inner = GET_MODE_INNER (mode);
9889 v = rtvec_alloc (units);
9891 link = TREE_VECTOR_CST_ELTS (exp);
9892 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9894 elt = TREE_VALUE (link);
9896 if (TREE_CODE (elt) == REAL_CST)
9897 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9898 inner);
9899 else
9900 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9901 TREE_INT_CST_HIGH (elt),
9902 inner);
9905 /* Initialize remaining elements to 0. */
9906 for (; i < units; ++i)
9907 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9909 return gen_rtx_raw_CONST_VECTOR (mode, v);
9912 #include "gt-expr.h"