lb1sf68.asm: Add __PIC__ and __ID_SHARED_LIBRARY__ support.
[official-gcc.git] / gcc / expr.c
blob54bff3f5be2c59b368223f1c9a4e1c11168b2c6a
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
129 void *constfundata;
130 int reverse;
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
135 unsigned int);
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
159 tree, enum machine_mode, int, tree, int);
160 static rtx var_rtx (tree);
162 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165 static int is_aligning_offset (tree, tree);
166 static rtx expand_increment (tree, int, int);
167 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
168 enum expand_modifier);
169 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
170 #ifdef PUSH_ROUNDING
171 static void emit_single_push_insn (enum machine_mode, rtx, tree);
172 #endif
173 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
174 static rtx const_vector_from_tree (tree);
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* Record for each mode whether we can float-extend from memory. */
185 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
190 #ifndef MOVE_RATIO
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 #define MOVE_RATIO 2
193 #else
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 #endif
197 #endif
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 #endif
206 /* If a clear memory operation would take CLEAR_RATIO or more simple
207 move-instruction sequences, we will do a clrstr or libcall instead. */
209 #ifndef CLEAR_RATIO
210 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
211 #define CLEAR_RATIO 2
212 #else
213 /* If we are optimizing for space, cut down the default clear ratio. */
214 #define CLEAR_RATIO (optimize_size ? 3 : 15)
215 #endif
216 #endif
218 /* This macro is used to determine whether clear_by_pieces should be
219 called to clear storage. */
220 #ifndef CLEAR_BY_PIECES_P
221 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
222 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
223 #endif
225 /* This macro is used to determine whether store_by_pieces should be
226 called to "memset" storage with byte values other than zero, or
227 to "memcpy" storage when the source is a constant string. */
228 #ifndef STORE_BY_PIECES_P
229 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
230 #endif
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238 /* These arrays record the insn_code of two different kinds of insns
239 to perform block compares. */
240 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
241 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
243 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
244 struct file_stack *expr_wfl_stack;
246 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
248 #ifndef SLOW_UNALIGNED_ACCESS
249 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
250 #endif
252 /* This is run once per compilation to set up which modes can be used
253 directly in memory and to initialize the block move optab. */
255 void
256 init_expr_once (void)
258 rtx insn, pat;
259 enum machine_mode mode;
260 int num_clobbers;
261 rtx mem, mem1;
262 rtx reg;
264 /* Try indexing by frame ptr and try by stack ptr.
265 It is known that on the Convex the stack ptr isn't a valid index.
266 With luck, one or the other is valid on any machine. */
267 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
268 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
270 /* A scratch register we can modify in-place below to avoid
271 useless RTL allocations. */
272 reg = gen_rtx_REG (VOIDmode, -1);
274 insn = rtx_alloc (INSN);
275 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
276 PATTERN (insn) = pat;
278 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
279 mode = (enum machine_mode) ((int) mode + 1))
281 int regno;
283 direct_load[(int) mode] = direct_store[(int) mode] = 0;
284 PUT_MODE (mem, mode);
285 PUT_MODE (mem1, mode);
286 PUT_MODE (reg, mode);
288 /* See if there is some register that can be used in this mode and
289 directly loaded or stored from memory. */
291 if (mode != VOIDmode && mode != BLKmode)
292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
293 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
294 regno++)
296 if (! HARD_REGNO_MODE_OK (regno, mode))
297 continue;
299 REGNO (reg) = regno;
301 SET_SRC (pat) = mem;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
306 SET_SRC (pat) = mem1;
307 SET_DEST (pat) = reg;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_load[(int) mode] = 1;
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
316 SET_SRC (pat) = reg;
317 SET_DEST (pat) = mem1;
318 if (recog (pat, insn, &num_clobbers) >= 0)
319 direct_store[(int) mode] = 1;
323 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
325 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
326 mode = GET_MODE_WIDER_MODE (mode))
328 enum machine_mode srcmode;
329 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
330 srcmode = GET_MODE_WIDER_MODE (srcmode))
332 enum insn_code ic;
334 ic = can_extend_p (mode, srcmode, 0);
335 if (ic == CODE_FOR_nothing)
336 continue;
338 PUT_MODE (mem, srcmode);
340 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
341 float_extend_from_mem[mode][srcmode] = true;
346 /* This is run at the start of compiling a function. */
348 void
349 init_expr (void)
351 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
354 /* Small sanity check that the queue is empty at the end of a function. */
356 void
357 finish_expr_for_function (void)
359 if (pending_chain)
360 abort ();
363 /* Manage the queue of increment instructions to be output
364 for POSTINCREMENT_EXPR expressions, etc. */
366 /* Queue up to increment (or change) VAR later. BODY says how:
367 BODY should be the same thing you would pass to emit_insn
368 to increment right away. It will go to emit_insn later on.
370 The value is a QUEUED expression to be used in place of VAR
371 where you want to guarantee the pre-incrementation value of VAR. */
373 static rtx
374 enqueue_insn (rtx var, rtx body)
376 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
377 body, pending_chain);
378 return pending_chain;
381 /* Use protect_from_queue to convert a QUEUED expression
382 into something that you can put immediately into an instruction.
383 If the queued incrementation has not happened yet,
384 protect_from_queue returns the variable itself.
385 If the incrementation has happened, protect_from_queue returns a temp
386 that contains a copy of the old value of the variable.
388 Any time an rtx which might possibly be a QUEUED is to be put
389 into an instruction, it must be passed through protect_from_queue first.
390 QUEUED expressions are not meaningful in instructions.
392 Do not pass a value through protect_from_queue and then hold
393 on to it for a while before putting it in an instruction!
394 If the queue is flushed in between, incorrect code will result. */
397 protect_from_queue (rtx x, int modify)
399 RTX_CODE code = GET_CODE (x);
401 #if 0 /* A QUEUED can hang around after the queue is forced out. */
402 /* Shortcut for most common case. */
403 if (pending_chain == 0)
404 return x;
405 #endif
407 if (code != QUEUED)
409 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
410 use of autoincrement. Make a copy of the contents of the memory
411 location rather than a copy of the address, but not if the value is
412 of mode BLKmode. Don't modify X in place since it might be
413 shared. */
414 if (code == MEM && GET_MODE (x) != BLKmode
415 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 rtx y = XEXP (x, 0);
418 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
420 if (QUEUED_INSN (y))
422 rtx temp = gen_reg_rtx (GET_MODE (x));
424 emit_insn_before (gen_move_insn (temp, new),
425 QUEUED_INSN (y));
426 return temp;
429 /* Copy the address into a pseudo, so that the returned value
430 remains correct across calls to emit_queue. */
431 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
434 /* Otherwise, recursively protect the subexpressions of all
435 the kinds of rtx's that can contain a QUEUED. */
436 if (code == MEM)
438 rtx tem = protect_from_queue (XEXP (x, 0), 0);
439 if (tem != XEXP (x, 0))
441 x = copy_rtx (x);
442 XEXP (x, 0) = tem;
445 else if (code == PLUS || code == MULT)
447 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
448 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
449 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451 x = copy_rtx (x);
452 XEXP (x, 0) = new0;
453 XEXP (x, 1) = new1;
456 return x;
458 /* If the increment has not happened, use the variable itself. Copy it
459 into a new pseudo so that the value remains correct across calls to
460 emit_queue. */
461 if (QUEUED_INSN (x) == 0)
462 return copy_to_reg (QUEUED_VAR (x));
463 /* If the increment has happened and a pre-increment copy exists,
464 use that copy. */
465 if (QUEUED_COPY (x) != 0)
466 return QUEUED_COPY (x);
467 /* The increment has happened but we haven't set up a pre-increment copy.
468 Set one up now, and use it. */
469 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
470 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
471 QUEUED_INSN (x));
472 return QUEUED_COPY (x);
475 /* Return nonzero if X contains a QUEUED expression:
476 if it contains anything that will be altered by a queued increment.
477 We handle only combinations of MEM, PLUS, MINUS and MULT operators
478 since memory addresses generally contain only those. */
481 queued_subexp_p (rtx x)
483 enum rtx_code code = GET_CODE (x);
484 switch (code)
486 case QUEUED:
487 return 1;
488 case MEM:
489 return queued_subexp_p (XEXP (x, 0));
490 case MULT:
491 case PLUS:
492 case MINUS:
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
495 default:
496 return 0;
500 /* Perform all the pending incrementations. */
502 void
503 emit_queue (void)
505 rtx p;
506 while ((p = pending_chain))
508 rtx body = QUEUED_BODY (p);
510 switch (GET_CODE (body))
512 case INSN:
513 case JUMP_INSN:
514 case CALL_INSN:
515 case CODE_LABEL:
516 case BARRIER:
517 case NOTE:
518 QUEUED_INSN (p) = body;
519 emit_insn (body);
520 break;
522 #ifdef ENABLE_CHECKING
523 case SEQUENCE:
524 abort ();
525 break;
526 #endif
528 default:
529 QUEUED_INSN (p) = emit_insn (body);
530 break;
533 pending_chain = QUEUED_NEXT (p);
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
542 void
543 convert_move (rtx to, rtx from, int unsignedp)
545 enum machine_mode to_mode = GET_MODE (to);
546 enum machine_mode from_mode = GET_MODE (from);
547 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
548 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
549 enum insn_code code;
550 rtx libcall;
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
554 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
556 to = protect_from_queue (to, 1);
557 from = protect_from_queue (from, 0);
559 if (to_real != from_real)
560 abort ();
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
564 TO here. */
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
573 abort ();
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
579 return;
582 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
585 abort ();
587 if (VECTOR_MODE_P (to_mode))
588 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
589 else
590 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592 emit_move_insn (to, from);
593 return;
596 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
599 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
600 return;
603 if (to_real)
605 rtx value, insns;
606 convert_optab tab;
608 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609 tab = sext_optab;
610 else if (GET_MODE_BITSIZE (from_mode) > GET_MODE_BITSIZE (to_mode))
611 tab = trunc_optab;
612 else
613 abort ();
615 /* Try converting directly if the insn is supported. */
617 code = tab->handlers[to_mode][from_mode].insn_code;
618 if (code != CODE_FOR_nothing)
620 emit_unop_insn (code, to, from,
621 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
622 return;
625 /* Otherwise use a libcall. */
626 libcall = tab->handlers[to_mode][from_mode].libfunc;
628 if (!libcall)
629 /* This conversion is not implemented yet. */
630 abort ();
632 start_sequence ();
633 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
634 1, from, from_mode);
635 insns = get_insns ();
636 end_sequence ();
637 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
638 from));
639 return;
642 /* Handle pointer conversion. */ /* SPEE 900220. */
643 /* Targets are expected to provide conversion insns between PxImode and
644 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
645 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
647 enum machine_mode full_mode
648 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
650 if (trunc_optab->handlers[to_mode][full_mode].insn_code
651 == CODE_FOR_nothing)
652 abort ();
654 if (full_mode != from_mode)
655 from = convert_to_mode (full_mode, from, unsignedp);
656 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
657 to, from, UNKNOWN);
658 return;
660 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
662 enum machine_mode full_mode
663 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
665 if (sext_optab->handlers[full_mode][from_mode].insn_code
666 == CODE_FOR_nothing)
667 abort ();
669 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
670 to, from, UNKNOWN);
671 if (to_mode == full_mode)
672 return;
674 /* else proceed to integer conversions below */
675 from_mode = full_mode;
678 /* Now both modes are integers. */
680 /* Handle expanding beyond a word. */
681 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
682 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
684 rtx insns;
685 rtx lowpart;
686 rtx fill_value;
687 rtx lowfrom;
688 int i;
689 enum machine_mode lowpart_mode;
690 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
692 /* Try converting directly if the insn is supported. */
693 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
694 != CODE_FOR_nothing)
696 /* If FROM is a SUBREG, put it into a register. Do this
697 so that we always generate the same set of insns for
698 better cse'ing; if an intermediate assignment occurred,
699 we won't be doing the operation directly on the SUBREG. */
700 if (optimize > 0 && GET_CODE (from) == SUBREG)
701 from = force_reg (from_mode, from);
702 emit_unop_insn (code, to, from, equiv_code);
703 return;
705 /* Next, try converting via full word. */
706 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
707 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
708 != CODE_FOR_nothing))
710 if (GET_CODE (to) == REG)
711 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
712 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
713 emit_unop_insn (code, to,
714 gen_lowpart (word_mode, to), equiv_code);
715 return;
718 /* No special multiword conversion insn; do it by hand. */
719 start_sequence ();
721 /* Since we will turn this into a no conflict block, we must ensure
722 that the source does not overlap the target. */
724 if (reg_overlap_mentioned_p (to, from))
725 from = force_reg (from_mode, from);
727 /* Get a copy of FROM widened to a word, if necessary. */
728 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
729 lowpart_mode = word_mode;
730 else
731 lowpart_mode = from_mode;
733 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
735 lowpart = gen_lowpart (lowpart_mode, to);
736 emit_move_insn (lowpart, lowfrom);
738 /* Compute the value to put in each remaining word. */
739 if (unsignedp)
740 fill_value = const0_rtx;
741 else
743 #ifdef HAVE_slt
744 if (HAVE_slt
745 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
746 && STORE_FLAG_VALUE == -1)
748 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
749 lowpart_mode, 0);
750 fill_value = gen_reg_rtx (word_mode);
751 emit_insn (gen_slt (fill_value));
753 else
754 #endif
756 fill_value
757 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
758 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
759 NULL_RTX, 0);
760 fill_value = convert_to_mode (word_mode, fill_value, 1);
764 /* Fill the remaining words. */
765 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
767 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
768 rtx subword = operand_subword (to, index, 1, to_mode);
770 if (subword == 0)
771 abort ();
773 if (fill_value != subword)
774 emit_move_insn (subword, fill_value);
777 insns = get_insns ();
778 end_sequence ();
780 emit_no_conflict_block (insns, to, from, NULL_RTX,
781 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
782 return;
785 /* Truncating multi-word to a word or less. */
786 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
787 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
789 if (!((GET_CODE (from) == MEM
790 && ! MEM_VOLATILE_P (from)
791 && direct_load[(int) to_mode]
792 && ! mode_dependent_address_p (XEXP (from, 0)))
793 || GET_CODE (from) == REG
794 || GET_CODE (from) == SUBREG))
795 from = force_reg (from_mode, from);
796 convert_move (to, gen_lowpart (word_mode, from), 0);
797 return;
800 /* Now follow all the conversions between integers
801 no more than a word long. */
803 /* For truncation, usually we can just refer to FROM in a narrower mode. */
804 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
805 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
806 GET_MODE_BITSIZE (from_mode)))
808 if (!((GET_CODE (from) == MEM
809 && ! MEM_VOLATILE_P (from)
810 && direct_load[(int) to_mode]
811 && ! mode_dependent_address_p (XEXP (from, 0)))
812 || GET_CODE (from) == REG
813 || GET_CODE (from) == SUBREG))
814 from = force_reg (from_mode, from);
815 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
816 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
817 from = copy_to_reg (from);
818 emit_move_insn (to, gen_lowpart (to_mode, from));
819 return;
822 /* Handle extension. */
823 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
825 /* Convert directly if that works. */
826 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
827 != CODE_FOR_nothing)
829 if (flag_force_mem)
830 from = force_not_mem (from);
832 emit_unop_insn (code, to, from, equiv_code);
833 return;
835 else
837 enum machine_mode intermediate;
838 rtx tmp;
839 tree shift_amount;
841 /* Search for a mode to convert via. */
842 for (intermediate = from_mode; intermediate != VOIDmode;
843 intermediate = GET_MODE_WIDER_MODE (intermediate))
844 if (((can_extend_p (to_mode, intermediate, unsignedp)
845 != CODE_FOR_nothing)
846 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
847 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
848 GET_MODE_BITSIZE (intermediate))))
849 && (can_extend_p (intermediate, from_mode, unsignedp)
850 != CODE_FOR_nothing))
852 convert_move (to, convert_to_mode (intermediate, from,
853 unsignedp), unsignedp);
854 return;
857 /* No suitable intermediate mode.
858 Generate what we need with shifts. */
859 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
860 - GET_MODE_BITSIZE (from_mode), 0);
861 from = gen_lowpart (to_mode, force_reg (from_mode, from));
862 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
863 to, unsignedp);
864 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
865 to, unsignedp);
866 if (tmp != to)
867 emit_move_insn (to, tmp);
868 return;
872 /* Support special truncate insns for certain modes. */
873 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
875 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
876 to, from, UNKNOWN);
877 return;
880 /* Handle truncation of volatile memrefs, and so on;
881 the things that couldn't be truncated directly,
882 and for which there was no special instruction.
884 ??? Code above formerly short-circuited this, for most integer
885 mode pairs, with a force_reg in from_mode followed by a recursive
886 call to this routine. Appears always to have been wrong. */
887 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
889 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
890 emit_move_insn (to, temp);
891 return;
894 /* Mode combination is not recognized. */
895 abort ();
898 /* Return an rtx for a value that would result
899 from converting X to mode MODE.
900 Both X and MODE may be floating, or both integer.
901 UNSIGNEDP is nonzero if X is an unsigned value.
902 This can be done by referring to a part of X in place
903 or by copying to a new temporary with conversion.
905 This function *must not* call protect_from_queue
906 except when putting X into an insn (in which case convert_move does it). */
909 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
911 return convert_modes (mode, VOIDmode, x, unsignedp);
914 /* Return an rtx for a value that would result
915 from converting X from mode OLDMODE to mode MODE.
916 Both modes may be floating, or both integer.
917 UNSIGNEDP is nonzero if X is an unsigned value.
919 This can be done by referring to a part of X in place
920 or by copying to a new temporary with conversion.
922 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
924 This function *must not* call protect_from_queue
925 except when putting X into an insn (in which case convert_move does it). */
928 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
930 rtx temp;
932 /* If FROM is a SUBREG that indicates that we have already done at least
933 the required extension, strip it. */
935 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
936 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
937 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
938 x = gen_lowpart (mode, x);
940 if (GET_MODE (x) != VOIDmode)
941 oldmode = GET_MODE (x);
943 if (mode == oldmode)
944 return x;
946 /* There is one case that we must handle specially: If we are converting
947 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
948 we are to interpret the constant as unsigned, gen_lowpart will do
949 the wrong if the constant appears negative. What we want to do is
950 make the high-order word of the constant zero, not all ones. */
952 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
953 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
954 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
956 HOST_WIDE_INT val = INTVAL (x);
958 if (oldmode != VOIDmode
959 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
961 int width = GET_MODE_BITSIZE (oldmode);
963 /* We need to zero extend VAL. */
964 val &= ((HOST_WIDE_INT) 1 << width) - 1;
967 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
970 /* We can do this with a gen_lowpart if both desired and current modes
971 are integer, and this is either a constant integer, a register, or a
972 non-volatile MEM. Except for the constant case where MODE is no
973 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
975 if ((GET_CODE (x) == CONST_INT
976 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
977 || (GET_MODE_CLASS (mode) == MODE_INT
978 && GET_MODE_CLASS (oldmode) == MODE_INT
979 && (GET_CODE (x) == CONST_DOUBLE
980 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
981 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
982 && direct_load[(int) mode])
983 || (GET_CODE (x) == REG
984 && (! HARD_REGISTER_P (x)
985 || HARD_REGNO_MODE_OK (REGNO (x), mode))
986 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
987 GET_MODE_BITSIZE (GET_MODE (x)))))))))
989 /* ?? If we don't know OLDMODE, we have to assume here that
990 X does not need sign- or zero-extension. This may not be
991 the case, but it's the best we can do. */
992 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
993 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
995 HOST_WIDE_INT val = INTVAL (x);
996 int width = GET_MODE_BITSIZE (oldmode);
998 /* We must sign or zero-extend in this case. Start by
999 zero-extending, then sign extend if we need to. */
1000 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1001 if (! unsignedp
1002 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1003 val |= (HOST_WIDE_INT) (-1) << width;
1005 return gen_int_mode (val, mode);
1008 return gen_lowpart (mode, x);
1011 /* Converting from integer constant into mode is always equivalent to an
1012 subreg operation. */
1013 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1015 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1016 abort ();
1017 return simplify_gen_subreg (mode, x, oldmode, 0);
1020 temp = gen_reg_rtx (mode);
1021 convert_move (temp, x, unsignedp);
1022 return temp;
1025 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1026 store efficiently. Due to internal GCC limitations, this is
1027 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1028 for an immediate constant. */
1030 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1032 /* Determine whether the LEN bytes can be moved by using several move
1033 instructions. Return nonzero if a call to move_by_pieces should
1034 succeed. */
1037 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1038 unsigned int align ATTRIBUTE_UNUSED)
1040 return MOVE_BY_PIECES_P (len, align);
1043 /* Generate several move instructions to copy LEN bytes from block FROM to
1044 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1045 and TO through protect_from_queue before calling.
1047 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1048 used to push FROM to the stack.
1050 ALIGN is maximum stack alignment we can assume.
1052 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1053 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1054 stpcpy. */
1057 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1058 unsigned int align, int endp)
1060 struct move_by_pieces data;
1061 rtx to_addr, from_addr = XEXP (from, 0);
1062 unsigned int max_size = MOVE_MAX_PIECES + 1;
1063 enum machine_mode mode = VOIDmode, tmode;
1064 enum insn_code icode;
1066 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1068 data.offset = 0;
1069 data.from_addr = from_addr;
1070 if (to)
1072 to_addr = XEXP (to, 0);
1073 data.to = to;
1074 data.autinc_to
1075 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1076 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1077 data.reverse
1078 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1080 else
1082 to_addr = NULL_RTX;
1083 data.to = NULL_RTX;
1084 data.autinc_to = 1;
1085 #ifdef STACK_GROWS_DOWNWARD
1086 data.reverse = 1;
1087 #else
1088 data.reverse = 0;
1089 #endif
1091 data.to_addr = to_addr;
1092 data.from = from;
1093 data.autinc_from
1094 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1095 || GET_CODE (from_addr) == POST_INC
1096 || GET_CODE (from_addr) == POST_DEC);
1098 data.explicit_inc_from = 0;
1099 data.explicit_inc_to = 0;
1100 if (data.reverse) data.offset = len;
1101 data.len = len;
1103 /* If copying requires more than two move insns,
1104 copy addresses to registers (to make displacements shorter)
1105 and use post-increment if available. */
1106 if (!(data.autinc_from && data.autinc_to)
1107 && move_by_pieces_ninsns (len, align) > 2)
1109 /* Find the mode of the largest move... */
1110 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1111 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1112 if (GET_MODE_SIZE (tmode) < max_size)
1113 mode = tmode;
1115 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1117 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1118 data.autinc_from = 1;
1119 data.explicit_inc_from = -1;
1121 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1123 data.from_addr = copy_addr_to_reg (from_addr);
1124 data.autinc_from = 1;
1125 data.explicit_inc_from = 1;
1127 if (!data.autinc_from && CONSTANT_P (from_addr))
1128 data.from_addr = copy_addr_to_reg (from_addr);
1129 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1131 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1132 data.autinc_to = 1;
1133 data.explicit_inc_to = -1;
1135 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1137 data.to_addr = copy_addr_to_reg (to_addr);
1138 data.autinc_to = 1;
1139 data.explicit_inc_to = 1;
1141 if (!data.autinc_to && CONSTANT_P (to_addr))
1142 data.to_addr = copy_addr_to_reg (to_addr);
1145 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1146 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1147 align = MOVE_MAX * BITS_PER_UNIT;
1149 /* First move what we can in the largest integer mode, then go to
1150 successively smaller modes. */
1152 while (max_size > 1)
1154 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1155 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1156 if (GET_MODE_SIZE (tmode) < max_size)
1157 mode = tmode;
1159 if (mode == VOIDmode)
1160 break;
1162 icode = mov_optab->handlers[(int) mode].insn_code;
1163 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1164 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1166 max_size = GET_MODE_SIZE (mode);
1169 /* The code above should have handled everything. */
1170 if (data.len > 0)
1171 abort ();
1173 if (endp)
1175 rtx to1;
1177 if (data.reverse)
1178 abort ();
1179 if (data.autinc_to)
1181 if (endp == 2)
1183 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1184 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1185 else
1186 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1187 -1));
1189 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1190 data.offset);
1192 else
1194 if (endp == 2)
1195 --data.offset;
1196 to1 = adjust_address (data.to, QImode, data.offset);
1198 return to1;
1200 else
1201 return data.to;
1204 /* Return number of insns required to move L bytes by pieces.
1205 ALIGN (in bits) is maximum alignment we can assume. */
1207 static unsigned HOST_WIDE_INT
1208 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1210 unsigned HOST_WIDE_INT n_insns = 0;
1211 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1213 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1214 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1215 align = MOVE_MAX * BITS_PER_UNIT;
1217 while (max_size > 1)
1219 enum machine_mode mode = VOIDmode, tmode;
1220 enum insn_code icode;
1222 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1223 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1224 if (GET_MODE_SIZE (tmode) < max_size)
1225 mode = tmode;
1227 if (mode == VOIDmode)
1228 break;
1230 icode = mov_optab->handlers[(int) mode].insn_code;
1231 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1232 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1234 max_size = GET_MODE_SIZE (mode);
1237 if (l)
1238 abort ();
1239 return n_insns;
1242 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1243 with move instructions for mode MODE. GENFUN is the gen_... function
1244 to make a move insn for that mode. DATA has all the other info. */
1246 static void
1247 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1248 struct move_by_pieces *data)
1250 unsigned int size = GET_MODE_SIZE (mode);
1251 rtx to1 = NULL_RTX, from1;
1253 while (data->len >= size)
1255 if (data->reverse)
1256 data->offset -= size;
1258 if (data->to)
1260 if (data->autinc_to)
1261 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1262 data->offset);
1263 else
1264 to1 = adjust_address (data->to, mode, data->offset);
1267 if (data->autinc_from)
1268 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1269 data->offset);
1270 else
1271 from1 = adjust_address (data->from, mode, data->offset);
1273 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1274 emit_insn (gen_add2_insn (data->to_addr,
1275 GEN_INT (-(HOST_WIDE_INT)size)));
1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1277 emit_insn (gen_add2_insn (data->from_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
1280 if (data->to)
1281 emit_insn ((*genfun) (to1, from1));
1282 else
1284 #ifdef PUSH_ROUNDING
1285 emit_single_push_insn (mode, from1, NULL);
1286 #else
1287 abort ();
1288 #endif
1291 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1292 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1293 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1294 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1296 if (! data->reverse)
1297 data->offset += size;
1299 data->len -= size;
1303 /* Emit code to move a block Y to a block X. This may be done with
1304 string-move instructions, with multiple scalar move instructions,
1305 or with a library call.
1307 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1308 SIZE is an rtx that says how long they are.
1309 ALIGN is the maximum alignment we can assume they have.
1310 METHOD describes what kind of copy this is, and what mechanisms may be used.
1312 Return the address of the new block, if memcpy is called and returns it,
1313 0 otherwise. */
1316 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1318 bool may_use_call;
1319 rtx retval = 0;
1320 unsigned int align;
1322 switch (method)
1324 case BLOCK_OP_NORMAL:
1325 may_use_call = true;
1326 break;
1328 case BLOCK_OP_CALL_PARM:
1329 may_use_call = block_move_libcall_safe_for_call_parm ();
1331 /* Make inhibit_defer_pop nonzero around the library call
1332 to force it to pop the arguments right away. */
1333 NO_DEFER_POP;
1334 break;
1336 case BLOCK_OP_NO_LIBCALL:
1337 may_use_call = false;
1338 break;
1340 default:
1341 abort ();
1344 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1346 if (GET_MODE (x) != BLKmode)
1347 abort ();
1348 if (GET_MODE (y) != BLKmode)
1349 abort ();
1351 x = protect_from_queue (x, 1);
1352 y = protect_from_queue (y, 0);
1353 size = protect_from_queue (size, 0);
1355 if (GET_CODE (x) != MEM)
1356 abort ();
1357 if (GET_CODE (y) != MEM)
1358 abort ();
1359 if (size == 0)
1360 abort ();
1362 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1363 can be incorrect is coming from __builtin_memcpy. */
1364 if (GET_CODE (size) == CONST_INT)
1366 if (INTVAL (size) == 0)
1367 return 0;
1369 x = shallow_copy_rtx (x);
1370 y = shallow_copy_rtx (y);
1371 set_mem_size (x, size);
1372 set_mem_size (y, size);
1375 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1376 move_by_pieces (x, y, INTVAL (size), align, 0);
1377 else if (emit_block_move_via_movstr (x, y, size, align))
1379 else if (may_use_call)
1380 retval = emit_block_move_via_libcall (x, y, size);
1381 else
1382 emit_block_move_via_loop (x, y, size, align);
1384 if (method == BLOCK_OP_CALL_PARM)
1385 OK_DEFER_POP;
1387 return retval;
1390 /* A subroutine of emit_block_move. Returns true if calling the
1391 block move libcall will not clobber any parameters which may have
1392 already been placed on the stack. */
1394 static bool
1395 block_move_libcall_safe_for_call_parm (void)
1397 if (PUSH_ARGS)
1398 return true;
1399 else
1401 /* Check to see whether memcpy takes all register arguments. */
1402 static enum {
1403 takes_regs_uninit, takes_regs_no, takes_regs_yes
1404 } takes_regs = takes_regs_uninit;
1406 switch (takes_regs)
1408 case takes_regs_uninit:
1410 CUMULATIVE_ARGS args_so_far;
1411 tree fn, arg;
1413 fn = emit_block_move_libcall_fn (false);
1414 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1416 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1417 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1419 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1420 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1421 if (!tmp || !REG_P (tmp))
1422 goto fail_takes_regs;
1423 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1424 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1425 NULL_TREE, 1))
1426 goto fail_takes_regs;
1427 #endif
1428 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1431 takes_regs = takes_regs_yes;
1432 /* FALLTHRU */
1434 case takes_regs_yes:
1435 return true;
1437 fail_takes_regs:
1438 takes_regs = takes_regs_no;
1439 /* FALLTHRU */
1440 case takes_regs_no:
1441 return false;
1443 default:
1444 abort ();
1449 /* A subroutine of emit_block_move. Expand a movstr pattern;
1450 return true if successful. */
1452 static bool
1453 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1455 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1456 enum machine_mode mode;
1458 /* Since this is a move insn, we don't care about volatility. */
1459 volatile_ok = 1;
1461 /* Try the most limited insn first, because there's no point
1462 including more than one in the machine description unless
1463 the more limited one has some advantage. */
1465 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1466 mode = GET_MODE_WIDER_MODE (mode))
1468 enum insn_code code = movstr_optab[(int) mode];
1469 insn_operand_predicate_fn pred;
1471 if (code != CODE_FOR_nothing
1472 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1473 here because if SIZE is less than the mode mask, as it is
1474 returned by the macro, it will definitely be less than the
1475 actual mode mask. */
1476 && ((GET_CODE (size) == CONST_INT
1477 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1478 <= (GET_MODE_MASK (mode) >> 1)))
1479 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1480 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1481 || (*pred) (x, BLKmode))
1482 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1483 || (*pred) (y, BLKmode))
1484 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1485 || (*pred) (opalign, VOIDmode)))
1487 rtx op2;
1488 rtx last = get_last_insn ();
1489 rtx pat;
1491 op2 = convert_to_mode (mode, size, 1);
1492 pred = insn_data[(int) code].operand[2].predicate;
1493 if (pred != 0 && ! (*pred) (op2, mode))
1494 op2 = copy_to_mode_reg (mode, op2);
1496 /* ??? When called via emit_block_move_for_call, it'd be
1497 nice if there were some way to inform the backend, so
1498 that it doesn't fail the expansion because it thinks
1499 emitting the libcall would be more efficient. */
1501 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1502 if (pat)
1504 emit_insn (pat);
1505 volatile_ok = 0;
1506 return true;
1508 else
1509 delete_insns_since (last);
1513 volatile_ok = 0;
1514 return false;
1517 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1518 Return the return value from memcpy, 0 otherwise. */
1520 static rtx
1521 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1523 rtx dst_addr, src_addr;
1524 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1525 enum machine_mode size_mode;
1526 rtx retval;
1528 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1530 It is unsafe to save the value generated by protect_from_queue and reuse
1531 it later. Consider what happens if emit_queue is called before the
1532 return value from protect_from_queue is used.
1534 Expansion of the CALL_EXPR below will call emit_queue before we are
1535 finished emitting RTL for argument setup. So if we are not careful we
1536 could get the wrong value for an argument.
1538 To avoid this problem we go ahead and emit code to copy the addresses of
1539 DST and SRC and SIZE into new pseudos. We can then place those new
1540 pseudos into an RTL_EXPR and use them later, even after a call to
1541 emit_queue.
1543 Note this is not strictly needed for library calls since they do not call
1544 emit_queue before loading their arguments. However, we may need to have
1545 library calls call emit_queue in the future since failing to do so could
1546 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1547 arguments in registers. */
1549 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1550 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1552 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1553 src_addr = convert_memory_address (ptr_mode, src_addr);
1555 dst_tree = make_tree (ptr_type_node, dst_addr);
1556 src_tree = make_tree (ptr_type_node, src_addr);
1558 if (TARGET_MEM_FUNCTIONS)
1559 size_mode = TYPE_MODE (sizetype);
1560 else
1561 size_mode = TYPE_MODE (unsigned_type_node);
1563 size = convert_to_mode (size_mode, size, 1);
1564 size = copy_to_mode_reg (size_mode, size);
1566 /* It is incorrect to use the libcall calling conventions to call
1567 memcpy in this context. This could be a user call to memcpy and
1568 the user may wish to examine the return value from memcpy. For
1569 targets where libcalls and normal calls have different conventions
1570 for returning pointers, we could end up generating incorrect code.
1572 For convenience, we generate the call to bcopy this way as well. */
1574 if (TARGET_MEM_FUNCTIONS)
1575 size_tree = make_tree (sizetype, size);
1576 else
1577 size_tree = make_tree (unsigned_type_node, size);
1579 fn = emit_block_move_libcall_fn (true);
1580 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1581 if (TARGET_MEM_FUNCTIONS)
1583 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1584 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1586 else
1588 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1589 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1592 /* Now we have to build up the CALL_EXPR itself. */
1593 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1594 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1595 call_expr, arg_list, NULL_TREE);
1597 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1599 /* If we are initializing a readonly value, show the above call clobbered
1600 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1601 the delay slot scheduler might overlook conflicts and take nasty
1602 decisions. */
1603 if (RTX_UNCHANGING_P (dst))
1604 add_function_usage_to
1605 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1606 gen_rtx_CLOBBER (VOIDmode, dst),
1607 NULL_RTX));
1609 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1612 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1613 for the function we use for block copies. The first time FOR_CALL
1614 is true, we call assemble_external. */
1616 static GTY(()) tree block_move_fn;
1618 void
1619 init_block_move_fn (const char *asmspec)
1621 if (!block_move_fn)
1623 tree args, fn;
1625 if (TARGET_MEM_FUNCTIONS)
1627 fn = get_identifier ("memcpy");
1628 args = build_function_type_list (ptr_type_node, ptr_type_node,
1629 const_ptr_type_node, sizetype,
1630 NULL_TREE);
1632 else
1634 fn = get_identifier ("bcopy");
1635 args = build_function_type_list (void_type_node, const_ptr_type_node,
1636 ptr_type_node, unsigned_type_node,
1637 NULL_TREE);
1640 fn = build_decl (FUNCTION_DECL, fn, args);
1641 DECL_EXTERNAL (fn) = 1;
1642 TREE_PUBLIC (fn) = 1;
1643 DECL_ARTIFICIAL (fn) = 1;
1644 TREE_NOTHROW (fn) = 1;
1646 block_move_fn = fn;
1649 if (asmspec)
1651 SET_DECL_RTL (block_move_fn, NULL_RTX);
1652 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1656 static tree
1657 emit_block_move_libcall_fn (int for_call)
1659 static bool emitted_extern;
1661 if (!block_move_fn)
1662 init_block_move_fn (NULL);
1664 if (for_call && !emitted_extern)
1666 emitted_extern = true;
1667 make_decl_rtl (block_move_fn, NULL);
1668 assemble_external (block_move_fn);
1671 return block_move_fn;
1674 /* A subroutine of emit_block_move. Copy the data via an explicit
1675 loop. This is used only when libcalls are forbidden. */
1676 /* ??? It'd be nice to copy in hunks larger than QImode. */
1678 static void
1679 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1680 unsigned int align ATTRIBUTE_UNUSED)
1682 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1683 enum machine_mode iter_mode;
1685 iter_mode = GET_MODE (size);
1686 if (iter_mode == VOIDmode)
1687 iter_mode = word_mode;
1689 top_label = gen_label_rtx ();
1690 cmp_label = gen_label_rtx ();
1691 iter = gen_reg_rtx (iter_mode);
1693 emit_move_insn (iter, const0_rtx);
1695 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1696 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1697 do_pending_stack_adjust ();
1699 emit_note (NOTE_INSN_LOOP_BEG);
1701 emit_jump (cmp_label);
1702 emit_label (top_label);
1704 tmp = convert_modes (Pmode, iter_mode, iter, true);
1705 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1706 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1707 x = change_address (x, QImode, x_addr);
1708 y = change_address (y, QImode, y_addr);
1710 emit_move_insn (x, y);
1712 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1713 true, OPTAB_LIB_WIDEN);
1714 if (tmp != iter)
1715 emit_move_insn (iter, tmp);
1717 emit_note (NOTE_INSN_LOOP_CONT);
1718 emit_label (cmp_label);
1720 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1721 true, top_label);
1723 emit_note (NOTE_INSN_LOOP_END);
1726 /* Copy all or part of a value X into registers starting at REGNO.
1727 The number of registers to be filled is NREGS. */
1729 void
1730 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1732 int i;
1733 #ifdef HAVE_load_multiple
1734 rtx pat;
1735 rtx last;
1736 #endif
1738 if (nregs == 0)
1739 return;
1741 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1742 x = validize_mem (force_const_mem (mode, x));
1744 /* See if the machine can do this with a load multiple insn. */
1745 #ifdef HAVE_load_multiple
1746 if (HAVE_load_multiple)
1748 last = get_last_insn ();
1749 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1750 GEN_INT (nregs));
1751 if (pat)
1753 emit_insn (pat);
1754 return;
1756 else
1757 delete_insns_since (last);
1759 #endif
1761 for (i = 0; i < nregs; i++)
1762 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1763 operand_subword_force (x, i, mode));
1766 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1767 The number of registers to be filled is NREGS. */
1769 void
1770 move_block_from_reg (int regno, rtx x, int nregs)
1772 int i;
1774 if (nregs == 0)
1775 return;
1777 /* See if the machine can do this with a store multiple insn. */
1778 #ifdef HAVE_store_multiple
1779 if (HAVE_store_multiple)
1781 rtx last = get_last_insn ();
1782 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1783 GEN_INT (nregs));
1784 if (pat)
1786 emit_insn (pat);
1787 return;
1789 else
1790 delete_insns_since (last);
1792 #endif
1794 for (i = 0; i < nregs; i++)
1796 rtx tem = operand_subword (x, i, 1, BLKmode);
1798 if (tem == 0)
1799 abort ();
1801 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1805 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1806 ORIG, where ORIG is a non-consecutive group of registers represented by
1807 a PARALLEL. The clone is identical to the original except in that the
1808 original set of registers is replaced by a new set of pseudo registers.
1809 The new set has the same modes as the original set. */
1812 gen_group_rtx (rtx orig)
1814 int i, length;
1815 rtx *tmps;
1817 if (GET_CODE (orig) != PARALLEL)
1818 abort ();
1820 length = XVECLEN (orig, 0);
1821 tmps = alloca (sizeof (rtx) * length);
1823 /* Skip a NULL entry in first slot. */
1824 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1826 if (i)
1827 tmps[0] = 0;
1829 for (; i < length; i++)
1831 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1832 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1834 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1837 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1840 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1841 where DST is non-consecutive registers represented by a PARALLEL.
1842 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1843 if not known. */
1845 void
1846 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1848 rtx *tmps, src;
1849 int start, i;
1851 if (GET_CODE (dst) != PARALLEL)
1852 abort ();
1854 /* Check for a NULL entry, used to indicate that the parameter goes
1855 both on the stack and in registers. */
1856 if (XEXP (XVECEXP (dst, 0, 0), 0))
1857 start = 0;
1858 else
1859 start = 1;
1861 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1863 /* Process the pieces. */
1864 for (i = start; i < XVECLEN (dst, 0); i++)
1866 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1867 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1868 unsigned int bytelen = GET_MODE_SIZE (mode);
1869 int shift = 0;
1871 /* Handle trailing fragments that run over the size of the struct. */
1872 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1874 /* Arrange to shift the fragment to where it belongs.
1875 extract_bit_field loads to the lsb of the reg. */
1876 if (
1877 #ifdef BLOCK_REG_PADDING
1878 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1879 == (BYTES_BIG_ENDIAN ? upward : downward)
1880 #else
1881 BYTES_BIG_ENDIAN
1882 #endif
1884 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1885 bytelen = ssize - bytepos;
1886 if (bytelen <= 0)
1887 abort ();
1890 /* If we won't be loading directly from memory, protect the real source
1891 from strange tricks we might play; but make sure that the source can
1892 be loaded directly into the destination. */
1893 src = orig_src;
1894 if (GET_CODE (orig_src) != MEM
1895 && (!CONSTANT_P (orig_src)
1896 || (GET_MODE (orig_src) != mode
1897 && GET_MODE (orig_src) != VOIDmode)))
1899 if (GET_MODE (orig_src) == VOIDmode)
1900 src = gen_reg_rtx (mode);
1901 else
1902 src = gen_reg_rtx (GET_MODE (orig_src));
1904 emit_move_insn (src, orig_src);
1907 /* Optimize the access just a bit. */
1908 if (GET_CODE (src) == MEM
1909 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1910 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1911 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1912 && bytelen == GET_MODE_SIZE (mode))
1914 tmps[i] = gen_reg_rtx (mode);
1915 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1917 else if (GET_CODE (src) == CONCAT)
1919 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1920 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1922 if ((bytepos == 0 && bytelen == slen0)
1923 || (bytepos != 0 && bytepos + bytelen <= slen))
1925 /* The following assumes that the concatenated objects all
1926 have the same size. In this case, a simple calculation
1927 can be used to determine the object and the bit field
1928 to be extracted. */
1929 tmps[i] = XEXP (src, bytepos / slen0);
1930 if (! CONSTANT_P (tmps[i])
1931 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1932 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1933 (bytepos % slen0) * BITS_PER_UNIT,
1934 1, NULL_RTX, mode, mode, ssize);
1936 else if (bytepos == 0)
1938 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1939 emit_move_insn (mem, src);
1940 tmps[i] = adjust_address (mem, mode, 0);
1942 else
1943 abort ();
1945 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1946 SIMD register, which is currently broken. While we get GCC
1947 to emit proper RTL for these cases, let's dump to memory. */
1948 else if (VECTOR_MODE_P (GET_MODE (dst))
1949 && GET_CODE (src) == REG)
1951 int slen = GET_MODE_SIZE (GET_MODE (src));
1952 rtx mem;
1954 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1955 emit_move_insn (mem, src);
1956 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1958 else if (CONSTANT_P (src)
1959 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1960 tmps[i] = src;
1961 else
1962 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1963 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1964 mode, mode, ssize);
1966 if (shift)
1967 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1968 tmps[i], 0, OPTAB_WIDEN);
1971 emit_queue ();
1973 /* Copy the extracted pieces into the proper (probable) hard regs. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1975 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1978 /* Emit code to move a block SRC to block DST, where SRC and DST are
1979 non-consecutive groups of registers, each represented by a PARALLEL. */
1981 void
1982 emit_group_move (rtx dst, rtx src)
1984 int i;
1986 if (GET_CODE (src) != PARALLEL
1987 || GET_CODE (dst) != PARALLEL
1988 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1989 abort ();
1991 /* Skip first entry if NULL. */
1992 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1993 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1994 XEXP (XVECEXP (src, 0, i), 0));
1997 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1998 where SRC is non-consecutive registers represented by a PARALLEL.
1999 SSIZE represents the total size of block ORIG_DST, or -1 if not
2000 known. */
2002 void
2003 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2005 rtx *tmps, dst;
2006 int start, i;
2008 if (GET_CODE (src) != PARALLEL)
2009 abort ();
2011 /* Check for a NULL entry, used to indicate that the parameter goes
2012 both on the stack and in registers. */
2013 if (XEXP (XVECEXP (src, 0, 0), 0))
2014 start = 0;
2015 else
2016 start = 1;
2018 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2020 /* Copy the (probable) hard regs into pseudos. */
2021 for (i = start; i < XVECLEN (src, 0); i++)
2023 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2024 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2025 emit_move_insn (tmps[i], reg);
2027 emit_queue ();
2029 /* If we won't be storing directly into memory, protect the real destination
2030 from strange tricks we might play. */
2031 dst = orig_dst;
2032 if (GET_CODE (dst) == PARALLEL)
2034 rtx temp;
2036 /* We can get a PARALLEL dst if there is a conditional expression in
2037 a return statement. In that case, the dst and src are the same,
2038 so no action is necessary. */
2039 if (rtx_equal_p (dst, src))
2040 return;
2042 /* It is unclear if we can ever reach here, but we may as well handle
2043 it. Allocate a temporary, and split this into a store/load to/from
2044 the temporary. */
2046 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2047 emit_group_store (temp, src, type, ssize);
2048 emit_group_load (dst, temp, type, ssize);
2049 return;
2051 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2053 dst = gen_reg_rtx (GET_MODE (orig_dst));
2054 /* Make life a bit easier for combine. */
2055 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2058 /* Process the pieces. */
2059 for (i = start; i < XVECLEN (src, 0); i++)
2061 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2062 enum machine_mode mode = GET_MODE (tmps[i]);
2063 unsigned int bytelen = GET_MODE_SIZE (mode);
2064 rtx dest = dst;
2066 /* Handle trailing fragments that run over the size of the struct. */
2067 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2069 /* store_bit_field always takes its value from the lsb.
2070 Move the fragment to the lsb if it's not already there. */
2071 if (
2072 #ifdef BLOCK_REG_PADDING
2073 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2074 == (BYTES_BIG_ENDIAN ? upward : downward)
2075 #else
2076 BYTES_BIG_ENDIAN
2077 #endif
2080 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2081 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2082 tmps[i], 0, OPTAB_WIDEN);
2084 bytelen = ssize - bytepos;
2087 if (GET_CODE (dst) == CONCAT)
2089 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2090 dest = XEXP (dst, 0);
2091 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2093 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2094 dest = XEXP (dst, 1);
2096 else if (bytepos == 0 && XVECLEN (src, 0))
2098 dest = assign_stack_temp (GET_MODE (dest),
2099 GET_MODE_SIZE (GET_MODE (dest)), 0);
2100 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2101 tmps[i]);
2102 dst = dest;
2103 break;
2105 else
2106 abort ();
2109 /* Optimize the access just a bit. */
2110 if (GET_CODE (dest) == MEM
2111 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2112 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2113 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2114 && bytelen == GET_MODE_SIZE (mode))
2115 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2116 else
2117 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2118 mode, tmps[i], ssize);
2121 emit_queue ();
2123 /* Copy from the pseudo into the (probable) hard reg. */
2124 if (orig_dst != dst)
2125 emit_move_insn (orig_dst, dst);
2128 /* Generate code to copy a BLKmode object of TYPE out of a
2129 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2130 is null, a stack temporary is created. TGTBLK is returned.
2132 The primary purpose of this routine is to handle functions
2133 that return BLKmode structures in registers. Some machines
2134 (the PA for example) want to return all small structures
2135 in registers regardless of the structure's alignment. */
2138 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2140 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2141 rtx src = NULL, dst = NULL;
2142 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2143 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2145 if (tgtblk == 0)
2147 tgtblk = assign_temp (build_qualified_type (type,
2148 (TYPE_QUALS (type)
2149 | TYPE_QUAL_CONST)),
2150 0, 1, 1);
2151 preserve_temp_slots (tgtblk);
2154 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2155 into a new pseudo which is a full word. */
2157 if (GET_MODE (srcreg) != BLKmode
2158 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2159 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2161 /* Structures whose size is not a multiple of a word are aligned
2162 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2163 machine, this means we must skip the empty high order bytes when
2164 calculating the bit offset. */
2165 if (BYTES_BIG_ENDIAN
2166 && bytes % UNITS_PER_WORD)
2167 big_endian_correction
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2170 /* Copy the structure BITSIZE bites at a time.
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
2175 for (bitpos = 0, xbitpos = big_endian_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == big_endian_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == big_endian_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2185 GET_MODE (srcreg));
2187 /* We need a new destination operand each time bitpos is on
2188 a word boundary. */
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
2198 BITS_PER_WORD),
2199 BITS_PER_WORD);
2202 return tgtblk;
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2208 void
2209 use_reg (rtx *call_fusage, rtx reg)
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2213 abort ();
2215 *call_fusage
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2223 void
2224 use_regs (rtx *call_fusage, int regno, int nregs)
2226 int i;
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2229 abort ();
2231 for (i = 0; i < nregs; i++)
2232 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2239 void
2240 use_group_regs (rtx *call_fusage, rtx regs)
2242 int i;
2244 for (i = 0; i < XVECLEN (regs, 0); i++)
2246 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg != 0 && GET_CODE (reg) == REG)
2252 use_reg (call_fusage, reg);
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2265 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2266 void *constfundata, unsigned int align)
2268 unsigned HOST_WIDE_INT max_size, l;
2269 HOST_WIDE_INT offset = 0;
2270 enum machine_mode mode, tmode;
2271 enum insn_code icode;
2272 int reverse;
2273 rtx cst;
2275 if (len == 0)
2276 return 1;
2278 if (! STORE_BY_PIECES_P (len, align))
2279 return 0;
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2282 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2283 align = MOVE_MAX * BITS_PER_UNIT;
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2288 for (reverse = 0;
2289 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2290 reverse++)
2292 l = len;
2293 mode = VOIDmode;
2294 max_size = STORE_MAX_PIECES + 1;
2295 while (max_size > 1)
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2302 if (mode == VOIDmode)
2303 break;
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= GET_MODE_ALIGNMENT (mode))
2309 unsigned int size = GET_MODE_SIZE (mode);
2311 while (l >= size)
2313 if (reverse)
2314 offset -= size;
2316 cst = (*constfun) (constfundata, offset, mode);
2317 if (!LEGITIMATE_CONSTANT_P (cst))
2318 return 0;
2320 if (!reverse)
2321 offset += size;
2323 l -= size;
2327 max_size = GET_MODE_SIZE (mode);
2330 /* The code above should have handled everything. */
2331 if (l != 0)
2332 abort ();
2335 return 1;
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2344 stpcpy. */
2347 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2348 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2349 void *constfundata, unsigned int align, int endp)
2351 struct store_by_pieces data;
2353 if (len == 0)
2355 if (endp == 2)
2356 abort ();
2357 return to;
2360 if (! STORE_BY_PIECES_P (len, align))
2361 abort ();
2362 to = protect_from_queue (to, 1);
2363 data.constfun = constfun;
2364 data.constfundata = constfundata;
2365 data.len = len;
2366 data.to = to;
2367 store_by_pieces_1 (&data, align);
2368 if (endp)
2370 rtx to1;
2372 if (data.reverse)
2373 abort ();
2374 if (data.autinc_to)
2376 if (endp == 2)
2378 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2379 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2380 else
2381 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2382 -1));
2384 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2385 data.offset);
2387 else
2389 if (endp == 2)
2390 --data.offset;
2391 to1 = adjust_address (data.to, QImode, data.offset);
2393 return to1;
2395 else
2396 return data.to;
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2403 static void
2404 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2406 struct store_by_pieces data;
2408 if (len == 0)
2409 return;
2411 data.constfun = clear_by_pieces_1;
2412 data.constfundata = NULL;
2413 data.len = len;
2414 data.to = to;
2415 store_by_pieces_1 (&data, align);
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2421 static rtx
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2424 enum machine_mode mode ATTRIBUTE_UNUSED)
2426 return const0_rtx;
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2434 static void
2435 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2436 unsigned int align ATTRIBUTE_UNUSED)
2438 rtx to_addr = XEXP (data->to, 0);
2439 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2440 enum machine_mode mode = VOIDmode, tmode;
2441 enum insn_code icode;
2443 data->offset = 0;
2444 data->to_addr = to_addr;
2445 data->autinc_to
2446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2449 data->explicit_inc_to = 0;
2450 data->reverse
2451 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2452 if (data->reverse)
2453 data->offset = data->len;
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data->autinc_to
2459 && move_by_pieces_ninsns (data->len, align) > 2)
2461 /* Determine the main mode we'll be using. */
2462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2464 if (GET_MODE_SIZE (tmode) < max_size)
2465 mode = tmode;
2467 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2469 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = -1;
2474 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2475 && ! data->autinc_to)
2477 data->to_addr = copy_addr_to_reg (to_addr);
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = 1;
2482 if ( !data->autinc_to && CONSTANT_P (to_addr))
2483 data->to_addr = copy_addr_to_reg (to_addr);
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2487 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2488 align = MOVE_MAX * BITS_PER_UNIT;
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2493 while (max_size > 1)
2495 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2496 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2497 if (GET_MODE_SIZE (tmode) < max_size)
2498 mode = tmode;
2500 if (mode == VOIDmode)
2501 break;
2503 icode = mov_optab->handlers[(int) mode].insn_code;
2504 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2505 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2507 max_size = GET_MODE_SIZE (mode);
2510 /* The code above should have handled everything. */
2511 if (data->len != 0)
2512 abort ();
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2519 static void
2520 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2521 struct store_by_pieces *data)
2523 unsigned int size = GET_MODE_SIZE (mode);
2524 rtx to1, cst;
2526 while (data->len >= size)
2528 if (data->reverse)
2529 data->offset -= size;
2531 if (data->autinc_to)
2532 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2533 data->offset);
2534 else
2535 to1 = adjust_address (data->to, mode, data->offset);
2537 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2538 emit_insn (gen_add2_insn (data->to_addr,
2539 GEN_INT (-(HOST_WIDE_INT) size)));
2541 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2542 emit_insn ((*genfun) (to1, cst));
2544 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2547 if (! data->reverse)
2548 data->offset += size;
2550 data->len -= size;
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2558 clear_storage (rtx object, rtx size)
2560 rtx retval = 0;
2561 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object) != BLKmode
2567 && GET_CODE (size) == CONST_INT
2568 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2569 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2570 else
2572 object = protect_from_queue (object, 1);
2573 size = protect_from_queue (size, 0);
2575 if (size == const0_rtx)
2577 else if (GET_CODE (size) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2580 else if (clear_storage_via_clrstr (object, size, align))
2582 else
2583 retval = clear_storage_via_libcall (object, size);
2586 return retval;
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2592 static bool
2593 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2599 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2600 enum machine_mode mode;
2602 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2603 mode = GET_MODE_WIDER_MODE (mode))
2605 enum insn_code code = clrstr_optab[(int) mode];
2606 insn_operand_predicate_fn pred;
2608 if (code != CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2615 <= (GET_MODE_MASK (mode) >> 1)))
2616 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2617 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2618 || (*pred) (object, BLKmode))
2619 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2620 || (*pred) (opalign, VOIDmode)))
2622 rtx op1;
2623 rtx last = get_last_insn ();
2624 rtx pat;
2626 op1 = convert_to_mode (mode, size, 1);
2627 pred = insn_data[(int) code].operand[1].predicate;
2628 if (pred != 0 && ! (*pred) (op1, mode))
2629 op1 = copy_to_mode_reg (mode, op1);
2631 pat = GEN_FCN ((int) code) (object, op1, opalign);
2632 if (pat)
2634 emit_insn (pat);
2635 return true;
2637 else
2638 delete_insns_since (last);
2642 return false;
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2648 static rtx
2649 clear_storage_via_libcall (rtx object, rtx size)
2651 tree call_expr, arg_list, fn, object_tree, size_tree;
2652 enum machine_mode size_mode;
2653 rtx retval;
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2668 emit_queue.
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2676 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2678 if (TARGET_MEM_FUNCTIONS)
2679 size_mode = TYPE_MODE (sizetype);
2680 else
2681 size_mode = TYPE_MODE (unsigned_type_node);
2682 size = convert_to_mode (size_mode, size, 1);
2683 size = copy_to_mode_reg (size_mode, size);
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
2691 For convenience, we generate the call to bzero this way as well. */
2693 object_tree = make_tree (ptr_type_node, object);
2694 if (TARGET_MEM_FUNCTIONS)
2695 size_tree = make_tree (sizetype, size);
2696 else
2697 size_tree = make_tree (unsigned_type_node, size);
2699 fn = clear_storage_libcall_fn (true);
2700 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2701 if (TARGET_MEM_FUNCTIONS)
2702 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2703 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2710 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2718 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2721 /* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2725 static GTY(()) tree block_clear_fn;
2727 void
2728 init_block_clear_fn (const char *asmspec)
2730 if (!block_clear_fn)
2732 tree fn, args;
2734 if (TARGET_MEM_FUNCTIONS)
2736 fn = get_identifier ("memset");
2737 args = build_function_type_list (ptr_type_node, ptr_type_node,
2738 integer_type_node, sizetype,
2739 NULL_TREE);
2741 else
2743 fn = get_identifier ("bzero");
2744 args = build_function_type_list (void_type_node, ptr_type_node,
2745 unsigned_type_node, NULL_TREE);
2748 fn = build_decl (FUNCTION_DECL, fn, args);
2749 DECL_EXTERNAL (fn) = 1;
2750 TREE_PUBLIC (fn) = 1;
2751 DECL_ARTIFICIAL (fn) = 1;
2752 TREE_NOTHROW (fn) = 1;
2754 block_clear_fn = fn;
2757 if (asmspec)
2759 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2764 static tree
2765 clear_storage_libcall_fn (int for_call)
2767 static bool emitted_extern;
2769 if (!block_clear_fn)
2770 init_block_clear_fn (NULL);
2772 if (for_call && !emitted_extern)
2774 emitted_extern = true;
2775 make_decl_rtl (block_clear_fn, NULL);
2776 assemble_external (block_clear_fn);
2779 return block_clear_fn;
2782 /* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2787 Return the last instruction emitted. */
2790 emit_move_insn (rtx x, rtx y)
2792 enum machine_mode mode = GET_MODE (x);
2793 rtx y_cst = NULL_RTX;
2794 rtx last_insn, set;
2796 x = protect_from_queue (x, 1);
2797 y = protect_from_queue (y, 0);
2799 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2800 abort ();
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y) == CONSTANT_P_RTX)
2805 else if (CONSTANT_P (y))
2807 if (optimize
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2809 && (last_insn = compress_float_constant (x, y)))
2810 return last_insn;
2812 y_cst = y;
2814 if (!LEGITIMATE_CONSTANT_P (y))
2816 y = force_const_mem (mode, y);
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2821 if (!y)
2822 y = y_cst;
2826 /* If X or Y are memory references, verify that their addresses are valid
2827 for the machine. */
2828 if (GET_CODE (x) == MEM
2829 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2830 && ! push_operand (x, GET_MODE (x)))
2831 || (flag_force_addr
2832 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2833 x = validize_mem (x);
2835 if (GET_CODE (y) == MEM
2836 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 || (flag_force_addr
2838 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2839 y = validize_mem (y);
2841 if (mode == BLKmode)
2842 abort ();
2844 last_insn = emit_move_insn_1 (x, y);
2846 if (y_cst && GET_CODE (x) == REG
2847 && (set = single_set (last_insn)) != NULL_RTX
2848 && SET_DEST (set) == x
2849 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2850 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2852 return last_insn;
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2860 emit_move_insn_1 (rtx x, rtx y)
2862 enum machine_mode mode = GET_MODE (x);
2863 enum machine_mode submode;
2864 enum mode_class class = GET_MODE_CLASS (mode);
2866 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2867 abort ();
2869 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2870 return
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2875 && BLKmode != (submode = GET_MODE_INNER (mode))
2876 && (mov_optab->handlers[(int) submode].insn_code
2877 != CODE_FOR_nothing))
2879 /* Don't split destination if it is a stack push. */
2880 int stack = push_operand (x, GET_MODE (x));
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2885 if (stack
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2887 != GET_MODE_SIZE (submode)))
2889 rtx temp;
2890 HOST_WIDE_INT offset1, offset2;
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp = expand_binop (Pmode,
2895 #ifdef STACK_GROWS_DOWNWARD
2896 sub_optab,
2897 #else
2898 add_optab,
2899 #endif
2900 stack_pointer_rtx,
2901 GEN_INT
2902 (PUSH_ROUNDING
2903 (GET_MODE_SIZE (GET_MODE (x)))),
2904 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2906 if (temp != stack_pointer_rtx)
2907 emit_move_insn (stack_pointer_rtx, temp);
2909 #ifdef STACK_GROWS_DOWNWARD
2910 offset1 = 0;
2911 offset2 = GET_MODE_SIZE (submode);
2912 #else
2913 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2914 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2915 + GET_MODE_SIZE (submode));
2916 #endif
2918 emit_move_insn (change_address (x, submode,
2919 gen_rtx_PLUS (Pmode,
2920 stack_pointer_rtx,
2921 GEN_INT (offset1))),
2922 gen_realpart (submode, y));
2923 emit_move_insn (change_address (x, submode,
2924 gen_rtx_PLUS (Pmode,
2925 stack_pointer_rtx,
2926 GEN_INT (offset2))),
2927 gen_imagpart (submode, y));
2929 else
2930 #endif
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2936 if (stack)
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 gen_imagpart (submode, y));
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_realpart (submode, y));
2945 #else
2946 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2947 gen_realpart (submode, y));
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_imagpart (submode, y));
2950 #endif
2952 else
2954 rtx realpart_x, realpart_y;
2955 rtx imagpart_x, imagpart_y;
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2967 && (reload_in_progress | reload_completed) == 0)
2969 int packed_dest_p
2970 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2971 int packed_src_p
2972 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2974 if (packed_dest_p || packed_src_p)
2976 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2977 ? MODE_FLOAT : MODE_INT);
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2982 if (reg_mode != BLKmode)
2984 rtx mem = assign_stack_temp (reg_mode,
2985 GET_MODE_SIZE (mode), 0);
2986 rtx cmem = adjust_address (mem, mode, 0);
2988 cfun->cannot_inline
2989 = N_("function using short complex types cannot be inline");
2991 if (packed_dest_p)
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2995 emit_move_insn_1 (cmem, y);
2996 return emit_move_insn_1 (sreg, mem);
2998 else
3000 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3002 emit_move_insn_1 (mem, sreg);
3003 return emit_move_insn_1 (x, cmem);
3009 realpart_x = gen_realpart (submode, x);
3010 realpart_y = gen_realpart (submode, y);
3011 imagpart_x = gen_imagpart (submode, x);
3012 imagpart_y = gen_imagpart (submode, y);
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3018 if (x != y
3019 && ! (reload_in_progress || reload_completed)
3020 && (GET_CODE (realpart_x) == SUBREG
3021 || GET_CODE (imagpart_x) == SUBREG))
3022 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3024 emit_move_insn (realpart_x, realpart_y);
3025 emit_move_insn (imagpart_x, imagpart_y);
3028 return get_last_insn ();
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode) == MODE_CC
3035 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3037 enum insn_code insn_code;
3038 enum machine_mode tmode = VOIDmode;
3039 rtx x1 = x, y1 = y;
3041 if (mode != CCmode
3042 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3043 tmode = CCmode;
3044 else
3045 for (tmode = QImode; tmode != VOIDmode;
3046 tmode = GET_MODE_WIDER_MODE (tmode))
3047 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3048 break;
3050 if (tmode == VOIDmode)
3051 abort ();
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3060 if (reload_in_progress)
3062 x = gen_lowpart_common (tmode, x1);
3063 if (x == 0 && GET_CODE (x1) == MEM)
3065 x = adjust_address_nv (x1, tmode, 0);
3066 copy_replacements (x1, x);
3069 y = gen_lowpart_common (tmode, y1);
3070 if (y == 0 && GET_CODE (y1) == MEM)
3072 y = adjust_address_nv (y1, tmode, 0);
3073 copy_replacements (y1, y);
3076 else
3078 x = gen_lowpart (tmode, x);
3079 y = gen_lowpart (tmode, y);
3082 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3083 return emit_insn (GEN_FCN (insn_code) (x, y));
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3091 && (submode = int_mode_for_mode (mode)) != BLKmode
3092 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3093 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3094 (simplify_gen_subreg (submode, x, mode, 0),
3095 simplify_gen_subreg (submode, y, mode, 0)));
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
3099 even if they must turn into multiple assembler instructions. */
3100 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3102 rtx last_insn = 0;
3103 rtx seq, inner;
3104 int need_clobber;
3105 int i;
3107 #ifdef PUSH_ROUNDING
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x, GET_MODE (x)))
3113 rtx temp;
3114 enum rtx_code code;
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp = expand_binop (Pmode,
3119 #ifdef STACK_GROWS_DOWNWARD
3120 sub_optab,
3121 #else
3122 add_optab,
3123 #endif
3124 stack_pointer_rtx,
3125 GEN_INT
3126 (PUSH_ROUNDING
3127 (GET_MODE_SIZE (GET_MODE (x)))),
3128 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3130 if (temp != stack_pointer_rtx)
3131 emit_move_insn (stack_pointer_rtx, temp);
3133 code = GET_CODE (XEXP (x, 0));
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code == POST_INC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (-((HOST_WIDE_INT)
3139 GET_MODE_SIZE (GET_MODE (x)))));
3140 else if (code == POST_DEC)
3141 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3143 else
3144 temp = stack_pointer_rtx;
3146 x = change_address (x, VOIDmode, temp);
3148 #endif
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress && GET_CODE (x) == MEM
3153 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3154 x = replace_equiv_address_nv (x, inner);
3155 if (reload_in_progress && GET_CODE (y) == MEM
3156 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3157 y = replace_equiv_address_nv (y, inner);
3159 start_sequence ();
3161 need_clobber = 0;
3162 for (i = 0;
3163 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3164 i++)
3166 rtx xpart = operand_subword (x, i, 1, mode);
3167 rtx ypart = operand_subword (y, i, 1, mode);
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart == 0 && CONSTANT_P (y))
3174 y = force_const_mem (mode, y);
3175 ypart = operand_subword (y, i, 1, mode);
3177 else if (ypart == 0)
3178 ypart = operand_subword_force (y, i, mode);
3180 if (xpart == 0 || ypart == 0)
3181 abort ();
3183 need_clobber |= (GET_CODE (xpart) == SUBREG);
3185 last_insn = emit_move_insn (xpart, ypart);
3188 seq = get_insns ();
3189 end_sequence ();
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3195 if (x != y
3196 && ! (reload_in_progress || reload_completed)
3197 && need_clobber != 0)
3198 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3200 emit_insn (seq);
3202 return last_insn;
3204 else
3205 abort ();
3208 /* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3212 static rtx
3213 compress_float_constant (rtx x, rtx y)
3215 enum machine_mode dstmode = GET_MODE (x);
3216 enum machine_mode orig_srcmode = GET_MODE (y);
3217 enum machine_mode srcmode;
3218 REAL_VALUE_TYPE r;
3220 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3222 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3223 srcmode != orig_srcmode;
3224 srcmode = GET_MODE_WIDER_MODE (srcmode))
3226 enum insn_code ic;
3227 rtx trunc_y, last_insn;
3229 /* Skip if the target can't extend this way. */
3230 ic = can_extend_p (dstmode, srcmode, 0);
3231 if (ic == CODE_FOR_nothing)
3232 continue;
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode, &r))
3236 continue;
3238 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3240 if (LEGITIMATE_CONSTANT_P (trunc_y))
3242 /* Skip if the target needs extra instructions to perform
3243 the extension. */
3244 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3245 continue;
3247 else if (float_extend_from_mem[dstmode][srcmode])
3248 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3249 else
3250 continue;
3252 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3253 last_insn = get_last_insn ();
3255 if (GET_CODE (x) == REG)
3256 set_unique_reg_note (last_insn, REG_EQUAL, y);
3258 return last_insn;
3261 return NULL_RTX;
3264 /* Pushing data onto the stack. */
3266 /* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3276 push_block (rtx size, int extra, int below)
3278 rtx temp;
3280 size = convert_modes (Pmode, ptr_mode, size, 1);
3281 if (CONSTANT_P (size))
3282 anti_adjust_stack (plus_constant (size, extra));
3283 else if (GET_CODE (size) == REG && extra == 0)
3284 anti_adjust_stack (size);
3285 else
3287 temp = copy_to_mode_reg (Pmode, size);
3288 if (extra != 0)
3289 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3290 temp, 0, OPTAB_LIB_WIDEN);
3291 anti_adjust_stack (temp);
3294 #ifndef STACK_GROWS_DOWNWARD
3295 if (0)
3296 #else
3297 if (1)
3298 #endif
3300 temp = virtual_outgoing_args_rtx;
3301 if (extra != 0 && below)
3302 temp = plus_constant (temp, extra);
3304 else
3306 if (GET_CODE (size) == CONST_INT)
3307 temp = plus_constant (virtual_outgoing_args_rtx,
3308 -INTVAL (size) - (below ? 0 : extra));
3309 else if (extra != 0 && !below)
3310 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3311 negate_rtx (Pmode, plus_constant (size, extra)));
3312 else
3313 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3314 negate_rtx (Pmode, size));
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3320 #ifdef PUSH_ROUNDING
3322 /* Emit single push insn. */
3324 static void
3325 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3327 rtx dest_addr;
3328 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3329 rtx dest;
3330 enum insn_code icode;
3331 insn_operand_predicate_fn pred;
3333 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode = push_optab->handlers[(int) mode].insn_code;
3337 if (icode != CODE_FOR_nothing)
3339 if (((pred = insn_data[(int) icode].operand[0].predicate)
3340 && !((*pred) (x, mode))))
3341 x = force_reg (mode, x);
3342 emit_insn (GEN_FCN (icode) (x));
3343 return;
3345 if (GET_MODE_SIZE (mode) == rounded_size)
3346 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3350 access to type. */
3351 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3353 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3354 HOST_WIDE_INT offset;
3356 emit_move_insn (stack_pointer_rtx,
3357 expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3359 sub_optab,
3360 #else
3361 add_optab,
3362 #endif
3363 stack_pointer_rtx,
3364 GEN_INT (rounded_size),
3365 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3367 offset = (HOST_WIDE_INT) padding_size;
3368 #ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE == POST_DEC)
3370 /* We have already decremented the stack pointer, so get the
3371 previous value. */
3372 offset += (HOST_WIDE_INT) rounded_size;
3373 #else
3374 if (STACK_PUSH_CODE == POST_INC)
3375 /* We have already incremented the stack pointer, so get the
3376 previous value. */
3377 offset -= (HOST_WIDE_INT) rounded_size;
3378 #endif
3379 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3381 else
3383 #ifdef STACK_GROWS_DOWNWARD
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3387 #else
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (rounded_size));
3391 #endif
3392 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3395 dest = gen_rtx_MEM (mode, dest_addr);
3397 if (type != 0)
3399 set_mem_attributes (dest, type, 1);
3401 if (flag_optimize_sibling_calls)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest, 0);
3408 emit_move_insn (dest, x);
3410 #endif
3412 /* Generate code to push X onto the stack, assuming it has mode MODE and
3413 type TYPE.
3414 MODE is redundant except when X is a CONST_INT (since they don't
3415 carry mode info).
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3419 ALIGN (in bits) is maximum alignment we can assume.
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3428 registers.
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
3431 This is ignored if an argument block has already been allocated.
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
3444 void
3445 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3446 unsigned int align, int partial, rtx reg, int extra,
3447 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3448 rtx alignment_pad)
3450 rtx xinner;
3451 enum direction stack_direction
3452 #ifdef STACK_GROWS_DOWNWARD
3453 = downward;
3454 #else
3455 = upward;
3456 #endif
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3463 /* Invert direction if stack is post-decrement.
3464 FIXME: why? */
3465 if (STACK_PUSH_CODE == POST_DEC)
3466 if (where_pad != none)
3467 where_pad = (where_pad == downward ? upward : downward);
3469 xinner = x = protect_from_queue (x, 0);
3471 if (mode == BLKmode)
3473 /* Copy a block into the stack, entirely or partially. */
3475 rtx temp;
3476 int used = partial * UNITS_PER_WORD;
3477 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3478 int skip;
3480 if (size == 0)
3481 abort ();
3483 used -= offset;
3485 /* USED is now the # of bytes we need not copy to the stack
3486 because registers will take care of them. */
3488 if (partial != 0)
3489 xinner = adjust_address (xinner, BLKmode, used);
3491 /* If the partial register-part of the arg counts in its stack size,
3492 skip the part of stack space corresponding to the registers.
3493 Otherwise, start copying to the beginning of the stack space,
3494 by setting SKIP to 0. */
3495 skip = (reg_parm_stack_space == 0) ? 0 : used;
3497 #ifdef PUSH_ROUNDING
3498 /* Do it with several push insns if that doesn't take lots of insns
3499 and if there is no difficulty with push insns that skip bytes
3500 on the stack for alignment purposes. */
3501 if (args_addr == 0
3502 && PUSH_ARGS
3503 && GET_CODE (size) == CONST_INT
3504 && skip == 0
3505 && MEM_ALIGN (xinner) >= align
3506 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3507 /* Here we avoid the case of a structure whose weak alignment
3508 forces many pushes of a small amount of data,
3509 and such small pushes do rounding that causes trouble. */
3510 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3511 || align >= BIGGEST_ALIGNMENT
3512 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3513 == (align / BITS_PER_UNIT)))
3514 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3516 /* Push padding now if padding above and stack grows down,
3517 or if padding below and stack grows up.
3518 But if space already allocated, this has already been done. */
3519 if (extra && args_addr == 0
3520 && where_pad != none && where_pad != stack_direction)
3521 anti_adjust_stack (GEN_INT (extra));
3523 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3525 else
3526 #endif /* PUSH_ROUNDING */
3528 rtx target;
3530 /* Otherwise make space on the stack and copy the data
3531 to the address of that space. */
3533 /* Deduct words put into registers from the size we must copy. */
3534 if (partial != 0)
3536 if (GET_CODE (size) == CONST_INT)
3537 size = GEN_INT (INTVAL (size) - used);
3538 else
3539 size = expand_binop (GET_MODE (size), sub_optab, size,
3540 GEN_INT (used), NULL_RTX, 0,
3541 OPTAB_LIB_WIDEN);
3544 /* Get the address of the stack space.
3545 In this case, we do not deal with EXTRA separately.
3546 A single stack adjust will do. */
3547 if (! args_addr)
3549 temp = push_block (size, extra, where_pad == downward);
3550 extra = 0;
3552 else if (GET_CODE (args_so_far) == CONST_INT)
3553 temp = memory_address (BLKmode,
3554 plus_constant (args_addr,
3555 skip + INTVAL (args_so_far)));
3556 else
3557 temp = memory_address (BLKmode,
3558 plus_constant (gen_rtx_PLUS (Pmode,
3559 args_addr,
3560 args_so_far),
3561 skip));
3563 if (!ACCUMULATE_OUTGOING_ARGS)
3565 /* If the source is referenced relative to the stack pointer,
3566 copy it to another register to stabilize it. We do not need
3567 to do this if we know that we won't be changing sp. */
3569 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3570 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3571 temp = copy_to_reg (temp);
3574 target = gen_rtx_MEM (BLKmode, temp);
3576 if (type != 0)
3578 set_mem_attributes (target, type, 1);
3579 /* Function incoming arguments may overlap with sibling call
3580 outgoing arguments and we cannot allow reordering of reads
3581 from function arguments with stores to outgoing arguments
3582 of sibling calls. */
3583 set_mem_alias_set (target, 0);
3586 /* ALIGN may well be better aligned than TYPE, e.g. due to
3587 PARM_BOUNDARY. Assume the caller isn't lying. */
3588 set_mem_align (target, align);
3590 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3593 else if (partial > 0)
3595 /* Scalar partly in registers. */
3597 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3598 int i;
3599 int not_stack;
3600 /* # words of start of argument
3601 that we must make space for but need not store. */
3602 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3603 int args_offset = INTVAL (args_so_far);
3604 int skip;
3606 /* Push padding now if padding above and stack grows down,
3607 or if padding below and stack grows up.
3608 But if space already allocated, this has already been done. */
3609 if (extra && args_addr == 0
3610 && where_pad != none && where_pad != stack_direction)
3611 anti_adjust_stack (GEN_INT (extra));
3613 /* If we make space by pushing it, we might as well push
3614 the real data. Otherwise, we can leave OFFSET nonzero
3615 and leave the space uninitialized. */
3616 if (args_addr == 0)
3617 offset = 0;
3619 /* Now NOT_STACK gets the number of words that we don't need to
3620 allocate on the stack. */
3621 not_stack = partial - offset;
3623 /* If the partial register-part of the arg counts in its stack size,
3624 skip the part of stack space corresponding to the registers.
3625 Otherwise, start copying to the beginning of the stack space,
3626 by setting SKIP to 0. */
3627 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3629 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3630 x = validize_mem (force_const_mem (mode, x));
3632 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3633 SUBREGs of such registers are not allowed. */
3634 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3635 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3636 x = copy_to_reg (x);
3638 /* Loop over all the words allocated on the stack for this arg. */
3639 /* We can do it by words, because any scalar bigger than a word
3640 has a size a multiple of a word. */
3641 #ifndef PUSH_ARGS_REVERSED
3642 for (i = not_stack; i < size; i++)
3643 #else
3644 for (i = size - 1; i >= not_stack; i--)
3645 #endif
3646 if (i >= not_stack + offset)
3647 emit_push_insn (operand_subword_force (x, i, mode),
3648 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3649 0, args_addr,
3650 GEN_INT (args_offset + ((i - not_stack + skip)
3651 * UNITS_PER_WORD)),
3652 reg_parm_stack_space, alignment_pad);
3654 else
3656 rtx addr;
3657 rtx dest;
3659 /* Push padding now if padding above and stack grows down,
3660 or if padding below and stack grows up.
3661 But if space already allocated, this has already been done. */
3662 if (extra && args_addr == 0
3663 && where_pad != none && where_pad != stack_direction)
3664 anti_adjust_stack (GEN_INT (extra));
3666 #ifdef PUSH_ROUNDING
3667 if (args_addr == 0 && PUSH_ARGS)
3668 emit_single_push_insn (mode, x, type);
3669 else
3670 #endif
3672 if (GET_CODE (args_so_far) == CONST_INT)
3673 addr
3674 = memory_address (mode,
3675 plus_constant (args_addr,
3676 INTVAL (args_so_far)));
3677 else
3678 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3679 args_so_far));
3680 dest = gen_rtx_MEM (mode, addr);
3681 if (type != 0)
3683 set_mem_attributes (dest, type, 1);
3684 /* Function incoming arguments may overlap with sibling call
3685 outgoing arguments and we cannot allow reordering of reads
3686 from function arguments with stores to outgoing arguments
3687 of sibling calls. */
3688 set_mem_alias_set (dest, 0);
3691 emit_move_insn (dest, x);
3695 /* If part should go in registers, copy that part
3696 into the appropriate registers. Do this now, at the end,
3697 since mem-to-mem copies above may do function calls. */
3698 if (partial > 0 && reg != 0)
3700 /* Handle calls that pass values in multiple non-contiguous locations.
3701 The Irix 6 ABI has examples of this. */
3702 if (GET_CODE (reg) == PARALLEL)
3703 emit_group_load (reg, x, type, -1);
3704 else
3705 move_block_to_reg (REGNO (reg), x, partial, mode);
3708 if (extra && args_addr == 0 && where_pad == stack_direction)
3709 anti_adjust_stack (GEN_INT (extra));
3711 if (alignment_pad && args_addr == 0)
3712 anti_adjust_stack (alignment_pad);
3715 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3716 operations. */
3718 static rtx
3719 get_subtarget (rtx x)
3721 return ((x == 0
3722 /* Only registers can be subtargets. */
3723 || GET_CODE (x) != REG
3724 /* If the register is readonly, it can't be set more than once. */
3725 || RTX_UNCHANGING_P (x)
3726 /* Don't use hard regs to avoid extending their life. */
3727 || REGNO (x) < FIRST_PSEUDO_REGISTER
3728 /* Avoid subtargets inside loops,
3729 since they hide some invariant expressions. */
3730 || preserve_subexpressions_p ())
3731 ? 0 : x);
3734 /* Expand an assignment that stores the value of FROM into TO.
3735 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3736 (This may contain a QUEUED rtx;
3737 if the value is constant, this rtx is a constant.)
3738 Otherwise, the returned value is NULL_RTX. */
3741 expand_assignment (tree to, tree from, int want_value)
3743 rtx to_rtx = 0;
3744 rtx result;
3746 /* Don't crash if the lhs of the assignment was erroneous. */
3748 if (TREE_CODE (to) == ERROR_MARK)
3750 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3751 return want_value ? result : NULL_RTX;
3754 /* Assignment of a structure component needs special treatment
3755 if the structure component's rtx is not simply a MEM.
3756 Assignment of an array element at a constant index, and assignment of
3757 an array element in an unaligned packed structure field, has the same
3758 problem. */
3760 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3761 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3762 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3764 enum machine_mode mode1;
3765 HOST_WIDE_INT bitsize, bitpos;
3766 rtx orig_to_rtx;
3767 tree offset;
3768 int unsignedp;
3769 int volatilep = 0;
3770 tree tem;
3772 push_temp_slots ();
3773 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3774 &unsignedp, &volatilep);
3776 /* If we are going to use store_bit_field and extract_bit_field,
3777 make sure to_rtx will be safe for multiple use. */
3779 if (mode1 == VOIDmode && want_value)
3780 tem = stabilize_reference (tem);
3782 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3784 if (offset != 0)
3786 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3788 if (GET_CODE (to_rtx) != MEM)
3789 abort ();
3791 #ifdef POINTERS_EXTEND_UNSIGNED
3792 if (GET_MODE (offset_rtx) != Pmode)
3793 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3794 #else
3795 if (GET_MODE (offset_rtx) != ptr_mode)
3796 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3797 #endif
3799 /* A constant address in TO_RTX can have VOIDmode, we must not try
3800 to call force_reg for that case. Avoid that case. */
3801 if (GET_CODE (to_rtx) == MEM
3802 && GET_MODE (to_rtx) == BLKmode
3803 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3804 && bitsize > 0
3805 && (bitpos % bitsize) == 0
3806 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3807 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3809 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3810 bitpos = 0;
3813 to_rtx = offset_address (to_rtx, offset_rtx,
3814 highest_pow2_factor_for_type (TREE_TYPE (to),
3815 offset));
3818 if (GET_CODE (to_rtx) == MEM)
3820 /* If the field is at offset zero, we could have been given the
3821 DECL_RTX of the parent struct. Don't munge it. */
3822 to_rtx = shallow_copy_rtx (to_rtx);
3824 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3827 /* Deal with volatile and readonly fields. The former is only done
3828 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3829 if (volatilep && GET_CODE (to_rtx) == MEM)
3831 if (to_rtx == orig_to_rtx)
3832 to_rtx = copy_rtx (to_rtx);
3833 MEM_VOLATILE_P (to_rtx) = 1;
3836 if (TREE_CODE (to) == COMPONENT_REF
3837 && TREE_READONLY (TREE_OPERAND (to, 1)))
3839 if (to_rtx == orig_to_rtx)
3840 to_rtx = copy_rtx (to_rtx);
3841 RTX_UNCHANGING_P (to_rtx) = 1;
3844 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3846 if (to_rtx == orig_to_rtx)
3847 to_rtx = copy_rtx (to_rtx);
3848 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3851 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3852 (want_value
3853 /* Spurious cast for HPUX compiler. */
3854 ? ((enum machine_mode)
3855 TYPE_MODE (TREE_TYPE (to)))
3856 : VOIDmode),
3857 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3859 preserve_temp_slots (result);
3860 free_temp_slots ();
3861 pop_temp_slots ();
3863 /* If the value is meaningful, convert RESULT to the proper mode.
3864 Otherwise, return nothing. */
3865 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3866 TYPE_MODE (TREE_TYPE (from)),
3867 result,
3868 TREE_UNSIGNED (TREE_TYPE (to)))
3869 : NULL_RTX);
3872 /* If the rhs is a function call and its value is not an aggregate,
3873 call the function before we start to compute the lhs.
3874 This is needed for correct code for cases such as
3875 val = setjmp (buf) on machines where reference to val
3876 requires loading up part of an address in a separate insn.
3878 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3879 since it might be a promoted variable where the zero- or sign- extension
3880 needs to be done. Handling this in the normal way is safe because no
3881 computation is done before the call. */
3882 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3883 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3884 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3885 && GET_CODE (DECL_RTL (to)) == REG))
3887 rtx value;
3889 push_temp_slots ();
3890 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3891 if (to_rtx == 0)
3892 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3894 /* Handle calls that return values in multiple non-contiguous locations.
3895 The Irix 6 ABI has examples of this. */
3896 if (GET_CODE (to_rtx) == PARALLEL)
3897 emit_group_load (to_rtx, value, TREE_TYPE (from),
3898 int_size_in_bytes (TREE_TYPE (from)));
3899 else if (GET_MODE (to_rtx) == BLKmode)
3900 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3901 else
3903 if (POINTER_TYPE_P (TREE_TYPE (to)))
3904 value = convert_memory_address (GET_MODE (to_rtx), value);
3905 emit_move_insn (to_rtx, value);
3907 preserve_temp_slots (to_rtx);
3908 free_temp_slots ();
3909 pop_temp_slots ();
3910 return want_value ? to_rtx : NULL_RTX;
3913 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3914 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3916 if (to_rtx == 0)
3917 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3919 /* Don't move directly into a return register. */
3920 if (TREE_CODE (to) == RESULT_DECL
3921 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3923 rtx temp;
3925 push_temp_slots ();
3926 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3928 if (GET_CODE (to_rtx) == PARALLEL)
3929 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3930 int_size_in_bytes (TREE_TYPE (from)));
3931 else
3932 emit_move_insn (to_rtx, temp);
3934 preserve_temp_slots (to_rtx);
3935 free_temp_slots ();
3936 pop_temp_slots ();
3937 return want_value ? to_rtx : NULL_RTX;
3940 /* In case we are returning the contents of an object which overlaps
3941 the place the value is being stored, use a safe function when copying
3942 a value through a pointer into a structure value return block. */
3943 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3944 && current_function_returns_struct
3945 && !current_function_returns_pcc_struct)
3947 rtx from_rtx, size;
3949 push_temp_slots ();
3950 size = expr_size (from);
3951 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3953 if (TARGET_MEM_FUNCTIONS)
3954 emit_library_call (memmove_libfunc, LCT_NORMAL,
3955 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3956 XEXP (from_rtx, 0), Pmode,
3957 convert_to_mode (TYPE_MODE (sizetype),
3958 size, TREE_UNSIGNED (sizetype)),
3959 TYPE_MODE (sizetype));
3960 else
3961 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3962 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3963 XEXP (to_rtx, 0), Pmode,
3964 convert_to_mode (TYPE_MODE (integer_type_node),
3965 size,
3966 TREE_UNSIGNED (integer_type_node)),
3967 TYPE_MODE (integer_type_node));
3969 preserve_temp_slots (to_rtx);
3970 free_temp_slots ();
3971 pop_temp_slots ();
3972 return want_value ? to_rtx : NULL_RTX;
3975 /* Compute FROM and store the value in the rtx we got. */
3977 push_temp_slots ();
3978 result = store_expr (from, to_rtx, want_value);
3979 preserve_temp_slots (result);
3980 free_temp_slots ();
3981 pop_temp_slots ();
3982 return want_value ? result : NULL_RTX;
3985 /* Generate code for computing expression EXP,
3986 and storing the value into TARGET.
3987 TARGET may contain a QUEUED rtx.
3989 If WANT_VALUE & 1 is nonzero, return a copy of the value
3990 not in TARGET, so that we can be sure to use the proper
3991 value in a containing expression even if TARGET has something
3992 else stored in it. If possible, we copy the value through a pseudo
3993 and return that pseudo. Or, if the value is constant, we try to
3994 return the constant. In some cases, we return a pseudo
3995 copied *from* TARGET.
3997 If the mode is BLKmode then we may return TARGET itself.
3998 It turns out that in BLKmode it doesn't cause a problem.
3999 because C has no operators that could combine two different
4000 assignments into the same BLKmode object with different values
4001 with no sequence point. Will other languages need this to
4002 be more thorough?
4004 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4005 to catch quickly any cases where the caller uses the value
4006 and fails to set WANT_VALUE.
4008 If WANT_VALUE & 2 is set, this is a store into a call param on the
4009 stack, and block moves may need to be treated specially. */
4012 store_expr (tree exp, rtx target, int want_value)
4014 rtx temp;
4015 int dont_return_target = 0;
4016 int dont_store_target = 0;
4018 if (VOID_TYPE_P (TREE_TYPE (exp)))
4020 /* C++ can generate ?: expressions with a throw expression in one
4021 branch and an rvalue in the other. Here, we resolve attempts to
4022 store the throw expression's nonexistent result. */
4023 if (want_value)
4024 abort ();
4025 expand_expr (exp, const0_rtx, VOIDmode, 0);
4026 return NULL_RTX;
4028 if (TREE_CODE (exp) == COMPOUND_EXPR)
4030 /* Perform first part of compound expression, then assign from second
4031 part. */
4032 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4033 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4034 emit_queue ();
4035 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4037 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4039 /* For conditional expression, get safe form of the target. Then
4040 test the condition, doing the appropriate assignment on either
4041 side. This avoids the creation of unnecessary temporaries.
4042 For non-BLKmode, it is more efficient not to do this. */
4044 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4046 emit_queue ();
4047 target = protect_from_queue (target, 1);
4049 do_pending_stack_adjust ();
4050 NO_DEFER_POP;
4051 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4052 start_cleanup_deferral ();
4053 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4054 end_cleanup_deferral ();
4055 emit_queue ();
4056 emit_jump_insn (gen_jump (lab2));
4057 emit_barrier ();
4058 emit_label (lab1);
4059 start_cleanup_deferral ();
4060 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4061 end_cleanup_deferral ();
4062 emit_queue ();
4063 emit_label (lab2);
4064 OK_DEFER_POP;
4066 return want_value & 1 ? target : NULL_RTX;
4068 else if (queued_subexp_p (target))
4069 /* If target contains a postincrement, let's not risk
4070 using it as the place to generate the rhs. */
4072 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4074 /* Expand EXP into a new pseudo. */
4075 temp = gen_reg_rtx (GET_MODE (target));
4076 temp = expand_expr (exp, temp, GET_MODE (target),
4077 (want_value & 2
4078 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4080 else
4081 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4082 (want_value & 2
4083 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4085 /* If target is volatile, ANSI requires accessing the value
4086 *from* the target, if it is accessed. So make that happen.
4087 In no case return the target itself. */
4088 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4089 dont_return_target = 1;
4091 else if ((want_value & 1) != 0
4092 && GET_CODE (target) == MEM
4093 && ! MEM_VOLATILE_P (target)
4094 && GET_MODE (target) != BLKmode)
4095 /* If target is in memory and caller wants value in a register instead,
4096 arrange that. Pass TARGET as target for expand_expr so that,
4097 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4098 We know expand_expr will not use the target in that case.
4099 Don't do this if TARGET is volatile because we are supposed
4100 to write it and then read it. */
4102 temp = expand_expr (exp, target, GET_MODE (target),
4103 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4104 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4106 /* If TEMP is already in the desired TARGET, only copy it from
4107 memory and don't store it there again. */
4108 if (temp == target
4109 || (rtx_equal_p (temp, target)
4110 && ! side_effects_p (temp) && ! side_effects_p (target)))
4111 dont_store_target = 1;
4112 temp = copy_to_reg (temp);
4114 dont_return_target = 1;
4116 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4117 /* If this is a scalar in a register that is stored in a wider mode
4118 than the declared mode, compute the result into its declared mode
4119 and then convert to the wider mode. Our value is the computed
4120 expression. */
4122 rtx inner_target = 0;
4124 /* If we don't want a value, we can do the conversion inside EXP,
4125 which will often result in some optimizations. Do the conversion
4126 in two steps: first change the signedness, if needed, then
4127 the extend. But don't do this if the type of EXP is a subtype
4128 of something else since then the conversion might involve
4129 more than just converting modes. */
4130 if ((want_value & 1) == 0
4131 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4132 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4134 if (TREE_UNSIGNED (TREE_TYPE (exp))
4135 != SUBREG_PROMOTED_UNSIGNED_P (target))
4136 exp = convert
4137 ((*lang_hooks.types.signed_or_unsigned_type)
4138 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4140 exp = convert ((*lang_hooks.types.type_for_mode)
4141 (GET_MODE (SUBREG_REG (target)),
4142 SUBREG_PROMOTED_UNSIGNED_P (target)),
4143 exp);
4145 inner_target = SUBREG_REG (target);
4148 temp = expand_expr (exp, inner_target, VOIDmode,
4149 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4151 /* If TEMP is a MEM and we want a result value, make the access
4152 now so it gets done only once. Strictly speaking, this is
4153 only necessary if the MEM is volatile, or if the address
4154 overlaps TARGET. But not performing the load twice also
4155 reduces the amount of rtl we generate and then have to CSE. */
4156 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4157 temp = copy_to_reg (temp);
4159 /* If TEMP is a VOIDmode constant, use convert_modes to make
4160 sure that we properly convert it. */
4161 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4163 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4164 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4165 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4166 GET_MODE (target), temp,
4167 SUBREG_PROMOTED_UNSIGNED_P (target));
4170 convert_move (SUBREG_REG (target), temp,
4171 SUBREG_PROMOTED_UNSIGNED_P (target));
4173 /* If we promoted a constant, change the mode back down to match
4174 target. Otherwise, the caller might get confused by a result whose
4175 mode is larger than expected. */
4177 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4179 if (GET_MODE (temp) != VOIDmode)
4181 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4182 SUBREG_PROMOTED_VAR_P (temp) = 1;
4183 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4184 SUBREG_PROMOTED_UNSIGNED_P (target));
4186 else
4187 temp = convert_modes (GET_MODE (target),
4188 GET_MODE (SUBREG_REG (target)),
4189 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4192 return want_value & 1 ? temp : NULL_RTX;
4194 else
4196 temp = expand_expr (exp, target, GET_MODE (target),
4197 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4198 /* Return TARGET if it's a specified hardware register.
4199 If TARGET is a volatile mem ref, either return TARGET
4200 or return a reg copied *from* TARGET; ANSI requires this.
4202 Otherwise, if TEMP is not TARGET, return TEMP
4203 if it is constant (for efficiency),
4204 or if we really want the correct value. */
4205 if (!(target && GET_CODE (target) == REG
4206 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4207 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4208 && ! rtx_equal_p (temp, target)
4209 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4210 dont_return_target = 1;
4213 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4214 the same as that of TARGET, adjust the constant. This is needed, for
4215 example, in case it is a CONST_DOUBLE and we want only a word-sized
4216 value. */
4217 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4218 && TREE_CODE (exp) != ERROR_MARK
4219 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4220 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4221 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4223 /* If value was not generated in the target, store it there.
4224 Convert the value to TARGET's type first if necessary.
4225 If TEMP and TARGET compare equal according to rtx_equal_p, but
4226 one or both of them are volatile memory refs, we have to distinguish
4227 two cases:
4228 - expand_expr has used TARGET. In this case, we must not generate
4229 another copy. This can be detected by TARGET being equal according
4230 to == .
4231 - expand_expr has not used TARGET - that means that the source just
4232 happens to have the same RTX form. Since temp will have been created
4233 by expand_expr, it will compare unequal according to == .
4234 We must generate a copy in this case, to reach the correct number
4235 of volatile memory references. */
4237 if ((! rtx_equal_p (temp, target)
4238 || (temp != target && (side_effects_p (temp)
4239 || side_effects_p (target))))
4240 && TREE_CODE (exp) != ERROR_MARK
4241 && ! dont_store_target
4242 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4243 but TARGET is not valid memory reference, TEMP will differ
4244 from TARGET although it is really the same location. */
4245 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4246 || target != DECL_RTL_IF_SET (exp))
4247 /* If there's nothing to copy, don't bother. Don't call expr_size
4248 unless necessary, because some front-ends (C++) expr_size-hook
4249 aborts on objects that are not supposed to be bit-copied or
4250 bit-initialized. */
4251 && expr_size (exp) != const0_rtx)
4253 target = protect_from_queue (target, 1);
4254 if (GET_MODE (temp) != GET_MODE (target)
4255 && GET_MODE (temp) != VOIDmode)
4257 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4258 if (dont_return_target)
4260 /* In this case, we will return TEMP,
4261 so make sure it has the proper mode.
4262 But don't forget to store the value into TARGET. */
4263 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4264 emit_move_insn (target, temp);
4266 else
4267 convert_move (target, temp, unsignedp);
4270 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4272 /* Handle copying a string constant into an array. The string
4273 constant may be shorter than the array. So copy just the string's
4274 actual length, and clear the rest. First get the size of the data
4275 type of the string, which is actually the size of the target. */
4276 rtx size = expr_size (exp);
4278 if (GET_CODE (size) == CONST_INT
4279 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4280 emit_block_move (target, temp, size,
4281 (want_value & 2
4282 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4283 else
4285 /* Compute the size of the data to copy from the string. */
4286 tree copy_size
4287 = size_binop (MIN_EXPR,
4288 make_tree (sizetype, size),
4289 size_int (TREE_STRING_LENGTH (exp)));
4290 rtx copy_size_rtx
4291 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4292 (want_value & 2
4293 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4294 rtx label = 0;
4296 /* Copy that much. */
4297 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4298 TREE_UNSIGNED (sizetype));
4299 emit_block_move (target, temp, copy_size_rtx,
4300 (want_value & 2
4301 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4303 /* Figure out how much is left in TARGET that we have to clear.
4304 Do all calculations in ptr_mode. */
4305 if (GET_CODE (copy_size_rtx) == CONST_INT)
4307 size = plus_constant (size, -INTVAL (copy_size_rtx));
4308 target = adjust_address (target, BLKmode,
4309 INTVAL (copy_size_rtx));
4311 else
4313 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4314 copy_size_rtx, NULL_RTX, 0,
4315 OPTAB_LIB_WIDEN);
4317 #ifdef POINTERS_EXTEND_UNSIGNED
4318 if (GET_MODE (copy_size_rtx) != Pmode)
4319 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4320 TREE_UNSIGNED (sizetype));
4321 #endif
4323 target = offset_address (target, copy_size_rtx,
4324 highest_pow2_factor (copy_size));
4325 label = gen_label_rtx ();
4326 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4327 GET_MODE (size), 0, label);
4330 if (size != const0_rtx)
4331 clear_storage (target, size);
4333 if (label)
4334 emit_label (label);
4337 /* Handle calls that return values in multiple non-contiguous locations.
4338 The Irix 6 ABI has examples of this. */
4339 else if (GET_CODE (target) == PARALLEL)
4340 emit_group_load (target, temp, TREE_TYPE (exp),
4341 int_size_in_bytes (TREE_TYPE (exp)));
4342 else if (GET_MODE (temp) == BLKmode)
4343 emit_block_move (target, temp, expr_size (exp),
4344 (want_value & 2
4345 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4346 else
4347 emit_move_insn (target, temp);
4350 /* If we don't want a value, return NULL_RTX. */
4351 if ((want_value & 1) == 0)
4352 return NULL_RTX;
4354 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4355 ??? The latter test doesn't seem to make sense. */
4356 else if (dont_return_target && GET_CODE (temp) != MEM)
4357 return temp;
4359 /* Return TARGET itself if it is a hard register. */
4360 else if ((want_value & 1) != 0
4361 && GET_MODE (target) != BLKmode
4362 && ! (GET_CODE (target) == REG
4363 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4364 return copy_to_reg (target);
4366 else
4367 return target;
4370 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4372 static int
4373 is_zeros_p (tree exp)
4375 tree elt;
4377 switch (TREE_CODE (exp))
4379 case CONVERT_EXPR:
4380 case NOP_EXPR:
4381 case NON_LVALUE_EXPR:
4382 case VIEW_CONVERT_EXPR:
4383 return is_zeros_p (TREE_OPERAND (exp, 0));
4385 case INTEGER_CST:
4386 return integer_zerop (exp);
4388 case COMPLEX_CST:
4389 return
4390 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4392 case REAL_CST:
4393 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4395 case VECTOR_CST:
4396 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4397 elt = TREE_CHAIN (elt))
4398 if (!is_zeros_p (TREE_VALUE (elt)))
4399 return 0;
4401 return 1;
4403 case CONSTRUCTOR:
4404 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4405 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4406 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4407 if (! is_zeros_p (TREE_VALUE (elt)))
4408 return 0;
4410 return 1;
4412 default:
4413 return 0;
4417 /* Return 1 if EXP contains mostly (3/4) zeros. */
4420 mostly_zeros_p (tree exp)
4422 if (TREE_CODE (exp) == CONSTRUCTOR)
4424 int elts = 0, zeros = 0;
4425 tree elt = CONSTRUCTOR_ELTS (exp);
4426 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4428 /* If there are no ranges of true bits, it is all zero. */
4429 return elt == NULL_TREE;
4431 for (; elt; elt = TREE_CHAIN (elt))
4433 /* We do not handle the case where the index is a RANGE_EXPR,
4434 so the statistic will be somewhat inaccurate.
4435 We do make a more accurate count in store_constructor itself,
4436 so since this function is only used for nested array elements,
4437 this should be close enough. */
4438 if (mostly_zeros_p (TREE_VALUE (elt)))
4439 zeros++;
4440 elts++;
4443 return 4 * zeros >= 3 * elts;
4446 return is_zeros_p (exp);
4449 /* Helper function for store_constructor.
4450 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4451 TYPE is the type of the CONSTRUCTOR, not the element type.
4452 CLEARED is as for store_constructor.
4453 ALIAS_SET is the alias set to use for any stores.
4455 This provides a recursive shortcut back to store_constructor when it isn't
4456 necessary to go through store_field. This is so that we can pass through
4457 the cleared field to let store_constructor know that we may not have to
4458 clear a substructure if the outer structure has already been cleared. */
4460 static void
4461 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4462 HOST_WIDE_INT bitpos, enum machine_mode mode,
4463 tree exp, tree type, int cleared, int alias_set)
4465 if (TREE_CODE (exp) == CONSTRUCTOR
4466 && bitpos % BITS_PER_UNIT == 0
4467 /* If we have a nonzero bitpos for a register target, then we just
4468 let store_field do the bitfield handling. This is unlikely to
4469 generate unnecessary clear instructions anyways. */
4470 && (bitpos == 0 || GET_CODE (target) == MEM))
4472 if (GET_CODE (target) == MEM)
4473 target
4474 = adjust_address (target,
4475 GET_MODE (target) == BLKmode
4476 || 0 != (bitpos
4477 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4478 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4481 /* Update the alias set, if required. */
4482 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4483 && MEM_ALIAS_SET (target) != 0)
4485 target = copy_rtx (target);
4486 set_mem_alias_set (target, alias_set);
4489 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4491 else
4492 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4493 alias_set);
4496 /* Store the value of constructor EXP into the rtx TARGET.
4497 TARGET is either a REG or a MEM; we know it cannot conflict, since
4498 safe_from_p has been called.
4499 CLEARED is true if TARGET is known to have been zero'd.
4500 SIZE is the number of bytes of TARGET we are allowed to modify: this
4501 may not be the same as the size of EXP if we are assigning to a field
4502 which has been packed to exclude padding bits. */
4504 static void
4505 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4507 tree type = TREE_TYPE (exp);
4508 #ifdef WORD_REGISTER_OPERATIONS
4509 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4510 #endif
4512 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4513 || TREE_CODE (type) == QUAL_UNION_TYPE)
4515 tree elt;
4517 /* If size is zero or the target is already cleared, do nothing. */
4518 if (size == 0 || cleared)
4519 cleared = 1;
4520 /* We either clear the aggregate or indicate the value is dead. */
4521 else if ((TREE_CODE (type) == UNION_TYPE
4522 || TREE_CODE (type) == QUAL_UNION_TYPE)
4523 && ! CONSTRUCTOR_ELTS (exp))
4524 /* If the constructor is empty, clear the union. */
4526 clear_storage (target, expr_size (exp));
4527 cleared = 1;
4530 /* If we are building a static constructor into a register,
4531 set the initial value as zero so we can fold the value into
4532 a constant. But if more than one register is involved,
4533 this probably loses. */
4534 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4535 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4537 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4538 cleared = 1;
4541 /* If the constructor has fewer fields than the structure
4542 or if we are initializing the structure to mostly zeros,
4543 clear the whole structure first. Don't do this if TARGET is a
4544 register whose mode size isn't equal to SIZE since clear_storage
4545 can't handle this case. */
4546 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4547 || mostly_zeros_p (exp))
4548 && (GET_CODE (target) != REG
4549 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4550 == size)))
4552 rtx xtarget = target;
4554 if (readonly_fields_p (type))
4556 xtarget = copy_rtx (xtarget);
4557 RTX_UNCHANGING_P (xtarget) = 1;
4560 clear_storage (xtarget, GEN_INT (size));
4561 cleared = 1;
4564 if (! cleared)
4565 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4567 /* Store each element of the constructor into
4568 the corresponding field of TARGET. */
4570 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4572 tree field = TREE_PURPOSE (elt);
4573 tree value = TREE_VALUE (elt);
4574 enum machine_mode mode;
4575 HOST_WIDE_INT bitsize;
4576 HOST_WIDE_INT bitpos = 0;
4577 tree offset;
4578 rtx to_rtx = target;
4580 /* Just ignore missing fields.
4581 We cleared the whole structure, above,
4582 if any fields are missing. */
4583 if (field == 0)
4584 continue;
4586 if (cleared && is_zeros_p (value))
4587 continue;
4589 if (host_integerp (DECL_SIZE (field), 1))
4590 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4591 else
4592 bitsize = -1;
4594 mode = DECL_MODE (field);
4595 if (DECL_BIT_FIELD (field))
4596 mode = VOIDmode;
4598 offset = DECL_FIELD_OFFSET (field);
4599 if (host_integerp (offset, 0)
4600 && host_integerp (bit_position (field), 0))
4602 bitpos = int_bit_position (field);
4603 offset = 0;
4605 else
4606 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4608 if (offset)
4610 rtx offset_rtx;
4612 if (CONTAINS_PLACEHOLDER_P (offset))
4613 offset = build (WITH_RECORD_EXPR, sizetype,
4614 offset, make_tree (TREE_TYPE (exp), target));
4616 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4617 if (GET_CODE (to_rtx) != MEM)
4618 abort ();
4620 #ifdef POINTERS_EXTEND_UNSIGNED
4621 if (GET_MODE (offset_rtx) != Pmode)
4622 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4623 #else
4624 if (GET_MODE (offset_rtx) != ptr_mode)
4625 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4626 #endif
4628 to_rtx = offset_address (to_rtx, offset_rtx,
4629 highest_pow2_factor (offset));
4632 if (TREE_READONLY (field))
4634 if (GET_CODE (to_rtx) == MEM)
4635 to_rtx = copy_rtx (to_rtx);
4637 RTX_UNCHANGING_P (to_rtx) = 1;
4640 #ifdef WORD_REGISTER_OPERATIONS
4641 /* If this initializes a field that is smaller than a word, at the
4642 start of a word, try to widen it to a full word.
4643 This special case allows us to output C++ member function
4644 initializations in a form that the optimizers can understand. */
4645 if (GET_CODE (target) == REG
4646 && bitsize < BITS_PER_WORD
4647 && bitpos % BITS_PER_WORD == 0
4648 && GET_MODE_CLASS (mode) == MODE_INT
4649 && TREE_CODE (value) == INTEGER_CST
4650 && exp_size >= 0
4651 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4653 tree type = TREE_TYPE (value);
4655 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4657 type = (*lang_hooks.types.type_for_size)
4658 (BITS_PER_WORD, TREE_UNSIGNED (type));
4659 value = convert (type, value);
4662 if (BYTES_BIG_ENDIAN)
4663 value
4664 = fold (build (LSHIFT_EXPR, type, value,
4665 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4666 bitsize = BITS_PER_WORD;
4667 mode = word_mode;
4669 #endif
4671 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4672 && DECL_NONADDRESSABLE_P (field))
4674 to_rtx = copy_rtx (to_rtx);
4675 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4678 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4679 value, type, cleared,
4680 get_alias_set (TREE_TYPE (field)));
4683 else if (TREE_CODE (type) == ARRAY_TYPE
4684 || TREE_CODE (type) == VECTOR_TYPE)
4686 tree elt;
4687 int i;
4688 int need_to_clear;
4689 tree domain = TYPE_DOMAIN (type);
4690 tree elttype = TREE_TYPE (type);
4691 int const_bounds_p;
4692 HOST_WIDE_INT minelt = 0;
4693 HOST_WIDE_INT maxelt = 0;
4695 /* Vectors are like arrays, but the domain is stored via an array
4696 type indirectly. */
4697 if (TREE_CODE (type) == VECTOR_TYPE)
4699 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4700 the same field as TYPE_DOMAIN, we are not guaranteed that
4701 it always will. */
4702 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4703 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4706 const_bounds_p = (TYPE_MIN_VALUE (domain)
4707 && TYPE_MAX_VALUE (domain)
4708 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4709 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4711 /* If we have constant bounds for the range of the type, get them. */
4712 if (const_bounds_p)
4714 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4715 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4718 /* If the constructor has fewer elements than the array,
4719 clear the whole array first. Similarly if this is
4720 static constructor of a non-BLKmode object. */
4721 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4722 need_to_clear = 1;
4723 else
4725 HOST_WIDE_INT count = 0, zero_count = 0;
4726 need_to_clear = ! const_bounds_p;
4728 /* This loop is a more accurate version of the loop in
4729 mostly_zeros_p (it handles RANGE_EXPR in an index).
4730 It is also needed to check for missing elements. */
4731 for (elt = CONSTRUCTOR_ELTS (exp);
4732 elt != NULL_TREE && ! need_to_clear;
4733 elt = TREE_CHAIN (elt))
4735 tree index = TREE_PURPOSE (elt);
4736 HOST_WIDE_INT this_node_count;
4738 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4740 tree lo_index = TREE_OPERAND (index, 0);
4741 tree hi_index = TREE_OPERAND (index, 1);
4743 if (! host_integerp (lo_index, 1)
4744 || ! host_integerp (hi_index, 1))
4746 need_to_clear = 1;
4747 break;
4750 this_node_count = (tree_low_cst (hi_index, 1)
4751 - tree_low_cst (lo_index, 1) + 1);
4753 else
4754 this_node_count = 1;
4756 count += this_node_count;
4757 if (mostly_zeros_p (TREE_VALUE (elt)))
4758 zero_count += this_node_count;
4761 /* Clear the entire array first if there are any missing elements,
4762 or if the incidence of zero elements is >= 75%. */
4763 if (! need_to_clear
4764 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4765 need_to_clear = 1;
4768 if (need_to_clear && size > 0)
4770 if (! cleared)
4772 if (REG_P (target))
4773 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4774 else
4775 clear_storage (target, GEN_INT (size));
4777 cleared = 1;
4779 else if (REG_P (target))
4780 /* Inform later passes that the old value is dead. */
4781 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4783 /* Store each element of the constructor into
4784 the corresponding element of TARGET, determined
4785 by counting the elements. */
4786 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4787 elt;
4788 elt = TREE_CHAIN (elt), i++)
4790 enum machine_mode mode;
4791 HOST_WIDE_INT bitsize;
4792 HOST_WIDE_INT bitpos;
4793 int unsignedp;
4794 tree value = TREE_VALUE (elt);
4795 tree index = TREE_PURPOSE (elt);
4796 rtx xtarget = target;
4798 if (cleared && is_zeros_p (value))
4799 continue;
4801 unsignedp = TREE_UNSIGNED (elttype);
4802 mode = TYPE_MODE (elttype);
4803 if (mode == BLKmode)
4804 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4805 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4806 : -1);
4807 else
4808 bitsize = GET_MODE_BITSIZE (mode);
4810 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4812 tree lo_index = TREE_OPERAND (index, 0);
4813 tree hi_index = TREE_OPERAND (index, 1);
4814 rtx index_r, pos_rtx, loop_end;
4815 struct nesting *loop;
4816 HOST_WIDE_INT lo, hi, count;
4817 tree position;
4819 /* If the range is constant and "small", unroll the loop. */
4820 if (const_bounds_p
4821 && host_integerp (lo_index, 0)
4822 && host_integerp (hi_index, 0)
4823 && (lo = tree_low_cst (lo_index, 0),
4824 hi = tree_low_cst (hi_index, 0),
4825 count = hi - lo + 1,
4826 (GET_CODE (target) != MEM
4827 || count <= 2
4828 || (host_integerp (TYPE_SIZE (elttype), 1)
4829 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4830 <= 40 * 8)))))
4832 lo -= minelt; hi -= minelt;
4833 for (; lo <= hi; lo++)
4835 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4837 if (GET_CODE (target) == MEM
4838 && !MEM_KEEP_ALIAS_SET_P (target)
4839 && TREE_CODE (type) == ARRAY_TYPE
4840 && TYPE_NONALIASED_COMPONENT (type))
4842 target = copy_rtx (target);
4843 MEM_KEEP_ALIAS_SET_P (target) = 1;
4846 store_constructor_field
4847 (target, bitsize, bitpos, mode, value, type, cleared,
4848 get_alias_set (elttype));
4851 else
4853 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4854 loop_end = gen_label_rtx ();
4856 unsignedp = TREE_UNSIGNED (domain);
4858 index = build_decl (VAR_DECL, NULL_TREE, domain);
4860 index_r
4861 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4862 &unsignedp, 0));
4863 SET_DECL_RTL (index, index_r);
4864 if (TREE_CODE (value) == SAVE_EXPR
4865 && SAVE_EXPR_RTL (value) == 0)
4867 /* Make sure value gets expanded once before the
4868 loop. */
4869 expand_expr (value, const0_rtx, VOIDmode, 0);
4870 emit_queue ();
4872 store_expr (lo_index, index_r, 0);
4873 loop = expand_start_loop (0);
4875 /* Assign value to element index. */
4876 position
4877 = convert (ssizetype,
4878 fold (build (MINUS_EXPR, TREE_TYPE (index),
4879 index, TYPE_MIN_VALUE (domain))));
4880 position = size_binop (MULT_EXPR, position,
4881 convert (ssizetype,
4882 TYPE_SIZE_UNIT (elttype)));
4884 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4885 xtarget = offset_address (target, pos_rtx,
4886 highest_pow2_factor (position));
4887 xtarget = adjust_address (xtarget, mode, 0);
4888 if (TREE_CODE (value) == CONSTRUCTOR)
4889 store_constructor (value, xtarget, cleared,
4890 bitsize / BITS_PER_UNIT);
4891 else
4892 store_expr (value, xtarget, 0);
4894 expand_exit_loop_if_false (loop,
4895 build (LT_EXPR, integer_type_node,
4896 index, hi_index));
4898 expand_increment (build (PREINCREMENT_EXPR,
4899 TREE_TYPE (index),
4900 index, integer_one_node), 0, 0);
4901 expand_end_loop ();
4902 emit_label (loop_end);
4905 else if ((index != 0 && ! host_integerp (index, 0))
4906 || ! host_integerp (TYPE_SIZE (elttype), 1))
4908 tree position;
4910 if (index == 0)
4911 index = ssize_int (1);
4913 if (minelt)
4914 index = convert (ssizetype,
4915 fold (build (MINUS_EXPR, index,
4916 TYPE_MIN_VALUE (domain))));
4918 position = size_binop (MULT_EXPR, index,
4919 convert (ssizetype,
4920 TYPE_SIZE_UNIT (elttype)));
4921 xtarget = offset_address (target,
4922 expand_expr (position, 0, VOIDmode, 0),
4923 highest_pow2_factor (position));
4924 xtarget = adjust_address (xtarget, mode, 0);
4925 store_expr (value, xtarget, 0);
4927 else
4929 if (index != 0)
4930 bitpos = ((tree_low_cst (index, 0) - minelt)
4931 * tree_low_cst (TYPE_SIZE (elttype), 1));
4932 else
4933 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4935 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4936 && TREE_CODE (type) == ARRAY_TYPE
4937 && TYPE_NONALIASED_COMPONENT (type))
4939 target = copy_rtx (target);
4940 MEM_KEEP_ALIAS_SET_P (target) = 1;
4943 store_constructor_field (target, bitsize, bitpos, mode, value,
4944 type, cleared, get_alias_set (elttype));
4950 /* Set constructor assignments. */
4951 else if (TREE_CODE (type) == SET_TYPE)
4953 tree elt = CONSTRUCTOR_ELTS (exp);
4954 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4955 tree domain = TYPE_DOMAIN (type);
4956 tree domain_min, domain_max, bitlength;
4958 /* The default implementation strategy is to extract the constant
4959 parts of the constructor, use that to initialize the target,
4960 and then "or" in whatever non-constant ranges we need in addition.
4962 If a large set is all zero or all ones, it is
4963 probably better to set it using memset (if available) or bzero.
4964 Also, if a large set has just a single range, it may also be
4965 better to first clear all the first clear the set (using
4966 bzero/memset), and set the bits we want. */
4968 /* Check for all zeros. */
4969 if (elt == NULL_TREE && size > 0)
4971 if (!cleared)
4972 clear_storage (target, GEN_INT (size));
4973 return;
4976 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4977 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4978 bitlength = size_binop (PLUS_EXPR,
4979 size_diffop (domain_max, domain_min),
4980 ssize_int (1));
4982 nbits = tree_low_cst (bitlength, 1);
4984 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4985 are "complicated" (more than one range), initialize (the
4986 constant parts) by copying from a constant. */
4987 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4988 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4990 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4991 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4992 char *bit_buffer = alloca (nbits);
4993 HOST_WIDE_INT word = 0;
4994 unsigned int bit_pos = 0;
4995 unsigned int ibit = 0;
4996 unsigned int offset = 0; /* In bytes from beginning of set. */
4998 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4999 for (;;)
5001 if (bit_buffer[ibit])
5003 if (BYTES_BIG_ENDIAN)
5004 word |= (1 << (set_word_size - 1 - bit_pos));
5005 else
5006 word |= 1 << bit_pos;
5009 bit_pos++; ibit++;
5010 if (bit_pos >= set_word_size || ibit == nbits)
5012 if (word != 0 || ! cleared)
5014 rtx datum = GEN_INT (word);
5015 rtx to_rtx;
5017 /* The assumption here is that it is safe to use
5018 XEXP if the set is multi-word, but not if
5019 it's single-word. */
5020 if (GET_CODE (target) == MEM)
5021 to_rtx = adjust_address (target, mode, offset);
5022 else if (offset == 0)
5023 to_rtx = target;
5024 else
5025 abort ();
5026 emit_move_insn (to_rtx, datum);
5029 if (ibit == nbits)
5030 break;
5031 word = 0;
5032 bit_pos = 0;
5033 offset += set_word_size / BITS_PER_UNIT;
5037 else if (!cleared)
5038 /* Don't bother clearing storage if the set is all ones. */
5039 if (TREE_CHAIN (elt) != NULL_TREE
5040 || (TREE_PURPOSE (elt) == NULL_TREE
5041 ? nbits != 1
5042 : ( ! host_integerp (TREE_VALUE (elt), 0)
5043 || ! host_integerp (TREE_PURPOSE (elt), 0)
5044 || (tree_low_cst (TREE_VALUE (elt), 0)
5045 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5046 != (HOST_WIDE_INT) nbits))))
5047 clear_storage (target, expr_size (exp));
5049 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5051 /* Start of range of element or NULL. */
5052 tree startbit = TREE_PURPOSE (elt);
5053 /* End of range of element, or element value. */
5054 tree endbit = TREE_VALUE (elt);
5055 HOST_WIDE_INT startb, endb;
5056 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5058 bitlength_rtx = expand_expr (bitlength,
5059 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5061 /* Handle non-range tuple element like [ expr ]. */
5062 if (startbit == NULL_TREE)
5064 startbit = save_expr (endbit);
5065 endbit = startbit;
5068 startbit = convert (sizetype, startbit);
5069 endbit = convert (sizetype, endbit);
5070 if (! integer_zerop (domain_min))
5072 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5073 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5075 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5076 EXPAND_CONST_ADDRESS);
5077 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5078 EXPAND_CONST_ADDRESS);
5080 if (REG_P (target))
5082 targetx
5083 = assign_temp
5084 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5085 (GET_MODE (target), 0),
5086 TYPE_QUAL_CONST)),
5087 0, 1, 1);
5088 emit_move_insn (targetx, target);
5091 else if (GET_CODE (target) == MEM)
5092 targetx = target;
5093 else
5094 abort ();
5096 /* Optimization: If startbit and endbit are constants divisible
5097 by BITS_PER_UNIT, call memset instead. */
5098 if (TARGET_MEM_FUNCTIONS
5099 && TREE_CODE (startbit) == INTEGER_CST
5100 && TREE_CODE (endbit) == INTEGER_CST
5101 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5102 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5104 emit_library_call (memset_libfunc, LCT_NORMAL,
5105 VOIDmode, 3,
5106 plus_constant (XEXP (targetx, 0),
5107 startb / BITS_PER_UNIT),
5108 Pmode,
5109 constm1_rtx, TYPE_MODE (integer_type_node),
5110 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5111 TYPE_MODE (sizetype));
5113 else
5114 emit_library_call (setbits_libfunc, LCT_NORMAL,
5115 VOIDmode, 4, XEXP (targetx, 0),
5116 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5117 startbit_rtx, TYPE_MODE (sizetype),
5118 endbit_rtx, TYPE_MODE (sizetype));
5120 if (REG_P (target))
5121 emit_move_insn (target, targetx);
5125 else
5126 abort ();
5129 /* Store the value of EXP (an expression tree)
5130 into a subfield of TARGET which has mode MODE and occupies
5131 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5132 If MODE is VOIDmode, it means that we are storing into a bit-field.
5134 If VALUE_MODE is VOIDmode, return nothing in particular.
5135 UNSIGNEDP is not used in this case.
5137 Otherwise, return an rtx for the value stored. This rtx
5138 has mode VALUE_MODE if that is convenient to do.
5139 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5141 TYPE is the type of the underlying object,
5143 ALIAS_SET is the alias set for the destination. This value will
5144 (in general) be different from that for TARGET, since TARGET is a
5145 reference to the containing structure. */
5147 static rtx
5148 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5149 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5150 int unsignedp, tree type, int alias_set)
5152 HOST_WIDE_INT width_mask = 0;
5154 if (TREE_CODE (exp) == ERROR_MARK)
5155 return const0_rtx;
5157 /* If we have nothing to store, do nothing unless the expression has
5158 side-effects. */
5159 if (bitsize == 0)
5160 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5161 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5162 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5164 /* If we are storing into an unaligned field of an aligned union that is
5165 in a register, we may have the mode of TARGET being an integer mode but
5166 MODE == BLKmode. In that case, get an aligned object whose size and
5167 alignment are the same as TARGET and store TARGET into it (we can avoid
5168 the store if the field being stored is the entire width of TARGET). Then
5169 call ourselves recursively to store the field into a BLKmode version of
5170 that object. Finally, load from the object into TARGET. This is not
5171 very efficient in general, but should only be slightly more expensive
5172 than the otherwise-required unaligned accesses. Perhaps this can be
5173 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5174 twice, once with emit_move_insn and once via store_field. */
5176 if (mode == BLKmode
5177 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5179 rtx object = assign_temp (type, 0, 1, 1);
5180 rtx blk_object = adjust_address (object, BLKmode, 0);
5182 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5183 emit_move_insn (object, target);
5185 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5186 alias_set);
5188 emit_move_insn (target, object);
5190 /* We want to return the BLKmode version of the data. */
5191 return blk_object;
5194 if (GET_CODE (target) == CONCAT)
5196 /* We're storing into a struct containing a single __complex. */
5198 if (bitpos != 0)
5199 abort ();
5200 return store_expr (exp, target, 0);
5203 /* If the structure is in a register or if the component
5204 is a bit field, we cannot use addressing to access it.
5205 Use bit-field techniques or SUBREG to store in it. */
5207 if (mode == VOIDmode
5208 || (mode != BLKmode && ! direct_store[(int) mode]
5209 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5210 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5211 || GET_CODE (target) == REG
5212 || GET_CODE (target) == SUBREG
5213 /* If the field isn't aligned enough to store as an ordinary memref,
5214 store it as a bit field. */
5215 || (mode != BLKmode
5216 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5217 || bitpos % GET_MODE_ALIGNMENT (mode))
5218 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5219 || (bitpos % BITS_PER_UNIT != 0)))
5220 /* If the RHS and field are a constant size and the size of the
5221 RHS isn't the same size as the bitfield, we must use bitfield
5222 operations. */
5223 || (bitsize >= 0
5224 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5225 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5227 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5229 /* If BITSIZE is narrower than the size of the type of EXP
5230 we will be narrowing TEMP. Normally, what's wanted are the
5231 low-order bits. However, if EXP's type is a record and this is
5232 big-endian machine, we want the upper BITSIZE bits. */
5233 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5234 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5235 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5236 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5237 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5238 - bitsize),
5239 NULL_RTX, 1);
5241 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5242 MODE. */
5243 if (mode != VOIDmode && mode != BLKmode
5244 && mode != TYPE_MODE (TREE_TYPE (exp)))
5245 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5247 /* If the modes of TARGET and TEMP are both BLKmode, both
5248 must be in memory and BITPOS must be aligned on a byte
5249 boundary. If so, we simply do a block copy. */
5250 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5252 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5253 || bitpos % BITS_PER_UNIT != 0)
5254 abort ();
5256 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5257 emit_block_move (target, temp,
5258 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5259 / BITS_PER_UNIT),
5260 BLOCK_OP_NORMAL);
5262 return value_mode == VOIDmode ? const0_rtx : target;
5265 /* Store the value in the bitfield. */
5266 store_bit_field (target, bitsize, bitpos, mode, temp,
5267 int_size_in_bytes (type));
5269 if (value_mode != VOIDmode)
5271 /* The caller wants an rtx for the value.
5272 If possible, avoid refetching from the bitfield itself. */
5273 if (width_mask != 0
5274 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5276 tree count;
5277 enum machine_mode tmode;
5279 tmode = GET_MODE (temp);
5280 if (tmode == VOIDmode)
5281 tmode = value_mode;
5283 if (unsignedp)
5284 return expand_and (tmode, temp,
5285 gen_int_mode (width_mask, tmode),
5286 NULL_RTX);
5288 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5289 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5290 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5293 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5294 NULL_RTX, value_mode, VOIDmode,
5295 int_size_in_bytes (type));
5297 return const0_rtx;
5299 else
5301 rtx addr = XEXP (target, 0);
5302 rtx to_rtx = target;
5304 /* If a value is wanted, it must be the lhs;
5305 so make the address stable for multiple use. */
5307 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5308 && ! CONSTANT_ADDRESS_P (addr)
5309 /* A frame-pointer reference is already stable. */
5310 && ! (GET_CODE (addr) == PLUS
5311 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5312 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5313 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5314 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5316 /* Now build a reference to just the desired component. */
5318 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5320 if (to_rtx == target)
5321 to_rtx = copy_rtx (to_rtx);
5323 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5324 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5325 set_mem_alias_set (to_rtx, alias_set);
5327 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5331 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5332 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5333 codes and find the ultimate containing object, which we return.
5335 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5336 bit position, and *PUNSIGNEDP to the signedness of the field.
5337 If the position of the field is variable, we store a tree
5338 giving the variable offset (in units) in *POFFSET.
5339 This offset is in addition to the bit position.
5340 If the position is not variable, we store 0 in *POFFSET.
5342 If any of the extraction expressions is volatile,
5343 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5345 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5346 is a mode that can be used to access the field. In that case, *PBITSIZE
5347 is redundant.
5349 If the field describes a variable-sized object, *PMODE is set to
5350 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5351 this case, but the address of the object can be found. */
5353 tree
5354 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5355 HOST_WIDE_INT *pbitpos, tree *poffset,
5356 enum machine_mode *pmode, int *punsignedp,
5357 int *pvolatilep)
5359 tree size_tree = 0;
5360 enum machine_mode mode = VOIDmode;
5361 tree offset = size_zero_node;
5362 tree bit_offset = bitsize_zero_node;
5363 tree placeholder_ptr = 0;
5364 tree tem;
5366 /* First get the mode, signedness, and size. We do this from just the
5367 outermost expression. */
5368 if (TREE_CODE (exp) == COMPONENT_REF)
5370 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5371 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5372 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5374 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5376 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5378 size_tree = TREE_OPERAND (exp, 1);
5379 *punsignedp = TREE_UNSIGNED (exp);
5381 else
5383 mode = TYPE_MODE (TREE_TYPE (exp));
5384 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5386 if (mode == BLKmode)
5387 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5388 else
5389 *pbitsize = GET_MODE_BITSIZE (mode);
5392 if (size_tree != 0)
5394 if (! host_integerp (size_tree, 1))
5395 mode = BLKmode, *pbitsize = -1;
5396 else
5397 *pbitsize = tree_low_cst (size_tree, 1);
5400 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5401 and find the ultimate containing object. */
5402 while (1)
5404 if (TREE_CODE (exp) == BIT_FIELD_REF)
5405 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5406 else if (TREE_CODE (exp) == COMPONENT_REF)
5408 tree field = TREE_OPERAND (exp, 1);
5409 tree this_offset = DECL_FIELD_OFFSET (field);
5411 /* If this field hasn't been filled in yet, don't go
5412 past it. This should only happen when folding expressions
5413 made during type construction. */
5414 if (this_offset == 0)
5415 break;
5416 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5417 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5419 offset = size_binop (PLUS_EXPR, offset, this_offset);
5420 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5421 DECL_FIELD_BIT_OFFSET (field));
5423 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5426 else if (TREE_CODE (exp) == ARRAY_REF
5427 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5429 tree index = TREE_OPERAND (exp, 1);
5430 tree array = TREE_OPERAND (exp, 0);
5431 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5432 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5433 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5435 /* We assume all arrays have sizes that are a multiple of a byte.
5436 First subtract the lower bound, if any, in the type of the
5437 index, then convert to sizetype and multiply by the size of the
5438 array element. */
5439 if (low_bound != 0 && ! integer_zerop (low_bound))
5440 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5441 index, low_bound));
5443 /* If the index has a self-referential type, pass it to a
5444 WITH_RECORD_EXPR; if the component size is, pass our
5445 component to one. */
5446 if (CONTAINS_PLACEHOLDER_P (index))
5447 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5448 if (CONTAINS_PLACEHOLDER_P (unit_size))
5449 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5451 offset = size_binop (PLUS_EXPR, offset,
5452 size_binop (MULT_EXPR,
5453 convert (sizetype, index),
5454 unit_size));
5457 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5459 tree new = find_placeholder (exp, &placeholder_ptr);
5461 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5462 We might have been called from tree optimization where we
5463 haven't set up an object yet. */
5464 if (new == 0)
5465 break;
5466 else
5467 exp = new;
5469 continue;
5472 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5473 conversions that don't change the mode, and all view conversions
5474 except those that need to "step up" the alignment. */
5475 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5476 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5477 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5478 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5479 && STRICT_ALIGNMENT
5480 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5481 < BIGGEST_ALIGNMENT)
5482 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5483 || TYPE_ALIGN_OK (TREE_TYPE
5484 (TREE_OPERAND (exp, 0))))))
5485 && ! ((TREE_CODE (exp) == NOP_EXPR
5486 || TREE_CODE (exp) == CONVERT_EXPR)
5487 && (TYPE_MODE (TREE_TYPE (exp))
5488 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5489 break;
5491 /* If any reference in the chain is volatile, the effect is volatile. */
5492 if (TREE_THIS_VOLATILE (exp))
5493 *pvolatilep = 1;
5495 exp = TREE_OPERAND (exp, 0);
5498 /* If OFFSET is constant, see if we can return the whole thing as a
5499 constant bit position. Otherwise, split it up. */
5500 if (host_integerp (offset, 0)
5501 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5502 bitsize_unit_node))
5503 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5504 && host_integerp (tem, 0))
5505 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5506 else
5507 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5509 *pmode = mode;
5510 return exp;
5513 /* Return 1 if T is an expression that get_inner_reference handles. */
5516 handled_component_p (tree t)
5518 switch (TREE_CODE (t))
5520 case BIT_FIELD_REF:
5521 case COMPONENT_REF:
5522 case ARRAY_REF:
5523 case ARRAY_RANGE_REF:
5524 case NON_LVALUE_EXPR:
5525 case VIEW_CONVERT_EXPR:
5526 return 1;
5528 /* ??? Sure they are handled, but get_inner_reference may return
5529 a different PBITSIZE, depending upon whether the expression is
5530 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5531 case NOP_EXPR:
5532 case CONVERT_EXPR:
5533 return (TYPE_MODE (TREE_TYPE (t))
5534 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5536 default:
5537 return 0;
5541 /* Given an rtx VALUE that may contain additions and multiplications, return
5542 an equivalent value that just refers to a register, memory, or constant.
5543 This is done by generating instructions to perform the arithmetic and
5544 returning a pseudo-register containing the value.
5546 The returned value may be a REG, SUBREG, MEM or constant. */
5549 force_operand (rtx value, rtx target)
5551 rtx op1, op2;
5552 /* Use subtarget as the target for operand 0 of a binary operation. */
5553 rtx subtarget = get_subtarget (target);
5554 enum rtx_code code = GET_CODE (value);
5556 /* Check for a PIC address load. */
5557 if ((code == PLUS || code == MINUS)
5558 && XEXP (value, 0) == pic_offset_table_rtx
5559 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5560 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5561 || GET_CODE (XEXP (value, 1)) == CONST))
5563 if (!subtarget)
5564 subtarget = gen_reg_rtx (GET_MODE (value));
5565 emit_move_insn (subtarget, value);
5566 return subtarget;
5569 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5571 if (!target)
5572 target = gen_reg_rtx (GET_MODE (value));
5573 convert_move (target, force_operand (XEXP (value, 0), NULL),
5574 code == ZERO_EXTEND);
5575 return target;
5578 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5580 op2 = XEXP (value, 1);
5581 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5582 subtarget = 0;
5583 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5585 code = PLUS;
5586 op2 = negate_rtx (GET_MODE (value), op2);
5589 /* Check for an addition with OP2 a constant integer and our first
5590 operand a PLUS of a virtual register and something else. In that
5591 case, we want to emit the sum of the virtual register and the
5592 constant first and then add the other value. This allows virtual
5593 register instantiation to simply modify the constant rather than
5594 creating another one around this addition. */
5595 if (code == PLUS && GET_CODE (op2) == CONST_INT
5596 && GET_CODE (XEXP (value, 0)) == PLUS
5597 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5598 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5599 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5601 rtx temp = expand_simple_binop (GET_MODE (value), code,
5602 XEXP (XEXP (value, 0), 0), op2,
5603 subtarget, 0, OPTAB_LIB_WIDEN);
5604 return expand_simple_binop (GET_MODE (value), code, temp,
5605 force_operand (XEXP (XEXP (value,
5606 0), 1), 0),
5607 target, 0, OPTAB_LIB_WIDEN);
5610 op1 = force_operand (XEXP (value, 0), subtarget);
5611 op2 = force_operand (op2, NULL_RTX);
5612 switch (code)
5614 case MULT:
5615 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5616 case DIV:
5617 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5618 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5619 target, 1, OPTAB_LIB_WIDEN);
5620 else
5621 return expand_divmod (0,
5622 FLOAT_MODE_P (GET_MODE (value))
5623 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5624 GET_MODE (value), op1, op2, target, 0);
5625 break;
5626 case MOD:
5627 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5628 target, 0);
5629 break;
5630 case UDIV:
5631 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5632 target, 1);
5633 break;
5634 case UMOD:
5635 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5636 target, 1);
5637 break;
5638 case ASHIFTRT:
5639 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5640 target, 0, OPTAB_LIB_WIDEN);
5641 break;
5642 default:
5643 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5644 target, 1, OPTAB_LIB_WIDEN);
5647 if (GET_RTX_CLASS (code) == '1')
5649 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5650 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5653 #ifdef INSN_SCHEDULING
5654 /* On machines that have insn scheduling, we want all memory reference to be
5655 explicit, so we need to deal with such paradoxical SUBREGs. */
5656 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5657 && (GET_MODE_SIZE (GET_MODE (value))
5658 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5659 value
5660 = simplify_gen_subreg (GET_MODE (value),
5661 force_reg (GET_MODE (SUBREG_REG (value)),
5662 force_operand (SUBREG_REG (value),
5663 NULL_RTX)),
5664 GET_MODE (SUBREG_REG (value)),
5665 SUBREG_BYTE (value));
5666 #endif
5668 return value;
5671 /* Subroutine of expand_expr: return nonzero iff there is no way that
5672 EXP can reference X, which is being modified. TOP_P is nonzero if this
5673 call is going to be used to determine whether we need a temporary
5674 for EXP, as opposed to a recursive call to this function.
5676 It is always safe for this routine to return zero since it merely
5677 searches for optimization opportunities. */
5680 safe_from_p (rtx x, tree exp, int top_p)
5682 rtx exp_rtl = 0;
5683 int i, nops;
5684 static tree save_expr_list;
5686 if (x == 0
5687 /* If EXP has varying size, we MUST use a target since we currently
5688 have no way of allocating temporaries of variable size
5689 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5690 So we assume here that something at a higher level has prevented a
5691 clash. This is somewhat bogus, but the best we can do. Only
5692 do this when X is BLKmode and when we are at the top level. */
5693 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5694 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5695 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5696 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5697 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5698 != INTEGER_CST)
5699 && GET_MODE (x) == BLKmode)
5700 /* If X is in the outgoing argument area, it is always safe. */
5701 || (GET_CODE (x) == MEM
5702 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5703 || (GET_CODE (XEXP (x, 0)) == PLUS
5704 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5705 return 1;
5707 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5708 find the underlying pseudo. */
5709 if (GET_CODE (x) == SUBREG)
5711 x = SUBREG_REG (x);
5712 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5713 return 0;
5716 /* A SAVE_EXPR might appear many times in the expression passed to the
5717 top-level safe_from_p call, and if it has a complex subexpression,
5718 examining it multiple times could result in a combinatorial explosion.
5719 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5720 with optimization took about 28 minutes to compile -- even though it was
5721 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5722 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5723 we have processed. Note that the only test of top_p was above. */
5725 if (top_p)
5727 int rtn;
5728 tree t;
5730 save_expr_list = 0;
5732 rtn = safe_from_p (x, exp, 0);
5734 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5735 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5737 return rtn;
5740 /* Now look at our tree code and possibly recurse. */
5741 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5743 case 'd':
5744 exp_rtl = DECL_RTL_IF_SET (exp);
5745 break;
5747 case 'c':
5748 return 1;
5750 case 'x':
5751 if (TREE_CODE (exp) == TREE_LIST)
5753 while (1)
5755 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5756 return 0;
5757 exp = TREE_CHAIN (exp);
5758 if (!exp)
5759 return 1;
5760 if (TREE_CODE (exp) != TREE_LIST)
5761 return safe_from_p (x, exp, 0);
5764 else if (TREE_CODE (exp) == ERROR_MARK)
5765 return 1; /* An already-visited SAVE_EXPR? */
5766 else
5767 return 0;
5769 case '2':
5770 case '<':
5771 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5772 return 0;
5773 /* FALLTHRU */
5775 case '1':
5776 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5778 case 'e':
5779 case 'r':
5780 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5781 the expression. If it is set, we conflict iff we are that rtx or
5782 both are in memory. Otherwise, we check all operands of the
5783 expression recursively. */
5785 switch (TREE_CODE (exp))
5787 case ADDR_EXPR:
5788 /* If the operand is static or we are static, we can't conflict.
5789 Likewise if we don't conflict with the operand at all. */
5790 if (staticp (TREE_OPERAND (exp, 0))
5791 || TREE_STATIC (exp)
5792 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5793 return 1;
5795 /* Otherwise, the only way this can conflict is if we are taking
5796 the address of a DECL a that address if part of X, which is
5797 very rare. */
5798 exp = TREE_OPERAND (exp, 0);
5799 if (DECL_P (exp))
5801 if (!DECL_RTL_SET_P (exp)
5802 || GET_CODE (DECL_RTL (exp)) != MEM)
5803 return 0;
5804 else
5805 exp_rtl = XEXP (DECL_RTL (exp), 0);
5807 break;
5809 case INDIRECT_REF:
5810 if (GET_CODE (x) == MEM
5811 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5812 get_alias_set (exp)))
5813 return 0;
5814 break;
5816 case CALL_EXPR:
5817 /* Assume that the call will clobber all hard registers and
5818 all of memory. */
5819 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5820 || GET_CODE (x) == MEM)
5821 return 0;
5822 break;
5824 case RTL_EXPR:
5825 /* If a sequence exists, we would have to scan every instruction
5826 in the sequence to see if it was safe. This is probably not
5827 worthwhile. */
5828 if (RTL_EXPR_SEQUENCE (exp))
5829 return 0;
5831 exp_rtl = RTL_EXPR_RTL (exp);
5832 break;
5834 case WITH_CLEANUP_EXPR:
5835 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5836 break;
5838 case CLEANUP_POINT_EXPR:
5839 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5841 case SAVE_EXPR:
5842 exp_rtl = SAVE_EXPR_RTL (exp);
5843 if (exp_rtl)
5844 break;
5846 /* If we've already scanned this, don't do it again. Otherwise,
5847 show we've scanned it and record for clearing the flag if we're
5848 going on. */
5849 if (TREE_PRIVATE (exp))
5850 return 1;
5852 TREE_PRIVATE (exp) = 1;
5853 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5855 TREE_PRIVATE (exp) = 0;
5856 return 0;
5859 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5860 return 1;
5862 case BIND_EXPR:
5863 /* The only operand we look at is operand 1. The rest aren't
5864 part of the expression. */
5865 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5867 default:
5868 break;
5871 /* If we have an rtx, we do not need to scan our operands. */
5872 if (exp_rtl)
5873 break;
5875 nops = first_rtl_op (TREE_CODE (exp));
5876 for (i = 0; i < nops; i++)
5877 if (TREE_OPERAND (exp, i) != 0
5878 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5879 return 0;
5881 /* If this is a language-specific tree code, it may require
5882 special handling. */
5883 if ((unsigned int) TREE_CODE (exp)
5884 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5885 && !(*lang_hooks.safe_from_p) (x, exp))
5886 return 0;
5889 /* If we have an rtl, find any enclosed object. Then see if we conflict
5890 with it. */
5891 if (exp_rtl)
5893 if (GET_CODE (exp_rtl) == SUBREG)
5895 exp_rtl = SUBREG_REG (exp_rtl);
5896 if (GET_CODE (exp_rtl) == REG
5897 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5898 return 0;
5901 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5902 are memory and they conflict. */
5903 return ! (rtx_equal_p (x, exp_rtl)
5904 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5905 && true_dependence (exp_rtl, VOIDmode, x,
5906 rtx_addr_varies_p)));
5909 /* If we reach here, it is safe. */
5910 return 1;
5913 /* Subroutine of expand_expr: return rtx if EXP is a
5914 variable or parameter; else return 0. */
5916 static rtx
5917 var_rtx (tree exp)
5919 STRIP_NOPS (exp);
5920 switch (TREE_CODE (exp))
5922 case PARM_DECL:
5923 case VAR_DECL:
5924 return DECL_RTL (exp);
5925 default:
5926 return 0;
5930 #ifdef MAX_INTEGER_COMPUTATION_MODE
5932 void
5933 check_max_integer_computation_mode (tree exp)
5935 enum tree_code code;
5936 enum machine_mode mode;
5938 /* Strip any NOPs that don't change the mode. */
5939 STRIP_NOPS (exp);
5940 code = TREE_CODE (exp);
5942 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5943 if (code == NOP_EXPR
5944 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5945 return;
5947 /* First check the type of the overall operation. We need only look at
5948 unary, binary and relational operations. */
5949 if (TREE_CODE_CLASS (code) == '1'
5950 || TREE_CODE_CLASS (code) == '2'
5951 || TREE_CODE_CLASS (code) == '<')
5953 mode = TYPE_MODE (TREE_TYPE (exp));
5954 if (GET_MODE_CLASS (mode) == MODE_INT
5955 && mode > MAX_INTEGER_COMPUTATION_MODE)
5956 internal_error ("unsupported wide integer operation");
5959 /* Check operand of a unary op. */
5960 if (TREE_CODE_CLASS (code) == '1')
5962 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5963 if (GET_MODE_CLASS (mode) == MODE_INT
5964 && mode > MAX_INTEGER_COMPUTATION_MODE)
5965 internal_error ("unsupported wide integer operation");
5968 /* Check operands of a binary/comparison op. */
5969 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5971 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5972 if (GET_MODE_CLASS (mode) == MODE_INT
5973 && mode > MAX_INTEGER_COMPUTATION_MODE)
5974 internal_error ("unsupported wide integer operation");
5976 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5977 if (GET_MODE_CLASS (mode) == MODE_INT
5978 && mode > MAX_INTEGER_COMPUTATION_MODE)
5979 internal_error ("unsupported wide integer operation");
5982 #endif
5984 /* Return the highest power of two that EXP is known to be a multiple of.
5985 This is used in updating alignment of MEMs in array references. */
5987 static unsigned HOST_WIDE_INT
5988 highest_pow2_factor (tree exp)
5990 unsigned HOST_WIDE_INT c0, c1;
5992 switch (TREE_CODE (exp))
5994 case INTEGER_CST:
5995 /* We can find the lowest bit that's a one. If the low
5996 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5997 We need to handle this case since we can find it in a COND_EXPR,
5998 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5999 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6000 later ICE. */
6001 if (TREE_CONSTANT_OVERFLOW (exp))
6002 return BIGGEST_ALIGNMENT;
6003 else
6005 /* Note: tree_low_cst is intentionally not used here,
6006 we don't care about the upper bits. */
6007 c0 = TREE_INT_CST_LOW (exp);
6008 c0 &= -c0;
6009 return c0 ? c0 : BIGGEST_ALIGNMENT;
6011 break;
6013 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6014 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6015 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6016 return MIN (c0, c1);
6018 case MULT_EXPR:
6019 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6020 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6021 return c0 * c1;
6023 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6024 case CEIL_DIV_EXPR:
6025 if (integer_pow2p (TREE_OPERAND (exp, 1))
6026 && host_integerp (TREE_OPERAND (exp, 1), 1))
6028 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6029 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6030 return MAX (1, c0 / c1);
6032 break;
6034 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6035 case SAVE_EXPR: case WITH_RECORD_EXPR:
6036 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6038 case COMPOUND_EXPR:
6039 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6041 case COND_EXPR:
6042 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6043 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6044 return MIN (c0, c1);
6046 default:
6047 break;
6050 return 1;
6053 /* Similar, except that it is known that the expression must be a multiple
6054 of the alignment of TYPE. */
6056 static unsigned HOST_WIDE_INT
6057 highest_pow2_factor_for_type (tree type, tree exp)
6059 unsigned HOST_WIDE_INT type_align, factor;
6061 factor = highest_pow2_factor (exp);
6062 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6063 return MAX (factor, type_align);
6066 /* Return an object on the placeholder list that matches EXP, a
6067 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6068 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6069 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6070 is a location which initially points to a starting location in the
6071 placeholder list (zero means start of the list) and where a pointer into
6072 the placeholder list at which the object is found is placed. */
6074 tree
6075 find_placeholder (tree exp, tree *plist)
6077 tree type = TREE_TYPE (exp);
6078 tree placeholder_expr;
6080 for (placeholder_expr
6081 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6082 placeholder_expr != 0;
6083 placeholder_expr = TREE_CHAIN (placeholder_expr))
6085 tree need_type = TYPE_MAIN_VARIANT (type);
6086 tree elt;
6088 /* Find the outermost reference that is of the type we want. If none,
6089 see if any object has a type that is a pointer to the type we
6090 want. */
6091 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6092 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6093 || TREE_CODE (elt) == COND_EXPR)
6094 ? TREE_OPERAND (elt, 1)
6095 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6097 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6098 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6099 ? TREE_OPERAND (elt, 0) : 0))
6100 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6102 if (plist)
6103 *plist = placeholder_expr;
6104 return elt;
6107 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6109 = ((TREE_CODE (elt) == COMPOUND_EXPR
6110 || TREE_CODE (elt) == COND_EXPR)
6111 ? TREE_OPERAND (elt, 1)
6112 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6114 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6115 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6116 ? TREE_OPERAND (elt, 0) : 0))
6117 if (POINTER_TYPE_P (TREE_TYPE (elt))
6118 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6119 == need_type))
6121 if (plist)
6122 *plist = placeholder_expr;
6123 return build1 (INDIRECT_REF, need_type, elt);
6127 return 0;
6130 /* Subroutine of expand_expr. Expand the two operands of a binary
6131 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6132 The value may be stored in TARGET if TARGET is nonzero. The
6133 MODIFIER argument is as documented by expand_expr. */
6135 static void
6136 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6137 enum expand_modifier modifier)
6139 if (! safe_from_p (target, exp1, 1))
6140 target = 0;
6141 if (operand_equal_p (exp0, exp1, 0))
6143 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6144 *op1 = copy_rtx (*op0);
6146 else
6148 /* If we need to preserve evaluation order, copy exp0 into its own
6149 temporary variable so that it can't be clobbered by exp1. */
6150 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6151 exp0 = save_expr (exp0);
6152 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6153 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6158 /* expand_expr: generate code for computing expression EXP.
6159 An rtx for the computed value is returned. The value is never null.
6160 In the case of a void EXP, const0_rtx is returned.
6162 The value may be stored in TARGET if TARGET is nonzero.
6163 TARGET is just a suggestion; callers must assume that
6164 the rtx returned may not be the same as TARGET.
6166 If TARGET is CONST0_RTX, it means that the value will be ignored.
6168 If TMODE is not VOIDmode, it suggests generating the
6169 result in mode TMODE. But this is done only when convenient.
6170 Otherwise, TMODE is ignored and the value generated in its natural mode.
6171 TMODE is just a suggestion; callers must assume that
6172 the rtx returned may not have mode TMODE.
6174 Note that TARGET may have neither TMODE nor MODE. In that case, it
6175 probably will not be used.
6177 If MODIFIER is EXPAND_SUM then when EXP is an addition
6178 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6179 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6180 products as above, or REG or MEM, or constant.
6181 Ordinarily in such cases we would output mul or add instructions
6182 and then return a pseudo reg containing the sum.
6184 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6185 it also marks a label as absolutely required (it can't be dead).
6186 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6187 This is used for outputting expressions used in initializers.
6189 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6190 with a constant address even if that address is not normally legitimate.
6191 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6193 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6194 a call parameter. Such targets require special care as we haven't yet
6195 marked TARGET so that it's safe from being trashed by libcalls. We
6196 don't want to use TARGET for anything but the final result;
6197 Intermediate values must go elsewhere. Additionally, calls to
6198 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6201 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6202 enum expand_modifier modifier)
6204 rtx op0, op1, temp;
6205 tree type = TREE_TYPE (exp);
6206 int unsignedp = TREE_UNSIGNED (type);
6207 enum machine_mode mode;
6208 enum tree_code code = TREE_CODE (exp);
6209 optab this_optab;
6210 rtx subtarget, original_target;
6211 int ignore;
6212 tree context;
6214 /* Handle ERROR_MARK before anybody tries to access its type. */
6215 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6217 op0 = CONST0_RTX (tmode);
6218 if (op0 != 0)
6219 return op0;
6220 return const0_rtx;
6223 mode = TYPE_MODE (type);
6224 /* Use subtarget as the target for operand 0 of a binary operation. */
6225 subtarget = get_subtarget (target);
6226 original_target = target;
6227 ignore = (target == const0_rtx
6228 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6229 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6230 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6231 && TREE_CODE (type) == VOID_TYPE));
6233 /* If we are going to ignore this result, we need only do something
6234 if there is a side-effect somewhere in the expression. If there
6235 is, short-circuit the most common cases here. Note that we must
6236 not call expand_expr with anything but const0_rtx in case this
6237 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6239 if (ignore)
6241 if (! TREE_SIDE_EFFECTS (exp))
6242 return const0_rtx;
6244 /* Ensure we reference a volatile object even if value is ignored, but
6245 don't do this if all we are doing is taking its address. */
6246 if (TREE_THIS_VOLATILE (exp)
6247 && TREE_CODE (exp) != FUNCTION_DECL
6248 && mode != VOIDmode && mode != BLKmode
6249 && modifier != EXPAND_CONST_ADDRESS)
6251 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6252 if (GET_CODE (temp) == MEM)
6253 temp = copy_to_reg (temp);
6254 return const0_rtx;
6257 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6258 || code == INDIRECT_REF || code == BUFFER_REF)
6259 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6260 modifier);
6262 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6263 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6265 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6266 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6267 return const0_rtx;
6269 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6270 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6271 /* If the second operand has no side effects, just evaluate
6272 the first. */
6273 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6274 modifier);
6275 else if (code == BIT_FIELD_REF)
6277 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6278 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6279 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6280 return const0_rtx;
6283 target = 0;
6286 #ifdef MAX_INTEGER_COMPUTATION_MODE
6287 /* Only check stuff here if the mode we want is different from the mode
6288 of the expression; if it's the same, check_max_integer_computation_mode
6289 will handle it. Do we really need to check this stuff at all? */
6291 if (target
6292 && GET_MODE (target) != mode
6293 && TREE_CODE (exp) != INTEGER_CST
6294 && TREE_CODE (exp) != PARM_DECL
6295 && TREE_CODE (exp) != ARRAY_REF
6296 && TREE_CODE (exp) != ARRAY_RANGE_REF
6297 && TREE_CODE (exp) != COMPONENT_REF
6298 && TREE_CODE (exp) != BIT_FIELD_REF
6299 && TREE_CODE (exp) != INDIRECT_REF
6300 && TREE_CODE (exp) != CALL_EXPR
6301 && TREE_CODE (exp) != VAR_DECL
6302 && TREE_CODE (exp) != RTL_EXPR)
6304 enum machine_mode mode = GET_MODE (target);
6306 if (GET_MODE_CLASS (mode) == MODE_INT
6307 && mode > MAX_INTEGER_COMPUTATION_MODE)
6308 internal_error ("unsupported wide integer operation");
6311 if (tmode != mode
6312 && TREE_CODE (exp) != INTEGER_CST
6313 && TREE_CODE (exp) != PARM_DECL
6314 && TREE_CODE (exp) != ARRAY_REF
6315 && TREE_CODE (exp) != ARRAY_RANGE_REF
6316 && TREE_CODE (exp) != COMPONENT_REF
6317 && TREE_CODE (exp) != BIT_FIELD_REF
6318 && TREE_CODE (exp) != INDIRECT_REF
6319 && TREE_CODE (exp) != VAR_DECL
6320 && TREE_CODE (exp) != CALL_EXPR
6321 && TREE_CODE (exp) != RTL_EXPR
6322 && GET_MODE_CLASS (tmode) == MODE_INT
6323 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6324 internal_error ("unsupported wide integer operation");
6326 check_max_integer_computation_mode (exp);
6327 #endif
6329 /* If will do cse, generate all results into pseudo registers
6330 since 1) that allows cse to find more things
6331 and 2) otherwise cse could produce an insn the machine
6332 cannot support. An exception is a CONSTRUCTOR into a multi-word
6333 MEM: that's much more likely to be most efficient into the MEM.
6334 Another is a CALL_EXPR which must return in memory. */
6336 if (! cse_not_expected && mode != BLKmode && target
6337 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6338 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6339 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6340 target = 0;
6342 switch (code)
6344 case LABEL_DECL:
6346 tree function = decl_function_context (exp);
6347 /* Labels in containing functions, or labels used from initializers,
6348 must be forced. */
6349 if (modifier == EXPAND_INITIALIZER
6350 || (function != current_function_decl
6351 && function != inline_function_decl
6352 && function != 0))
6353 temp = force_label_rtx (exp);
6354 else
6355 temp = label_rtx (exp);
6357 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6358 if (function != current_function_decl
6359 && function != inline_function_decl && function != 0)
6360 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6361 return temp;
6364 case PARM_DECL:
6365 if (!DECL_RTL_SET_P (exp))
6367 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6368 return CONST0_RTX (mode);
6371 /* ... fall through ... */
6373 case VAR_DECL:
6374 /* If a static var's type was incomplete when the decl was written,
6375 but the type is complete now, lay out the decl now. */
6376 if (DECL_SIZE (exp) == 0
6377 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6378 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6379 layout_decl (exp, 0);
6381 /* ... fall through ... */
6383 case FUNCTION_DECL:
6384 case RESULT_DECL:
6385 if (DECL_RTL (exp) == 0)
6386 abort ();
6388 /* Ensure variable marked as used even if it doesn't go through
6389 a parser. If it hasn't be used yet, write out an external
6390 definition. */
6391 if (! TREE_USED (exp))
6393 assemble_external (exp);
6394 TREE_USED (exp) = 1;
6397 /* Show we haven't gotten RTL for this yet. */
6398 temp = 0;
6400 /* Handle variables inherited from containing functions. */
6401 context = decl_function_context (exp);
6403 /* We treat inline_function_decl as an alias for the current function
6404 because that is the inline function whose vars, types, etc.
6405 are being merged into the current function.
6406 See expand_inline_function. */
6408 if (context != 0 && context != current_function_decl
6409 && context != inline_function_decl
6410 /* If var is static, we don't need a static chain to access it. */
6411 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6412 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6414 rtx addr;
6416 /* Mark as non-local and addressable. */
6417 DECL_NONLOCAL (exp) = 1;
6418 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6419 abort ();
6420 (*lang_hooks.mark_addressable) (exp);
6421 if (GET_CODE (DECL_RTL (exp)) != MEM)
6422 abort ();
6423 addr = XEXP (DECL_RTL (exp), 0);
6424 if (GET_CODE (addr) == MEM)
6425 addr
6426 = replace_equiv_address (addr,
6427 fix_lexical_addr (XEXP (addr, 0), exp));
6428 else
6429 addr = fix_lexical_addr (addr, exp);
6431 temp = replace_equiv_address (DECL_RTL (exp), addr);
6434 /* This is the case of an array whose size is to be determined
6435 from its initializer, while the initializer is still being parsed.
6436 See expand_decl. */
6438 else if (GET_CODE (DECL_RTL (exp)) == MEM
6439 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6440 temp = validize_mem (DECL_RTL (exp));
6442 /* If DECL_RTL is memory, we are in the normal case and either
6443 the address is not valid or it is not a register and -fforce-addr
6444 is specified, get the address into a register. */
6446 else if (GET_CODE (DECL_RTL (exp)) == MEM
6447 && modifier != EXPAND_CONST_ADDRESS
6448 && modifier != EXPAND_SUM
6449 && modifier != EXPAND_INITIALIZER
6450 && (! memory_address_p (DECL_MODE (exp),
6451 XEXP (DECL_RTL (exp), 0))
6452 || (flag_force_addr
6453 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6454 temp = replace_equiv_address (DECL_RTL (exp),
6455 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6457 /* If we got something, return it. But first, set the alignment
6458 if the address is a register. */
6459 if (temp != 0)
6461 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6462 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6464 return temp;
6467 /* If the mode of DECL_RTL does not match that of the decl, it
6468 must be a promoted value. We return a SUBREG of the wanted mode,
6469 but mark it so that we know that it was already extended. */
6471 if (GET_CODE (DECL_RTL (exp)) == REG
6472 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6474 /* Get the signedness used for this variable. Ensure we get the
6475 same mode we got when the variable was declared. */
6476 if (GET_MODE (DECL_RTL (exp))
6477 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6478 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6479 abort ();
6481 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6482 SUBREG_PROMOTED_VAR_P (temp) = 1;
6483 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6484 return temp;
6487 return DECL_RTL (exp);
6489 case INTEGER_CST:
6490 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6491 TREE_INT_CST_HIGH (exp), mode);
6493 /* ??? If overflow is set, fold will have done an incomplete job,
6494 which can result in (plus xx (const_int 0)), which can get
6495 simplified by validate_replace_rtx during virtual register
6496 instantiation, which can result in unrecognizable insns.
6497 Avoid this by forcing all overflows into registers. */
6498 if (TREE_CONSTANT_OVERFLOW (exp)
6499 && modifier != EXPAND_INITIALIZER)
6500 temp = force_reg (mode, temp);
6502 return temp;
6504 case VECTOR_CST:
6505 return const_vector_from_tree (exp);
6507 case CONST_DECL:
6508 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6510 case REAL_CST:
6511 /* If optimized, generate immediate CONST_DOUBLE
6512 which will be turned into memory by reload if necessary.
6514 We used to force a register so that loop.c could see it. But
6515 this does not allow gen_* patterns to perform optimizations with
6516 the constants. It also produces two insns in cases like "x = 1.0;".
6517 On most machines, floating-point constants are not permitted in
6518 many insns, so we'd end up copying it to a register in any case.
6520 Now, we do the copying in expand_binop, if appropriate. */
6521 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6522 TYPE_MODE (TREE_TYPE (exp)));
6524 case COMPLEX_CST:
6525 /* Handle evaluating a complex constant in a CONCAT target. */
6526 if (original_target && GET_CODE (original_target) == CONCAT)
6528 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6529 rtx rtarg, itarg;
6531 rtarg = XEXP (original_target, 0);
6532 itarg = XEXP (original_target, 1);
6534 /* Move the real and imaginary parts separately. */
6535 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6536 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6538 if (op0 != rtarg)
6539 emit_move_insn (rtarg, op0);
6540 if (op1 != itarg)
6541 emit_move_insn (itarg, op1);
6543 return original_target;
6546 /* ... fall through ... */
6548 case STRING_CST:
6549 temp = output_constant_def (exp, 1);
6551 /* temp contains a constant address.
6552 On RISC machines where a constant address isn't valid,
6553 make some insns to get that address into a register. */
6554 if (modifier != EXPAND_CONST_ADDRESS
6555 && modifier != EXPAND_INITIALIZER
6556 && modifier != EXPAND_SUM
6557 && (! memory_address_p (mode, XEXP (temp, 0))
6558 || flag_force_addr))
6559 return replace_equiv_address (temp,
6560 copy_rtx (XEXP (temp, 0)));
6561 return temp;
6563 case EXPR_WITH_FILE_LOCATION:
6565 rtx to_return;
6566 struct file_stack fs;
6568 fs.location = input_location;
6569 fs.next = expr_wfl_stack;
6570 input_filename = EXPR_WFL_FILENAME (exp);
6571 input_line = EXPR_WFL_LINENO (exp);
6572 expr_wfl_stack = &fs;
6573 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6574 emit_line_note (input_location);
6575 /* Possibly avoid switching back and forth here. */
6576 to_return = expand_expr (EXPR_WFL_NODE (exp),
6577 (ignore ? const0_rtx : target),
6578 tmode, modifier);
6579 if (expr_wfl_stack != &fs)
6580 abort ();
6581 input_location = fs.location;
6582 expr_wfl_stack = fs.next;
6583 return to_return;
6586 case SAVE_EXPR:
6587 context = decl_function_context (exp);
6589 /* If this SAVE_EXPR was at global context, assume we are an
6590 initialization function and move it into our context. */
6591 if (context == 0)
6592 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6594 /* We treat inline_function_decl as an alias for the current function
6595 because that is the inline function whose vars, types, etc.
6596 are being merged into the current function.
6597 See expand_inline_function. */
6598 if (context == current_function_decl || context == inline_function_decl)
6599 context = 0;
6601 /* If this is non-local, handle it. */
6602 if (context)
6604 /* The following call just exists to abort if the context is
6605 not of a containing function. */
6606 find_function_data (context);
6608 temp = SAVE_EXPR_RTL (exp);
6609 if (temp && GET_CODE (temp) == REG)
6611 put_var_into_stack (exp, /*rescan=*/true);
6612 temp = SAVE_EXPR_RTL (exp);
6614 if (temp == 0 || GET_CODE (temp) != MEM)
6615 abort ();
6616 return
6617 replace_equiv_address (temp,
6618 fix_lexical_addr (XEXP (temp, 0), exp));
6620 if (SAVE_EXPR_RTL (exp) == 0)
6622 if (mode == VOIDmode)
6623 temp = const0_rtx;
6624 else
6625 temp = assign_temp (build_qualified_type (type,
6626 (TYPE_QUALS (type)
6627 | TYPE_QUAL_CONST)),
6628 3, 0, 0);
6630 SAVE_EXPR_RTL (exp) = temp;
6631 if (!optimize && GET_CODE (temp) == REG)
6632 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6633 save_expr_regs);
6635 /* If the mode of TEMP does not match that of the expression, it
6636 must be a promoted value. We pass store_expr a SUBREG of the
6637 wanted mode but mark it so that we know that it was already
6638 extended. */
6640 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6642 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6643 promote_mode (type, mode, &unsignedp, 0);
6644 SUBREG_PROMOTED_VAR_P (temp) = 1;
6645 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6648 if (temp == const0_rtx)
6649 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6650 else
6651 store_expr (TREE_OPERAND (exp, 0), temp,
6652 modifier == EXPAND_STACK_PARM ? 2 : 0);
6654 TREE_USED (exp) = 1;
6657 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6658 must be a promoted value. We return a SUBREG of the wanted mode,
6659 but mark it so that we know that it was already extended. */
6661 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6662 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6664 /* Compute the signedness and make the proper SUBREG. */
6665 promote_mode (type, mode, &unsignedp, 0);
6666 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6667 SUBREG_PROMOTED_VAR_P (temp) = 1;
6668 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6669 return temp;
6672 return SAVE_EXPR_RTL (exp);
6674 case UNSAVE_EXPR:
6676 rtx temp;
6677 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6678 TREE_OPERAND (exp, 0)
6679 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6680 return temp;
6683 case PLACEHOLDER_EXPR:
6685 tree old_list = placeholder_list;
6686 tree placeholder_expr = 0;
6688 exp = find_placeholder (exp, &placeholder_expr);
6689 if (exp == 0)
6690 abort ();
6692 placeholder_list = TREE_CHAIN (placeholder_expr);
6693 temp = expand_expr (exp, original_target, tmode, modifier);
6694 placeholder_list = old_list;
6695 return temp;
6698 case WITH_RECORD_EXPR:
6699 /* Put the object on the placeholder list, expand our first operand,
6700 and pop the list. */
6701 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6702 placeholder_list);
6703 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6704 modifier);
6705 placeholder_list = TREE_CHAIN (placeholder_list);
6706 return target;
6708 case GOTO_EXPR:
6709 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6710 expand_goto (TREE_OPERAND (exp, 0));
6711 else
6712 expand_computed_goto (TREE_OPERAND (exp, 0));
6713 return const0_rtx;
6715 case EXIT_EXPR:
6716 expand_exit_loop_if_false (NULL,
6717 invert_truthvalue (TREE_OPERAND (exp, 0)));
6718 return const0_rtx;
6720 case LABELED_BLOCK_EXPR:
6721 if (LABELED_BLOCK_BODY (exp))
6722 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6723 /* Should perhaps use expand_label, but this is simpler and safer. */
6724 do_pending_stack_adjust ();
6725 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6726 return const0_rtx;
6728 case EXIT_BLOCK_EXPR:
6729 if (EXIT_BLOCK_RETURN (exp))
6730 sorry ("returned value in block_exit_expr");
6731 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6732 return const0_rtx;
6734 case LOOP_EXPR:
6735 push_temp_slots ();
6736 expand_start_loop (1);
6737 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6738 expand_end_loop ();
6739 pop_temp_slots ();
6741 return const0_rtx;
6743 case BIND_EXPR:
6745 tree vars = TREE_OPERAND (exp, 0);
6747 /* Need to open a binding contour here because
6748 if there are any cleanups they must be contained here. */
6749 expand_start_bindings (2);
6751 /* Mark the corresponding BLOCK for output in its proper place. */
6752 if (TREE_OPERAND (exp, 2) != 0
6753 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6754 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6756 /* If VARS have not yet been expanded, expand them now. */
6757 while (vars)
6759 if (!DECL_RTL_SET_P (vars))
6760 expand_decl (vars);
6761 expand_decl_init (vars);
6762 vars = TREE_CHAIN (vars);
6765 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6767 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6769 return temp;
6772 case RTL_EXPR:
6773 if (RTL_EXPR_SEQUENCE (exp))
6775 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6776 abort ();
6777 emit_insn (RTL_EXPR_SEQUENCE (exp));
6778 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6780 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6781 free_temps_for_rtl_expr (exp);
6782 return RTL_EXPR_RTL (exp);
6784 case CONSTRUCTOR:
6785 /* If we don't need the result, just ensure we evaluate any
6786 subexpressions. */
6787 if (ignore)
6789 tree elt;
6791 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6792 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6794 return const0_rtx;
6797 /* All elts simple constants => refer to a constant in memory. But
6798 if this is a non-BLKmode mode, let it store a field at a time
6799 since that should make a CONST_INT or CONST_DOUBLE when we
6800 fold. Likewise, if we have a target we can use, it is best to
6801 store directly into the target unless the type is large enough
6802 that memcpy will be used. If we are making an initializer and
6803 all operands are constant, put it in memory as well.
6805 FIXME: Avoid trying to fill vector constructors piece-meal.
6806 Output them with output_constant_def below unless we're sure
6807 they're zeros. This should go away when vector initializers
6808 are treated like VECTOR_CST instead of arrays.
6810 else if ((TREE_STATIC (exp)
6811 && ((mode == BLKmode
6812 && ! (target != 0 && safe_from_p (target, exp, 1)))
6813 || TREE_ADDRESSABLE (exp)
6814 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6815 && (! MOVE_BY_PIECES_P
6816 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6817 TYPE_ALIGN (type)))
6818 && ((TREE_CODE (type) == VECTOR_TYPE
6819 && !is_zeros_p (exp))
6820 || ! mostly_zeros_p (exp)))))
6821 || ((modifier == EXPAND_INITIALIZER
6822 || modifier == EXPAND_CONST_ADDRESS)
6823 && TREE_CONSTANT (exp)))
6825 rtx constructor = output_constant_def (exp, 1);
6827 if (modifier != EXPAND_CONST_ADDRESS
6828 && modifier != EXPAND_INITIALIZER
6829 && modifier != EXPAND_SUM)
6830 constructor = validize_mem (constructor);
6832 return constructor;
6834 else
6836 /* Handle calls that pass values in multiple non-contiguous
6837 locations. The Irix 6 ABI has examples of this. */
6838 if (target == 0 || ! safe_from_p (target, exp, 1)
6839 || GET_CODE (target) == PARALLEL
6840 || modifier == EXPAND_STACK_PARM)
6841 target
6842 = assign_temp (build_qualified_type (type,
6843 (TYPE_QUALS (type)
6844 | (TREE_READONLY (exp)
6845 * TYPE_QUAL_CONST))),
6846 0, TREE_ADDRESSABLE (exp), 1);
6848 store_constructor (exp, target, 0, int_expr_size (exp));
6849 return target;
6852 case INDIRECT_REF:
6854 tree exp1 = TREE_OPERAND (exp, 0);
6855 tree index;
6856 tree string = string_constant (exp1, &index);
6858 /* Try to optimize reads from const strings. */
6859 if (string
6860 && TREE_CODE (string) == STRING_CST
6861 && TREE_CODE (index) == INTEGER_CST
6862 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6863 && GET_MODE_CLASS (mode) == MODE_INT
6864 && GET_MODE_SIZE (mode) == 1
6865 && modifier != EXPAND_WRITE)
6866 return gen_int_mode (TREE_STRING_POINTER (string)
6867 [TREE_INT_CST_LOW (index)], mode);
6869 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6870 op0 = memory_address (mode, op0);
6871 temp = gen_rtx_MEM (mode, op0);
6872 set_mem_attributes (temp, exp, 0);
6874 /* If we are writing to this object and its type is a record with
6875 readonly fields, we must mark it as readonly so it will
6876 conflict with readonly references to those fields. */
6877 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6878 RTX_UNCHANGING_P (temp) = 1;
6880 return temp;
6883 case ARRAY_REF:
6884 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6885 abort ();
6888 tree array = TREE_OPERAND (exp, 0);
6889 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6890 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6891 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6892 HOST_WIDE_INT i;
6894 /* Optimize the special-case of a zero lower bound.
6896 We convert the low_bound to sizetype to avoid some problems
6897 with constant folding. (E.g. suppose the lower bound is 1,
6898 and its mode is QI. Without the conversion, (ARRAY
6899 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6900 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6902 if (! integer_zerop (low_bound))
6903 index = size_diffop (index, convert (sizetype, low_bound));
6905 /* Fold an expression like: "foo"[2].
6906 This is not done in fold so it won't happen inside &.
6907 Don't fold if this is for wide characters since it's too
6908 difficult to do correctly and this is a very rare case. */
6910 if (modifier != EXPAND_CONST_ADDRESS
6911 && modifier != EXPAND_INITIALIZER
6912 && modifier != EXPAND_MEMORY
6913 && TREE_CODE (array) == STRING_CST
6914 && TREE_CODE (index) == INTEGER_CST
6915 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6916 && GET_MODE_CLASS (mode) == MODE_INT
6917 && GET_MODE_SIZE (mode) == 1)
6918 return gen_int_mode (TREE_STRING_POINTER (array)
6919 [TREE_INT_CST_LOW (index)], mode);
6921 /* If this is a constant index into a constant array,
6922 just get the value from the array. Handle both the cases when
6923 we have an explicit constructor and when our operand is a variable
6924 that was declared const. */
6926 if (modifier != EXPAND_CONST_ADDRESS
6927 && modifier != EXPAND_INITIALIZER
6928 && modifier != EXPAND_MEMORY
6929 && TREE_CODE (array) == CONSTRUCTOR
6930 && ! TREE_SIDE_EFFECTS (array)
6931 && TREE_CODE (index) == INTEGER_CST
6932 && 0 > compare_tree_int (index,
6933 list_length (CONSTRUCTOR_ELTS
6934 (TREE_OPERAND (exp, 0)))))
6936 tree elem;
6938 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6939 i = TREE_INT_CST_LOW (index);
6940 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6943 if (elem)
6944 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6945 modifier);
6948 else if (optimize >= 1
6949 && modifier != EXPAND_CONST_ADDRESS
6950 && modifier != EXPAND_INITIALIZER
6951 && modifier != EXPAND_MEMORY
6952 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6953 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6954 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6956 if (TREE_CODE (index) == INTEGER_CST)
6958 tree init = DECL_INITIAL (array);
6960 if (TREE_CODE (init) == CONSTRUCTOR)
6962 tree elem;
6964 for (elem = CONSTRUCTOR_ELTS (init);
6965 (elem
6966 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6967 elem = TREE_CHAIN (elem))
6970 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6971 return expand_expr (fold (TREE_VALUE (elem)), target,
6972 tmode, modifier);
6974 else if (TREE_CODE (init) == STRING_CST
6975 && 0 > compare_tree_int (index,
6976 TREE_STRING_LENGTH (init)))
6978 tree type = TREE_TYPE (TREE_TYPE (init));
6979 enum machine_mode mode = TYPE_MODE (type);
6981 if (GET_MODE_CLASS (mode) == MODE_INT
6982 && GET_MODE_SIZE (mode) == 1)
6983 return gen_int_mode (TREE_STRING_POINTER (init)
6984 [TREE_INT_CST_LOW (index)], mode);
6989 goto normal_inner_ref;
6991 case COMPONENT_REF:
6992 /* If the operand is a CONSTRUCTOR, we can just extract the
6993 appropriate field if it is present. */
6994 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6996 tree elt;
6998 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6999 elt = TREE_CHAIN (elt))
7000 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7001 /* We can normally use the value of the field in the
7002 CONSTRUCTOR. However, if this is a bitfield in
7003 an integral mode that we can fit in a HOST_WIDE_INT,
7004 we must mask only the number of bits in the bitfield,
7005 since this is done implicitly by the constructor. If
7006 the bitfield does not meet either of those conditions,
7007 we can't do this optimization. */
7008 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7009 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7010 == MODE_INT)
7011 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7012 <= HOST_BITS_PER_WIDE_INT))))
7014 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7015 && modifier == EXPAND_STACK_PARM)
7016 target = 0;
7017 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7018 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7020 HOST_WIDE_INT bitsize
7021 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7022 enum machine_mode imode
7023 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7025 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7027 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7028 op0 = expand_and (imode, op0, op1, target);
7030 else
7032 tree count
7033 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7036 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7037 target, 0);
7038 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7039 target, 0);
7043 return op0;
7046 goto normal_inner_ref;
7048 case BIT_FIELD_REF:
7049 case ARRAY_RANGE_REF:
7050 normal_inner_ref:
7052 enum machine_mode mode1;
7053 HOST_WIDE_INT bitsize, bitpos;
7054 tree offset;
7055 int volatilep = 0;
7056 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7057 &mode1, &unsignedp, &volatilep);
7058 rtx orig_op0;
7060 /* If we got back the original object, something is wrong. Perhaps
7061 we are evaluating an expression too early. In any event, don't
7062 infinitely recurse. */
7063 if (tem == exp)
7064 abort ();
7066 /* If TEM's type is a union of variable size, pass TARGET to the inner
7067 computation, since it will need a temporary and TARGET is known
7068 to have to do. This occurs in unchecked conversion in Ada. */
7070 orig_op0 = op0
7071 = expand_expr (tem,
7072 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7073 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7074 != INTEGER_CST)
7075 && modifier != EXPAND_STACK_PARM
7076 ? target : NULL_RTX),
7077 VOIDmode,
7078 (modifier == EXPAND_INITIALIZER
7079 || modifier == EXPAND_CONST_ADDRESS
7080 || modifier == EXPAND_STACK_PARM)
7081 ? modifier : EXPAND_NORMAL);
7083 /* If this is a constant, put it into a register if it is a
7084 legitimate constant and OFFSET is 0 and memory if it isn't. */
7085 if (CONSTANT_P (op0))
7087 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7088 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7089 && offset == 0)
7090 op0 = force_reg (mode, op0);
7091 else
7092 op0 = validize_mem (force_const_mem (mode, op0));
7095 /* Otherwise, if this object not in memory and we either have an
7096 offset or a BLKmode result, put it there. This case can't occur in
7097 C, but can in Ada if we have unchecked conversion of an expression
7098 from a scalar type to an array or record type or for an
7099 ARRAY_RANGE_REF whose type is BLKmode. */
7100 else if (GET_CODE (op0) != MEM
7101 && (offset != 0
7102 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7104 /* If the operand is a SAVE_EXPR, we can deal with this by
7105 forcing the SAVE_EXPR into memory. */
7106 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7108 put_var_into_stack (TREE_OPERAND (exp, 0),
7109 /*rescan=*/true);
7110 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7112 else
7114 tree nt
7115 = build_qualified_type (TREE_TYPE (tem),
7116 (TYPE_QUALS (TREE_TYPE (tem))
7117 | TYPE_QUAL_CONST));
7118 rtx memloc = assign_temp (nt, 1, 1, 1);
7120 emit_move_insn (memloc, op0);
7121 op0 = memloc;
7125 if (offset != 0)
7127 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7128 EXPAND_SUM);
7130 if (GET_CODE (op0) != MEM)
7131 abort ();
7133 #ifdef POINTERS_EXTEND_UNSIGNED
7134 if (GET_MODE (offset_rtx) != Pmode)
7135 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7136 #else
7137 if (GET_MODE (offset_rtx) != ptr_mode)
7138 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7139 #endif
7141 /* A constant address in OP0 can have VOIDmode, we must not try
7142 to call force_reg for that case. Avoid that case. */
7143 if (GET_CODE (op0) == MEM
7144 && GET_MODE (op0) == BLKmode
7145 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7146 && bitsize != 0
7147 && (bitpos % bitsize) == 0
7148 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7149 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7151 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7152 bitpos = 0;
7155 op0 = offset_address (op0, offset_rtx,
7156 highest_pow2_factor (offset));
7159 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7160 record its alignment as BIGGEST_ALIGNMENT. */
7161 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7162 && is_aligning_offset (offset, tem))
7163 set_mem_align (op0, BIGGEST_ALIGNMENT);
7165 /* Don't forget about volatility even if this is a bitfield. */
7166 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7168 if (op0 == orig_op0)
7169 op0 = copy_rtx (op0);
7171 MEM_VOLATILE_P (op0) = 1;
7174 /* The following code doesn't handle CONCAT.
7175 Assume only bitpos == 0 can be used for CONCAT, due to
7176 one element arrays having the same mode as its element. */
7177 if (GET_CODE (op0) == CONCAT)
7179 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7180 abort ();
7181 return op0;
7184 /* In cases where an aligned union has an unaligned object
7185 as a field, we might be extracting a BLKmode value from
7186 an integer-mode (e.g., SImode) object. Handle this case
7187 by doing the extract into an object as wide as the field
7188 (which we know to be the width of a basic mode), then
7189 storing into memory, and changing the mode to BLKmode. */
7190 if (mode1 == VOIDmode
7191 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7192 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7193 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7194 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7195 && modifier != EXPAND_CONST_ADDRESS
7196 && modifier != EXPAND_INITIALIZER)
7197 /* If the field isn't aligned enough to fetch as a memref,
7198 fetch it as a bit field. */
7199 || (mode1 != BLKmode
7200 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7201 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7202 && ((modifier == EXPAND_CONST_ADDRESS
7203 || modifier == EXPAND_INITIALIZER)
7204 ? STRICT_ALIGNMENT
7205 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7206 || (bitpos % BITS_PER_UNIT != 0)))
7207 /* If the type and the field are a constant size and the
7208 size of the type isn't the same size as the bitfield,
7209 we must use bitfield operations. */
7210 || (bitsize >= 0
7211 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7212 == INTEGER_CST)
7213 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7214 bitsize)))
7216 enum machine_mode ext_mode = mode;
7218 if (ext_mode == BLKmode
7219 && ! (target != 0 && GET_CODE (op0) == MEM
7220 && GET_CODE (target) == MEM
7221 && bitpos % BITS_PER_UNIT == 0))
7222 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7224 if (ext_mode == BLKmode)
7226 if (target == 0)
7227 target = assign_temp (type, 0, 1, 1);
7229 if (bitsize == 0)
7230 return target;
7232 /* In this case, BITPOS must start at a byte boundary and
7233 TARGET, if specified, must be a MEM. */
7234 if (GET_CODE (op0) != MEM
7235 || (target != 0 && GET_CODE (target) != MEM)
7236 || bitpos % BITS_PER_UNIT != 0)
7237 abort ();
7239 emit_block_move (target,
7240 adjust_address (op0, VOIDmode,
7241 bitpos / BITS_PER_UNIT),
7242 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7243 / BITS_PER_UNIT),
7244 (modifier == EXPAND_STACK_PARM
7245 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7247 return target;
7250 op0 = validize_mem (op0);
7252 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7253 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7255 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7256 (modifier == EXPAND_STACK_PARM
7257 ? NULL_RTX : target),
7258 ext_mode, ext_mode,
7259 int_size_in_bytes (TREE_TYPE (tem)));
7261 /* If the result is a record type and BITSIZE is narrower than
7262 the mode of OP0, an integral mode, and this is a big endian
7263 machine, we must put the field into the high-order bits. */
7264 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7265 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7266 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7267 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7268 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7269 - bitsize),
7270 op0, 1);
7272 if (mode == BLKmode)
7274 rtx new = assign_temp (build_qualified_type
7275 ((*lang_hooks.types.type_for_mode)
7276 (ext_mode, 0),
7277 TYPE_QUAL_CONST), 0, 1, 1);
7279 emit_move_insn (new, op0);
7280 op0 = copy_rtx (new);
7281 PUT_MODE (op0, BLKmode);
7282 set_mem_attributes (op0, exp, 1);
7285 return op0;
7288 /* If the result is BLKmode, use that to access the object
7289 now as well. */
7290 if (mode == BLKmode)
7291 mode1 = BLKmode;
7293 /* Get a reference to just this component. */
7294 if (modifier == EXPAND_CONST_ADDRESS
7295 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7296 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7297 else
7298 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7300 if (op0 == orig_op0)
7301 op0 = copy_rtx (op0);
7303 set_mem_attributes (op0, exp, 0);
7304 if (GET_CODE (XEXP (op0, 0)) == REG)
7305 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7307 MEM_VOLATILE_P (op0) |= volatilep;
7308 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7309 || modifier == EXPAND_CONST_ADDRESS
7310 || modifier == EXPAND_INITIALIZER)
7311 return op0;
7312 else if (target == 0)
7313 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7315 convert_move (target, op0, unsignedp);
7316 return target;
7319 case VTABLE_REF:
7321 rtx insn, before = get_last_insn (), vtbl_ref;
7323 /* Evaluate the interior expression. */
7324 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7325 tmode, modifier);
7327 /* Get or create an instruction off which to hang a note. */
7328 if (REG_P (subtarget))
7330 target = subtarget;
7331 insn = get_last_insn ();
7332 if (insn == before)
7333 abort ();
7334 if (! INSN_P (insn))
7335 insn = prev_nonnote_insn (insn);
7337 else
7339 target = gen_reg_rtx (GET_MODE (subtarget));
7340 insn = emit_move_insn (target, subtarget);
7343 /* Collect the data for the note. */
7344 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7345 vtbl_ref = plus_constant (vtbl_ref,
7346 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7347 /* Discard the initial CONST that was added. */
7348 vtbl_ref = XEXP (vtbl_ref, 0);
7350 REG_NOTES (insn)
7351 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7353 return target;
7356 /* Intended for a reference to a buffer of a file-object in Pascal.
7357 But it's not certain that a special tree code will really be
7358 necessary for these. INDIRECT_REF might work for them. */
7359 case BUFFER_REF:
7360 abort ();
7362 case IN_EXPR:
7364 /* Pascal set IN expression.
7366 Algorithm:
7367 rlo = set_low - (set_low%bits_per_word);
7368 the_word = set [ (index - rlo)/bits_per_word ];
7369 bit_index = index % bits_per_word;
7370 bitmask = 1 << bit_index;
7371 return !!(the_word & bitmask); */
7373 tree set = TREE_OPERAND (exp, 0);
7374 tree index = TREE_OPERAND (exp, 1);
7375 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7376 tree set_type = TREE_TYPE (set);
7377 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7378 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7379 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7380 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7381 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7382 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7383 rtx setaddr = XEXP (setval, 0);
7384 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7385 rtx rlow;
7386 rtx diff, quo, rem, addr, bit, result;
7388 /* If domain is empty, answer is no. Likewise if index is constant
7389 and out of bounds. */
7390 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7391 && TREE_CODE (set_low_bound) == INTEGER_CST
7392 && tree_int_cst_lt (set_high_bound, set_low_bound))
7393 || (TREE_CODE (index) == INTEGER_CST
7394 && TREE_CODE (set_low_bound) == INTEGER_CST
7395 && tree_int_cst_lt (index, set_low_bound))
7396 || (TREE_CODE (set_high_bound) == INTEGER_CST
7397 && TREE_CODE (index) == INTEGER_CST
7398 && tree_int_cst_lt (set_high_bound, index))))
7399 return const0_rtx;
7401 if (target == 0)
7402 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7404 /* If we get here, we have to generate the code for both cases
7405 (in range and out of range). */
7407 op0 = gen_label_rtx ();
7408 op1 = gen_label_rtx ();
7410 if (! (GET_CODE (index_val) == CONST_INT
7411 && GET_CODE (lo_r) == CONST_INT))
7412 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7413 GET_MODE (index_val), iunsignedp, op1);
7415 if (! (GET_CODE (index_val) == CONST_INT
7416 && GET_CODE (hi_r) == CONST_INT))
7417 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7418 GET_MODE (index_val), iunsignedp, op1);
7420 /* Calculate the element number of bit zero in the first word
7421 of the set. */
7422 if (GET_CODE (lo_r) == CONST_INT)
7423 rlow = GEN_INT (INTVAL (lo_r)
7424 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7425 else
7426 rlow = expand_binop (index_mode, and_optab, lo_r,
7427 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7428 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7430 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7431 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7433 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7434 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7435 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7436 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7438 addr = memory_address (byte_mode,
7439 expand_binop (index_mode, add_optab, diff,
7440 setaddr, NULL_RTX, iunsignedp,
7441 OPTAB_LIB_WIDEN));
7443 /* Extract the bit we want to examine. */
7444 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7445 gen_rtx_MEM (byte_mode, addr),
7446 make_tree (TREE_TYPE (index), rem),
7447 NULL_RTX, 1);
7448 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7449 GET_MODE (target) == byte_mode ? target : 0,
7450 1, OPTAB_LIB_WIDEN);
7452 if (result != target)
7453 convert_move (target, result, 1);
7455 /* Output the code to handle the out-of-range case. */
7456 emit_jump (op0);
7457 emit_label (op1);
7458 emit_move_insn (target, const0_rtx);
7459 emit_label (op0);
7460 return target;
7463 case WITH_CLEANUP_EXPR:
7464 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7466 WITH_CLEANUP_EXPR_RTL (exp)
7467 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7468 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7469 CLEANUP_EH_ONLY (exp));
7471 /* That's it for this cleanup. */
7472 TREE_OPERAND (exp, 1) = 0;
7474 return WITH_CLEANUP_EXPR_RTL (exp);
7476 case CLEANUP_POINT_EXPR:
7478 /* Start a new binding layer that will keep track of all cleanup
7479 actions to be performed. */
7480 expand_start_bindings (2);
7482 target_temp_slot_level = temp_slot_level;
7484 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7485 /* If we're going to use this value, load it up now. */
7486 if (! ignore)
7487 op0 = force_not_mem (op0);
7488 preserve_temp_slots (op0);
7489 expand_end_bindings (NULL_TREE, 0, 0);
7491 return op0;
7493 case CALL_EXPR:
7494 /* Check for a built-in function. */
7495 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7496 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7497 == FUNCTION_DECL)
7498 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7500 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7501 == BUILT_IN_FRONTEND)
7502 return (*lang_hooks.expand_expr) (exp, original_target,
7503 tmode, modifier);
7504 else
7505 return expand_builtin (exp, target, subtarget, tmode, ignore);
7508 return expand_call (exp, target, ignore);
7510 case NON_LVALUE_EXPR:
7511 case NOP_EXPR:
7512 case CONVERT_EXPR:
7513 case REFERENCE_EXPR:
7514 if (TREE_OPERAND (exp, 0) == error_mark_node)
7515 return const0_rtx;
7517 if (TREE_CODE (type) == UNION_TYPE)
7519 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7521 /* If both input and output are BLKmode, this conversion isn't doing
7522 anything except possibly changing memory attribute. */
7523 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7525 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7526 modifier);
7528 result = copy_rtx (result);
7529 set_mem_attributes (result, exp, 0);
7530 return result;
7533 if (target == 0)
7534 target = assign_temp (type, 0, 1, 1);
7536 if (GET_CODE (target) == MEM)
7537 /* Store data into beginning of memory target. */
7538 store_expr (TREE_OPERAND (exp, 0),
7539 adjust_address (target, TYPE_MODE (valtype), 0),
7540 modifier == EXPAND_STACK_PARM ? 2 : 0);
7542 else if (GET_CODE (target) == REG)
7543 /* Store this field into a union of the proper type. */
7544 store_field (target,
7545 MIN ((int_size_in_bytes (TREE_TYPE
7546 (TREE_OPERAND (exp, 0)))
7547 * BITS_PER_UNIT),
7548 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7549 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7550 VOIDmode, 0, type, 0);
7551 else
7552 abort ();
7554 /* Return the entire union. */
7555 return target;
7558 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7560 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7561 modifier);
7563 /* If the signedness of the conversion differs and OP0 is
7564 a promoted SUBREG, clear that indication since we now
7565 have to do the proper extension. */
7566 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7567 && GET_CODE (op0) == SUBREG)
7568 SUBREG_PROMOTED_VAR_P (op0) = 0;
7570 return op0;
7573 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7574 if (GET_MODE (op0) == mode)
7575 return op0;
7577 /* If OP0 is a constant, just convert it into the proper mode. */
7578 if (CONSTANT_P (op0))
7580 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7581 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7583 if (modifier == EXPAND_INITIALIZER)
7584 return simplify_gen_subreg (mode, op0, inner_mode,
7585 subreg_lowpart_offset (mode,
7586 inner_mode));
7587 else
7588 return convert_modes (mode, inner_mode, op0,
7589 TREE_UNSIGNED (inner_type));
7592 if (modifier == EXPAND_INITIALIZER)
7593 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7595 if (target == 0)
7596 return
7597 convert_to_mode (mode, op0,
7598 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7599 else
7600 convert_move (target, op0,
7601 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7602 return target;
7604 case VIEW_CONVERT_EXPR:
7605 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7607 /* If the input and output modes are both the same, we are done.
7608 Otherwise, if neither mode is BLKmode and both are integral and within
7609 a word, we can use gen_lowpart. If neither is true, make sure the
7610 operand is in memory and convert the MEM to the new mode. */
7611 if (TYPE_MODE (type) == GET_MODE (op0))
7613 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7614 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7615 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7616 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7617 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7618 op0 = gen_lowpart (TYPE_MODE (type), op0);
7619 else if (GET_CODE (op0) != MEM)
7621 /* If the operand is not a MEM, force it into memory. Since we
7622 are going to be be changing the mode of the MEM, don't call
7623 force_const_mem for constants because we don't allow pool
7624 constants to change mode. */
7625 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7627 if (TREE_ADDRESSABLE (exp))
7628 abort ();
7630 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7631 target
7632 = assign_stack_temp_for_type
7633 (TYPE_MODE (inner_type),
7634 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7636 emit_move_insn (target, op0);
7637 op0 = target;
7640 /* At this point, OP0 is in the correct mode. If the output type is such
7641 that the operand is known to be aligned, indicate that it is.
7642 Otherwise, we need only be concerned about alignment for non-BLKmode
7643 results. */
7644 if (GET_CODE (op0) == MEM)
7646 op0 = copy_rtx (op0);
7648 if (TYPE_ALIGN_OK (type))
7649 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7650 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7651 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7653 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7654 HOST_WIDE_INT temp_size
7655 = MAX (int_size_in_bytes (inner_type),
7656 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7657 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7658 temp_size, 0, type);
7659 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7661 if (TREE_ADDRESSABLE (exp))
7662 abort ();
7664 if (GET_MODE (op0) == BLKmode)
7665 emit_block_move (new_with_op0_mode, op0,
7666 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7667 (modifier == EXPAND_STACK_PARM
7668 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7669 else
7670 emit_move_insn (new_with_op0_mode, op0);
7672 op0 = new;
7675 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7678 return op0;
7680 case PLUS_EXPR:
7681 this_optab = ! unsignedp && flag_trapv
7682 && (GET_MODE_CLASS (mode) == MODE_INT)
7683 ? addv_optab : add_optab;
7685 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7686 something else, make sure we add the register to the constant and
7687 then to the other thing. This case can occur during strength
7688 reduction and doing it this way will produce better code if the
7689 frame pointer or argument pointer is eliminated.
7691 fold-const.c will ensure that the constant is always in the inner
7692 PLUS_EXPR, so the only case we need to do anything about is if
7693 sp, ap, or fp is our second argument, in which case we must swap
7694 the innermost first argument and our second argument. */
7696 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7697 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7698 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7699 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7700 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7701 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7703 tree t = TREE_OPERAND (exp, 1);
7705 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7706 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7709 /* If the result is to be ptr_mode and we are adding an integer to
7710 something, we might be forming a constant. So try to use
7711 plus_constant. If it produces a sum and we can't accept it,
7712 use force_operand. This allows P = &ARR[const] to generate
7713 efficient code on machines where a SYMBOL_REF is not a valid
7714 address.
7716 If this is an EXPAND_SUM call, always return the sum. */
7717 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7718 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7720 if (modifier == EXPAND_STACK_PARM)
7721 target = 0;
7722 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7723 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7724 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7726 rtx constant_part;
7728 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7729 EXPAND_SUM);
7730 /* Use immed_double_const to ensure that the constant is
7731 truncated according to the mode of OP1, then sign extended
7732 to a HOST_WIDE_INT. Using the constant directly can result
7733 in non-canonical RTL in a 64x32 cross compile. */
7734 constant_part
7735 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7736 (HOST_WIDE_INT) 0,
7737 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7738 op1 = plus_constant (op1, INTVAL (constant_part));
7739 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7740 op1 = force_operand (op1, target);
7741 return op1;
7744 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7745 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7746 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7748 rtx constant_part;
7750 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7751 (modifier == EXPAND_INITIALIZER
7752 ? EXPAND_INITIALIZER : EXPAND_SUM));
7753 if (! CONSTANT_P (op0))
7755 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7756 VOIDmode, modifier);
7757 /* Return a PLUS if modifier says it's OK. */
7758 if (modifier == EXPAND_SUM
7759 || modifier == EXPAND_INITIALIZER)
7760 return simplify_gen_binary (PLUS, mode, op0, op1);
7761 goto binop2;
7763 /* Use immed_double_const to ensure that the constant is
7764 truncated according to the mode of OP1, then sign extended
7765 to a HOST_WIDE_INT. Using the constant directly can result
7766 in non-canonical RTL in a 64x32 cross compile. */
7767 constant_part
7768 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7769 (HOST_WIDE_INT) 0,
7770 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7771 op0 = plus_constant (op0, INTVAL (constant_part));
7772 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7773 op0 = force_operand (op0, target);
7774 return op0;
7778 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7779 subtarget = 0;
7781 /* No sense saving up arithmetic to be done
7782 if it's all in the wrong mode to form part of an address.
7783 And force_operand won't know whether to sign-extend or
7784 zero-extend. */
7785 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7786 || mode != ptr_mode)
7788 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7789 subtarget, &op0, &op1, 0);
7790 if (op0 == const0_rtx)
7791 return op1;
7792 if (op1 == const0_rtx)
7793 return op0;
7794 goto binop2;
7797 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7798 subtarget, &op0, &op1, modifier);
7799 return simplify_gen_binary (PLUS, mode, op0, op1);
7801 case MINUS_EXPR:
7802 /* For initializers, we are allowed to return a MINUS of two
7803 symbolic constants. Here we handle all cases when both operands
7804 are constant. */
7805 /* Handle difference of two symbolic constants,
7806 for the sake of an initializer. */
7807 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7808 && really_constant_p (TREE_OPERAND (exp, 0))
7809 && really_constant_p (TREE_OPERAND (exp, 1)))
7811 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7812 NULL_RTX, &op0, &op1, modifier);
7814 /* If the last operand is a CONST_INT, use plus_constant of
7815 the negated constant. Else make the MINUS. */
7816 if (GET_CODE (op1) == CONST_INT)
7817 return plus_constant (op0, - INTVAL (op1));
7818 else
7819 return gen_rtx_MINUS (mode, op0, op1);
7822 this_optab = ! unsignedp && flag_trapv
7823 && (GET_MODE_CLASS(mode) == MODE_INT)
7824 ? subv_optab : sub_optab;
7826 /* No sense saving up arithmetic to be done
7827 if it's all in the wrong mode to form part of an address.
7828 And force_operand won't know whether to sign-extend or
7829 zero-extend. */
7830 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7831 || mode != ptr_mode)
7832 goto binop;
7834 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7835 subtarget, &op0, &op1, modifier);
7837 /* Convert A - const to A + (-const). */
7838 if (GET_CODE (op1) == CONST_INT)
7840 op1 = negate_rtx (mode, op1);
7841 return simplify_gen_binary (PLUS, mode, op0, op1);
7844 goto binop2;
7846 case MULT_EXPR:
7847 /* If first operand is constant, swap them.
7848 Thus the following special case checks need only
7849 check the second operand. */
7850 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7852 tree t1 = TREE_OPERAND (exp, 0);
7853 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7854 TREE_OPERAND (exp, 1) = t1;
7857 /* Attempt to return something suitable for generating an
7858 indexed address, for machines that support that. */
7860 if (modifier == EXPAND_SUM && mode == ptr_mode
7861 && host_integerp (TREE_OPERAND (exp, 1), 0))
7863 tree exp1 = TREE_OPERAND (exp, 1);
7865 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7866 EXPAND_SUM);
7868 /* If we knew for certain that this is arithmetic for an array
7869 reference, and we knew the bounds of the array, then we could
7870 apply the distributive law across (PLUS X C) for constant C.
7871 Without such knowledge, we risk overflowing the computation
7872 when both X and C are large, but X+C isn't. */
7873 /* ??? Could perhaps special-case EXP being unsigned and C being
7874 positive. In that case we are certain that X+C is no smaller
7875 than X and so the transformed expression will overflow iff the
7876 original would have. */
7878 if (GET_CODE (op0) != REG)
7879 op0 = force_operand (op0, NULL_RTX);
7880 if (GET_CODE (op0) != REG)
7881 op0 = copy_to_mode_reg (mode, op0);
7883 return gen_rtx_MULT (mode, op0,
7884 gen_int_mode (tree_low_cst (exp1, 0),
7885 TYPE_MODE (TREE_TYPE (exp1))));
7888 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7889 subtarget = 0;
7891 if (modifier == EXPAND_STACK_PARM)
7892 target = 0;
7894 /* Check for multiplying things that have been extended
7895 from a narrower type. If this machine supports multiplying
7896 in that narrower type with a result in the desired type,
7897 do it that way, and avoid the explicit type-conversion. */
7898 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7899 && TREE_CODE (type) == INTEGER_TYPE
7900 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7901 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7902 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7903 && int_fits_type_p (TREE_OPERAND (exp, 1),
7904 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7905 /* Don't use a widening multiply if a shift will do. */
7906 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7907 > HOST_BITS_PER_WIDE_INT)
7908 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7910 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7911 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7913 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7914 /* If both operands are extended, they must either both
7915 be zero-extended or both be sign-extended. */
7916 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7918 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7920 enum machine_mode innermode
7921 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7922 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7923 ? smul_widen_optab : umul_widen_optab);
7924 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7925 ? umul_widen_optab : smul_widen_optab);
7926 if (mode == GET_MODE_WIDER_MODE (innermode))
7928 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7930 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7931 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7932 TREE_OPERAND (exp, 1),
7933 NULL_RTX, &op0, &op1, 0);
7934 else
7935 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7936 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7937 NULL_RTX, &op0, &op1, 0);
7938 goto binop2;
7940 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7941 && innermode == word_mode)
7943 rtx htem;
7944 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7945 NULL_RTX, VOIDmode, 0);
7946 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7947 op1 = convert_modes (innermode, mode,
7948 expand_expr (TREE_OPERAND (exp, 1),
7949 NULL_RTX, VOIDmode, 0),
7950 unsignedp);
7951 else
7952 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7953 NULL_RTX, VOIDmode, 0);
7954 temp = expand_binop (mode, other_optab, op0, op1, target,
7955 unsignedp, OPTAB_LIB_WIDEN);
7956 htem = expand_mult_highpart_adjust (innermode,
7957 gen_highpart (innermode, temp),
7958 op0, op1,
7959 gen_highpart (innermode, temp),
7960 unsignedp);
7961 emit_move_insn (gen_highpart (innermode, temp), htem);
7962 return temp;
7966 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7967 subtarget, &op0, &op1, 0);
7968 return expand_mult (mode, op0, op1, target, unsignedp);
7970 case TRUNC_DIV_EXPR:
7971 case FLOOR_DIV_EXPR:
7972 case CEIL_DIV_EXPR:
7973 case ROUND_DIV_EXPR:
7974 case EXACT_DIV_EXPR:
7975 if (modifier == EXPAND_STACK_PARM)
7976 target = 0;
7977 /* Possible optimization: compute the dividend with EXPAND_SUM
7978 then if the divisor is constant can optimize the case
7979 where some terms of the dividend have coeffs divisible by it. */
7980 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7981 subtarget, &op0, &op1, 0);
7982 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7984 case RDIV_EXPR:
7985 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7986 expensive divide. If not, combine will rebuild the original
7987 computation. */
7988 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7989 && TREE_CODE (type) == REAL_TYPE
7990 && !real_onep (TREE_OPERAND (exp, 0)))
7991 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7992 build (RDIV_EXPR, type,
7993 build_real (type, dconst1),
7994 TREE_OPERAND (exp, 1))),
7995 target, tmode, modifier);
7996 this_optab = sdiv_optab;
7997 goto binop;
7999 case TRUNC_MOD_EXPR:
8000 case FLOOR_MOD_EXPR:
8001 case CEIL_MOD_EXPR:
8002 case ROUND_MOD_EXPR:
8003 if (modifier == EXPAND_STACK_PARM)
8004 target = 0;
8005 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8006 subtarget, &op0, &op1, 0);
8007 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8009 case FIX_ROUND_EXPR:
8010 case FIX_FLOOR_EXPR:
8011 case FIX_CEIL_EXPR:
8012 abort (); /* Not used for C. */
8014 case FIX_TRUNC_EXPR:
8015 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8016 if (target == 0 || modifier == EXPAND_STACK_PARM)
8017 target = gen_reg_rtx (mode);
8018 expand_fix (target, op0, unsignedp);
8019 return target;
8021 case FLOAT_EXPR:
8022 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8023 if (target == 0 || modifier == EXPAND_STACK_PARM)
8024 target = gen_reg_rtx (mode);
8025 /* expand_float can't figure out what to do if FROM has VOIDmode.
8026 So give it the correct mode. With -O, cse will optimize this. */
8027 if (GET_MODE (op0) == VOIDmode)
8028 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8029 op0);
8030 expand_float (target, op0,
8031 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8032 return target;
8034 case NEGATE_EXPR:
8035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8036 if (modifier == EXPAND_STACK_PARM)
8037 target = 0;
8038 temp = expand_unop (mode,
8039 ! unsignedp && flag_trapv
8040 && (GET_MODE_CLASS(mode) == MODE_INT)
8041 ? negv_optab : neg_optab, op0, target, 0);
8042 if (temp == 0)
8043 abort ();
8044 return temp;
8046 case ABS_EXPR:
8047 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8048 if (modifier == EXPAND_STACK_PARM)
8049 target = 0;
8051 /* ABS_EXPR is not valid for complex arguments. */
8052 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8053 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8054 abort ();
8056 /* Unsigned abs is simply the operand. Testing here means we don't
8057 risk generating incorrect code below. */
8058 if (TREE_UNSIGNED (type))
8059 return op0;
8061 return expand_abs (mode, op0, target, unsignedp,
8062 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8064 case MAX_EXPR:
8065 case MIN_EXPR:
8066 target = original_target;
8067 if (target == 0
8068 || modifier == EXPAND_STACK_PARM
8069 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8070 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8071 || GET_MODE (target) != mode
8072 || (GET_CODE (target) == REG
8073 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8074 target = gen_reg_rtx (mode);
8075 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8076 target, &op0, &op1, 0);
8078 /* First try to do it with a special MIN or MAX instruction.
8079 If that does not win, use a conditional jump to select the proper
8080 value. */
8081 this_optab = (TREE_UNSIGNED (type)
8082 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8083 : (code == MIN_EXPR ? smin_optab : smax_optab));
8085 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8086 OPTAB_WIDEN);
8087 if (temp != 0)
8088 return temp;
8090 /* At this point, a MEM target is no longer useful; we will get better
8091 code without it. */
8093 if (GET_CODE (target) == MEM)
8094 target = gen_reg_rtx (mode);
8096 if (target != op0)
8097 emit_move_insn (target, op0);
8099 op0 = gen_label_rtx ();
8101 /* If this mode is an integer too wide to compare properly,
8102 compare word by word. Rely on cse to optimize constant cases. */
8103 if (GET_MODE_CLASS (mode) == MODE_INT
8104 && ! can_compare_p (GE, mode, ccp_jump))
8106 if (code == MAX_EXPR)
8107 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8108 target, op1, NULL_RTX, op0);
8109 else
8110 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8111 op1, target, NULL_RTX, op0);
8113 else
8115 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8116 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8117 unsignedp, mode, NULL_RTX, NULL_RTX,
8118 op0);
8120 emit_move_insn (target, op1);
8121 emit_label (op0);
8122 return target;
8124 case BIT_NOT_EXPR:
8125 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8126 if (modifier == EXPAND_STACK_PARM)
8127 target = 0;
8128 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8129 if (temp == 0)
8130 abort ();
8131 return temp;
8133 /* ??? Can optimize bitwise operations with one arg constant.
8134 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8135 and (a bitwise1 b) bitwise2 b (etc)
8136 but that is probably not worth while. */
8138 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8139 boolean values when we want in all cases to compute both of them. In
8140 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8141 as actual zero-or-1 values and then bitwise anding. In cases where
8142 there cannot be any side effects, better code would be made by
8143 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8144 how to recognize those cases. */
8146 case TRUTH_AND_EXPR:
8147 case BIT_AND_EXPR:
8148 this_optab = and_optab;
8149 goto binop;
8151 case TRUTH_OR_EXPR:
8152 case BIT_IOR_EXPR:
8153 this_optab = ior_optab;
8154 goto binop;
8156 case TRUTH_XOR_EXPR:
8157 case BIT_XOR_EXPR:
8158 this_optab = xor_optab;
8159 goto binop;
8161 case LSHIFT_EXPR:
8162 case RSHIFT_EXPR:
8163 case LROTATE_EXPR:
8164 case RROTATE_EXPR:
8165 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8166 subtarget = 0;
8167 if (modifier == EXPAND_STACK_PARM)
8168 target = 0;
8169 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8170 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8171 unsignedp);
8173 /* Could determine the answer when only additive constants differ. Also,
8174 the addition of one can be handled by changing the condition. */
8175 case LT_EXPR:
8176 case LE_EXPR:
8177 case GT_EXPR:
8178 case GE_EXPR:
8179 case EQ_EXPR:
8180 case NE_EXPR:
8181 case UNORDERED_EXPR:
8182 case ORDERED_EXPR:
8183 case UNLT_EXPR:
8184 case UNLE_EXPR:
8185 case UNGT_EXPR:
8186 case UNGE_EXPR:
8187 case UNEQ_EXPR:
8188 temp = do_store_flag (exp,
8189 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8190 tmode != VOIDmode ? tmode : mode, 0);
8191 if (temp != 0)
8192 return temp;
8194 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8195 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8196 && original_target
8197 && GET_CODE (original_target) == REG
8198 && (GET_MODE (original_target)
8199 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8201 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8202 VOIDmode, 0);
8204 /* If temp is constant, we can just compute the result. */
8205 if (GET_CODE (temp) == CONST_INT)
8207 if (INTVAL (temp) != 0)
8208 emit_move_insn (target, const1_rtx);
8209 else
8210 emit_move_insn (target, const0_rtx);
8212 return target;
8215 if (temp != original_target)
8217 enum machine_mode mode1 = GET_MODE (temp);
8218 if (mode1 == VOIDmode)
8219 mode1 = tmode != VOIDmode ? tmode : mode;
8221 temp = copy_to_mode_reg (mode1, temp);
8224 op1 = gen_label_rtx ();
8225 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8226 GET_MODE (temp), unsignedp, op1);
8227 emit_move_insn (temp, const1_rtx);
8228 emit_label (op1);
8229 return temp;
8232 /* If no set-flag instruction, must generate a conditional
8233 store into a temporary variable. Drop through
8234 and handle this like && and ||. */
8236 case TRUTH_ANDIF_EXPR:
8237 case TRUTH_ORIF_EXPR:
8238 if (! ignore
8239 && (target == 0
8240 || modifier == EXPAND_STACK_PARM
8241 || ! safe_from_p (target, exp, 1)
8242 /* Make sure we don't have a hard reg (such as function's return
8243 value) live across basic blocks, if not optimizing. */
8244 || (!optimize && GET_CODE (target) == REG
8245 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8246 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8248 if (target)
8249 emit_clr_insn (target);
8251 op1 = gen_label_rtx ();
8252 jumpifnot (exp, op1);
8254 if (target)
8255 emit_0_to_1_insn (target);
8257 emit_label (op1);
8258 return ignore ? const0_rtx : target;
8260 case TRUTH_NOT_EXPR:
8261 if (modifier == EXPAND_STACK_PARM)
8262 target = 0;
8263 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8264 /* The parser is careful to generate TRUTH_NOT_EXPR
8265 only with operands that are always zero or one. */
8266 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8267 target, 1, OPTAB_LIB_WIDEN);
8268 if (temp == 0)
8269 abort ();
8270 return temp;
8272 case COMPOUND_EXPR:
8273 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8274 emit_queue ();
8275 return expand_expr (TREE_OPERAND (exp, 1),
8276 (ignore ? const0_rtx : target),
8277 VOIDmode, modifier);
8279 case COND_EXPR:
8280 /* If we would have a "singleton" (see below) were it not for a
8281 conversion in each arm, bring that conversion back out. */
8282 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8283 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8284 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8285 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8287 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8288 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8290 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8291 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8292 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8293 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8294 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8295 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8296 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8297 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8298 return expand_expr (build1 (NOP_EXPR, type,
8299 build (COND_EXPR, TREE_TYPE (iftrue),
8300 TREE_OPERAND (exp, 0),
8301 iftrue, iffalse)),
8302 target, tmode, modifier);
8306 /* Note that COND_EXPRs whose type is a structure or union
8307 are required to be constructed to contain assignments of
8308 a temporary variable, so that we can evaluate them here
8309 for side effect only. If type is void, we must do likewise. */
8311 /* If an arm of the branch requires a cleanup,
8312 only that cleanup is performed. */
8314 tree singleton = 0;
8315 tree binary_op = 0, unary_op = 0;
8317 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8318 convert it to our mode, if necessary. */
8319 if (integer_onep (TREE_OPERAND (exp, 1))
8320 && integer_zerop (TREE_OPERAND (exp, 2))
8321 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8323 if (ignore)
8325 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8326 modifier);
8327 return const0_rtx;
8330 if (modifier == EXPAND_STACK_PARM)
8331 target = 0;
8332 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8333 if (GET_MODE (op0) == mode)
8334 return op0;
8336 if (target == 0)
8337 target = gen_reg_rtx (mode);
8338 convert_move (target, op0, unsignedp);
8339 return target;
8342 /* Check for X ? A + B : A. If we have this, we can copy A to the
8343 output and conditionally add B. Similarly for unary operations.
8344 Don't do this if X has side-effects because those side effects
8345 might affect A or B and the "?" operation is a sequence point in
8346 ANSI. (operand_equal_p tests for side effects.) */
8348 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8349 && operand_equal_p (TREE_OPERAND (exp, 2),
8350 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8351 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8352 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8353 && operand_equal_p (TREE_OPERAND (exp, 1),
8354 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8355 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8356 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8357 && operand_equal_p (TREE_OPERAND (exp, 2),
8358 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8359 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8360 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8361 && operand_equal_p (TREE_OPERAND (exp, 1),
8362 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8363 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8365 /* If we are not to produce a result, we have no target. Otherwise,
8366 if a target was specified use it; it will not be used as an
8367 intermediate target unless it is safe. If no target, use a
8368 temporary. */
8370 if (ignore)
8371 temp = 0;
8372 else if (modifier == EXPAND_STACK_PARM)
8373 temp = assign_temp (type, 0, 0, 1);
8374 else if (original_target
8375 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8376 || (singleton && GET_CODE (original_target) == REG
8377 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8378 && original_target == var_rtx (singleton)))
8379 && GET_MODE (original_target) == mode
8380 #ifdef HAVE_conditional_move
8381 && (! can_conditionally_move_p (mode)
8382 || GET_CODE (original_target) == REG
8383 || TREE_ADDRESSABLE (type))
8384 #endif
8385 && (GET_CODE (original_target) != MEM
8386 || TREE_ADDRESSABLE (type)))
8387 temp = original_target;
8388 else if (TREE_ADDRESSABLE (type))
8389 abort ();
8390 else
8391 temp = assign_temp (type, 0, 0, 1);
8393 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8394 do the test of X as a store-flag operation, do this as
8395 A + ((X != 0) << log C). Similarly for other simple binary
8396 operators. Only do for C == 1 if BRANCH_COST is low. */
8397 if (temp && singleton && binary_op
8398 && (TREE_CODE (binary_op) == PLUS_EXPR
8399 || TREE_CODE (binary_op) == MINUS_EXPR
8400 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8401 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8402 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8403 : integer_onep (TREE_OPERAND (binary_op, 1)))
8404 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8406 rtx result;
8407 tree cond;
8408 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8409 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8410 ? addv_optab : add_optab)
8411 : TREE_CODE (binary_op) == MINUS_EXPR
8412 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8413 ? subv_optab : sub_optab)
8414 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8415 : xor_optab);
8417 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8418 if (singleton == TREE_OPERAND (exp, 1))
8419 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8420 else
8421 cond = TREE_OPERAND (exp, 0);
8423 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8424 ? temp : NULL_RTX),
8425 mode, BRANCH_COST <= 1);
8427 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8428 result = expand_shift (LSHIFT_EXPR, mode, result,
8429 build_int_2 (tree_log2
8430 (TREE_OPERAND
8431 (binary_op, 1)),
8433 (safe_from_p (temp, singleton, 1)
8434 ? temp : NULL_RTX), 0);
8436 if (result)
8438 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8439 return expand_binop (mode, boptab, op1, result, temp,
8440 unsignedp, OPTAB_LIB_WIDEN);
8444 do_pending_stack_adjust ();
8445 NO_DEFER_POP;
8446 op0 = gen_label_rtx ();
8448 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8450 if (temp != 0)
8452 /* If the target conflicts with the other operand of the
8453 binary op, we can't use it. Also, we can't use the target
8454 if it is a hard register, because evaluating the condition
8455 might clobber it. */
8456 if ((binary_op
8457 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8458 || (GET_CODE (temp) == REG
8459 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8460 temp = gen_reg_rtx (mode);
8461 store_expr (singleton, temp,
8462 modifier == EXPAND_STACK_PARM ? 2 : 0);
8464 else
8465 expand_expr (singleton,
8466 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8467 if (singleton == TREE_OPERAND (exp, 1))
8468 jumpif (TREE_OPERAND (exp, 0), op0);
8469 else
8470 jumpifnot (TREE_OPERAND (exp, 0), op0);
8472 start_cleanup_deferral ();
8473 if (binary_op && temp == 0)
8474 /* Just touch the other operand. */
8475 expand_expr (TREE_OPERAND (binary_op, 1),
8476 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8477 else if (binary_op)
8478 store_expr (build (TREE_CODE (binary_op), type,
8479 make_tree (type, temp),
8480 TREE_OPERAND (binary_op, 1)),
8481 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8482 else
8483 store_expr (build1 (TREE_CODE (unary_op), type,
8484 make_tree (type, temp)),
8485 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8486 op1 = op0;
8488 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8489 comparison operator. If we have one of these cases, set the
8490 output to A, branch on A (cse will merge these two references),
8491 then set the output to FOO. */
8492 else if (temp
8493 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8494 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8495 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8496 TREE_OPERAND (exp, 1), 0)
8497 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8498 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8499 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8501 if (GET_CODE (temp) == REG
8502 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8503 temp = gen_reg_rtx (mode);
8504 store_expr (TREE_OPERAND (exp, 1), temp,
8505 modifier == EXPAND_STACK_PARM ? 2 : 0);
8506 jumpif (TREE_OPERAND (exp, 0), op0);
8508 start_cleanup_deferral ();
8509 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8510 store_expr (TREE_OPERAND (exp, 2), temp,
8511 modifier == EXPAND_STACK_PARM ? 2 : 0);
8512 else
8513 expand_expr (TREE_OPERAND (exp, 2),
8514 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8515 op1 = op0;
8517 else if (temp
8518 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8519 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8520 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8521 TREE_OPERAND (exp, 2), 0)
8522 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8523 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8524 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8526 if (GET_CODE (temp) == REG
8527 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8528 temp = gen_reg_rtx (mode);
8529 store_expr (TREE_OPERAND (exp, 2), temp,
8530 modifier == EXPAND_STACK_PARM ? 2 : 0);
8531 jumpifnot (TREE_OPERAND (exp, 0), op0);
8533 start_cleanup_deferral ();
8534 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8535 store_expr (TREE_OPERAND (exp, 1), temp,
8536 modifier == EXPAND_STACK_PARM ? 2 : 0);
8537 else
8538 expand_expr (TREE_OPERAND (exp, 1),
8539 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8540 op1 = op0;
8542 else
8544 op1 = gen_label_rtx ();
8545 jumpifnot (TREE_OPERAND (exp, 0), op0);
8547 start_cleanup_deferral ();
8549 /* One branch of the cond can be void, if it never returns. For
8550 example A ? throw : E */
8551 if (temp != 0
8552 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8553 store_expr (TREE_OPERAND (exp, 1), temp,
8554 modifier == EXPAND_STACK_PARM ? 2 : 0);
8555 else
8556 expand_expr (TREE_OPERAND (exp, 1),
8557 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8558 end_cleanup_deferral ();
8559 emit_queue ();
8560 emit_jump_insn (gen_jump (op1));
8561 emit_barrier ();
8562 emit_label (op0);
8563 start_cleanup_deferral ();
8564 if (temp != 0
8565 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8566 store_expr (TREE_OPERAND (exp, 2), temp,
8567 modifier == EXPAND_STACK_PARM ? 2 : 0);
8568 else
8569 expand_expr (TREE_OPERAND (exp, 2),
8570 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8573 end_cleanup_deferral ();
8575 emit_queue ();
8576 emit_label (op1);
8577 OK_DEFER_POP;
8579 return temp;
8582 case TARGET_EXPR:
8584 /* Something needs to be initialized, but we didn't know
8585 where that thing was when building the tree. For example,
8586 it could be the return value of a function, or a parameter
8587 to a function which lays down in the stack, or a temporary
8588 variable which must be passed by reference.
8590 We guarantee that the expression will either be constructed
8591 or copied into our original target. */
8593 tree slot = TREE_OPERAND (exp, 0);
8594 tree cleanups = NULL_TREE;
8595 tree exp1;
8597 if (TREE_CODE (slot) != VAR_DECL)
8598 abort ();
8600 if (! ignore)
8601 target = original_target;
8603 /* Set this here so that if we get a target that refers to a
8604 register variable that's already been used, put_reg_into_stack
8605 knows that it should fix up those uses. */
8606 TREE_USED (slot) = 1;
8608 if (target == 0)
8610 if (DECL_RTL_SET_P (slot))
8612 target = DECL_RTL (slot);
8613 /* If we have already expanded the slot, so don't do
8614 it again. (mrs) */
8615 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8616 return target;
8618 else
8620 target = assign_temp (type, 2, 0, 1);
8621 /* All temp slots at this level must not conflict. */
8622 preserve_temp_slots (target);
8623 SET_DECL_RTL (slot, target);
8624 if (TREE_ADDRESSABLE (slot))
8625 put_var_into_stack (slot, /*rescan=*/false);
8627 /* Since SLOT is not known to the called function
8628 to belong to its stack frame, we must build an explicit
8629 cleanup. This case occurs when we must build up a reference
8630 to pass the reference as an argument. In this case,
8631 it is very likely that such a reference need not be
8632 built here. */
8634 if (TREE_OPERAND (exp, 2) == 0)
8635 TREE_OPERAND (exp, 2)
8636 = (*lang_hooks.maybe_build_cleanup) (slot);
8637 cleanups = TREE_OPERAND (exp, 2);
8640 else
8642 /* This case does occur, when expanding a parameter which
8643 needs to be constructed on the stack. The target
8644 is the actual stack address that we want to initialize.
8645 The function we call will perform the cleanup in this case. */
8647 /* If we have already assigned it space, use that space,
8648 not target that we were passed in, as our target
8649 parameter is only a hint. */
8650 if (DECL_RTL_SET_P (slot))
8652 target = DECL_RTL (slot);
8653 /* If we have already expanded the slot, so don't do
8654 it again. (mrs) */
8655 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8656 return target;
8658 else
8660 SET_DECL_RTL (slot, target);
8661 /* If we must have an addressable slot, then make sure that
8662 the RTL that we just stored in slot is OK. */
8663 if (TREE_ADDRESSABLE (slot))
8664 put_var_into_stack (slot, /*rescan=*/true);
8668 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8669 /* Mark it as expanded. */
8670 TREE_OPERAND (exp, 1) = NULL_TREE;
8672 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8674 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8676 return target;
8679 case INIT_EXPR:
8681 tree lhs = TREE_OPERAND (exp, 0);
8682 tree rhs = TREE_OPERAND (exp, 1);
8684 temp = expand_assignment (lhs, rhs, ! ignore);
8685 return temp;
8688 case MODIFY_EXPR:
8690 /* If lhs is complex, expand calls in rhs before computing it.
8691 That's so we don't compute a pointer and save it over a
8692 call. If lhs is simple, compute it first so we can give it
8693 as a target if the rhs is just a call. This avoids an
8694 extra temp and copy and that prevents a partial-subsumption
8695 which makes bad code. Actually we could treat
8696 component_ref's of vars like vars. */
8698 tree lhs = TREE_OPERAND (exp, 0);
8699 tree rhs = TREE_OPERAND (exp, 1);
8701 temp = 0;
8703 /* Check for |= or &= of a bitfield of size one into another bitfield
8704 of size 1. In this case, (unless we need the result of the
8705 assignment) we can do this more efficiently with a
8706 test followed by an assignment, if necessary.
8708 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8709 things change so we do, this code should be enhanced to
8710 support it. */
8711 if (ignore
8712 && TREE_CODE (lhs) == COMPONENT_REF
8713 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8714 || TREE_CODE (rhs) == BIT_AND_EXPR)
8715 && TREE_OPERAND (rhs, 0) == lhs
8716 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8717 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8718 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8720 rtx label = gen_label_rtx ();
8722 do_jump (TREE_OPERAND (rhs, 1),
8723 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8724 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8725 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8726 (TREE_CODE (rhs) == BIT_IOR_EXPR
8727 ? integer_one_node
8728 : integer_zero_node)),
8730 do_pending_stack_adjust ();
8731 emit_label (label);
8732 return const0_rtx;
8735 temp = expand_assignment (lhs, rhs, ! ignore);
8737 return temp;
8740 case RETURN_EXPR:
8741 if (!TREE_OPERAND (exp, 0))
8742 expand_null_return ();
8743 else
8744 expand_return (TREE_OPERAND (exp, 0));
8745 return const0_rtx;
8747 case PREINCREMENT_EXPR:
8748 case PREDECREMENT_EXPR:
8749 return expand_increment (exp, 0, ignore);
8751 case POSTINCREMENT_EXPR:
8752 case POSTDECREMENT_EXPR:
8753 /* Faster to treat as pre-increment if result is not used. */
8754 return expand_increment (exp, ! ignore, ignore);
8756 case ADDR_EXPR:
8757 if (modifier == EXPAND_STACK_PARM)
8758 target = 0;
8759 /* Are we taking the address of a nested function? */
8760 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8761 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8762 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8763 && ! TREE_STATIC (exp))
8765 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8766 op0 = force_operand (op0, target);
8768 /* If we are taking the address of something erroneous, just
8769 return a zero. */
8770 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8771 return const0_rtx;
8772 /* If we are taking the address of a constant and are at the
8773 top level, we have to use output_constant_def since we can't
8774 call force_const_mem at top level. */
8775 else if (cfun == 0
8776 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8777 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8778 == 'c')))
8779 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8780 else
8782 /* We make sure to pass const0_rtx down if we came in with
8783 ignore set, to avoid doing the cleanups twice for something. */
8784 op0 = expand_expr (TREE_OPERAND (exp, 0),
8785 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8786 (modifier == EXPAND_INITIALIZER
8787 ? modifier : EXPAND_CONST_ADDRESS));
8789 /* If we are going to ignore the result, OP0 will have been set
8790 to const0_rtx, so just return it. Don't get confused and
8791 think we are taking the address of the constant. */
8792 if (ignore)
8793 return op0;
8795 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8796 clever and returns a REG when given a MEM. */
8797 op0 = protect_from_queue (op0, 1);
8799 /* We would like the object in memory. If it is a constant, we can
8800 have it be statically allocated into memory. For a non-constant,
8801 we need to allocate some memory and store the value into it. */
8803 if (CONSTANT_P (op0))
8804 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8805 op0);
8806 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8807 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8808 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8810 /* If the operand is a SAVE_EXPR, we can deal with this by
8811 forcing the SAVE_EXPR into memory. */
8812 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8814 put_var_into_stack (TREE_OPERAND (exp, 0),
8815 /*rescan=*/true);
8816 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8818 else
8820 /* If this object is in a register, it can't be BLKmode. */
8821 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8822 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8824 if (GET_CODE (op0) == PARALLEL)
8825 /* Handle calls that pass values in multiple
8826 non-contiguous locations. The Irix 6 ABI has examples
8827 of this. */
8828 emit_group_store (memloc, op0, inner_type,
8829 int_size_in_bytes (inner_type));
8830 else
8831 emit_move_insn (memloc, op0);
8833 op0 = memloc;
8837 if (GET_CODE (op0) != MEM)
8838 abort ();
8840 mark_temp_addr_taken (op0);
8841 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8843 op0 = XEXP (op0, 0);
8844 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8845 op0 = convert_memory_address (ptr_mode, op0);
8846 return op0;
8849 /* If OP0 is not aligned as least as much as the type requires, we
8850 need to make a temporary, copy OP0 to it, and take the address of
8851 the temporary. We want to use the alignment of the type, not of
8852 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8853 the test for BLKmode means that can't happen. The test for
8854 BLKmode is because we never make mis-aligned MEMs with
8855 non-BLKmode.
8857 We don't need to do this at all if the machine doesn't have
8858 strict alignment. */
8859 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8860 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8861 > MEM_ALIGN (op0))
8862 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8864 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8865 rtx new;
8867 if (TYPE_ALIGN_OK (inner_type))
8868 abort ();
8870 if (TREE_ADDRESSABLE (inner_type))
8872 /* We can't make a bitwise copy of this object, so fail. */
8873 error ("cannot take the address of an unaligned member");
8874 return const0_rtx;
8877 new = assign_stack_temp_for_type
8878 (TYPE_MODE (inner_type),
8879 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8880 : int_size_in_bytes (inner_type),
8881 1, build_qualified_type (inner_type,
8882 (TYPE_QUALS (inner_type)
8883 | TYPE_QUAL_CONST)));
8885 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8886 (modifier == EXPAND_STACK_PARM
8887 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8889 op0 = new;
8892 op0 = force_operand (XEXP (op0, 0), target);
8895 if (flag_force_addr
8896 && GET_CODE (op0) != REG
8897 && modifier != EXPAND_CONST_ADDRESS
8898 && modifier != EXPAND_INITIALIZER
8899 && modifier != EXPAND_SUM)
8900 op0 = force_reg (Pmode, op0);
8902 if (GET_CODE (op0) == REG
8903 && ! REG_USERVAR_P (op0))
8904 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8906 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8907 op0 = convert_memory_address (ptr_mode, op0);
8909 return op0;
8911 case ENTRY_VALUE_EXPR:
8912 abort ();
8914 /* COMPLEX type for Extended Pascal & Fortran */
8915 case COMPLEX_EXPR:
8917 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8918 rtx insns;
8920 /* Get the rtx code of the operands. */
8921 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8922 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8924 if (! target)
8925 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8927 start_sequence ();
8929 /* Move the real (op0) and imaginary (op1) parts to their location. */
8930 emit_move_insn (gen_realpart (mode, target), op0);
8931 emit_move_insn (gen_imagpart (mode, target), op1);
8933 insns = get_insns ();
8934 end_sequence ();
8936 /* Complex construction should appear as a single unit. */
8937 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8938 each with a separate pseudo as destination.
8939 It's not correct for flow to treat them as a unit. */
8940 if (GET_CODE (target) != CONCAT)
8941 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8942 else
8943 emit_insn (insns);
8945 return target;
8948 case REALPART_EXPR:
8949 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8950 return gen_realpart (mode, op0);
8952 case IMAGPART_EXPR:
8953 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8954 return gen_imagpart (mode, op0);
8956 case CONJ_EXPR:
8958 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8959 rtx imag_t;
8960 rtx insns;
8962 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8964 if (! target)
8965 target = gen_reg_rtx (mode);
8967 start_sequence ();
8969 /* Store the realpart and the negated imagpart to target. */
8970 emit_move_insn (gen_realpart (partmode, target),
8971 gen_realpart (partmode, op0));
8973 imag_t = gen_imagpart (partmode, target);
8974 temp = expand_unop (partmode,
8975 ! unsignedp && flag_trapv
8976 && (GET_MODE_CLASS(partmode) == MODE_INT)
8977 ? negv_optab : neg_optab,
8978 gen_imagpart (partmode, op0), imag_t, 0);
8979 if (temp != imag_t)
8980 emit_move_insn (imag_t, temp);
8982 insns = get_insns ();
8983 end_sequence ();
8985 /* Conjugate should appear as a single unit
8986 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8987 each with a separate pseudo as destination.
8988 It's not correct for flow to treat them as a unit. */
8989 if (GET_CODE (target) != CONCAT)
8990 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8991 else
8992 emit_insn (insns);
8994 return target;
8997 case TRY_CATCH_EXPR:
8999 tree handler = TREE_OPERAND (exp, 1);
9001 expand_eh_region_start ();
9003 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9005 expand_eh_region_end_cleanup (handler);
9007 return op0;
9010 case TRY_FINALLY_EXPR:
9012 tree try_block = TREE_OPERAND (exp, 0);
9013 tree finally_block = TREE_OPERAND (exp, 1);
9015 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9017 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9018 is not sufficient, so we cannot expand the block twice.
9019 So we play games with GOTO_SUBROUTINE_EXPR to let us
9020 expand the thing only once. */
9021 /* When not optimizing, we go ahead with this form since
9022 (1) user breakpoints operate more predictably without
9023 code duplication, and
9024 (2) we're not running any of the global optimizers
9025 that would explode in time/space with the highly
9026 connected CFG created by the indirect branching. */
9028 rtx finally_label = gen_label_rtx ();
9029 rtx done_label = gen_label_rtx ();
9030 rtx return_link = gen_reg_rtx (Pmode);
9031 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9032 (tree) finally_label, (tree) return_link);
9033 TREE_SIDE_EFFECTS (cleanup) = 1;
9035 /* Start a new binding layer that will keep track of all cleanup
9036 actions to be performed. */
9037 expand_start_bindings (2);
9038 target_temp_slot_level = temp_slot_level;
9040 expand_decl_cleanup (NULL_TREE, cleanup);
9041 op0 = expand_expr (try_block, target, tmode, modifier);
9043 preserve_temp_slots (op0);
9044 expand_end_bindings (NULL_TREE, 0, 0);
9045 emit_jump (done_label);
9046 emit_label (finally_label);
9047 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9048 emit_indirect_jump (return_link);
9049 emit_label (done_label);
9051 else
9053 expand_start_bindings (2);
9054 target_temp_slot_level = temp_slot_level;
9056 expand_decl_cleanup (NULL_TREE, finally_block);
9057 op0 = expand_expr (try_block, target, tmode, modifier);
9059 preserve_temp_slots (op0);
9060 expand_end_bindings (NULL_TREE, 0, 0);
9063 return op0;
9066 case GOTO_SUBROUTINE_EXPR:
9068 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9069 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9070 rtx return_address = gen_label_rtx ();
9071 emit_move_insn (return_link,
9072 gen_rtx_LABEL_REF (Pmode, return_address));
9073 emit_jump (subr);
9074 emit_label (return_address);
9075 return const0_rtx;
9078 case VA_ARG_EXPR:
9079 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9081 case EXC_PTR_EXPR:
9082 return get_exception_pointer (cfun);
9084 case FDESC_EXPR:
9085 /* Function descriptors are not valid except for as
9086 initialization constants, and should not be expanded. */
9087 abort ();
9089 default:
9090 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9093 /* Here to do an ordinary binary operator, generating an instruction
9094 from the optab already placed in `this_optab'. */
9095 binop:
9096 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9097 subtarget, &op0, &op1, 0);
9098 binop2:
9099 if (modifier == EXPAND_STACK_PARM)
9100 target = 0;
9101 temp = expand_binop (mode, this_optab, op0, op1, target,
9102 unsignedp, OPTAB_LIB_WIDEN);
9103 if (temp == 0)
9104 abort ();
9105 return temp;
9108 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9109 when applied to the address of EXP produces an address known to be
9110 aligned more than BIGGEST_ALIGNMENT. */
9112 static int
9113 is_aligning_offset (tree offset, tree exp)
9115 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9116 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9117 || TREE_CODE (offset) == NOP_EXPR
9118 || TREE_CODE (offset) == CONVERT_EXPR
9119 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9120 offset = TREE_OPERAND (offset, 0);
9122 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9123 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9124 if (TREE_CODE (offset) != BIT_AND_EXPR
9125 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9126 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9127 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9128 return 0;
9130 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9131 It must be NEGATE_EXPR. Then strip any more conversions. */
9132 offset = TREE_OPERAND (offset, 0);
9133 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9134 || TREE_CODE (offset) == NOP_EXPR
9135 || TREE_CODE (offset) == CONVERT_EXPR)
9136 offset = TREE_OPERAND (offset, 0);
9138 if (TREE_CODE (offset) != NEGATE_EXPR)
9139 return 0;
9141 offset = TREE_OPERAND (offset, 0);
9142 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9143 || TREE_CODE (offset) == NOP_EXPR
9144 || TREE_CODE (offset) == CONVERT_EXPR)
9145 offset = TREE_OPERAND (offset, 0);
9147 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9148 whose type is the same as EXP. */
9149 return (TREE_CODE (offset) == ADDR_EXPR
9150 && (TREE_OPERAND (offset, 0) == exp
9151 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9152 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9153 == TREE_TYPE (exp)))));
9156 /* Return the tree node if an ARG corresponds to a string constant or zero
9157 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9158 in bytes within the string that ARG is accessing. The type of the
9159 offset will be `sizetype'. */
9161 tree
9162 string_constant (tree arg, tree *ptr_offset)
9164 STRIP_NOPS (arg);
9166 if (TREE_CODE (arg) == ADDR_EXPR
9167 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9169 *ptr_offset = size_zero_node;
9170 return TREE_OPERAND (arg, 0);
9172 else if (TREE_CODE (arg) == PLUS_EXPR)
9174 tree arg0 = TREE_OPERAND (arg, 0);
9175 tree arg1 = TREE_OPERAND (arg, 1);
9177 STRIP_NOPS (arg0);
9178 STRIP_NOPS (arg1);
9180 if (TREE_CODE (arg0) == ADDR_EXPR
9181 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9183 *ptr_offset = convert (sizetype, arg1);
9184 return TREE_OPERAND (arg0, 0);
9186 else if (TREE_CODE (arg1) == ADDR_EXPR
9187 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9189 *ptr_offset = convert (sizetype, arg0);
9190 return TREE_OPERAND (arg1, 0);
9194 return 0;
9197 /* Expand code for a post- or pre- increment or decrement
9198 and return the RTX for the result.
9199 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9201 static rtx
9202 expand_increment (tree exp, int post, int ignore)
9204 rtx op0, op1;
9205 rtx temp, value;
9206 tree incremented = TREE_OPERAND (exp, 0);
9207 optab this_optab = add_optab;
9208 int icode;
9209 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9210 int op0_is_copy = 0;
9211 int single_insn = 0;
9212 /* 1 means we can't store into OP0 directly,
9213 because it is a subreg narrower than a word,
9214 and we don't dare clobber the rest of the word. */
9215 int bad_subreg = 0;
9217 /* Stabilize any component ref that might need to be
9218 evaluated more than once below. */
9219 if (!post
9220 || TREE_CODE (incremented) == BIT_FIELD_REF
9221 || (TREE_CODE (incremented) == COMPONENT_REF
9222 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9223 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9224 incremented = stabilize_reference (incremented);
9225 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9226 ones into save exprs so that they don't accidentally get evaluated
9227 more than once by the code below. */
9228 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9229 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9230 incremented = save_expr (incremented);
9232 /* Compute the operands as RTX.
9233 Note whether OP0 is the actual lvalue or a copy of it:
9234 I believe it is a copy iff it is a register or subreg
9235 and insns were generated in computing it. */
9237 temp = get_last_insn ();
9238 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9240 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9241 in place but instead must do sign- or zero-extension during assignment,
9242 so we copy it into a new register and let the code below use it as
9243 a copy.
9245 Note that we can safely modify this SUBREG since it is know not to be
9246 shared (it was made by the expand_expr call above). */
9248 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9250 if (post)
9251 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9252 else
9253 bad_subreg = 1;
9255 else if (GET_CODE (op0) == SUBREG
9256 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9258 /* We cannot increment this SUBREG in place. If we are
9259 post-incrementing, get a copy of the old value. Otherwise,
9260 just mark that we cannot increment in place. */
9261 if (post)
9262 op0 = copy_to_reg (op0);
9263 else
9264 bad_subreg = 1;
9267 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9268 && temp != get_last_insn ());
9269 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9271 /* Decide whether incrementing or decrementing. */
9272 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9273 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9274 this_optab = sub_optab;
9276 /* Convert decrement by a constant into a negative increment. */
9277 if (this_optab == sub_optab
9278 && GET_CODE (op1) == CONST_INT)
9280 op1 = GEN_INT (-INTVAL (op1));
9281 this_optab = add_optab;
9284 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9285 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9287 /* For a preincrement, see if we can do this with a single instruction. */
9288 if (!post)
9290 icode = (int) this_optab->handlers[(int) mode].insn_code;
9291 if (icode != (int) CODE_FOR_nothing
9292 /* Make sure that OP0 is valid for operands 0 and 1
9293 of the insn we want to queue. */
9294 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9295 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9296 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9297 single_insn = 1;
9300 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9301 then we cannot just increment OP0. We must therefore contrive to
9302 increment the original value. Then, for postincrement, we can return
9303 OP0 since it is a copy of the old value. For preincrement, expand here
9304 unless we can do it with a single insn.
9306 Likewise if storing directly into OP0 would clobber high bits
9307 we need to preserve (bad_subreg). */
9308 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9310 /* This is the easiest way to increment the value wherever it is.
9311 Problems with multiple evaluation of INCREMENTED are prevented
9312 because either (1) it is a component_ref or preincrement,
9313 in which case it was stabilized above, or (2) it is an array_ref
9314 with constant index in an array in a register, which is
9315 safe to reevaluate. */
9316 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9317 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9318 ? MINUS_EXPR : PLUS_EXPR),
9319 TREE_TYPE (exp),
9320 incremented,
9321 TREE_OPERAND (exp, 1));
9323 while (TREE_CODE (incremented) == NOP_EXPR
9324 || TREE_CODE (incremented) == CONVERT_EXPR)
9326 newexp = convert (TREE_TYPE (incremented), newexp);
9327 incremented = TREE_OPERAND (incremented, 0);
9330 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9331 return post ? op0 : temp;
9334 if (post)
9336 /* We have a true reference to the value in OP0.
9337 If there is an insn to add or subtract in this mode, queue it.
9338 Queueing the increment insn avoids the register shuffling
9339 that often results if we must increment now and first save
9340 the old value for subsequent use. */
9342 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9343 op0 = stabilize (op0);
9344 #endif
9346 icode = (int) this_optab->handlers[(int) mode].insn_code;
9347 if (icode != (int) CODE_FOR_nothing
9348 /* Make sure that OP0 is valid for operands 0 and 1
9349 of the insn we want to queue. */
9350 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9351 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9353 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9354 op1 = force_reg (mode, op1);
9356 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9358 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9360 rtx addr = (general_operand (XEXP (op0, 0), mode)
9361 ? force_reg (Pmode, XEXP (op0, 0))
9362 : copy_to_reg (XEXP (op0, 0)));
9363 rtx temp, result;
9365 op0 = replace_equiv_address (op0, addr);
9366 temp = force_reg (GET_MODE (op0), op0);
9367 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9368 op1 = force_reg (mode, op1);
9370 /* The increment queue is LIFO, thus we have to `queue'
9371 the instructions in reverse order. */
9372 enqueue_insn (op0, gen_move_insn (op0, temp));
9373 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9374 return result;
9378 /* Preincrement, or we can't increment with one simple insn. */
9379 if (post)
9380 /* Save a copy of the value before inc or dec, to return it later. */
9381 temp = value = copy_to_reg (op0);
9382 else
9383 /* Arrange to return the incremented value. */
9384 /* Copy the rtx because expand_binop will protect from the queue,
9385 and the results of that would be invalid for us to return
9386 if our caller does emit_queue before using our result. */
9387 temp = copy_rtx (value = op0);
9389 /* Increment however we can. */
9390 op1 = expand_binop (mode, this_optab, value, op1, op0,
9391 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9393 /* Make sure the value is stored into OP0. */
9394 if (op1 != op0)
9395 emit_move_insn (op0, op1);
9397 return temp;
9400 /* Generate code to calculate EXP using a store-flag instruction
9401 and return an rtx for the result. EXP is either a comparison
9402 or a TRUTH_NOT_EXPR whose operand is a comparison.
9404 If TARGET is nonzero, store the result there if convenient.
9406 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9407 cheap.
9409 Return zero if there is no suitable set-flag instruction
9410 available on this machine.
9412 Once expand_expr has been called on the arguments of the comparison,
9413 we are committed to doing the store flag, since it is not safe to
9414 re-evaluate the expression. We emit the store-flag insn by calling
9415 emit_store_flag, but only expand the arguments if we have a reason
9416 to believe that emit_store_flag will be successful. If we think that
9417 it will, but it isn't, we have to simulate the store-flag with a
9418 set/jump/set sequence. */
9420 static rtx
9421 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9423 enum rtx_code code;
9424 tree arg0, arg1, type;
9425 tree tem;
9426 enum machine_mode operand_mode;
9427 int invert = 0;
9428 int unsignedp;
9429 rtx op0, op1;
9430 enum insn_code icode;
9431 rtx subtarget = target;
9432 rtx result, label;
9434 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9435 result at the end. We can't simply invert the test since it would
9436 have already been inverted if it were valid. This case occurs for
9437 some floating-point comparisons. */
9439 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9440 invert = 1, exp = TREE_OPERAND (exp, 0);
9442 arg0 = TREE_OPERAND (exp, 0);
9443 arg1 = TREE_OPERAND (exp, 1);
9445 /* Don't crash if the comparison was erroneous. */
9446 if (arg0 == error_mark_node || arg1 == error_mark_node)
9447 return const0_rtx;
9449 type = TREE_TYPE (arg0);
9450 operand_mode = TYPE_MODE (type);
9451 unsignedp = TREE_UNSIGNED (type);
9453 /* We won't bother with BLKmode store-flag operations because it would mean
9454 passing a lot of information to emit_store_flag. */
9455 if (operand_mode == BLKmode)
9456 return 0;
9458 /* We won't bother with store-flag operations involving function pointers
9459 when function pointers must be canonicalized before comparisons. */
9460 #ifdef HAVE_canonicalize_funcptr_for_compare
9461 if (HAVE_canonicalize_funcptr_for_compare
9462 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9463 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9464 == FUNCTION_TYPE))
9465 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9466 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9467 == FUNCTION_TYPE))))
9468 return 0;
9469 #endif
9471 STRIP_NOPS (arg0);
9472 STRIP_NOPS (arg1);
9474 /* Get the rtx comparison code to use. We know that EXP is a comparison
9475 operation of some type. Some comparisons against 1 and -1 can be
9476 converted to comparisons with zero. Do so here so that the tests
9477 below will be aware that we have a comparison with zero. These
9478 tests will not catch constants in the first operand, but constants
9479 are rarely passed as the first operand. */
9481 switch (TREE_CODE (exp))
9483 case EQ_EXPR:
9484 code = EQ;
9485 break;
9486 case NE_EXPR:
9487 code = NE;
9488 break;
9489 case LT_EXPR:
9490 if (integer_onep (arg1))
9491 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9492 else
9493 code = unsignedp ? LTU : LT;
9494 break;
9495 case LE_EXPR:
9496 if (! unsignedp && integer_all_onesp (arg1))
9497 arg1 = integer_zero_node, code = LT;
9498 else
9499 code = unsignedp ? LEU : LE;
9500 break;
9501 case GT_EXPR:
9502 if (! unsignedp && integer_all_onesp (arg1))
9503 arg1 = integer_zero_node, code = GE;
9504 else
9505 code = unsignedp ? GTU : GT;
9506 break;
9507 case GE_EXPR:
9508 if (integer_onep (arg1))
9509 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9510 else
9511 code = unsignedp ? GEU : GE;
9512 break;
9514 case UNORDERED_EXPR:
9515 code = UNORDERED;
9516 break;
9517 case ORDERED_EXPR:
9518 code = ORDERED;
9519 break;
9520 case UNLT_EXPR:
9521 code = UNLT;
9522 break;
9523 case UNLE_EXPR:
9524 code = UNLE;
9525 break;
9526 case UNGT_EXPR:
9527 code = UNGT;
9528 break;
9529 case UNGE_EXPR:
9530 code = UNGE;
9531 break;
9532 case UNEQ_EXPR:
9533 code = UNEQ;
9534 break;
9536 default:
9537 abort ();
9540 /* Put a constant second. */
9541 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9543 tem = arg0; arg0 = arg1; arg1 = tem;
9544 code = swap_condition (code);
9547 /* If this is an equality or inequality test of a single bit, we can
9548 do this by shifting the bit being tested to the low-order bit and
9549 masking the result with the constant 1. If the condition was EQ,
9550 we xor it with 1. This does not require an scc insn and is faster
9551 than an scc insn even if we have it.
9553 The code to make this transformation was moved into fold_single_bit_test,
9554 so we just call into the folder and expand its result. */
9556 if ((code == NE || code == EQ)
9557 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9558 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9560 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9561 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9562 arg0, arg1, type),
9563 target, VOIDmode, EXPAND_NORMAL);
9566 /* Now see if we are likely to be able to do this. Return if not. */
9567 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9568 return 0;
9570 icode = setcc_gen_code[(int) code];
9571 if (icode == CODE_FOR_nothing
9572 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9574 /* We can only do this if it is one of the special cases that
9575 can be handled without an scc insn. */
9576 if ((code == LT && integer_zerop (arg1))
9577 || (! only_cheap && code == GE && integer_zerop (arg1)))
9579 else if (BRANCH_COST >= 0
9580 && ! only_cheap && (code == NE || code == EQ)
9581 && TREE_CODE (type) != REAL_TYPE
9582 && ((abs_optab->handlers[(int) operand_mode].insn_code
9583 != CODE_FOR_nothing)
9584 || (ffs_optab->handlers[(int) operand_mode].insn_code
9585 != CODE_FOR_nothing)))
9587 else
9588 return 0;
9591 if (! get_subtarget (target)
9592 || GET_MODE (subtarget) != operand_mode
9593 || ! safe_from_p (subtarget, arg1, 1))
9594 subtarget = 0;
9596 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9598 if (target == 0)
9599 target = gen_reg_rtx (mode);
9601 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9602 because, if the emit_store_flag does anything it will succeed and
9603 OP0 and OP1 will not be used subsequently. */
9605 result = emit_store_flag (target, code,
9606 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9607 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9608 operand_mode, unsignedp, 1);
9610 if (result)
9612 if (invert)
9613 result = expand_binop (mode, xor_optab, result, const1_rtx,
9614 result, 0, OPTAB_LIB_WIDEN);
9615 return result;
9618 /* If this failed, we have to do this with set/compare/jump/set code. */
9619 if (GET_CODE (target) != REG
9620 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9621 target = gen_reg_rtx (GET_MODE (target));
9623 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9624 result = compare_from_rtx (op0, op1, code, unsignedp,
9625 operand_mode, NULL_RTX);
9626 if (GET_CODE (result) == CONST_INT)
9627 return (((result == const0_rtx && ! invert)
9628 || (result != const0_rtx && invert))
9629 ? const0_rtx : const1_rtx);
9631 /* The code of RESULT may not match CODE if compare_from_rtx
9632 decided to swap its operands and reverse the original code.
9634 We know that compare_from_rtx returns either a CONST_INT or
9635 a new comparison code, so it is safe to just extract the
9636 code from RESULT. */
9637 code = GET_CODE (result);
9639 label = gen_label_rtx ();
9640 if (bcc_gen_fctn[(int) code] == 0)
9641 abort ();
9643 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9644 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9645 emit_label (label);
9647 return target;
9651 /* Stubs in case we haven't got a casesi insn. */
9652 #ifndef HAVE_casesi
9653 # define HAVE_casesi 0
9654 # define gen_casesi(a, b, c, d, e) (0)
9655 # define CODE_FOR_casesi CODE_FOR_nothing
9656 #endif
9658 /* If the machine does not have a case insn that compares the bounds,
9659 this means extra overhead for dispatch tables, which raises the
9660 threshold for using them. */
9661 #ifndef CASE_VALUES_THRESHOLD
9662 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9663 #endif /* CASE_VALUES_THRESHOLD */
9665 unsigned int
9666 case_values_threshold (void)
9668 return CASE_VALUES_THRESHOLD;
9671 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9672 0 otherwise (i.e. if there is no casesi instruction). */
9674 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9675 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9677 enum machine_mode index_mode = SImode;
9678 int index_bits = GET_MODE_BITSIZE (index_mode);
9679 rtx op1, op2, index;
9680 enum machine_mode op_mode;
9682 if (! HAVE_casesi)
9683 return 0;
9685 /* Convert the index to SImode. */
9686 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9688 enum machine_mode omode = TYPE_MODE (index_type);
9689 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9691 /* We must handle the endpoints in the original mode. */
9692 index_expr = build (MINUS_EXPR, index_type,
9693 index_expr, minval);
9694 minval = integer_zero_node;
9695 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9696 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9697 omode, 1, default_label);
9698 /* Now we can safely truncate. */
9699 index = convert_to_mode (index_mode, index, 0);
9701 else
9703 if (TYPE_MODE (index_type) != index_mode)
9705 index_expr = convert ((*lang_hooks.types.type_for_size)
9706 (index_bits, 0), index_expr);
9707 index_type = TREE_TYPE (index_expr);
9710 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9712 emit_queue ();
9713 index = protect_from_queue (index, 0);
9714 do_pending_stack_adjust ();
9716 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9717 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9718 (index, op_mode))
9719 index = copy_to_mode_reg (op_mode, index);
9721 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9723 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9724 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9725 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9726 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9727 (op1, op_mode))
9728 op1 = copy_to_mode_reg (op_mode, op1);
9730 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9732 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9733 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9734 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9735 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9736 (op2, op_mode))
9737 op2 = copy_to_mode_reg (op_mode, op2);
9739 emit_jump_insn (gen_casesi (index, op1, op2,
9740 table_label, default_label));
9741 return 1;
9744 /* Attempt to generate a tablejump instruction; same concept. */
9745 #ifndef HAVE_tablejump
9746 #define HAVE_tablejump 0
9747 #define gen_tablejump(x, y) (0)
9748 #endif
9750 /* Subroutine of the next function.
9752 INDEX is the value being switched on, with the lowest value
9753 in the table already subtracted.
9754 MODE is its expected mode (needed if INDEX is constant).
9755 RANGE is the length of the jump table.
9756 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9758 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9759 index value is out of range. */
9761 static void
9762 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9763 rtx default_label)
9765 rtx temp, vector;
9767 if (INTVAL (range) > cfun->max_jumptable_ents)
9768 cfun->max_jumptable_ents = INTVAL (range);
9770 /* Do an unsigned comparison (in the proper mode) between the index
9771 expression and the value which represents the length of the range.
9772 Since we just finished subtracting the lower bound of the range
9773 from the index expression, this comparison allows us to simultaneously
9774 check that the original index expression value is both greater than
9775 or equal to the minimum value of the range and less than or equal to
9776 the maximum value of the range. */
9778 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9779 default_label);
9781 /* If index is in range, it must fit in Pmode.
9782 Convert to Pmode so we can index with it. */
9783 if (mode != Pmode)
9784 index = convert_to_mode (Pmode, index, 1);
9786 /* Don't let a MEM slip thru, because then INDEX that comes
9787 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9788 and break_out_memory_refs will go to work on it and mess it up. */
9789 #ifdef PIC_CASE_VECTOR_ADDRESS
9790 if (flag_pic && GET_CODE (index) != REG)
9791 index = copy_to_mode_reg (Pmode, index);
9792 #endif
9794 /* If flag_force_addr were to affect this address
9795 it could interfere with the tricky assumptions made
9796 about addresses that contain label-refs,
9797 which may be valid only very near the tablejump itself. */
9798 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9799 GET_MODE_SIZE, because this indicates how large insns are. The other
9800 uses should all be Pmode, because they are addresses. This code
9801 could fail if addresses and insns are not the same size. */
9802 index = gen_rtx_PLUS (Pmode,
9803 gen_rtx_MULT (Pmode, index,
9804 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9805 gen_rtx_LABEL_REF (Pmode, table_label));
9806 #ifdef PIC_CASE_VECTOR_ADDRESS
9807 if (flag_pic)
9808 index = PIC_CASE_VECTOR_ADDRESS (index);
9809 else
9810 #endif
9811 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9812 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9813 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9814 RTX_UNCHANGING_P (vector) = 1;
9815 MEM_NOTRAP_P (vector) = 1;
9816 convert_move (temp, vector, 0);
9818 emit_jump_insn (gen_tablejump (temp, table_label));
9820 /* If we are generating PIC code or if the table is PC-relative, the
9821 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9822 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9823 emit_barrier ();
9827 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9828 rtx table_label, rtx default_label)
9830 rtx index;
9832 if (! HAVE_tablejump)
9833 return 0;
9835 index_expr = fold (build (MINUS_EXPR, index_type,
9836 convert (index_type, index_expr),
9837 convert (index_type, minval)));
9838 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9839 emit_queue ();
9840 index = protect_from_queue (index, 0);
9841 do_pending_stack_adjust ();
9843 do_tablejump (index, TYPE_MODE (index_type),
9844 convert_modes (TYPE_MODE (index_type),
9845 TYPE_MODE (TREE_TYPE (range)),
9846 expand_expr (range, NULL_RTX,
9847 VOIDmode, 0),
9848 TREE_UNSIGNED (TREE_TYPE (range))),
9849 table_label, default_label);
9850 return 1;
9853 /* Nonzero if the mode is a valid vector mode for this architecture.
9854 This returns nonzero even if there is no hardware support for the
9855 vector mode, but we can emulate with narrower modes. */
9858 vector_mode_valid_p (enum machine_mode mode)
9860 enum mode_class class = GET_MODE_CLASS (mode);
9861 enum machine_mode innermode;
9863 /* Doh! What's going on? */
9864 if (class != MODE_VECTOR_INT
9865 && class != MODE_VECTOR_FLOAT)
9866 return 0;
9868 /* Hardware support. Woo hoo! */
9869 if (VECTOR_MODE_SUPPORTED_P (mode))
9870 return 1;
9872 innermode = GET_MODE_INNER (mode);
9874 /* We should probably return 1 if requesting V4DI and we have no DI,
9875 but we have V2DI, but this is probably very unlikely. */
9877 /* If we have support for the inner mode, we can safely emulate it.
9878 We may not have V2DI, but me can emulate with a pair of DIs. */
9879 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9882 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9883 static rtx
9884 const_vector_from_tree (tree exp)
9886 rtvec v;
9887 int units, i;
9888 tree link, elt;
9889 enum machine_mode inner, mode;
9891 mode = TYPE_MODE (TREE_TYPE (exp));
9893 if (is_zeros_p (exp))
9894 return CONST0_RTX (mode);
9896 units = GET_MODE_NUNITS (mode);
9897 inner = GET_MODE_INNER (mode);
9899 v = rtvec_alloc (units);
9901 link = TREE_VECTOR_CST_ELTS (exp);
9902 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9904 elt = TREE_VALUE (link);
9906 if (TREE_CODE (elt) == REAL_CST)
9907 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9908 inner);
9909 else
9910 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9911 TREE_INT_CST_HIGH (elt),
9912 inner);
9915 /* Initialize remaining elements to 0. */
9916 for (; i < units; ++i)
9917 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9919 return gen_rtx_raw_CONST_VECTOR (mode, v);
9922 #include "gt-expr.h"