* config/xtensa/xtensa-protos.h: Convert to ISO C90.
[official-gcc.git] / gcc / expr.c
blob633f4a755379699060abded00b9001cf91b1f38a
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
129 void *constfundata;
130 int reverse;
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
135 unsigned int);
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
159 tree, enum machine_mode, int, tree, int);
160 static rtx var_rtx (tree);
162 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165 static int is_aligning_offset (tree, tree);
166 static rtx expand_increment (tree, int, int);
167 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
168 enum expand_modifier);
169 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
170 #ifdef PUSH_ROUNDING
171 static void emit_single_push_insn (enum machine_mode, rtx, tree);
172 #endif
173 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
174 static rtx const_vector_from_tree (tree);
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* Record for each mode whether we can float-extend from memory. */
185 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
190 #ifndef MOVE_RATIO
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 #define MOVE_RATIO 2
193 #else
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 #endif
197 #endif
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 #endif
206 /* If a clear memory operation would take CLEAR_RATIO or more simple
207 move-instruction sequences, we will do a clrstr or libcall instead. */
209 #ifndef CLEAR_RATIO
210 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
211 #define CLEAR_RATIO 2
212 #else
213 /* If we are optimizing for space, cut down the default clear ratio. */
214 #define CLEAR_RATIO (optimize_size ? 3 : 15)
215 #endif
216 #endif
218 /* This macro is used to determine whether clear_by_pieces should be
219 called to clear storage. */
220 #ifndef CLEAR_BY_PIECES_P
221 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
222 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
223 #endif
225 /* This macro is used to determine whether store_by_pieces should be
226 called to "memset" storage with byte values other than zero, or
227 to "memcpy" storage when the source is a constant string. */
228 #ifndef STORE_BY_PIECES_P
229 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
230 #endif
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
239 struct file_stack *expr_wfl_stack;
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
247 /* This is run once per compilation to set up which modes can be used
248 directly in memory and to initialize the block move optab. */
250 void
251 init_expr_once (void)
253 rtx insn, pat;
254 enum machine_mode mode;
255 int num_clobbers;
256 rtx mem, mem1;
257 rtx reg;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
276 int regno;
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
289 regno++)
291 if (! HARD_REGNO_MODE_OK (regno, mode))
292 continue;
294 REGNO (reg) = regno;
296 SET_SRC (pat) = mem;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
327 enum insn_code ic;
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
331 continue;
333 PUT_MODE (mem, srcmode);
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
341 /* This is run at the start of compiling a function. */
343 void
344 init_expr (void)
346 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
349 /* Small sanity check that the queue is empty at the end of a function. */
351 void
352 finish_expr_for_function (void)
354 if (pending_chain)
355 abort ();
358 /* Manage the queue of increment instructions to be output
359 for POSTINCREMENT_EXPR expressions, etc. */
361 /* Queue up to increment (or change) VAR later. BODY says how:
362 BODY should be the same thing you would pass to emit_insn
363 to increment right away. It will go to emit_insn later on.
365 The value is a QUEUED expression to be used in place of VAR
366 where you want to guarantee the pre-incrementation value of VAR. */
368 static rtx
369 enqueue_insn (rtx var, rtx body)
371 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
372 body, pending_chain);
373 return pending_chain;
376 /* Use protect_from_queue to convert a QUEUED expression
377 into something that you can put immediately into an instruction.
378 If the queued incrementation has not happened yet,
379 protect_from_queue returns the variable itself.
380 If the incrementation has happened, protect_from_queue returns a temp
381 that contains a copy of the old value of the variable.
383 Any time an rtx which might possibly be a QUEUED is to be put
384 into an instruction, it must be passed through protect_from_queue first.
385 QUEUED expressions are not meaningful in instructions.
387 Do not pass a value through protect_from_queue and then hold
388 on to it for a while before putting it in an instruction!
389 If the queue is flushed in between, incorrect code will result. */
392 protect_from_queue (rtx x, int modify)
394 RTX_CODE code = GET_CODE (x);
396 #if 0 /* A QUEUED can hang around after the queue is forced out. */
397 /* Shortcut for most common case. */
398 if (pending_chain == 0)
399 return x;
400 #endif
402 if (code != QUEUED)
404 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
405 use of autoincrement. Make a copy of the contents of the memory
406 location rather than a copy of the address, but not if the value is
407 of mode BLKmode. Don't modify X in place since it might be
408 shared. */
409 if (code == MEM && GET_MODE (x) != BLKmode
410 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
412 rtx y = XEXP (x, 0);
413 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
415 if (QUEUED_INSN (y))
417 rtx temp = gen_reg_rtx (GET_MODE (x));
419 emit_insn_before (gen_move_insn (temp, new),
420 QUEUED_INSN (y));
421 return temp;
424 /* Copy the address into a pseudo, so that the returned value
425 remains correct across calls to emit_queue. */
426 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
429 /* Otherwise, recursively protect the subexpressions of all
430 the kinds of rtx's that can contain a QUEUED. */
431 if (code == MEM)
433 rtx tem = protect_from_queue (XEXP (x, 0), 0);
434 if (tem != XEXP (x, 0))
436 x = copy_rtx (x);
437 XEXP (x, 0) = tem;
440 else if (code == PLUS || code == MULT)
442 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
443 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
444 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
446 x = copy_rtx (x);
447 XEXP (x, 0) = new0;
448 XEXP (x, 1) = new1;
451 return x;
453 /* If the increment has not happened, use the variable itself. Copy it
454 into a new pseudo so that the value remains correct across calls to
455 emit_queue. */
456 if (QUEUED_INSN (x) == 0)
457 return copy_to_reg (QUEUED_VAR (x));
458 /* If the increment has happened and a pre-increment copy exists,
459 use that copy. */
460 if (QUEUED_COPY (x) != 0)
461 return QUEUED_COPY (x);
462 /* The increment has happened but we haven't set up a pre-increment copy.
463 Set one up now, and use it. */
464 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
465 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
466 QUEUED_INSN (x));
467 return QUEUED_COPY (x);
470 /* Return nonzero if X contains a QUEUED expression:
471 if it contains anything that will be altered by a queued increment.
472 We handle only combinations of MEM, PLUS, MINUS and MULT operators
473 since memory addresses generally contain only those. */
476 queued_subexp_p (rtx x)
478 enum rtx_code code = GET_CODE (x);
479 switch (code)
481 case QUEUED:
482 return 1;
483 case MEM:
484 return queued_subexp_p (XEXP (x, 0));
485 case MULT:
486 case PLUS:
487 case MINUS:
488 return (queued_subexp_p (XEXP (x, 0))
489 || queued_subexp_p (XEXP (x, 1)));
490 default:
491 return 0;
495 /* Perform all the pending incrementations. */
497 void
498 emit_queue (void)
500 rtx p;
501 while ((p = pending_chain))
503 rtx body = QUEUED_BODY (p);
505 switch (GET_CODE (body))
507 case INSN:
508 case JUMP_INSN:
509 case CALL_INSN:
510 case CODE_LABEL:
511 case BARRIER:
512 case NOTE:
513 QUEUED_INSN (p) = body;
514 emit_insn (body);
515 break;
517 #ifdef ENABLE_CHECKING
518 case SEQUENCE:
519 abort ();
520 break;
521 #endif
523 default:
524 QUEUED_INSN (p) = emit_insn (body);
525 break;
528 pending_chain = QUEUED_NEXT (p);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
537 void
538 convert_move (rtx to, rtx from, int unsignedp)
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
544 enum insn_code code;
545 rtx libcall;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
554 if (to_real != from_real)
555 abort ();
557 /* If FROM is a SUBREG that indicates that we have already done at least
558 the required extension, strip it. We don't handle such SUBREGs as
559 TO here. */
561 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
562 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
563 >= GET_MODE_SIZE (to_mode))
564 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
565 from = gen_lowpart (to_mode, from), from_mode = to_mode;
567 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
568 abort ();
570 if (to_mode == from_mode
571 || (from_mode == VOIDmode && CONSTANT_P (from)))
573 emit_move_insn (to, from);
574 return;
577 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
579 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
580 abort ();
582 if (VECTOR_MODE_P (to_mode))
583 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
584 else
585 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
587 emit_move_insn (to, from);
588 return;
591 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
593 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
594 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
595 return;
598 if (to_real != from_real)
599 abort ();
601 if (to_real)
603 rtx value, insns;
605 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, 0))
609 != CODE_FOR_nothing)
611 emit_unop_insn (code, to, from, UNKNOWN);
612 return;
616 #ifdef HAVE_trunchfqf2
617 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
620 return;
622 #endif
623 #ifdef HAVE_trunctqfqf2
624 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
626 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
627 return;
629 #endif
630 #ifdef HAVE_truncsfqf2
631 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
633 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
634 return;
636 #endif
637 #ifdef HAVE_truncdfqf2
638 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
640 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
641 return;
643 #endif
644 #ifdef HAVE_truncxfqf2
645 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
647 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
648 return;
650 #endif
651 #ifdef HAVE_trunctfqf2
652 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
654 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
655 return;
657 #endif
659 #ifdef HAVE_trunctqfhf2
660 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
662 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
663 return;
665 #endif
666 #ifdef HAVE_truncsfhf2
667 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
669 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
670 return;
672 #endif
673 #ifdef HAVE_truncdfhf2
674 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
676 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
677 return;
679 #endif
680 #ifdef HAVE_truncxfhf2
681 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
683 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
684 return;
686 #endif
687 #ifdef HAVE_trunctfhf2
688 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
690 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
691 return;
693 #endif
695 #ifdef HAVE_truncsftqf2
696 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
698 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
699 return;
701 #endif
702 #ifdef HAVE_truncdftqf2
703 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
705 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
706 return;
708 #endif
709 #ifdef HAVE_truncxftqf2
710 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
712 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
713 return;
715 #endif
716 #ifdef HAVE_trunctftqf2
717 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
719 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
720 return;
722 #endif
724 #ifdef HAVE_truncdfsf2
725 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
727 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
728 return;
730 #endif
731 #ifdef HAVE_truncxfsf2
732 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
734 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
735 return;
737 #endif
738 #ifdef HAVE_trunctfsf2
739 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
741 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
742 return;
744 #endif
745 #ifdef HAVE_truncxfdf2
746 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
748 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
749 return;
751 #endif
752 #ifdef HAVE_trunctfdf2
753 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
755 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
756 return;
758 #endif
760 libcall = (rtx) 0;
761 switch (from_mode)
763 case SFmode:
764 switch (to_mode)
766 case DFmode:
767 libcall = extendsfdf2_libfunc;
768 break;
770 case XFmode:
771 libcall = extendsfxf2_libfunc;
772 break;
774 case TFmode:
775 libcall = extendsftf2_libfunc;
776 break;
778 default:
779 break;
781 break;
783 case DFmode:
784 switch (to_mode)
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
798 default:
799 break;
801 break;
803 case XFmode:
804 switch (to_mode)
806 case SFmode:
807 libcall = truncxfsf2_libfunc;
808 break;
810 case DFmode:
811 libcall = truncxfdf2_libfunc;
812 break;
814 default:
815 break;
817 break;
819 case TFmode:
820 switch (to_mode)
822 case SFmode:
823 libcall = trunctfsf2_libfunc;
824 break;
826 case DFmode:
827 libcall = trunctfdf2_libfunc;
828 break;
830 default:
831 break;
833 break;
835 default:
836 break;
839 if (libcall == (rtx) 0)
840 /* This conversion is not implemented yet. */
841 abort ();
843 start_sequence ();
844 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
845 1, from, from_mode);
846 insns = get_insns ();
847 end_sequence ();
848 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
849 from));
850 return;
853 /* Now both modes are integers. */
855 /* Handle expanding beyond a word. */
856 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
857 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
859 rtx insns;
860 rtx lowpart;
861 rtx fill_value;
862 rtx lowfrom;
863 int i;
864 enum machine_mode lowpart_mode;
865 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
867 /* Try converting directly if the insn is supported. */
868 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
869 != CODE_FOR_nothing)
871 /* If FROM is a SUBREG, put it into a register. Do this
872 so that we always generate the same set of insns for
873 better cse'ing; if an intermediate assignment occurred,
874 we won't be doing the operation directly on the SUBREG. */
875 if (optimize > 0 && GET_CODE (from) == SUBREG)
876 from = force_reg (from_mode, from);
877 emit_unop_insn (code, to, from, equiv_code);
878 return;
880 /* Next, try converting via full word. */
881 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
882 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
883 != CODE_FOR_nothing))
885 if (GET_CODE (to) == REG)
886 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
887 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
888 emit_unop_insn (code, to,
889 gen_lowpart (word_mode, to), equiv_code);
890 return;
893 /* No special multiword conversion insn; do it by hand. */
894 start_sequence ();
896 /* Since we will turn this into a no conflict block, we must ensure
897 that the source does not overlap the target. */
899 if (reg_overlap_mentioned_p (to, from))
900 from = force_reg (from_mode, from);
902 /* Get a copy of FROM widened to a word, if necessary. */
903 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
904 lowpart_mode = word_mode;
905 else
906 lowpart_mode = from_mode;
908 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
910 lowpart = gen_lowpart (lowpart_mode, to);
911 emit_move_insn (lowpart, lowfrom);
913 /* Compute the value to put in each remaining word. */
914 if (unsignedp)
915 fill_value = const0_rtx;
916 else
918 #ifdef HAVE_slt
919 if (HAVE_slt
920 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
921 && STORE_FLAG_VALUE == -1)
923 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
924 lowpart_mode, 0);
925 fill_value = gen_reg_rtx (word_mode);
926 emit_insn (gen_slt (fill_value));
928 else
929 #endif
931 fill_value
932 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
933 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
934 NULL_RTX, 0);
935 fill_value = convert_to_mode (word_mode, fill_value, 1);
939 /* Fill the remaining words. */
940 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
942 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
943 rtx subword = operand_subword (to, index, 1, to_mode);
945 if (subword == 0)
946 abort ();
948 if (fill_value != subword)
949 emit_move_insn (subword, fill_value);
952 insns = get_insns ();
953 end_sequence ();
955 emit_no_conflict_block (insns, to, from, NULL_RTX,
956 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
957 return;
960 /* Truncating multi-word to a word or less. */
961 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
962 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
964 if (!((GET_CODE (from) == MEM
965 && ! MEM_VOLATILE_P (from)
966 && direct_load[(int) to_mode]
967 && ! mode_dependent_address_p (XEXP (from, 0)))
968 || GET_CODE (from) == REG
969 || GET_CODE (from) == SUBREG))
970 from = force_reg (from_mode, from);
971 convert_move (to, gen_lowpart (word_mode, from), 0);
972 return;
975 /* Handle pointer conversion. */ /* SPEE 900220. */
976 if (to_mode == PQImode)
978 if (from_mode != QImode)
979 from = convert_to_mode (QImode, from, unsignedp);
981 #ifdef HAVE_truncqipqi2
982 if (HAVE_truncqipqi2)
984 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
985 return;
987 #endif /* HAVE_truncqipqi2 */
988 abort ();
991 if (from_mode == PQImode)
993 if (to_mode != QImode)
995 from = convert_to_mode (QImode, from, unsignedp);
996 from_mode = QImode;
998 else
1000 #ifdef HAVE_extendpqiqi2
1001 if (HAVE_extendpqiqi2)
1003 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1004 return;
1006 #endif /* HAVE_extendpqiqi2 */
1007 abort ();
1011 if (to_mode == PSImode)
1013 if (from_mode != SImode)
1014 from = convert_to_mode (SImode, from, unsignedp);
1016 #ifdef HAVE_truncsipsi2
1017 if (HAVE_truncsipsi2)
1019 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1020 return;
1022 #endif /* HAVE_truncsipsi2 */
1023 abort ();
1026 if (from_mode == PSImode)
1028 if (to_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1031 from_mode = SImode;
1033 else
1035 #ifdef HAVE_extendpsisi2
1036 if (! unsignedp && HAVE_extendpsisi2)
1038 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1039 return;
1041 #endif /* HAVE_extendpsisi2 */
1042 #ifdef HAVE_zero_extendpsisi2
1043 if (unsignedp && HAVE_zero_extendpsisi2)
1045 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1046 return;
1048 #endif /* HAVE_zero_extendpsisi2 */
1049 abort ();
1053 if (to_mode == PDImode)
1055 if (from_mode != DImode)
1056 from = convert_to_mode (DImode, from, unsignedp);
1058 #ifdef HAVE_truncdipdi2
1059 if (HAVE_truncdipdi2)
1061 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1062 return;
1064 #endif /* HAVE_truncdipdi2 */
1065 abort ();
1068 if (from_mode == PDImode)
1070 if (to_mode != DImode)
1072 from = convert_to_mode (DImode, from, unsignedp);
1073 from_mode = DImode;
1075 else
1077 #ifdef HAVE_extendpdidi2
1078 if (HAVE_extendpdidi2)
1080 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1081 return;
1083 #endif /* HAVE_extendpdidi2 */
1084 abort ();
1088 /* Now follow all the conversions between integers
1089 no more than a word long. */
1091 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1092 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1093 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1094 GET_MODE_BITSIZE (from_mode)))
1096 if (!((GET_CODE (from) == MEM
1097 && ! MEM_VOLATILE_P (from)
1098 && direct_load[(int) to_mode]
1099 && ! mode_dependent_address_p (XEXP (from, 0)))
1100 || GET_CODE (from) == REG
1101 || GET_CODE (from) == SUBREG))
1102 from = force_reg (from_mode, from);
1103 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1104 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1105 from = copy_to_reg (from);
1106 emit_move_insn (to, gen_lowpart (to_mode, from));
1107 return;
1110 /* Handle extension. */
1111 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1113 /* Convert directly if that works. */
1114 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1115 != CODE_FOR_nothing)
1117 if (flag_force_mem)
1118 from = force_not_mem (from);
1120 emit_unop_insn (code, to, from, equiv_code);
1121 return;
1123 else
1125 enum machine_mode intermediate;
1126 rtx tmp;
1127 tree shift_amount;
1129 /* Search for a mode to convert via. */
1130 for (intermediate = from_mode; intermediate != VOIDmode;
1131 intermediate = GET_MODE_WIDER_MODE (intermediate))
1132 if (((can_extend_p (to_mode, intermediate, unsignedp)
1133 != CODE_FOR_nothing)
1134 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1135 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1136 GET_MODE_BITSIZE (intermediate))))
1137 && (can_extend_p (intermediate, from_mode, unsignedp)
1138 != CODE_FOR_nothing))
1140 convert_move (to, convert_to_mode (intermediate, from,
1141 unsignedp), unsignedp);
1142 return;
1145 /* No suitable intermediate mode.
1146 Generate what we need with shifts. */
1147 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1148 - GET_MODE_BITSIZE (from_mode), 0);
1149 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1150 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1151 to, unsignedp);
1152 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1153 to, unsignedp);
1154 if (tmp != to)
1155 emit_move_insn (to, tmp);
1156 return;
1160 /* Support special truncate insns for certain modes. */
1162 if (from_mode == DImode && to_mode == SImode)
1164 #ifdef HAVE_truncdisi2
1165 if (HAVE_truncdisi2)
1167 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1168 return;
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1175 if (from_mode == DImode && to_mode == HImode)
1177 #ifdef HAVE_truncdihi2
1178 if (HAVE_truncdihi2)
1180 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1181 return;
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1188 if (from_mode == DImode && to_mode == QImode)
1190 #ifdef HAVE_truncdiqi2
1191 if (HAVE_truncdiqi2)
1193 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1194 return;
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1201 if (from_mode == SImode && to_mode == HImode)
1203 #ifdef HAVE_truncsihi2
1204 if (HAVE_truncsihi2)
1206 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1207 return;
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1214 if (from_mode == SImode && to_mode == QImode)
1216 #ifdef HAVE_truncsiqi2
1217 if (HAVE_truncsiqi2)
1219 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1220 return;
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1227 if (from_mode == HImode && to_mode == QImode)
1229 #ifdef HAVE_trunchiqi2
1230 if (HAVE_trunchiqi2)
1232 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1233 return;
1235 #endif
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1237 return;
1240 if (from_mode == TImode && to_mode == DImode)
1242 #ifdef HAVE_trunctidi2
1243 if (HAVE_trunctidi2)
1245 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1246 return;
1248 #endif
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1250 return;
1253 if (from_mode == TImode && to_mode == SImode)
1255 #ifdef HAVE_trunctisi2
1256 if (HAVE_trunctisi2)
1258 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1259 return;
1261 #endif
1262 convert_move (to, force_reg (from_mode, from), unsignedp);
1263 return;
1266 if (from_mode == TImode && to_mode == HImode)
1268 #ifdef HAVE_trunctihi2
1269 if (HAVE_trunctihi2)
1271 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1272 return;
1274 #endif
1275 convert_move (to, force_reg (from_mode, from), unsignedp);
1276 return;
1279 if (from_mode == TImode && to_mode == QImode)
1281 #ifdef HAVE_trunctiqi2
1282 if (HAVE_trunctiqi2)
1284 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1285 return;
1287 #endif
1288 convert_move (to, force_reg (from_mode, from), unsignedp);
1289 return;
1292 /* Handle truncation of volatile memrefs, and so on;
1293 the things that couldn't be truncated directly,
1294 and for which there was no special instruction. */
1295 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1297 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1298 emit_move_insn (to, temp);
1299 return;
1302 /* Mode combination is not recognized. */
1303 abort ();
1306 /* Return an rtx for a value that would result
1307 from converting X to mode MODE.
1308 Both X and MODE may be floating, or both integer.
1309 UNSIGNEDP is nonzero if X is an unsigned value.
1310 This can be done by referring to a part of X in place
1311 or by copying to a new temporary with conversion.
1313 This function *must not* call protect_from_queue
1314 except when putting X into an insn (in which case convert_move does it). */
1317 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
1319 return convert_modes (mode, VOIDmode, x, unsignedp);
1322 /* Return an rtx for a value that would result
1323 from converting X from mode OLDMODE to mode MODE.
1324 Both modes may be floating, or both integer.
1325 UNSIGNEDP is nonzero if X is an unsigned value.
1327 This can be done by referring to a part of X in place
1328 or by copying to a new temporary with conversion.
1330 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1332 This function *must not* call protect_from_queue
1333 except when putting X into an insn (in which case convert_move does it). */
1336 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
1338 rtx temp;
1340 /* If FROM is a SUBREG that indicates that we have already done at least
1341 the required extension, strip it. */
1343 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1344 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1345 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1346 x = gen_lowpart (mode, x);
1348 if (GET_MODE (x) != VOIDmode)
1349 oldmode = GET_MODE (x);
1351 if (mode == oldmode)
1352 return x;
1354 /* There is one case that we must handle specially: If we are converting
1355 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1356 we are to interpret the constant as unsigned, gen_lowpart will do
1357 the wrong if the constant appears negative. What we want to do is
1358 make the high-order word of the constant zero, not all ones. */
1360 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1361 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1362 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1364 HOST_WIDE_INT val = INTVAL (x);
1366 if (oldmode != VOIDmode
1367 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1369 int width = GET_MODE_BITSIZE (oldmode);
1371 /* We need to zero extend VAL. */
1372 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1378 /* We can do this with a gen_lowpart if both desired and current modes
1379 are integer, and this is either a constant integer, a register, or a
1380 non-volatile MEM. Except for the constant case where MODE is no
1381 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1383 if ((GET_CODE (x) == CONST_INT
1384 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1385 || (GET_MODE_CLASS (mode) == MODE_INT
1386 && GET_MODE_CLASS (oldmode) == MODE_INT
1387 && (GET_CODE (x) == CONST_DOUBLE
1388 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1389 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1390 && direct_load[(int) mode])
1391 || (GET_CODE (x) == REG
1392 && (! HARD_REGISTER_P (x)
1393 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1394 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1395 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1397 /* ?? If we don't know OLDMODE, we have to assume here that
1398 X does not need sign- or zero-extension. This may not be
1399 the case, but it's the best we can do. */
1400 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1401 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1403 HOST_WIDE_INT val = INTVAL (x);
1404 int width = GET_MODE_BITSIZE (oldmode);
1406 /* We must sign or zero-extend in this case. Start by
1407 zero-extending, then sign extend if we need to. */
1408 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1409 if (! unsignedp
1410 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1411 val |= (HOST_WIDE_INT) (-1) << width;
1413 return gen_int_mode (val, mode);
1416 return gen_lowpart (mode, x);
1419 /* Converting from integer constant into mode is always equivalent to an
1420 subreg operation. */
1421 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1423 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1424 abort ();
1425 return simplify_gen_subreg (mode, x, oldmode, 0);
1428 temp = gen_reg_rtx (mode);
1429 convert_move (temp, x, unsignedp);
1430 return temp;
1433 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1434 store efficiently. Due to internal GCC limitations, this is
1435 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1436 for an immediate constant. */
1438 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1440 /* Determine whether the LEN bytes can be moved by using several move
1441 instructions. Return nonzero if a call to move_by_pieces should
1442 succeed. */
1445 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1446 unsigned int align ATTRIBUTE_UNUSED)
1448 return MOVE_BY_PIECES_P (len, align);
1451 /* Generate several move instructions to copy LEN bytes from block FROM to
1452 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1453 and TO through protect_from_queue before calling.
1455 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1456 used to push FROM to the stack.
1458 ALIGN is maximum stack alignment we can assume.
1460 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1461 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1462 stpcpy. */
1465 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1466 unsigned int align, int endp)
1468 struct move_by_pieces data;
1469 rtx to_addr, from_addr = XEXP (from, 0);
1470 unsigned int max_size = MOVE_MAX_PIECES + 1;
1471 enum machine_mode mode = VOIDmode, tmode;
1472 enum insn_code icode;
1474 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1476 data.offset = 0;
1477 data.from_addr = from_addr;
1478 if (to)
1480 to_addr = XEXP (to, 0);
1481 data.to = to;
1482 data.autinc_to
1483 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1484 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1485 data.reverse
1486 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1488 else
1490 to_addr = NULL_RTX;
1491 data.to = NULL_RTX;
1492 data.autinc_to = 1;
1493 #ifdef STACK_GROWS_DOWNWARD
1494 data.reverse = 1;
1495 #else
1496 data.reverse = 0;
1497 #endif
1499 data.to_addr = to_addr;
1500 data.from = from;
1501 data.autinc_from
1502 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1503 || GET_CODE (from_addr) == POST_INC
1504 || GET_CODE (from_addr) == POST_DEC);
1506 data.explicit_inc_from = 0;
1507 data.explicit_inc_to = 0;
1508 if (data.reverse) data.offset = len;
1509 data.len = len;
1511 /* If copying requires more than two move insns,
1512 copy addresses to registers (to make displacements shorter)
1513 and use post-increment if available. */
1514 if (!(data.autinc_from && data.autinc_to)
1515 && move_by_pieces_ninsns (len, align) > 2)
1517 /* Find the mode of the largest move... */
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1521 mode = tmode;
1523 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1525 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1526 data.autinc_from = 1;
1527 data.explicit_inc_from = -1;
1529 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 data.autinc_from = 1;
1533 data.explicit_inc_from = 1;
1535 if (!data.autinc_from && CONSTANT_P (from_addr))
1536 data.from_addr = copy_addr_to_reg (from_addr);
1537 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1539 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1540 data.autinc_to = 1;
1541 data.explicit_inc_to = -1;
1543 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1545 data.to_addr = copy_addr_to_reg (to_addr);
1546 data.autinc_to = 1;
1547 data.explicit_inc_to = 1;
1549 if (!data.autinc_to && CONSTANT_P (to_addr))
1550 data.to_addr = copy_addr_to_reg (to_addr);
1553 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1554 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1555 align = MOVE_MAX * BITS_PER_UNIT;
1557 /* First move what we can in the largest integer mode, then go to
1558 successively smaller modes. */
1560 while (max_size > 1)
1562 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1563 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1564 if (GET_MODE_SIZE (tmode) < max_size)
1565 mode = tmode;
1567 if (mode == VOIDmode)
1568 break;
1570 icode = mov_optab->handlers[(int) mode].insn_code;
1571 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1572 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1574 max_size = GET_MODE_SIZE (mode);
1577 /* The code above should have handled everything. */
1578 if (data.len > 0)
1579 abort ();
1581 if (endp)
1583 rtx to1;
1585 if (data.reverse)
1586 abort ();
1587 if (data.autinc_to)
1589 if (endp == 2)
1591 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1592 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1593 else
1594 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1595 -1));
1597 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1598 data.offset);
1600 else
1602 if (endp == 2)
1603 --data.offset;
1604 to1 = adjust_address (data.to, QImode, data.offset);
1606 return to1;
1608 else
1609 return data.to;
1612 /* Return number of insns required to move L bytes by pieces.
1613 ALIGN (in bits) is maximum alignment we can assume. */
1615 static unsigned HOST_WIDE_INT
1616 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1618 unsigned HOST_WIDE_INT n_insns = 0;
1619 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1621 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1622 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1623 align = MOVE_MAX * BITS_PER_UNIT;
1625 while (max_size > 1)
1627 enum machine_mode mode = VOIDmode, tmode;
1628 enum insn_code icode;
1630 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1631 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1632 if (GET_MODE_SIZE (tmode) < max_size)
1633 mode = tmode;
1635 if (mode == VOIDmode)
1636 break;
1638 icode = mov_optab->handlers[(int) mode].insn_code;
1639 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1640 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1642 max_size = GET_MODE_SIZE (mode);
1645 if (l)
1646 abort ();
1647 return n_insns;
1650 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1651 with move instructions for mode MODE. GENFUN is the gen_... function
1652 to make a move insn for that mode. DATA has all the other info. */
1654 static void
1655 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1656 struct move_by_pieces *data)
1658 unsigned int size = GET_MODE_SIZE (mode);
1659 rtx to1 = NULL_RTX, from1;
1661 while (data->len >= size)
1663 if (data->reverse)
1664 data->offset -= size;
1666 if (data->to)
1668 if (data->autinc_to)
1669 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1670 data->offset);
1671 else
1672 to1 = adjust_address (data->to, mode, data->offset);
1675 if (data->autinc_from)
1676 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1677 data->offset);
1678 else
1679 from1 = adjust_address (data->from, mode, data->offset);
1681 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1682 emit_insn (gen_add2_insn (data->to_addr,
1683 GEN_INT (-(HOST_WIDE_INT)size)));
1684 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1685 emit_insn (gen_add2_insn (data->from_addr,
1686 GEN_INT (-(HOST_WIDE_INT)size)));
1688 if (data->to)
1689 emit_insn ((*genfun) (to1, from1));
1690 else
1692 #ifdef PUSH_ROUNDING
1693 emit_single_push_insn (mode, from1, NULL);
1694 #else
1695 abort ();
1696 #endif
1699 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1700 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1701 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1702 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1704 if (! data->reverse)
1705 data->offset += size;
1707 data->len -= size;
1711 /* Emit code to move a block Y to a block X. This may be done with
1712 string-move instructions, with multiple scalar move instructions,
1713 or with a library call.
1715 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1716 SIZE is an rtx that says how long they are.
1717 ALIGN is the maximum alignment we can assume they have.
1718 METHOD describes what kind of copy this is, and what mechanisms may be used.
1720 Return the address of the new block, if memcpy is called and returns it,
1721 0 otherwise. */
1724 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1726 bool may_use_call;
1727 rtx retval = 0;
1728 unsigned int align;
1730 switch (method)
1732 case BLOCK_OP_NORMAL:
1733 may_use_call = true;
1734 break;
1736 case BLOCK_OP_CALL_PARM:
1737 may_use_call = block_move_libcall_safe_for_call_parm ();
1739 /* Make inhibit_defer_pop nonzero around the library call
1740 to force it to pop the arguments right away. */
1741 NO_DEFER_POP;
1742 break;
1744 case BLOCK_OP_NO_LIBCALL:
1745 may_use_call = false;
1746 break;
1748 default:
1749 abort ();
1752 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1754 if (GET_MODE (x) != BLKmode)
1755 abort ();
1756 if (GET_MODE (y) != BLKmode)
1757 abort ();
1759 x = protect_from_queue (x, 1);
1760 y = protect_from_queue (y, 0);
1761 size = protect_from_queue (size, 0);
1763 if (GET_CODE (x) != MEM)
1764 abort ();
1765 if (GET_CODE (y) != MEM)
1766 abort ();
1767 if (size == 0)
1768 abort ();
1770 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1771 can be incorrect is coming from __builtin_memcpy. */
1772 if (GET_CODE (size) == CONST_INT)
1774 if (INTVAL (size) == 0)
1775 return 0;
1777 x = shallow_copy_rtx (x);
1778 y = shallow_copy_rtx (y);
1779 set_mem_size (x, size);
1780 set_mem_size (y, size);
1783 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1784 move_by_pieces (x, y, INTVAL (size), align, 0);
1785 else if (emit_block_move_via_movstr (x, y, size, align))
1787 else if (may_use_call)
1788 retval = emit_block_move_via_libcall (x, y, size);
1789 else
1790 emit_block_move_via_loop (x, y, size, align);
1792 if (method == BLOCK_OP_CALL_PARM)
1793 OK_DEFER_POP;
1795 return retval;
1798 /* A subroutine of emit_block_move. Returns true if calling the
1799 block move libcall will not clobber any parameters which may have
1800 already been placed on the stack. */
1802 static bool
1803 block_move_libcall_safe_for_call_parm (void)
1805 if (PUSH_ARGS)
1806 return true;
1807 else
1809 /* Check to see whether memcpy takes all register arguments. */
1810 static enum {
1811 takes_regs_uninit, takes_regs_no, takes_regs_yes
1812 } takes_regs = takes_regs_uninit;
1814 switch (takes_regs)
1816 case takes_regs_uninit:
1818 CUMULATIVE_ARGS args_so_far;
1819 tree fn, arg;
1821 fn = emit_block_move_libcall_fn (false);
1822 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1824 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1825 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1827 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1828 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1829 if (!tmp || !REG_P (tmp))
1830 goto fail_takes_regs;
1831 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1832 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1833 NULL_TREE, 1))
1834 goto fail_takes_regs;
1835 #endif
1836 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1839 takes_regs = takes_regs_yes;
1840 /* FALLTHRU */
1842 case takes_regs_yes:
1843 return true;
1845 fail_takes_regs:
1846 takes_regs = takes_regs_no;
1847 /* FALLTHRU */
1848 case takes_regs_no:
1849 return false;
1851 default:
1852 abort ();
1857 /* A subroutine of emit_block_move. Expand a movstr pattern;
1858 return true if successful. */
1860 static bool
1861 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1863 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1864 enum machine_mode mode;
1866 /* Since this is a move insn, we don't care about volatility. */
1867 volatile_ok = 1;
1869 /* Try the most limited insn first, because there's no point
1870 including more than one in the machine description unless
1871 the more limited one has some advantage. */
1873 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1874 mode = GET_MODE_WIDER_MODE (mode))
1876 enum insn_code code = movstr_optab[(int) mode];
1877 insn_operand_predicate_fn pred;
1879 if (code != CODE_FOR_nothing
1880 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1881 here because if SIZE is less than the mode mask, as it is
1882 returned by the macro, it will definitely be less than the
1883 actual mode mask. */
1884 && ((GET_CODE (size) == CONST_INT
1885 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1886 <= (GET_MODE_MASK (mode) >> 1)))
1887 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1888 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1889 || (*pred) (x, BLKmode))
1890 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1891 || (*pred) (y, BLKmode))
1892 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1893 || (*pred) (opalign, VOIDmode)))
1895 rtx op2;
1896 rtx last = get_last_insn ();
1897 rtx pat;
1899 op2 = convert_to_mode (mode, size, 1);
1900 pred = insn_data[(int) code].operand[2].predicate;
1901 if (pred != 0 && ! (*pred) (op2, mode))
1902 op2 = copy_to_mode_reg (mode, op2);
1904 /* ??? When called via emit_block_move_for_call, it'd be
1905 nice if there were some way to inform the backend, so
1906 that it doesn't fail the expansion because it thinks
1907 emitting the libcall would be more efficient. */
1909 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1910 if (pat)
1912 emit_insn (pat);
1913 volatile_ok = 0;
1914 return true;
1916 else
1917 delete_insns_since (last);
1921 volatile_ok = 0;
1922 return false;
1925 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1926 Return the return value from memcpy, 0 otherwise. */
1928 static rtx
1929 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1931 rtx dst_addr, src_addr;
1932 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1933 enum machine_mode size_mode;
1934 rtx retval;
1936 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1938 It is unsafe to save the value generated by protect_from_queue and reuse
1939 it later. Consider what happens if emit_queue is called before the
1940 return value from protect_from_queue is used.
1942 Expansion of the CALL_EXPR below will call emit_queue before we are
1943 finished emitting RTL for argument setup. So if we are not careful we
1944 could get the wrong value for an argument.
1946 To avoid this problem we go ahead and emit code to copy the addresses of
1947 DST and SRC and SIZE into new pseudos. We can then place those new
1948 pseudos into an RTL_EXPR and use them later, even after a call to
1949 emit_queue.
1951 Note this is not strictly needed for library calls since they do not call
1952 emit_queue before loading their arguments. However, we may need to have
1953 library calls call emit_queue in the future since failing to do so could
1954 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1955 arguments in registers. */
1957 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1958 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1960 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1961 src_addr = convert_memory_address (ptr_mode, src_addr);
1963 dst_tree = make_tree (ptr_type_node, dst_addr);
1964 src_tree = make_tree (ptr_type_node, src_addr);
1966 if (TARGET_MEM_FUNCTIONS)
1967 size_mode = TYPE_MODE (sizetype);
1968 else
1969 size_mode = TYPE_MODE (unsigned_type_node);
1971 size = convert_to_mode (size_mode, size, 1);
1972 size = copy_to_mode_reg (size_mode, size);
1974 /* It is incorrect to use the libcall calling conventions to call
1975 memcpy in this context. This could be a user call to memcpy and
1976 the user may wish to examine the return value from memcpy. For
1977 targets where libcalls and normal calls have different conventions
1978 for returning pointers, we could end up generating incorrect code.
1980 For convenience, we generate the call to bcopy this way as well. */
1982 if (TARGET_MEM_FUNCTIONS)
1983 size_tree = make_tree (sizetype, size);
1984 else
1985 size_tree = make_tree (unsigned_type_node, size);
1987 fn = emit_block_move_libcall_fn (true);
1988 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1989 if (TARGET_MEM_FUNCTIONS)
1991 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1992 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1994 else
1996 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1997 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2000 /* Now we have to build up the CALL_EXPR itself. */
2001 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2002 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2003 call_expr, arg_list, NULL_TREE);
2005 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2007 /* If we are initializing a readonly value, show the above call clobbered
2008 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2009 the delay slot scheduler might overlook conflicts and take nasty
2010 decisions. */
2011 if (RTX_UNCHANGING_P (dst))
2012 add_function_usage_to
2013 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2014 gen_rtx_CLOBBER (VOIDmode, dst),
2015 NULL_RTX));
2017 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2020 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2021 for the function we use for block copies. The first time FOR_CALL
2022 is true, we call assemble_external. */
2024 static GTY(()) tree block_move_fn;
2026 void
2027 init_block_move_fn (const char *asmspec)
2029 if (!block_move_fn)
2031 tree args, fn;
2033 if (TARGET_MEM_FUNCTIONS)
2035 fn = get_identifier ("memcpy");
2036 args = build_function_type_list (ptr_type_node, ptr_type_node,
2037 const_ptr_type_node, sizetype,
2038 NULL_TREE);
2040 else
2042 fn = get_identifier ("bcopy");
2043 args = build_function_type_list (void_type_node, const_ptr_type_node,
2044 ptr_type_node, unsigned_type_node,
2045 NULL_TREE);
2048 fn = build_decl (FUNCTION_DECL, fn, args);
2049 DECL_EXTERNAL (fn) = 1;
2050 TREE_PUBLIC (fn) = 1;
2051 DECL_ARTIFICIAL (fn) = 1;
2052 TREE_NOTHROW (fn) = 1;
2054 block_move_fn = fn;
2057 if (asmspec)
2059 SET_DECL_RTL (block_move_fn, NULL_RTX);
2060 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2064 static tree
2065 emit_block_move_libcall_fn (int for_call)
2067 static bool emitted_extern;
2069 if (!block_move_fn)
2070 init_block_move_fn (NULL);
2072 if (for_call && !emitted_extern)
2074 emitted_extern = true;
2075 make_decl_rtl (block_move_fn, NULL);
2076 assemble_external (block_move_fn);
2079 return block_move_fn;
2082 /* A subroutine of emit_block_move. Copy the data via an explicit
2083 loop. This is used only when libcalls are forbidden. */
2084 /* ??? It'd be nice to copy in hunks larger than QImode. */
2086 static void
2087 emit_block_move_via_loop (rtx x, rtx y, rtx size,
2088 unsigned int align ATTRIBUTE_UNUSED)
2090 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2091 enum machine_mode iter_mode;
2093 iter_mode = GET_MODE (size);
2094 if (iter_mode == VOIDmode)
2095 iter_mode = word_mode;
2097 top_label = gen_label_rtx ();
2098 cmp_label = gen_label_rtx ();
2099 iter = gen_reg_rtx (iter_mode);
2101 emit_move_insn (iter, const0_rtx);
2103 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2104 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2105 do_pending_stack_adjust ();
2107 emit_note (NOTE_INSN_LOOP_BEG);
2109 emit_jump (cmp_label);
2110 emit_label (top_label);
2112 tmp = convert_modes (Pmode, iter_mode, iter, true);
2113 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2114 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2115 x = change_address (x, QImode, x_addr);
2116 y = change_address (y, QImode, y_addr);
2118 emit_move_insn (x, y);
2120 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2121 true, OPTAB_LIB_WIDEN);
2122 if (tmp != iter)
2123 emit_move_insn (iter, tmp);
2125 emit_note (NOTE_INSN_LOOP_CONT);
2126 emit_label (cmp_label);
2128 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2129 true, top_label);
2131 emit_note (NOTE_INSN_LOOP_END);
2134 /* Copy all or part of a value X into registers starting at REGNO.
2135 The number of registers to be filled is NREGS. */
2137 void
2138 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
2140 int i;
2141 #ifdef HAVE_load_multiple
2142 rtx pat;
2143 rtx last;
2144 #endif
2146 if (nregs == 0)
2147 return;
2149 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2150 x = validize_mem (force_const_mem (mode, x));
2152 /* See if the machine can do this with a load multiple insn. */
2153 #ifdef HAVE_load_multiple
2154 if (HAVE_load_multiple)
2156 last = get_last_insn ();
2157 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2158 GEN_INT (nregs));
2159 if (pat)
2161 emit_insn (pat);
2162 return;
2164 else
2165 delete_insns_since (last);
2167 #endif
2169 for (i = 0; i < nregs; i++)
2170 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2171 operand_subword_force (x, i, mode));
2174 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2175 The number of registers to be filled is NREGS. */
2177 void
2178 move_block_from_reg (int regno, rtx x, int nregs)
2180 int i;
2182 if (nregs == 0)
2183 return;
2185 /* See if the machine can do this with a store multiple insn. */
2186 #ifdef HAVE_store_multiple
2187 if (HAVE_store_multiple)
2189 rtx last = get_last_insn ();
2190 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2191 GEN_INT (nregs));
2192 if (pat)
2194 emit_insn (pat);
2195 return;
2197 else
2198 delete_insns_since (last);
2200 #endif
2202 for (i = 0; i < nregs; i++)
2204 rtx tem = operand_subword (x, i, 1, BLKmode);
2206 if (tem == 0)
2207 abort ();
2209 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2213 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2214 ORIG, where ORIG is a non-consecutive group of registers represented by
2215 a PARALLEL. The clone is identical to the original except in that the
2216 original set of registers is replaced by a new set of pseudo registers.
2217 The new set has the same modes as the original set. */
2220 gen_group_rtx (rtx orig)
2222 int i, length;
2223 rtx *tmps;
2225 if (GET_CODE (orig) != PARALLEL)
2226 abort ();
2228 length = XVECLEN (orig, 0);
2229 tmps = alloca (sizeof (rtx) * length);
2231 /* Skip a NULL entry in first slot. */
2232 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2234 if (i)
2235 tmps[0] = 0;
2237 for (; i < length; i++)
2239 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2240 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2242 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2245 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2248 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
2249 where DST is non-consecutive registers represented by a PARALLEL.
2250 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2251 if not known. */
2253 void
2254 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
2256 rtx *tmps, src;
2257 int start, i;
2259 if (GET_CODE (dst) != PARALLEL)
2260 abort ();
2262 /* Check for a NULL entry, used to indicate that the parameter goes
2263 both on the stack and in registers. */
2264 if (XEXP (XVECEXP (dst, 0, 0), 0))
2265 start = 0;
2266 else
2267 start = 1;
2269 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
2271 /* Process the pieces. */
2272 for (i = start; i < XVECLEN (dst, 0); i++)
2274 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2275 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2276 unsigned int bytelen = GET_MODE_SIZE (mode);
2277 int shift = 0;
2279 /* Handle trailing fragments that run over the size of the struct. */
2280 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2282 /* Arrange to shift the fragment to where it belongs.
2283 extract_bit_field loads to the lsb of the reg. */
2284 if (
2285 #ifdef BLOCK_REG_PADDING
2286 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2287 == (BYTES_BIG_ENDIAN ? upward : downward)
2288 #else
2289 BYTES_BIG_ENDIAN
2290 #endif
2292 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2293 bytelen = ssize - bytepos;
2294 if (bytelen <= 0)
2295 abort ();
2298 /* If we won't be loading directly from memory, protect the real source
2299 from strange tricks we might play; but make sure that the source can
2300 be loaded directly into the destination. */
2301 src = orig_src;
2302 if (GET_CODE (orig_src) != MEM
2303 && (!CONSTANT_P (orig_src)
2304 || (GET_MODE (orig_src) != mode
2305 && GET_MODE (orig_src) != VOIDmode)))
2307 if (GET_MODE (orig_src) == VOIDmode)
2308 src = gen_reg_rtx (mode);
2309 else
2310 src = gen_reg_rtx (GET_MODE (orig_src));
2312 emit_move_insn (src, orig_src);
2315 /* Optimize the access just a bit. */
2316 if (GET_CODE (src) == MEM
2317 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2318 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2319 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2320 && bytelen == GET_MODE_SIZE (mode))
2322 tmps[i] = gen_reg_rtx (mode);
2323 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2325 else if (GET_CODE (src) == CONCAT)
2327 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2328 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2330 if ((bytepos == 0 && bytelen == slen0)
2331 || (bytepos != 0 && bytepos + bytelen <= slen))
2333 /* The following assumes that the concatenated objects all
2334 have the same size. In this case, a simple calculation
2335 can be used to determine the object and the bit field
2336 to be extracted. */
2337 tmps[i] = XEXP (src, bytepos / slen0);
2338 if (! CONSTANT_P (tmps[i])
2339 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2340 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2341 (bytepos % slen0) * BITS_PER_UNIT,
2342 1, NULL_RTX, mode, mode, ssize);
2344 else if (bytepos == 0)
2346 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2347 emit_move_insn (mem, src);
2348 tmps[i] = adjust_address (mem, mode, 0);
2350 else
2351 abort ();
2353 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2354 SIMD register, which is currently broken. While we get GCC
2355 to emit proper RTL for these cases, let's dump to memory. */
2356 else if (VECTOR_MODE_P (GET_MODE (dst))
2357 && GET_CODE (src) == REG)
2359 int slen = GET_MODE_SIZE (GET_MODE (src));
2360 rtx mem;
2362 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2363 emit_move_insn (mem, src);
2364 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2366 else if (CONSTANT_P (src)
2367 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2368 tmps[i] = src;
2369 else
2370 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2371 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2372 mode, mode, ssize);
2374 if (shift)
2375 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2376 tmps[i], 0, OPTAB_WIDEN);
2379 emit_queue ();
2381 /* Copy the extracted pieces into the proper (probable) hard regs. */
2382 for (i = start; i < XVECLEN (dst, 0); i++)
2383 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2386 /* Emit code to move a block SRC to block DST, where SRC and DST are
2387 non-consecutive groups of registers, each represented by a PARALLEL. */
2389 void
2390 emit_group_move (rtx dst, rtx src)
2392 int i;
2394 if (GET_CODE (src) != PARALLEL
2395 || GET_CODE (dst) != PARALLEL
2396 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2397 abort ();
2399 /* Skip first entry if NULL. */
2400 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2401 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2402 XEXP (XVECEXP (src, 0, i), 0));
2405 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2406 where SRC is non-consecutive registers represented by a PARALLEL.
2407 SSIZE represents the total size of block ORIG_DST, or -1 if not
2408 known. */
2410 void
2411 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2413 rtx *tmps, dst;
2414 int start, i;
2416 if (GET_CODE (src) != PARALLEL)
2417 abort ();
2419 /* Check for a NULL entry, used to indicate that the parameter goes
2420 both on the stack and in registers. */
2421 if (XEXP (XVECEXP (src, 0, 0), 0))
2422 start = 0;
2423 else
2424 start = 1;
2426 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2428 /* Copy the (probable) hard regs into pseudos. */
2429 for (i = start; i < XVECLEN (src, 0); i++)
2431 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2432 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2433 emit_move_insn (tmps[i], reg);
2435 emit_queue ();
2437 /* If we won't be storing directly into memory, protect the real destination
2438 from strange tricks we might play. */
2439 dst = orig_dst;
2440 if (GET_CODE (dst) == PARALLEL)
2442 rtx temp;
2444 /* We can get a PARALLEL dst if there is a conditional expression in
2445 a return statement. In that case, the dst and src are the same,
2446 so no action is necessary. */
2447 if (rtx_equal_p (dst, src))
2448 return;
2450 /* It is unclear if we can ever reach here, but we may as well handle
2451 it. Allocate a temporary, and split this into a store/load to/from
2452 the temporary. */
2454 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2455 emit_group_store (temp, src, type, ssize);
2456 emit_group_load (dst, temp, type, ssize);
2457 return;
2459 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2461 dst = gen_reg_rtx (GET_MODE (orig_dst));
2462 /* Make life a bit easier for combine. */
2463 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2466 /* Process the pieces. */
2467 for (i = start; i < XVECLEN (src, 0); i++)
2469 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2470 enum machine_mode mode = GET_MODE (tmps[i]);
2471 unsigned int bytelen = GET_MODE_SIZE (mode);
2472 rtx dest = dst;
2474 /* Handle trailing fragments that run over the size of the struct. */
2475 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2477 /* store_bit_field always takes its value from the lsb.
2478 Move the fragment to the lsb if it's not already there. */
2479 if (
2480 #ifdef BLOCK_REG_PADDING
2481 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2482 == (BYTES_BIG_ENDIAN ? upward : downward)
2483 #else
2484 BYTES_BIG_ENDIAN
2485 #endif
2488 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2489 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2490 tmps[i], 0, OPTAB_WIDEN);
2492 bytelen = ssize - bytepos;
2495 if (GET_CODE (dst) == CONCAT)
2497 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2498 dest = XEXP (dst, 0);
2499 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2501 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2502 dest = XEXP (dst, 1);
2504 else if (bytepos == 0 && XVECLEN (src, 0))
2506 dest = assign_stack_temp (GET_MODE (dest),
2507 GET_MODE_SIZE (GET_MODE (dest)), 0);
2508 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2509 tmps[i]);
2510 dst = dest;
2511 break;
2513 else
2514 abort ();
2517 /* Optimize the access just a bit. */
2518 if (GET_CODE (dest) == MEM
2519 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2520 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2521 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2522 && bytelen == GET_MODE_SIZE (mode))
2523 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2524 else
2525 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2526 mode, tmps[i], ssize);
2529 emit_queue ();
2531 /* Copy from the pseudo into the (probable) hard reg. */
2532 if (orig_dst != dst)
2533 emit_move_insn (orig_dst, dst);
2536 /* Generate code to copy a BLKmode object of TYPE out of a
2537 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2538 is null, a stack temporary is created. TGTBLK is returned.
2540 The primary purpose of this routine is to handle functions
2541 that return BLKmode structures in registers. Some machines
2542 (the PA for example) want to return all small structures
2543 in registers regardless of the structure's alignment. */
2546 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2548 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2549 rtx src = NULL, dst = NULL;
2550 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2551 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2553 if (tgtblk == 0)
2555 tgtblk = assign_temp (build_qualified_type (type,
2556 (TYPE_QUALS (type)
2557 | TYPE_QUAL_CONST)),
2558 0, 1, 1);
2559 preserve_temp_slots (tgtblk);
2562 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2563 into a new pseudo which is a full word. */
2565 if (GET_MODE (srcreg) != BLKmode
2566 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2567 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2569 /* Structures whose size is not a multiple of a word are aligned
2570 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2571 machine, this means we must skip the empty high order bytes when
2572 calculating the bit offset. */
2573 if (BYTES_BIG_ENDIAN
2574 && bytes % UNITS_PER_WORD)
2575 big_endian_correction
2576 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2578 /* Copy the structure BITSIZE bites at a time.
2580 We could probably emit more efficient code for machines which do not use
2581 strict alignment, but it doesn't seem worth the effort at the current
2582 time. */
2583 for (bitpos = 0, xbitpos = big_endian_correction;
2584 bitpos < bytes * BITS_PER_UNIT;
2585 bitpos += bitsize, xbitpos += bitsize)
2587 /* We need a new source operand each time xbitpos is on a
2588 word boundary and when xbitpos == big_endian_correction
2589 (the first time through). */
2590 if (xbitpos % BITS_PER_WORD == 0
2591 || xbitpos == big_endian_correction)
2592 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2593 GET_MODE (srcreg));
2595 /* We need a new destination operand each time bitpos is on
2596 a word boundary. */
2597 if (bitpos % BITS_PER_WORD == 0)
2598 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2600 /* Use xbitpos for the source extraction (right justified) and
2601 xbitpos for the destination store (left justified). */
2602 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2603 extract_bit_field (src, bitsize,
2604 xbitpos % BITS_PER_WORD, 1,
2605 NULL_RTX, word_mode, word_mode,
2606 BITS_PER_WORD),
2607 BITS_PER_WORD);
2610 return tgtblk;
2613 /* Add a USE expression for REG to the (possibly empty) list pointed
2614 to by CALL_FUSAGE. REG must denote a hard register. */
2616 void
2617 use_reg (rtx *call_fusage, rtx reg)
2619 if (GET_CODE (reg) != REG
2620 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2621 abort ();
2623 *call_fusage
2624 = gen_rtx_EXPR_LIST (VOIDmode,
2625 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2628 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2629 starting at REGNO. All of these registers must be hard registers. */
2631 void
2632 use_regs (rtx *call_fusage, int regno, int nregs)
2634 int i;
2636 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2637 abort ();
2639 for (i = 0; i < nregs; i++)
2640 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2643 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2644 PARALLEL REGS. This is for calls that pass values in multiple
2645 non-contiguous locations. The Irix 6 ABI has examples of this. */
2647 void
2648 use_group_regs (rtx *call_fusage, rtx regs)
2650 int i;
2652 for (i = 0; i < XVECLEN (regs, 0); i++)
2654 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2656 /* A NULL entry means the parameter goes both on the stack and in
2657 registers. This can also be a MEM for targets that pass values
2658 partially on the stack and partially in registers. */
2659 if (reg != 0 && GET_CODE (reg) == REG)
2660 use_reg (call_fusage, reg);
2665 /* Determine whether the LEN bytes generated by CONSTFUN can be
2666 stored to memory using several move instructions. CONSTFUNDATA is
2667 a pointer which will be passed as argument in every CONSTFUN call.
2668 ALIGN is maximum alignment we can assume. Return nonzero if a
2669 call to store_by_pieces should succeed. */
2672 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2673 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2674 void *constfundata, unsigned int align)
2676 unsigned HOST_WIDE_INT max_size, l;
2677 HOST_WIDE_INT offset = 0;
2678 enum machine_mode mode, tmode;
2679 enum insn_code icode;
2680 int reverse;
2681 rtx cst;
2683 if (len == 0)
2684 return 1;
2686 if (! STORE_BY_PIECES_P (len, align))
2687 return 0;
2689 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2690 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2691 align = MOVE_MAX * BITS_PER_UNIT;
2693 /* We would first store what we can in the largest integer mode, then go to
2694 successively smaller modes. */
2696 for (reverse = 0;
2697 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2698 reverse++)
2700 l = len;
2701 mode = VOIDmode;
2702 max_size = STORE_MAX_PIECES + 1;
2703 while (max_size > 1)
2705 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2706 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2707 if (GET_MODE_SIZE (tmode) < max_size)
2708 mode = tmode;
2710 if (mode == VOIDmode)
2711 break;
2713 icode = mov_optab->handlers[(int) mode].insn_code;
2714 if (icode != CODE_FOR_nothing
2715 && align >= GET_MODE_ALIGNMENT (mode))
2717 unsigned int size = GET_MODE_SIZE (mode);
2719 while (l >= size)
2721 if (reverse)
2722 offset -= size;
2724 cst = (*constfun) (constfundata, offset, mode);
2725 if (!LEGITIMATE_CONSTANT_P (cst))
2726 return 0;
2728 if (!reverse)
2729 offset += size;
2731 l -= size;
2735 max_size = GET_MODE_SIZE (mode);
2738 /* The code above should have handled everything. */
2739 if (l != 0)
2740 abort ();
2743 return 1;
2746 /* Generate several move instructions to store LEN bytes generated by
2747 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2748 pointer which will be passed as argument in every CONSTFUN call.
2749 ALIGN is maximum alignment we can assume.
2750 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2751 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2752 stpcpy. */
2755 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2756 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2757 void *constfundata, unsigned int align, int endp)
2759 struct store_by_pieces data;
2761 if (len == 0)
2763 if (endp == 2)
2764 abort ();
2765 return to;
2768 if (! STORE_BY_PIECES_P (len, align))
2769 abort ();
2770 to = protect_from_queue (to, 1);
2771 data.constfun = constfun;
2772 data.constfundata = constfundata;
2773 data.len = len;
2774 data.to = to;
2775 store_by_pieces_1 (&data, align);
2776 if (endp)
2778 rtx to1;
2780 if (data.reverse)
2781 abort ();
2782 if (data.autinc_to)
2784 if (endp == 2)
2786 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2787 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2788 else
2789 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2790 -1));
2792 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2793 data.offset);
2795 else
2797 if (endp == 2)
2798 --data.offset;
2799 to1 = adjust_address (data.to, QImode, data.offset);
2801 return to1;
2803 else
2804 return data.to;
2807 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2808 rtx with BLKmode). The caller must pass TO through protect_from_queue
2809 before calling. ALIGN is maximum alignment we can assume. */
2811 static void
2812 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2814 struct store_by_pieces data;
2816 if (len == 0)
2817 return;
2819 data.constfun = clear_by_pieces_1;
2820 data.constfundata = NULL;
2821 data.len = len;
2822 data.to = to;
2823 store_by_pieces_1 (&data, align);
2826 /* Callback routine for clear_by_pieces.
2827 Return const0_rtx unconditionally. */
2829 static rtx
2830 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2831 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2832 enum machine_mode mode ATTRIBUTE_UNUSED)
2834 return const0_rtx;
2837 /* Subroutine of clear_by_pieces and store_by_pieces.
2838 Generate several move instructions to store LEN bytes of block TO. (A MEM
2839 rtx with BLKmode). The caller must pass TO through protect_from_queue
2840 before calling. ALIGN is maximum alignment we can assume. */
2842 static void
2843 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2844 unsigned int align ATTRIBUTE_UNUSED)
2846 rtx to_addr = XEXP (data->to, 0);
2847 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2848 enum machine_mode mode = VOIDmode, tmode;
2849 enum insn_code icode;
2851 data->offset = 0;
2852 data->to_addr = to_addr;
2853 data->autinc_to
2854 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2855 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2857 data->explicit_inc_to = 0;
2858 data->reverse
2859 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2860 if (data->reverse)
2861 data->offset = data->len;
2863 /* If storing requires more than two move insns,
2864 copy addresses to registers (to make displacements shorter)
2865 and use post-increment if available. */
2866 if (!data->autinc_to
2867 && move_by_pieces_ninsns (data->len, align) > 2)
2869 /* Determine the main mode we'll be using. */
2870 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2871 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2872 if (GET_MODE_SIZE (tmode) < max_size)
2873 mode = tmode;
2875 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2877 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2878 data->autinc_to = 1;
2879 data->explicit_inc_to = -1;
2882 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2883 && ! data->autinc_to)
2885 data->to_addr = copy_addr_to_reg (to_addr);
2886 data->autinc_to = 1;
2887 data->explicit_inc_to = 1;
2890 if ( !data->autinc_to && CONSTANT_P (to_addr))
2891 data->to_addr = copy_addr_to_reg (to_addr);
2894 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2895 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2896 align = MOVE_MAX * BITS_PER_UNIT;
2898 /* First store what we can in the largest integer mode, then go to
2899 successively smaller modes. */
2901 while (max_size > 1)
2903 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2904 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2905 if (GET_MODE_SIZE (tmode) < max_size)
2906 mode = tmode;
2908 if (mode == VOIDmode)
2909 break;
2911 icode = mov_optab->handlers[(int) mode].insn_code;
2912 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2913 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2915 max_size = GET_MODE_SIZE (mode);
2918 /* The code above should have handled everything. */
2919 if (data->len != 0)
2920 abort ();
2923 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2924 with move instructions for mode MODE. GENFUN is the gen_... function
2925 to make a move insn for that mode. DATA has all the other info. */
2927 static void
2928 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2929 struct store_by_pieces *data)
2931 unsigned int size = GET_MODE_SIZE (mode);
2932 rtx to1, cst;
2934 while (data->len >= size)
2936 if (data->reverse)
2937 data->offset -= size;
2939 if (data->autinc_to)
2940 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2941 data->offset);
2942 else
2943 to1 = adjust_address (data->to, mode, data->offset);
2945 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2946 emit_insn (gen_add2_insn (data->to_addr,
2947 GEN_INT (-(HOST_WIDE_INT) size)));
2949 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2950 emit_insn ((*genfun) (to1, cst));
2952 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2953 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2955 if (! data->reverse)
2956 data->offset += size;
2958 data->len -= size;
2962 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2963 its length in bytes. */
2966 clear_storage (rtx object, rtx size)
2968 rtx retval = 0;
2969 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2970 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2972 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2973 just move a zero. Otherwise, do this a piece at a time. */
2974 if (GET_MODE (object) != BLKmode
2975 && GET_CODE (size) == CONST_INT
2976 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2977 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2978 else
2980 object = protect_from_queue (object, 1);
2981 size = protect_from_queue (size, 0);
2983 if (size == const0_rtx)
2985 else if (GET_CODE (size) == CONST_INT
2986 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2987 clear_by_pieces (object, INTVAL (size), align);
2988 else if (clear_storage_via_clrstr (object, size, align))
2990 else
2991 retval = clear_storage_via_libcall (object, size);
2994 return retval;
2997 /* A subroutine of clear_storage. Expand a clrstr pattern;
2998 return true if successful. */
3000 static bool
3001 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
3003 /* Try the most limited insn first, because there's no point
3004 including more than one in the machine description unless
3005 the more limited one has some advantage. */
3007 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3008 enum machine_mode mode;
3010 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3011 mode = GET_MODE_WIDER_MODE (mode))
3013 enum insn_code code = clrstr_optab[(int) mode];
3014 insn_operand_predicate_fn pred;
3016 if (code != CODE_FOR_nothing
3017 /* We don't need MODE to be narrower than
3018 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3019 the mode mask, as it is returned by the macro, it will
3020 definitely be less than the actual mode mask. */
3021 && ((GET_CODE (size) == CONST_INT
3022 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3023 <= (GET_MODE_MASK (mode) >> 1)))
3024 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3025 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3026 || (*pred) (object, BLKmode))
3027 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3028 || (*pred) (opalign, VOIDmode)))
3030 rtx op1;
3031 rtx last = get_last_insn ();
3032 rtx pat;
3034 op1 = convert_to_mode (mode, size, 1);
3035 pred = insn_data[(int) code].operand[1].predicate;
3036 if (pred != 0 && ! (*pred) (op1, mode))
3037 op1 = copy_to_mode_reg (mode, op1);
3039 pat = GEN_FCN ((int) code) (object, op1, opalign);
3040 if (pat)
3042 emit_insn (pat);
3043 return true;
3045 else
3046 delete_insns_since (last);
3050 return false;
3053 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3054 Return the return value of memset, 0 otherwise. */
3056 static rtx
3057 clear_storage_via_libcall (rtx object, rtx size)
3059 tree call_expr, arg_list, fn, object_tree, size_tree;
3060 enum machine_mode size_mode;
3061 rtx retval;
3063 /* OBJECT or SIZE may have been passed through protect_from_queue.
3065 It is unsafe to save the value generated by protect_from_queue
3066 and reuse it later. Consider what happens if emit_queue is
3067 called before the return value from protect_from_queue is used.
3069 Expansion of the CALL_EXPR below will call emit_queue before
3070 we are finished emitting RTL for argument setup. So if we are
3071 not careful we could get the wrong value for an argument.
3073 To avoid this problem we go ahead and emit code to copy OBJECT
3074 and SIZE into new pseudos. We can then place those new pseudos
3075 into an RTL_EXPR and use them later, even after a call to
3076 emit_queue.
3078 Note this is not strictly needed for library calls since they
3079 do not call emit_queue before loading their arguments. However,
3080 we may need to have library calls call emit_queue in the future
3081 since failing to do so could cause problems for targets which
3082 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3084 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3086 if (TARGET_MEM_FUNCTIONS)
3087 size_mode = TYPE_MODE (sizetype);
3088 else
3089 size_mode = TYPE_MODE (unsigned_type_node);
3090 size = convert_to_mode (size_mode, size, 1);
3091 size = copy_to_mode_reg (size_mode, size);
3093 /* It is incorrect to use the libcall calling conventions to call
3094 memset in this context. This could be a user call to memset and
3095 the user may wish to examine the return value from memset. For
3096 targets where libcalls and normal calls have different conventions
3097 for returning pointers, we could end up generating incorrect code.
3099 For convenience, we generate the call to bzero this way as well. */
3101 object_tree = make_tree (ptr_type_node, object);
3102 if (TARGET_MEM_FUNCTIONS)
3103 size_tree = make_tree (sizetype, size);
3104 else
3105 size_tree = make_tree (unsigned_type_node, size);
3107 fn = clear_storage_libcall_fn (true);
3108 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3109 if (TARGET_MEM_FUNCTIONS)
3110 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3111 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3113 /* Now we have to build up the CALL_EXPR itself. */
3114 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3115 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3116 call_expr, arg_list, NULL_TREE);
3118 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3120 /* If we are initializing a readonly value, show the above call
3121 clobbered it. Otherwise, a load from it may erroneously be
3122 hoisted from a loop. */
3123 if (RTX_UNCHANGING_P (object))
3124 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3126 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3129 /* A subroutine of clear_storage_via_libcall. Create the tree node
3130 for the function we use for block clears. The first time FOR_CALL
3131 is true, we call assemble_external. */
3133 static GTY(()) tree block_clear_fn;
3135 void
3136 init_block_clear_fn (const char *asmspec)
3138 if (!block_clear_fn)
3140 tree fn, args;
3142 if (TARGET_MEM_FUNCTIONS)
3144 fn = get_identifier ("memset");
3145 args = build_function_type_list (ptr_type_node, ptr_type_node,
3146 integer_type_node, sizetype,
3147 NULL_TREE);
3149 else
3151 fn = get_identifier ("bzero");
3152 args = build_function_type_list (void_type_node, ptr_type_node,
3153 unsigned_type_node, NULL_TREE);
3156 fn = build_decl (FUNCTION_DECL, fn, args);
3157 DECL_EXTERNAL (fn) = 1;
3158 TREE_PUBLIC (fn) = 1;
3159 DECL_ARTIFICIAL (fn) = 1;
3160 TREE_NOTHROW (fn) = 1;
3162 block_clear_fn = fn;
3165 if (asmspec)
3167 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3168 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3172 static tree
3173 clear_storage_libcall_fn (int for_call)
3175 static bool emitted_extern;
3177 if (!block_clear_fn)
3178 init_block_clear_fn (NULL);
3180 if (for_call && !emitted_extern)
3182 emitted_extern = true;
3183 make_decl_rtl (block_clear_fn, NULL);
3184 assemble_external (block_clear_fn);
3187 return block_clear_fn;
3190 /* Generate code to copy Y into X.
3191 Both Y and X must have the same mode, except that
3192 Y can be a constant with VOIDmode.
3193 This mode cannot be BLKmode; use emit_block_move for that.
3195 Return the last instruction emitted. */
3198 emit_move_insn (rtx x, rtx y)
3200 enum machine_mode mode = GET_MODE (x);
3201 rtx y_cst = NULL_RTX;
3202 rtx last_insn, set;
3204 x = protect_from_queue (x, 1);
3205 y = protect_from_queue (y, 0);
3207 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3208 abort ();
3210 /* Never force constant_p_rtx to memory. */
3211 if (GET_CODE (y) == CONSTANT_P_RTX)
3213 else if (CONSTANT_P (y))
3215 if (optimize
3216 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3217 && (last_insn = compress_float_constant (x, y)))
3218 return last_insn;
3220 y_cst = y;
3222 if (!LEGITIMATE_CONSTANT_P (y))
3224 y = force_const_mem (mode, y);
3226 /* If the target's cannot_force_const_mem prevented the spill,
3227 assume that the target's move expanders will also take care
3228 of the non-legitimate constant. */
3229 if (!y)
3230 y = y_cst;
3234 /* If X or Y are memory references, verify that their addresses are valid
3235 for the machine. */
3236 if (GET_CODE (x) == MEM
3237 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3238 && ! push_operand (x, GET_MODE (x)))
3239 || (flag_force_addr
3240 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3241 x = validize_mem (x);
3243 if (GET_CODE (y) == MEM
3244 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3245 || (flag_force_addr
3246 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3247 y = validize_mem (y);
3249 if (mode == BLKmode)
3250 abort ();
3252 last_insn = emit_move_insn_1 (x, y);
3254 if (y_cst && GET_CODE (x) == REG
3255 && (set = single_set (last_insn)) != NULL_RTX
3256 && SET_DEST (set) == x
3257 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3258 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3260 return last_insn;
3263 /* Low level part of emit_move_insn.
3264 Called just like emit_move_insn, but assumes X and Y
3265 are basically valid. */
3268 emit_move_insn_1 (rtx x, rtx y)
3270 enum machine_mode mode = GET_MODE (x);
3271 enum machine_mode submode;
3272 enum mode_class class = GET_MODE_CLASS (mode);
3274 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3275 abort ();
3277 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3278 return
3279 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3281 /* Expand complex moves by moving real part and imag part, if possible. */
3282 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3283 && BLKmode != (submode = GET_MODE_INNER (mode))
3284 && (mov_optab->handlers[(int) submode].insn_code
3285 != CODE_FOR_nothing))
3287 /* Don't split destination if it is a stack push. */
3288 int stack = push_operand (x, GET_MODE (x));
3290 #ifdef PUSH_ROUNDING
3291 /* In case we output to the stack, but the size is smaller than the
3292 machine can push exactly, we need to use move instructions. */
3293 if (stack
3294 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3295 != GET_MODE_SIZE (submode)))
3297 rtx temp;
3298 HOST_WIDE_INT offset1, offset2;
3300 /* Do not use anti_adjust_stack, since we don't want to update
3301 stack_pointer_delta. */
3302 temp = expand_binop (Pmode,
3303 #ifdef STACK_GROWS_DOWNWARD
3304 sub_optab,
3305 #else
3306 add_optab,
3307 #endif
3308 stack_pointer_rtx,
3309 GEN_INT
3310 (PUSH_ROUNDING
3311 (GET_MODE_SIZE (GET_MODE (x)))),
3312 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3314 if (temp != stack_pointer_rtx)
3315 emit_move_insn (stack_pointer_rtx, temp);
3317 #ifdef STACK_GROWS_DOWNWARD
3318 offset1 = 0;
3319 offset2 = GET_MODE_SIZE (submode);
3320 #else
3321 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3322 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3323 + GET_MODE_SIZE (submode));
3324 #endif
3326 emit_move_insn (change_address (x, submode,
3327 gen_rtx_PLUS (Pmode,
3328 stack_pointer_rtx,
3329 GEN_INT (offset1))),
3330 gen_realpart (submode, y));
3331 emit_move_insn (change_address (x, submode,
3332 gen_rtx_PLUS (Pmode,
3333 stack_pointer_rtx,
3334 GEN_INT (offset2))),
3335 gen_imagpart (submode, y));
3337 else
3338 #endif
3339 /* If this is a stack, push the highpart first, so it
3340 will be in the argument order.
3342 In that case, change_address is used only to convert
3343 the mode, not to change the address. */
3344 if (stack)
3346 /* Note that the real part always precedes the imag part in memory
3347 regardless of machine's endianness. */
3348 #ifdef STACK_GROWS_DOWNWARD
3349 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3350 gen_imagpart (submode, y));
3351 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3352 gen_realpart (submode, y));
3353 #else
3354 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3355 gen_realpart (submode, y));
3356 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3357 gen_imagpart (submode, y));
3358 #endif
3360 else
3362 rtx realpart_x, realpart_y;
3363 rtx imagpart_x, imagpart_y;
3365 /* If this is a complex value with each part being smaller than a
3366 word, the usual calling sequence will likely pack the pieces into
3367 a single register. Unfortunately, SUBREG of hard registers only
3368 deals in terms of words, so we have a problem converting input
3369 arguments to the CONCAT of two registers that is used elsewhere
3370 for complex values. If this is before reload, we can copy it into
3371 memory and reload. FIXME, we should see about using extract and
3372 insert on integer registers, but complex short and complex char
3373 variables should be rarely used. */
3374 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3375 && (reload_in_progress | reload_completed) == 0)
3377 int packed_dest_p
3378 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3379 int packed_src_p
3380 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3382 if (packed_dest_p || packed_src_p)
3384 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3385 ? MODE_FLOAT : MODE_INT);
3387 enum machine_mode reg_mode
3388 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3390 if (reg_mode != BLKmode)
3392 rtx mem = assign_stack_temp (reg_mode,
3393 GET_MODE_SIZE (mode), 0);
3394 rtx cmem = adjust_address (mem, mode, 0);
3396 cfun->cannot_inline
3397 = N_("function using short complex types cannot be inline");
3399 if (packed_dest_p)
3401 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3403 emit_move_insn_1 (cmem, y);
3404 return emit_move_insn_1 (sreg, mem);
3406 else
3408 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3410 emit_move_insn_1 (mem, sreg);
3411 return emit_move_insn_1 (x, cmem);
3417 realpart_x = gen_realpart (submode, x);
3418 realpart_y = gen_realpart (submode, y);
3419 imagpart_x = gen_imagpart (submode, x);
3420 imagpart_y = gen_imagpart (submode, y);
3422 /* Show the output dies here. This is necessary for SUBREGs
3423 of pseudos since we cannot track their lifetimes correctly;
3424 hard regs shouldn't appear here except as return values.
3425 We never want to emit such a clobber after reload. */
3426 if (x != y
3427 && ! (reload_in_progress || reload_completed)
3428 && (GET_CODE (realpart_x) == SUBREG
3429 || GET_CODE (imagpart_x) == SUBREG))
3430 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3432 emit_move_insn (realpart_x, realpart_y);
3433 emit_move_insn (imagpart_x, imagpart_y);
3436 return get_last_insn ();
3439 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3440 find a mode to do it in. If we have a movcc, use it. Otherwise,
3441 find the MODE_INT mode of the same width. */
3442 else if (GET_MODE_CLASS (mode) == MODE_CC
3443 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3445 enum insn_code insn_code;
3446 enum machine_mode tmode = VOIDmode;
3447 rtx x1 = x, y1 = y;
3449 if (mode != CCmode
3450 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3451 tmode = CCmode;
3452 else
3453 for (tmode = QImode; tmode != VOIDmode;
3454 tmode = GET_MODE_WIDER_MODE (tmode))
3455 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3456 break;
3458 if (tmode == VOIDmode)
3459 abort ();
3461 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3462 may call change_address which is not appropriate if we were
3463 called when a reload was in progress. We don't have to worry
3464 about changing the address since the size in bytes is supposed to
3465 be the same. Copy the MEM to change the mode and move any
3466 substitutions from the old MEM to the new one. */
3468 if (reload_in_progress)
3470 x = gen_lowpart_common (tmode, x1);
3471 if (x == 0 && GET_CODE (x1) == MEM)
3473 x = adjust_address_nv (x1, tmode, 0);
3474 copy_replacements (x1, x);
3477 y = gen_lowpart_common (tmode, y1);
3478 if (y == 0 && GET_CODE (y1) == MEM)
3480 y = adjust_address_nv (y1, tmode, 0);
3481 copy_replacements (y1, y);
3484 else
3486 x = gen_lowpart (tmode, x);
3487 y = gen_lowpart (tmode, y);
3490 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3491 return emit_insn (GEN_FCN (insn_code) (x, y));
3494 /* This will handle any multi-word or full-word mode that lacks a move_insn
3495 pattern. However, you will get better code if you define such patterns,
3496 even if they must turn into multiple assembler instructions. */
3497 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3499 rtx last_insn = 0;
3500 rtx seq, inner;
3501 int need_clobber;
3502 int i;
3504 #ifdef PUSH_ROUNDING
3506 /* If X is a push on the stack, do the push now and replace
3507 X with a reference to the stack pointer. */
3508 if (push_operand (x, GET_MODE (x)))
3510 rtx temp;
3511 enum rtx_code code;
3513 /* Do not use anti_adjust_stack, since we don't want to update
3514 stack_pointer_delta. */
3515 temp = expand_binop (Pmode,
3516 #ifdef STACK_GROWS_DOWNWARD
3517 sub_optab,
3518 #else
3519 add_optab,
3520 #endif
3521 stack_pointer_rtx,
3522 GEN_INT
3523 (PUSH_ROUNDING
3524 (GET_MODE_SIZE (GET_MODE (x)))),
3525 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3527 if (temp != stack_pointer_rtx)
3528 emit_move_insn (stack_pointer_rtx, temp);
3530 code = GET_CODE (XEXP (x, 0));
3532 /* Just hope that small offsets off SP are OK. */
3533 if (code == POST_INC)
3534 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3535 GEN_INT (-((HOST_WIDE_INT)
3536 GET_MODE_SIZE (GET_MODE (x)))));
3537 else if (code == POST_DEC)
3538 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3539 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3540 else
3541 temp = stack_pointer_rtx;
3543 x = change_address (x, VOIDmode, temp);
3545 #endif
3547 /* If we are in reload, see if either operand is a MEM whose address
3548 is scheduled for replacement. */
3549 if (reload_in_progress && GET_CODE (x) == MEM
3550 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3551 x = replace_equiv_address_nv (x, inner);
3552 if (reload_in_progress && GET_CODE (y) == MEM
3553 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3554 y = replace_equiv_address_nv (y, inner);
3556 start_sequence ();
3558 need_clobber = 0;
3559 for (i = 0;
3560 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3561 i++)
3563 rtx xpart = operand_subword (x, i, 1, mode);
3564 rtx ypart = operand_subword (y, i, 1, mode);
3566 /* If we can't get a part of Y, put Y into memory if it is a
3567 constant. Otherwise, force it into a register. If we still
3568 can't get a part of Y, abort. */
3569 if (ypart == 0 && CONSTANT_P (y))
3571 y = force_const_mem (mode, y);
3572 ypart = operand_subword (y, i, 1, mode);
3574 else if (ypart == 0)
3575 ypart = operand_subword_force (y, i, mode);
3577 if (xpart == 0 || ypart == 0)
3578 abort ();
3580 need_clobber |= (GET_CODE (xpart) == SUBREG);
3582 last_insn = emit_move_insn (xpart, ypart);
3585 seq = get_insns ();
3586 end_sequence ();
3588 /* Show the output dies here. This is necessary for SUBREGs
3589 of pseudos since we cannot track their lifetimes correctly;
3590 hard regs shouldn't appear here except as return values.
3591 We never want to emit such a clobber after reload. */
3592 if (x != y
3593 && ! (reload_in_progress || reload_completed)
3594 && need_clobber != 0)
3595 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3597 emit_insn (seq);
3599 return last_insn;
3601 else
3602 abort ();
3605 /* If Y is representable exactly in a narrower mode, and the target can
3606 perform the extension directly from constant or memory, then emit the
3607 move as an extension. */
3609 static rtx
3610 compress_float_constant (rtx x, rtx y)
3612 enum machine_mode dstmode = GET_MODE (x);
3613 enum machine_mode orig_srcmode = GET_MODE (y);
3614 enum machine_mode srcmode;
3615 REAL_VALUE_TYPE r;
3617 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3619 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3620 srcmode != orig_srcmode;
3621 srcmode = GET_MODE_WIDER_MODE (srcmode))
3623 enum insn_code ic;
3624 rtx trunc_y, last_insn;
3626 /* Skip if the target can't extend this way. */
3627 ic = can_extend_p (dstmode, srcmode, 0);
3628 if (ic == CODE_FOR_nothing)
3629 continue;
3631 /* Skip if the narrowed value isn't exact. */
3632 if (! exact_real_truncate (srcmode, &r))
3633 continue;
3635 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3637 if (LEGITIMATE_CONSTANT_P (trunc_y))
3639 /* Skip if the target needs extra instructions to perform
3640 the extension. */
3641 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3642 continue;
3644 else if (float_extend_from_mem[dstmode][srcmode])
3645 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3646 else
3647 continue;
3649 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3650 last_insn = get_last_insn ();
3652 if (GET_CODE (x) == REG)
3653 set_unique_reg_note (last_insn, REG_EQUAL, y);
3655 return last_insn;
3658 return NULL_RTX;
3661 /* Pushing data onto the stack. */
3663 /* Push a block of length SIZE (perhaps variable)
3664 and return an rtx to address the beginning of the block.
3665 Note that it is not possible for the value returned to be a QUEUED.
3666 The value may be virtual_outgoing_args_rtx.
3668 EXTRA is the number of bytes of padding to push in addition to SIZE.
3669 BELOW nonzero means this padding comes at low addresses;
3670 otherwise, the padding comes at high addresses. */
3673 push_block (rtx size, int extra, int below)
3675 rtx temp;
3677 size = convert_modes (Pmode, ptr_mode, size, 1);
3678 if (CONSTANT_P (size))
3679 anti_adjust_stack (plus_constant (size, extra));
3680 else if (GET_CODE (size) == REG && extra == 0)
3681 anti_adjust_stack (size);
3682 else
3684 temp = copy_to_mode_reg (Pmode, size);
3685 if (extra != 0)
3686 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3687 temp, 0, OPTAB_LIB_WIDEN);
3688 anti_adjust_stack (temp);
3691 #ifndef STACK_GROWS_DOWNWARD
3692 if (0)
3693 #else
3694 if (1)
3695 #endif
3697 temp = virtual_outgoing_args_rtx;
3698 if (extra != 0 && below)
3699 temp = plus_constant (temp, extra);
3701 else
3703 if (GET_CODE (size) == CONST_INT)
3704 temp = plus_constant (virtual_outgoing_args_rtx,
3705 -INTVAL (size) - (below ? 0 : extra));
3706 else if (extra != 0 && !below)
3707 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3708 negate_rtx (Pmode, plus_constant (size, extra)));
3709 else
3710 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3711 negate_rtx (Pmode, size));
3714 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3717 #ifdef PUSH_ROUNDING
3719 /* Emit single push insn. */
3721 static void
3722 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3724 rtx dest_addr;
3725 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3726 rtx dest;
3727 enum insn_code icode;
3728 insn_operand_predicate_fn pred;
3730 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3731 /* If there is push pattern, use it. Otherwise try old way of throwing
3732 MEM representing push operation to move expander. */
3733 icode = push_optab->handlers[(int) mode].insn_code;
3734 if (icode != CODE_FOR_nothing)
3736 if (((pred = insn_data[(int) icode].operand[0].predicate)
3737 && !((*pred) (x, mode))))
3738 x = force_reg (mode, x);
3739 emit_insn (GEN_FCN (icode) (x));
3740 return;
3742 if (GET_MODE_SIZE (mode) == rounded_size)
3743 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3744 /* If we are to pad downward, adjust the stack pointer first and
3745 then store X into the stack location using an offset. This is
3746 because emit_move_insn does not know how to pad; it does not have
3747 access to type. */
3748 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3750 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3751 HOST_WIDE_INT offset;
3753 emit_move_insn (stack_pointer_rtx,
3754 expand_binop (Pmode,
3755 #ifdef STACK_GROWS_DOWNWARD
3756 sub_optab,
3757 #else
3758 add_optab,
3759 #endif
3760 stack_pointer_rtx,
3761 GEN_INT (rounded_size),
3762 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3764 offset = (HOST_WIDE_INT) padding_size;
3765 #ifdef STACK_GROWS_DOWNWARD
3766 if (STACK_PUSH_CODE == POST_DEC)
3767 /* We have already decremented the stack pointer, so get the
3768 previous value. */
3769 offset += (HOST_WIDE_INT) rounded_size;
3770 #else
3771 if (STACK_PUSH_CODE == POST_INC)
3772 /* We have already incremented the stack pointer, so get the
3773 previous value. */
3774 offset -= (HOST_WIDE_INT) rounded_size;
3775 #endif
3776 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3778 else
3780 #ifdef STACK_GROWS_DOWNWARD
3781 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3782 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3783 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3784 #else
3785 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3786 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3787 GEN_INT (rounded_size));
3788 #endif
3789 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3792 dest = gen_rtx_MEM (mode, dest_addr);
3794 if (type != 0)
3796 set_mem_attributes (dest, type, 1);
3798 if (flag_optimize_sibling_calls)
3799 /* Function incoming arguments may overlap with sibling call
3800 outgoing arguments and we cannot allow reordering of reads
3801 from function arguments with stores to outgoing arguments
3802 of sibling calls. */
3803 set_mem_alias_set (dest, 0);
3805 emit_move_insn (dest, x);
3807 #endif
3809 /* Generate code to push X onto the stack, assuming it has mode MODE and
3810 type TYPE.
3811 MODE is redundant except when X is a CONST_INT (since they don't
3812 carry mode info).
3813 SIZE is an rtx for the size of data to be copied (in bytes),
3814 needed only if X is BLKmode.
3816 ALIGN (in bits) is maximum alignment we can assume.
3818 If PARTIAL and REG are both nonzero, then copy that many of the first
3819 words of X into registers starting with REG, and push the rest of X.
3820 The amount of space pushed is decreased by PARTIAL words,
3821 rounded *down* to a multiple of PARM_BOUNDARY.
3822 REG must be a hard register in this case.
3823 If REG is zero but PARTIAL is not, take any all others actions for an
3824 argument partially in registers, but do not actually load any
3825 registers.
3827 EXTRA is the amount in bytes of extra space to leave next to this arg.
3828 This is ignored if an argument block has already been allocated.
3830 On a machine that lacks real push insns, ARGS_ADDR is the address of
3831 the bottom of the argument block for this call. We use indexing off there
3832 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3833 argument block has not been preallocated.
3835 ARGS_SO_FAR is the size of args previously pushed for this call.
3837 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3838 for arguments passed in registers. If nonzero, it will be the number
3839 of bytes required. */
3841 void
3842 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3843 unsigned int align, int partial, rtx reg, int extra,
3844 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3845 rtx alignment_pad)
3847 rtx xinner;
3848 enum direction stack_direction
3849 #ifdef STACK_GROWS_DOWNWARD
3850 = downward;
3851 #else
3852 = upward;
3853 #endif
3855 /* Decide where to pad the argument: `downward' for below,
3856 `upward' for above, or `none' for don't pad it.
3857 Default is below for small data on big-endian machines; else above. */
3858 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3860 /* Invert direction if stack is post-decrement.
3861 FIXME: why? */
3862 if (STACK_PUSH_CODE == POST_DEC)
3863 if (where_pad != none)
3864 where_pad = (where_pad == downward ? upward : downward);
3866 xinner = x = protect_from_queue (x, 0);
3868 if (mode == BLKmode)
3870 /* Copy a block into the stack, entirely or partially. */
3872 rtx temp;
3873 int used = partial * UNITS_PER_WORD;
3874 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3875 int skip;
3877 if (size == 0)
3878 abort ();
3880 used -= offset;
3882 /* USED is now the # of bytes we need not copy to the stack
3883 because registers will take care of them. */
3885 if (partial != 0)
3886 xinner = adjust_address (xinner, BLKmode, used);
3888 /* If the partial register-part of the arg counts in its stack size,
3889 skip the part of stack space corresponding to the registers.
3890 Otherwise, start copying to the beginning of the stack space,
3891 by setting SKIP to 0. */
3892 skip = (reg_parm_stack_space == 0) ? 0 : used;
3894 #ifdef PUSH_ROUNDING
3895 /* Do it with several push insns if that doesn't take lots of insns
3896 and if there is no difficulty with push insns that skip bytes
3897 on the stack for alignment purposes. */
3898 if (args_addr == 0
3899 && PUSH_ARGS
3900 && GET_CODE (size) == CONST_INT
3901 && skip == 0
3902 && MEM_ALIGN (xinner) >= align
3903 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3904 /* Here we avoid the case of a structure whose weak alignment
3905 forces many pushes of a small amount of data,
3906 and such small pushes do rounding that causes trouble. */
3907 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3908 || align >= BIGGEST_ALIGNMENT
3909 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3910 == (align / BITS_PER_UNIT)))
3911 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3913 /* Push padding now if padding above and stack grows down,
3914 or if padding below and stack grows up.
3915 But if space already allocated, this has already been done. */
3916 if (extra && args_addr == 0
3917 && where_pad != none && where_pad != stack_direction)
3918 anti_adjust_stack (GEN_INT (extra));
3920 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3922 else
3923 #endif /* PUSH_ROUNDING */
3925 rtx target;
3927 /* Otherwise make space on the stack and copy the data
3928 to the address of that space. */
3930 /* Deduct words put into registers from the size we must copy. */
3931 if (partial != 0)
3933 if (GET_CODE (size) == CONST_INT)
3934 size = GEN_INT (INTVAL (size) - used);
3935 else
3936 size = expand_binop (GET_MODE (size), sub_optab, size,
3937 GEN_INT (used), NULL_RTX, 0,
3938 OPTAB_LIB_WIDEN);
3941 /* Get the address of the stack space.
3942 In this case, we do not deal with EXTRA separately.
3943 A single stack adjust will do. */
3944 if (! args_addr)
3946 temp = push_block (size, extra, where_pad == downward);
3947 extra = 0;
3949 else if (GET_CODE (args_so_far) == CONST_INT)
3950 temp = memory_address (BLKmode,
3951 plus_constant (args_addr,
3952 skip + INTVAL (args_so_far)));
3953 else
3954 temp = memory_address (BLKmode,
3955 plus_constant (gen_rtx_PLUS (Pmode,
3956 args_addr,
3957 args_so_far),
3958 skip));
3960 if (!ACCUMULATE_OUTGOING_ARGS)
3962 /* If the source is referenced relative to the stack pointer,
3963 copy it to another register to stabilize it. We do not need
3964 to do this if we know that we won't be changing sp. */
3966 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3967 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3968 temp = copy_to_reg (temp);
3971 target = gen_rtx_MEM (BLKmode, temp);
3973 if (type != 0)
3975 set_mem_attributes (target, type, 1);
3976 /* Function incoming arguments may overlap with sibling call
3977 outgoing arguments and we cannot allow reordering of reads
3978 from function arguments with stores to outgoing arguments
3979 of sibling calls. */
3980 set_mem_alias_set (target, 0);
3983 /* ALIGN may well be better aligned than TYPE, e.g. due to
3984 PARM_BOUNDARY. Assume the caller isn't lying. */
3985 set_mem_align (target, align);
3987 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3990 else if (partial > 0)
3992 /* Scalar partly in registers. */
3994 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3995 int i;
3996 int not_stack;
3997 /* # words of start of argument
3998 that we must make space for but need not store. */
3999 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
4000 int args_offset = INTVAL (args_so_far);
4001 int skip;
4003 /* Push padding now if padding above and stack grows down,
4004 or if padding below and stack grows up.
4005 But if space already allocated, this has already been done. */
4006 if (extra && args_addr == 0
4007 && where_pad != none && where_pad != stack_direction)
4008 anti_adjust_stack (GEN_INT (extra));
4010 /* If we make space by pushing it, we might as well push
4011 the real data. Otherwise, we can leave OFFSET nonzero
4012 and leave the space uninitialized. */
4013 if (args_addr == 0)
4014 offset = 0;
4016 /* Now NOT_STACK gets the number of words that we don't need to
4017 allocate on the stack. */
4018 not_stack = partial - offset;
4020 /* If the partial register-part of the arg counts in its stack size,
4021 skip the part of stack space corresponding to the registers.
4022 Otherwise, start copying to the beginning of the stack space,
4023 by setting SKIP to 0. */
4024 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4026 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4027 x = validize_mem (force_const_mem (mode, x));
4029 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4030 SUBREGs of such registers are not allowed. */
4031 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4032 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4033 x = copy_to_reg (x);
4035 /* Loop over all the words allocated on the stack for this arg. */
4036 /* We can do it by words, because any scalar bigger than a word
4037 has a size a multiple of a word. */
4038 #ifndef PUSH_ARGS_REVERSED
4039 for (i = not_stack; i < size; i++)
4040 #else
4041 for (i = size - 1; i >= not_stack; i--)
4042 #endif
4043 if (i >= not_stack + offset)
4044 emit_push_insn (operand_subword_force (x, i, mode),
4045 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4046 0, args_addr,
4047 GEN_INT (args_offset + ((i - not_stack + skip)
4048 * UNITS_PER_WORD)),
4049 reg_parm_stack_space, alignment_pad);
4051 else
4053 rtx addr;
4054 rtx dest;
4056 /* Push padding now if padding above and stack grows down,
4057 or if padding below and stack grows up.
4058 But if space already allocated, this has already been done. */
4059 if (extra && args_addr == 0
4060 && where_pad != none && where_pad != stack_direction)
4061 anti_adjust_stack (GEN_INT (extra));
4063 #ifdef PUSH_ROUNDING
4064 if (args_addr == 0 && PUSH_ARGS)
4065 emit_single_push_insn (mode, x, type);
4066 else
4067 #endif
4069 if (GET_CODE (args_so_far) == CONST_INT)
4070 addr
4071 = memory_address (mode,
4072 plus_constant (args_addr,
4073 INTVAL (args_so_far)));
4074 else
4075 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4076 args_so_far));
4077 dest = gen_rtx_MEM (mode, addr);
4078 if (type != 0)
4080 set_mem_attributes (dest, type, 1);
4081 /* Function incoming arguments may overlap with sibling call
4082 outgoing arguments and we cannot allow reordering of reads
4083 from function arguments with stores to outgoing arguments
4084 of sibling calls. */
4085 set_mem_alias_set (dest, 0);
4088 emit_move_insn (dest, x);
4092 /* If part should go in registers, copy that part
4093 into the appropriate registers. Do this now, at the end,
4094 since mem-to-mem copies above may do function calls. */
4095 if (partial > 0 && reg != 0)
4097 /* Handle calls that pass values in multiple non-contiguous locations.
4098 The Irix 6 ABI has examples of this. */
4099 if (GET_CODE (reg) == PARALLEL)
4100 emit_group_load (reg, x, type, -1);
4101 else
4102 move_block_to_reg (REGNO (reg), x, partial, mode);
4105 if (extra && args_addr == 0 && where_pad == stack_direction)
4106 anti_adjust_stack (GEN_INT (extra));
4108 if (alignment_pad && args_addr == 0)
4109 anti_adjust_stack (alignment_pad);
4112 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4113 operations. */
4115 static rtx
4116 get_subtarget (rtx x)
4118 return ((x == 0
4119 /* Only registers can be subtargets. */
4120 || GET_CODE (x) != REG
4121 /* If the register is readonly, it can't be set more than once. */
4122 || RTX_UNCHANGING_P (x)
4123 /* Don't use hard regs to avoid extending their life. */
4124 || REGNO (x) < FIRST_PSEUDO_REGISTER
4125 /* Avoid subtargets inside loops,
4126 since they hide some invariant expressions. */
4127 || preserve_subexpressions_p ())
4128 ? 0 : x);
4131 /* Expand an assignment that stores the value of FROM into TO.
4132 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4133 (This may contain a QUEUED rtx;
4134 if the value is constant, this rtx is a constant.)
4135 Otherwise, the returned value is NULL_RTX. */
4138 expand_assignment (tree to, tree from, int want_value)
4140 rtx to_rtx = 0;
4141 rtx result;
4143 /* Don't crash if the lhs of the assignment was erroneous. */
4145 if (TREE_CODE (to) == ERROR_MARK)
4147 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4148 return want_value ? result : NULL_RTX;
4151 /* Assignment of a structure component needs special treatment
4152 if the structure component's rtx is not simply a MEM.
4153 Assignment of an array element at a constant index, and assignment of
4154 an array element in an unaligned packed structure field, has the same
4155 problem. */
4157 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4158 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4159 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4161 enum machine_mode mode1;
4162 HOST_WIDE_INT bitsize, bitpos;
4163 rtx orig_to_rtx;
4164 tree offset;
4165 int unsignedp;
4166 int volatilep = 0;
4167 tree tem;
4169 push_temp_slots ();
4170 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4171 &unsignedp, &volatilep);
4173 /* If we are going to use store_bit_field and extract_bit_field,
4174 make sure to_rtx will be safe for multiple use. */
4176 if (mode1 == VOIDmode && want_value)
4177 tem = stabilize_reference (tem);
4179 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4181 if (offset != 0)
4183 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4185 if (GET_CODE (to_rtx) != MEM)
4186 abort ();
4188 #ifdef POINTERS_EXTEND_UNSIGNED
4189 if (GET_MODE (offset_rtx) != Pmode)
4190 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4191 #else
4192 if (GET_MODE (offset_rtx) != ptr_mode)
4193 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4194 #endif
4196 /* A constant address in TO_RTX can have VOIDmode, we must not try
4197 to call force_reg for that case. Avoid that case. */
4198 if (GET_CODE (to_rtx) == MEM
4199 && GET_MODE (to_rtx) == BLKmode
4200 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4201 && bitsize > 0
4202 && (bitpos % bitsize) == 0
4203 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4204 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4206 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4207 bitpos = 0;
4210 to_rtx = offset_address (to_rtx, offset_rtx,
4211 highest_pow2_factor_for_type (TREE_TYPE (to),
4212 offset));
4215 if (GET_CODE (to_rtx) == MEM)
4217 /* If the field is at offset zero, we could have been given the
4218 DECL_RTX of the parent struct. Don't munge it. */
4219 to_rtx = shallow_copy_rtx (to_rtx);
4221 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4224 /* Deal with volatile and readonly fields. The former is only done
4225 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4226 if (volatilep && GET_CODE (to_rtx) == MEM)
4228 if (to_rtx == orig_to_rtx)
4229 to_rtx = copy_rtx (to_rtx);
4230 MEM_VOLATILE_P (to_rtx) = 1;
4233 if (TREE_CODE (to) == COMPONENT_REF
4234 && TREE_READONLY (TREE_OPERAND (to, 1)))
4236 if (to_rtx == orig_to_rtx)
4237 to_rtx = copy_rtx (to_rtx);
4238 RTX_UNCHANGING_P (to_rtx) = 1;
4241 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4243 if (to_rtx == orig_to_rtx)
4244 to_rtx = copy_rtx (to_rtx);
4245 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4248 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4249 (want_value
4250 /* Spurious cast for HPUX compiler. */
4251 ? ((enum machine_mode)
4252 TYPE_MODE (TREE_TYPE (to)))
4253 : VOIDmode),
4254 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4256 preserve_temp_slots (result);
4257 free_temp_slots ();
4258 pop_temp_slots ();
4260 /* If the value is meaningful, convert RESULT to the proper mode.
4261 Otherwise, return nothing. */
4262 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4263 TYPE_MODE (TREE_TYPE (from)),
4264 result,
4265 TREE_UNSIGNED (TREE_TYPE (to)))
4266 : NULL_RTX);
4269 /* If the rhs is a function call and its value is not an aggregate,
4270 call the function before we start to compute the lhs.
4271 This is needed for correct code for cases such as
4272 val = setjmp (buf) on machines where reference to val
4273 requires loading up part of an address in a separate insn.
4275 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4276 since it might be a promoted variable where the zero- or sign- extension
4277 needs to be done. Handling this in the normal way is safe because no
4278 computation is done before the call. */
4279 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4280 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4281 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4282 && GET_CODE (DECL_RTL (to)) == REG))
4284 rtx value;
4286 push_temp_slots ();
4287 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4288 if (to_rtx == 0)
4289 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4291 /* Handle calls that return values in multiple non-contiguous locations.
4292 The Irix 6 ABI has examples of this. */
4293 if (GET_CODE (to_rtx) == PARALLEL)
4294 emit_group_load (to_rtx, value, TREE_TYPE (from),
4295 int_size_in_bytes (TREE_TYPE (from)));
4296 else if (GET_MODE (to_rtx) == BLKmode)
4297 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4298 else
4300 if (POINTER_TYPE_P (TREE_TYPE (to)))
4301 value = convert_memory_address (GET_MODE (to_rtx), value);
4302 emit_move_insn (to_rtx, value);
4304 preserve_temp_slots (to_rtx);
4305 free_temp_slots ();
4306 pop_temp_slots ();
4307 return want_value ? to_rtx : NULL_RTX;
4310 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4311 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4313 if (to_rtx == 0)
4314 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4316 /* Don't move directly into a return register. */
4317 if (TREE_CODE (to) == RESULT_DECL
4318 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4320 rtx temp;
4322 push_temp_slots ();
4323 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4325 if (GET_CODE (to_rtx) == PARALLEL)
4326 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4327 int_size_in_bytes (TREE_TYPE (from)));
4328 else
4329 emit_move_insn (to_rtx, temp);
4331 preserve_temp_slots (to_rtx);
4332 free_temp_slots ();
4333 pop_temp_slots ();
4334 return want_value ? to_rtx : NULL_RTX;
4337 /* In case we are returning the contents of an object which overlaps
4338 the place the value is being stored, use a safe function when copying
4339 a value through a pointer into a structure value return block. */
4340 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4341 && current_function_returns_struct
4342 && !current_function_returns_pcc_struct)
4344 rtx from_rtx, size;
4346 push_temp_slots ();
4347 size = expr_size (from);
4348 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4350 if (TARGET_MEM_FUNCTIONS)
4351 emit_library_call (memmove_libfunc, LCT_NORMAL,
4352 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4353 XEXP (from_rtx, 0), Pmode,
4354 convert_to_mode (TYPE_MODE (sizetype),
4355 size, TREE_UNSIGNED (sizetype)),
4356 TYPE_MODE (sizetype));
4357 else
4358 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4359 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4360 XEXP (to_rtx, 0), Pmode,
4361 convert_to_mode (TYPE_MODE (integer_type_node),
4362 size,
4363 TREE_UNSIGNED (integer_type_node)),
4364 TYPE_MODE (integer_type_node));
4366 preserve_temp_slots (to_rtx);
4367 free_temp_slots ();
4368 pop_temp_slots ();
4369 return want_value ? to_rtx : NULL_RTX;
4372 /* Compute FROM and store the value in the rtx we got. */
4374 push_temp_slots ();
4375 result = store_expr (from, to_rtx, want_value);
4376 preserve_temp_slots (result);
4377 free_temp_slots ();
4378 pop_temp_slots ();
4379 return want_value ? result : NULL_RTX;
4382 /* Generate code for computing expression EXP,
4383 and storing the value into TARGET.
4384 TARGET may contain a QUEUED rtx.
4386 If WANT_VALUE & 1 is nonzero, return a copy of the value
4387 not in TARGET, so that we can be sure to use the proper
4388 value in a containing expression even if TARGET has something
4389 else stored in it. If possible, we copy the value through a pseudo
4390 and return that pseudo. Or, if the value is constant, we try to
4391 return the constant. In some cases, we return a pseudo
4392 copied *from* TARGET.
4394 If the mode is BLKmode then we may return TARGET itself.
4395 It turns out that in BLKmode it doesn't cause a problem.
4396 because C has no operators that could combine two different
4397 assignments into the same BLKmode object with different values
4398 with no sequence point. Will other languages need this to
4399 be more thorough?
4401 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4402 to catch quickly any cases where the caller uses the value
4403 and fails to set WANT_VALUE.
4405 If WANT_VALUE & 2 is set, this is a store into a call param on the
4406 stack, and block moves may need to be treated specially. */
4409 store_expr (tree exp, rtx target, int want_value)
4411 rtx temp;
4412 int dont_return_target = 0;
4413 int dont_store_target = 0;
4415 if (VOID_TYPE_P (TREE_TYPE (exp)))
4417 /* C++ can generate ?: expressions with a throw expression in one
4418 branch and an rvalue in the other. Here, we resolve attempts to
4419 store the throw expression's nonexistent result. */
4420 if (want_value)
4421 abort ();
4422 expand_expr (exp, const0_rtx, VOIDmode, 0);
4423 return NULL_RTX;
4425 if (TREE_CODE (exp) == COMPOUND_EXPR)
4427 /* Perform first part of compound expression, then assign from second
4428 part. */
4429 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4430 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4431 emit_queue ();
4432 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4434 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4436 /* For conditional expression, get safe form of the target. Then
4437 test the condition, doing the appropriate assignment on either
4438 side. This avoids the creation of unnecessary temporaries.
4439 For non-BLKmode, it is more efficient not to do this. */
4441 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4443 emit_queue ();
4444 target = protect_from_queue (target, 1);
4446 do_pending_stack_adjust ();
4447 NO_DEFER_POP;
4448 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4449 start_cleanup_deferral ();
4450 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4451 end_cleanup_deferral ();
4452 emit_queue ();
4453 emit_jump_insn (gen_jump (lab2));
4454 emit_barrier ();
4455 emit_label (lab1);
4456 start_cleanup_deferral ();
4457 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4458 end_cleanup_deferral ();
4459 emit_queue ();
4460 emit_label (lab2);
4461 OK_DEFER_POP;
4463 return want_value & 1 ? target : NULL_RTX;
4465 else if (queued_subexp_p (target))
4466 /* If target contains a postincrement, let's not risk
4467 using it as the place to generate the rhs. */
4469 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4471 /* Expand EXP into a new pseudo. */
4472 temp = gen_reg_rtx (GET_MODE (target));
4473 temp = expand_expr (exp, temp, GET_MODE (target),
4474 (want_value & 2
4475 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4477 else
4478 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4479 (want_value & 2
4480 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4482 /* If target is volatile, ANSI requires accessing the value
4483 *from* the target, if it is accessed. So make that happen.
4484 In no case return the target itself. */
4485 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4486 dont_return_target = 1;
4488 else if ((want_value & 1) != 0
4489 && GET_CODE (target) == MEM
4490 && ! MEM_VOLATILE_P (target)
4491 && GET_MODE (target) != BLKmode)
4492 /* If target is in memory and caller wants value in a register instead,
4493 arrange that. Pass TARGET as target for expand_expr so that,
4494 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4495 We know expand_expr will not use the target in that case.
4496 Don't do this if TARGET is volatile because we are supposed
4497 to write it and then read it. */
4499 temp = expand_expr (exp, target, GET_MODE (target),
4500 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4501 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4503 /* If TEMP is already in the desired TARGET, only copy it from
4504 memory and don't store it there again. */
4505 if (temp == target
4506 || (rtx_equal_p (temp, target)
4507 && ! side_effects_p (temp) && ! side_effects_p (target)))
4508 dont_store_target = 1;
4509 temp = copy_to_reg (temp);
4511 dont_return_target = 1;
4513 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4514 /* If this is a scalar in a register that is stored in a wider mode
4515 than the declared mode, compute the result into its declared mode
4516 and then convert to the wider mode. Our value is the computed
4517 expression. */
4519 rtx inner_target = 0;
4521 /* If we don't want a value, we can do the conversion inside EXP,
4522 which will often result in some optimizations. Do the conversion
4523 in two steps: first change the signedness, if needed, then
4524 the extend. But don't do this if the type of EXP is a subtype
4525 of something else since then the conversion might involve
4526 more than just converting modes. */
4527 if ((want_value & 1) == 0
4528 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4529 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4531 if (TREE_UNSIGNED (TREE_TYPE (exp))
4532 != SUBREG_PROMOTED_UNSIGNED_P (target))
4533 exp = convert
4534 ((*lang_hooks.types.signed_or_unsigned_type)
4535 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4537 exp = convert ((*lang_hooks.types.type_for_mode)
4538 (GET_MODE (SUBREG_REG (target)),
4539 SUBREG_PROMOTED_UNSIGNED_P (target)),
4540 exp);
4542 inner_target = SUBREG_REG (target);
4545 temp = expand_expr (exp, inner_target, VOIDmode,
4546 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4548 /* If TEMP is a MEM and we want a result value, make the access
4549 now so it gets done only once. Strictly speaking, this is
4550 only necessary if the MEM is volatile, or if the address
4551 overlaps TARGET. But not performing the load twice also
4552 reduces the amount of rtl we generate and then have to CSE. */
4553 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4554 temp = copy_to_reg (temp);
4556 /* If TEMP is a VOIDmode constant, use convert_modes to make
4557 sure that we properly convert it. */
4558 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4560 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4561 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4562 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4563 GET_MODE (target), temp,
4564 SUBREG_PROMOTED_UNSIGNED_P (target));
4567 convert_move (SUBREG_REG (target), temp,
4568 SUBREG_PROMOTED_UNSIGNED_P (target));
4570 /* If we promoted a constant, change the mode back down to match
4571 target. Otherwise, the caller might get confused by a result whose
4572 mode is larger than expected. */
4574 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4576 if (GET_MODE (temp) != VOIDmode)
4578 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4579 SUBREG_PROMOTED_VAR_P (temp) = 1;
4580 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4581 SUBREG_PROMOTED_UNSIGNED_P (target));
4583 else
4584 temp = convert_modes (GET_MODE (target),
4585 GET_MODE (SUBREG_REG (target)),
4586 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4589 return want_value & 1 ? temp : NULL_RTX;
4591 else
4593 temp = expand_expr (exp, target, GET_MODE (target),
4594 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4595 /* Return TARGET if it's a specified hardware register.
4596 If TARGET is a volatile mem ref, either return TARGET
4597 or return a reg copied *from* TARGET; ANSI requires this.
4599 Otherwise, if TEMP is not TARGET, return TEMP
4600 if it is constant (for efficiency),
4601 or if we really want the correct value. */
4602 if (!(target && GET_CODE (target) == REG
4603 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4604 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4605 && ! rtx_equal_p (temp, target)
4606 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4607 dont_return_target = 1;
4610 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4611 the same as that of TARGET, adjust the constant. This is needed, for
4612 example, in case it is a CONST_DOUBLE and we want only a word-sized
4613 value. */
4614 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4615 && TREE_CODE (exp) != ERROR_MARK
4616 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4617 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4618 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4620 /* If value was not generated in the target, store it there.
4621 Convert the value to TARGET's type first if necessary.
4622 If TEMP and TARGET compare equal according to rtx_equal_p, but
4623 one or both of them are volatile memory refs, we have to distinguish
4624 two cases:
4625 - expand_expr has used TARGET. In this case, we must not generate
4626 another copy. This can be detected by TARGET being equal according
4627 to == .
4628 - expand_expr has not used TARGET - that means that the source just
4629 happens to have the same RTX form. Since temp will have been created
4630 by expand_expr, it will compare unequal according to == .
4631 We must generate a copy in this case, to reach the correct number
4632 of volatile memory references. */
4634 if ((! rtx_equal_p (temp, target)
4635 || (temp != target && (side_effects_p (temp)
4636 || side_effects_p (target))))
4637 && TREE_CODE (exp) != ERROR_MARK
4638 && ! dont_store_target
4639 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4640 but TARGET is not valid memory reference, TEMP will differ
4641 from TARGET although it is really the same location. */
4642 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4643 || target != DECL_RTL_IF_SET (exp))
4644 /* If there's nothing to copy, don't bother. Don't call expr_size
4645 unless necessary, because some front-ends (C++) expr_size-hook
4646 aborts on objects that are not supposed to be bit-copied or
4647 bit-initialized. */
4648 && expr_size (exp) != const0_rtx)
4650 target = protect_from_queue (target, 1);
4651 if (GET_MODE (temp) != GET_MODE (target)
4652 && GET_MODE (temp) != VOIDmode)
4654 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4655 if (dont_return_target)
4657 /* In this case, we will return TEMP,
4658 so make sure it has the proper mode.
4659 But don't forget to store the value into TARGET. */
4660 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4661 emit_move_insn (target, temp);
4663 else
4664 convert_move (target, temp, unsignedp);
4667 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4669 /* Handle copying a string constant into an array. The string
4670 constant may be shorter than the array. So copy just the string's
4671 actual length, and clear the rest. First get the size of the data
4672 type of the string, which is actually the size of the target. */
4673 rtx size = expr_size (exp);
4675 if (GET_CODE (size) == CONST_INT
4676 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4677 emit_block_move (target, temp, size,
4678 (want_value & 2
4679 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4680 else
4682 /* Compute the size of the data to copy from the string. */
4683 tree copy_size
4684 = size_binop (MIN_EXPR,
4685 make_tree (sizetype, size),
4686 size_int (TREE_STRING_LENGTH (exp)));
4687 rtx copy_size_rtx
4688 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4689 (want_value & 2
4690 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4691 rtx label = 0;
4693 /* Copy that much. */
4694 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4695 TREE_UNSIGNED (sizetype));
4696 emit_block_move (target, temp, copy_size_rtx,
4697 (want_value & 2
4698 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4700 /* Figure out how much is left in TARGET that we have to clear.
4701 Do all calculations in ptr_mode. */
4702 if (GET_CODE (copy_size_rtx) == CONST_INT)
4704 size = plus_constant (size, -INTVAL (copy_size_rtx));
4705 target = adjust_address (target, BLKmode,
4706 INTVAL (copy_size_rtx));
4708 else
4710 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4711 copy_size_rtx, NULL_RTX, 0,
4712 OPTAB_LIB_WIDEN);
4714 #ifdef POINTERS_EXTEND_UNSIGNED
4715 if (GET_MODE (copy_size_rtx) != Pmode)
4716 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4717 TREE_UNSIGNED (sizetype));
4718 #endif
4720 target = offset_address (target, copy_size_rtx,
4721 highest_pow2_factor (copy_size));
4722 label = gen_label_rtx ();
4723 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4724 GET_MODE (size), 0, label);
4727 if (size != const0_rtx)
4728 clear_storage (target, size);
4730 if (label)
4731 emit_label (label);
4734 /* Handle calls that return values in multiple non-contiguous locations.
4735 The Irix 6 ABI has examples of this. */
4736 else if (GET_CODE (target) == PARALLEL)
4737 emit_group_load (target, temp, TREE_TYPE (exp),
4738 int_size_in_bytes (TREE_TYPE (exp)));
4739 else if (GET_MODE (temp) == BLKmode)
4740 emit_block_move (target, temp, expr_size (exp),
4741 (want_value & 2
4742 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4743 else
4744 emit_move_insn (target, temp);
4747 /* If we don't want a value, return NULL_RTX. */
4748 if ((want_value & 1) == 0)
4749 return NULL_RTX;
4751 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4752 ??? The latter test doesn't seem to make sense. */
4753 else if (dont_return_target && GET_CODE (temp) != MEM)
4754 return temp;
4756 /* Return TARGET itself if it is a hard register. */
4757 else if ((want_value & 1) != 0
4758 && GET_MODE (target) != BLKmode
4759 && ! (GET_CODE (target) == REG
4760 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4761 return copy_to_reg (target);
4763 else
4764 return target;
4767 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4769 static int
4770 is_zeros_p (tree exp)
4772 tree elt;
4774 switch (TREE_CODE (exp))
4776 case CONVERT_EXPR:
4777 case NOP_EXPR:
4778 case NON_LVALUE_EXPR:
4779 case VIEW_CONVERT_EXPR:
4780 return is_zeros_p (TREE_OPERAND (exp, 0));
4782 case INTEGER_CST:
4783 return integer_zerop (exp);
4785 case COMPLEX_CST:
4786 return
4787 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4789 case REAL_CST:
4790 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4792 case VECTOR_CST:
4793 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4794 elt = TREE_CHAIN (elt))
4795 if (!is_zeros_p (TREE_VALUE (elt)))
4796 return 0;
4798 return 1;
4800 case CONSTRUCTOR:
4801 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4802 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4803 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4804 if (! is_zeros_p (TREE_VALUE (elt)))
4805 return 0;
4807 return 1;
4809 default:
4810 return 0;
4814 /* Return 1 if EXP contains mostly (3/4) zeros. */
4817 mostly_zeros_p (tree exp)
4819 if (TREE_CODE (exp) == CONSTRUCTOR)
4821 int elts = 0, zeros = 0;
4822 tree elt = CONSTRUCTOR_ELTS (exp);
4823 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4825 /* If there are no ranges of true bits, it is all zero. */
4826 return elt == NULL_TREE;
4828 for (; elt; elt = TREE_CHAIN (elt))
4830 /* We do not handle the case where the index is a RANGE_EXPR,
4831 so the statistic will be somewhat inaccurate.
4832 We do make a more accurate count in store_constructor itself,
4833 so since this function is only used for nested array elements,
4834 this should be close enough. */
4835 if (mostly_zeros_p (TREE_VALUE (elt)))
4836 zeros++;
4837 elts++;
4840 return 4 * zeros >= 3 * elts;
4843 return is_zeros_p (exp);
4846 /* Helper function for store_constructor.
4847 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4848 TYPE is the type of the CONSTRUCTOR, not the element type.
4849 CLEARED is as for store_constructor.
4850 ALIAS_SET is the alias set to use for any stores.
4852 This provides a recursive shortcut back to store_constructor when it isn't
4853 necessary to go through store_field. This is so that we can pass through
4854 the cleared field to let store_constructor know that we may not have to
4855 clear a substructure if the outer structure has already been cleared. */
4857 static void
4858 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4859 HOST_WIDE_INT bitpos, enum machine_mode mode,
4860 tree exp, tree type, int cleared, int alias_set)
4862 if (TREE_CODE (exp) == CONSTRUCTOR
4863 && bitpos % BITS_PER_UNIT == 0
4864 /* If we have a nonzero bitpos for a register target, then we just
4865 let store_field do the bitfield handling. This is unlikely to
4866 generate unnecessary clear instructions anyways. */
4867 && (bitpos == 0 || GET_CODE (target) == MEM))
4869 if (GET_CODE (target) == MEM)
4870 target
4871 = adjust_address (target,
4872 GET_MODE (target) == BLKmode
4873 || 0 != (bitpos
4874 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4875 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4878 /* Update the alias set, if required. */
4879 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4880 && MEM_ALIAS_SET (target) != 0)
4882 target = copy_rtx (target);
4883 set_mem_alias_set (target, alias_set);
4886 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4888 else
4889 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4890 alias_set);
4893 /* Store the value of constructor EXP into the rtx TARGET.
4894 TARGET is either a REG or a MEM; we know it cannot conflict, since
4895 safe_from_p has been called.
4896 CLEARED is true if TARGET is known to have been zero'd.
4897 SIZE is the number of bytes of TARGET we are allowed to modify: this
4898 may not be the same as the size of EXP if we are assigning to a field
4899 which has been packed to exclude padding bits. */
4901 static void
4902 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4904 tree type = TREE_TYPE (exp);
4905 #ifdef WORD_REGISTER_OPERATIONS
4906 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4907 #endif
4909 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4910 || TREE_CODE (type) == QUAL_UNION_TYPE)
4912 tree elt;
4914 /* If size is zero or the target is already cleared, do nothing. */
4915 if (size == 0 || cleared)
4916 cleared = 1;
4917 /* We either clear the aggregate or indicate the value is dead. */
4918 else if ((TREE_CODE (type) == UNION_TYPE
4919 || TREE_CODE (type) == QUAL_UNION_TYPE)
4920 && ! CONSTRUCTOR_ELTS (exp))
4921 /* If the constructor is empty, clear the union. */
4923 clear_storage (target, expr_size (exp));
4924 cleared = 1;
4927 /* If we are building a static constructor into a register,
4928 set the initial value as zero so we can fold the value into
4929 a constant. But if more than one register is involved,
4930 this probably loses. */
4931 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4932 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4934 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4935 cleared = 1;
4938 /* If the constructor has fewer fields than the structure
4939 or if we are initializing the structure to mostly zeros,
4940 clear the whole structure first. Don't do this if TARGET is a
4941 register whose mode size isn't equal to SIZE since clear_storage
4942 can't handle this case. */
4943 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4944 || mostly_zeros_p (exp))
4945 && (GET_CODE (target) != REG
4946 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4947 == size)))
4949 rtx xtarget = target;
4951 if (readonly_fields_p (type))
4953 xtarget = copy_rtx (xtarget);
4954 RTX_UNCHANGING_P (xtarget) = 1;
4957 clear_storage (xtarget, GEN_INT (size));
4958 cleared = 1;
4961 if (! cleared)
4962 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4964 /* Store each element of the constructor into
4965 the corresponding field of TARGET. */
4967 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4969 tree field = TREE_PURPOSE (elt);
4970 tree value = TREE_VALUE (elt);
4971 enum machine_mode mode;
4972 HOST_WIDE_INT bitsize;
4973 HOST_WIDE_INT bitpos = 0;
4974 tree offset;
4975 rtx to_rtx = target;
4977 /* Just ignore missing fields.
4978 We cleared the whole structure, above,
4979 if any fields are missing. */
4980 if (field == 0)
4981 continue;
4983 if (cleared && is_zeros_p (value))
4984 continue;
4986 if (host_integerp (DECL_SIZE (field), 1))
4987 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4988 else
4989 bitsize = -1;
4991 mode = DECL_MODE (field);
4992 if (DECL_BIT_FIELD (field))
4993 mode = VOIDmode;
4995 offset = DECL_FIELD_OFFSET (field);
4996 if (host_integerp (offset, 0)
4997 && host_integerp (bit_position (field), 0))
4999 bitpos = int_bit_position (field);
5000 offset = 0;
5002 else
5003 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5005 if (offset)
5007 rtx offset_rtx;
5009 if (CONTAINS_PLACEHOLDER_P (offset))
5010 offset = build (WITH_RECORD_EXPR, sizetype,
5011 offset, make_tree (TREE_TYPE (exp), target));
5013 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5014 if (GET_CODE (to_rtx) != MEM)
5015 abort ();
5017 #ifdef POINTERS_EXTEND_UNSIGNED
5018 if (GET_MODE (offset_rtx) != Pmode)
5019 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5020 #else
5021 if (GET_MODE (offset_rtx) != ptr_mode)
5022 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5023 #endif
5025 to_rtx = offset_address (to_rtx, offset_rtx,
5026 highest_pow2_factor (offset));
5029 if (TREE_READONLY (field))
5031 if (GET_CODE (to_rtx) == MEM)
5032 to_rtx = copy_rtx (to_rtx);
5034 RTX_UNCHANGING_P (to_rtx) = 1;
5037 #ifdef WORD_REGISTER_OPERATIONS
5038 /* If this initializes a field that is smaller than a word, at the
5039 start of a word, try to widen it to a full word.
5040 This special case allows us to output C++ member function
5041 initializations in a form that the optimizers can understand. */
5042 if (GET_CODE (target) == REG
5043 && bitsize < BITS_PER_WORD
5044 && bitpos % BITS_PER_WORD == 0
5045 && GET_MODE_CLASS (mode) == MODE_INT
5046 && TREE_CODE (value) == INTEGER_CST
5047 && exp_size >= 0
5048 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5050 tree type = TREE_TYPE (value);
5052 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5054 type = (*lang_hooks.types.type_for_size)
5055 (BITS_PER_WORD, TREE_UNSIGNED (type));
5056 value = convert (type, value);
5059 if (BYTES_BIG_ENDIAN)
5060 value
5061 = fold (build (LSHIFT_EXPR, type, value,
5062 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5063 bitsize = BITS_PER_WORD;
5064 mode = word_mode;
5066 #endif
5068 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5069 && DECL_NONADDRESSABLE_P (field))
5071 to_rtx = copy_rtx (to_rtx);
5072 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5075 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5076 value, type, cleared,
5077 get_alias_set (TREE_TYPE (field)));
5080 else if (TREE_CODE (type) == ARRAY_TYPE
5081 || TREE_CODE (type) == VECTOR_TYPE)
5083 tree elt;
5084 int i;
5085 int need_to_clear;
5086 tree domain = TYPE_DOMAIN (type);
5087 tree elttype = TREE_TYPE (type);
5088 int const_bounds_p;
5089 HOST_WIDE_INT minelt = 0;
5090 HOST_WIDE_INT maxelt = 0;
5092 /* Vectors are like arrays, but the domain is stored via an array
5093 type indirectly. */
5094 if (TREE_CODE (type) == VECTOR_TYPE)
5096 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5097 the same field as TYPE_DOMAIN, we are not guaranteed that
5098 it always will. */
5099 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5100 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5103 const_bounds_p = (TYPE_MIN_VALUE (domain)
5104 && TYPE_MAX_VALUE (domain)
5105 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5106 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5108 /* If we have constant bounds for the range of the type, get them. */
5109 if (const_bounds_p)
5111 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5112 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5115 /* If the constructor has fewer elements than the array,
5116 clear the whole array first. Similarly if this is
5117 static constructor of a non-BLKmode object. */
5118 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5119 need_to_clear = 1;
5120 else
5122 HOST_WIDE_INT count = 0, zero_count = 0;
5123 need_to_clear = ! const_bounds_p;
5125 /* This loop is a more accurate version of the loop in
5126 mostly_zeros_p (it handles RANGE_EXPR in an index).
5127 It is also needed to check for missing elements. */
5128 for (elt = CONSTRUCTOR_ELTS (exp);
5129 elt != NULL_TREE && ! need_to_clear;
5130 elt = TREE_CHAIN (elt))
5132 tree index = TREE_PURPOSE (elt);
5133 HOST_WIDE_INT this_node_count;
5135 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5137 tree lo_index = TREE_OPERAND (index, 0);
5138 tree hi_index = TREE_OPERAND (index, 1);
5140 if (! host_integerp (lo_index, 1)
5141 || ! host_integerp (hi_index, 1))
5143 need_to_clear = 1;
5144 break;
5147 this_node_count = (tree_low_cst (hi_index, 1)
5148 - tree_low_cst (lo_index, 1) + 1);
5150 else
5151 this_node_count = 1;
5153 count += this_node_count;
5154 if (mostly_zeros_p (TREE_VALUE (elt)))
5155 zero_count += this_node_count;
5158 /* Clear the entire array first if there are any missing elements,
5159 or if the incidence of zero elements is >= 75%. */
5160 if (! need_to_clear
5161 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5162 need_to_clear = 1;
5165 if (need_to_clear && size > 0)
5167 if (! cleared)
5169 if (REG_P (target))
5170 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5171 else
5172 clear_storage (target, GEN_INT (size));
5174 cleared = 1;
5176 else if (REG_P (target))
5177 /* Inform later passes that the old value is dead. */
5178 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5180 /* Store each element of the constructor into
5181 the corresponding element of TARGET, determined
5182 by counting the elements. */
5183 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5184 elt;
5185 elt = TREE_CHAIN (elt), i++)
5187 enum machine_mode mode;
5188 HOST_WIDE_INT bitsize;
5189 HOST_WIDE_INT bitpos;
5190 int unsignedp;
5191 tree value = TREE_VALUE (elt);
5192 tree index = TREE_PURPOSE (elt);
5193 rtx xtarget = target;
5195 if (cleared && is_zeros_p (value))
5196 continue;
5198 unsignedp = TREE_UNSIGNED (elttype);
5199 mode = TYPE_MODE (elttype);
5200 if (mode == BLKmode)
5201 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5202 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5203 : -1);
5204 else
5205 bitsize = GET_MODE_BITSIZE (mode);
5207 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5209 tree lo_index = TREE_OPERAND (index, 0);
5210 tree hi_index = TREE_OPERAND (index, 1);
5211 rtx index_r, pos_rtx, loop_end;
5212 struct nesting *loop;
5213 HOST_WIDE_INT lo, hi, count;
5214 tree position;
5216 /* If the range is constant and "small", unroll the loop. */
5217 if (const_bounds_p
5218 && host_integerp (lo_index, 0)
5219 && host_integerp (hi_index, 0)
5220 && (lo = tree_low_cst (lo_index, 0),
5221 hi = tree_low_cst (hi_index, 0),
5222 count = hi - lo + 1,
5223 (GET_CODE (target) != MEM
5224 || count <= 2
5225 || (host_integerp (TYPE_SIZE (elttype), 1)
5226 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5227 <= 40 * 8)))))
5229 lo -= minelt; hi -= minelt;
5230 for (; lo <= hi; lo++)
5232 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5234 if (GET_CODE (target) == MEM
5235 && !MEM_KEEP_ALIAS_SET_P (target)
5236 && TREE_CODE (type) == ARRAY_TYPE
5237 && TYPE_NONALIASED_COMPONENT (type))
5239 target = copy_rtx (target);
5240 MEM_KEEP_ALIAS_SET_P (target) = 1;
5243 store_constructor_field
5244 (target, bitsize, bitpos, mode, value, type, cleared,
5245 get_alias_set (elttype));
5248 else
5250 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5251 loop_end = gen_label_rtx ();
5253 unsignedp = TREE_UNSIGNED (domain);
5255 index = build_decl (VAR_DECL, NULL_TREE, domain);
5257 index_r
5258 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5259 &unsignedp, 0));
5260 SET_DECL_RTL (index, index_r);
5261 if (TREE_CODE (value) == SAVE_EXPR
5262 && SAVE_EXPR_RTL (value) == 0)
5264 /* Make sure value gets expanded once before the
5265 loop. */
5266 expand_expr (value, const0_rtx, VOIDmode, 0);
5267 emit_queue ();
5269 store_expr (lo_index, index_r, 0);
5270 loop = expand_start_loop (0);
5272 /* Assign value to element index. */
5273 position
5274 = convert (ssizetype,
5275 fold (build (MINUS_EXPR, TREE_TYPE (index),
5276 index, TYPE_MIN_VALUE (domain))));
5277 position = size_binop (MULT_EXPR, position,
5278 convert (ssizetype,
5279 TYPE_SIZE_UNIT (elttype)));
5281 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5282 xtarget = offset_address (target, pos_rtx,
5283 highest_pow2_factor (position));
5284 xtarget = adjust_address (xtarget, mode, 0);
5285 if (TREE_CODE (value) == CONSTRUCTOR)
5286 store_constructor (value, xtarget, cleared,
5287 bitsize / BITS_PER_UNIT);
5288 else
5289 store_expr (value, xtarget, 0);
5291 expand_exit_loop_if_false (loop,
5292 build (LT_EXPR, integer_type_node,
5293 index, hi_index));
5295 expand_increment (build (PREINCREMENT_EXPR,
5296 TREE_TYPE (index),
5297 index, integer_one_node), 0, 0);
5298 expand_end_loop ();
5299 emit_label (loop_end);
5302 else if ((index != 0 && ! host_integerp (index, 0))
5303 || ! host_integerp (TYPE_SIZE (elttype), 1))
5305 tree position;
5307 if (index == 0)
5308 index = ssize_int (1);
5310 if (minelt)
5311 index = convert (ssizetype,
5312 fold (build (MINUS_EXPR, index,
5313 TYPE_MIN_VALUE (domain))));
5315 position = size_binop (MULT_EXPR, index,
5316 convert (ssizetype,
5317 TYPE_SIZE_UNIT (elttype)));
5318 xtarget = offset_address (target,
5319 expand_expr (position, 0, VOIDmode, 0),
5320 highest_pow2_factor (position));
5321 xtarget = adjust_address (xtarget, mode, 0);
5322 store_expr (value, xtarget, 0);
5324 else
5326 if (index != 0)
5327 bitpos = ((tree_low_cst (index, 0) - minelt)
5328 * tree_low_cst (TYPE_SIZE (elttype), 1));
5329 else
5330 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5332 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5333 && TREE_CODE (type) == ARRAY_TYPE
5334 && TYPE_NONALIASED_COMPONENT (type))
5336 target = copy_rtx (target);
5337 MEM_KEEP_ALIAS_SET_P (target) = 1;
5340 store_constructor_field (target, bitsize, bitpos, mode, value,
5341 type, cleared, get_alias_set (elttype));
5347 /* Set constructor assignments. */
5348 else if (TREE_CODE (type) == SET_TYPE)
5350 tree elt = CONSTRUCTOR_ELTS (exp);
5351 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5352 tree domain = TYPE_DOMAIN (type);
5353 tree domain_min, domain_max, bitlength;
5355 /* The default implementation strategy is to extract the constant
5356 parts of the constructor, use that to initialize the target,
5357 and then "or" in whatever non-constant ranges we need in addition.
5359 If a large set is all zero or all ones, it is
5360 probably better to set it using memset (if available) or bzero.
5361 Also, if a large set has just a single range, it may also be
5362 better to first clear all the first clear the set (using
5363 bzero/memset), and set the bits we want. */
5365 /* Check for all zeros. */
5366 if (elt == NULL_TREE && size > 0)
5368 if (!cleared)
5369 clear_storage (target, GEN_INT (size));
5370 return;
5373 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5374 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5375 bitlength = size_binop (PLUS_EXPR,
5376 size_diffop (domain_max, domain_min),
5377 ssize_int (1));
5379 nbits = tree_low_cst (bitlength, 1);
5381 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5382 are "complicated" (more than one range), initialize (the
5383 constant parts) by copying from a constant. */
5384 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5385 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5387 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5388 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5389 char *bit_buffer = alloca (nbits);
5390 HOST_WIDE_INT word = 0;
5391 unsigned int bit_pos = 0;
5392 unsigned int ibit = 0;
5393 unsigned int offset = 0; /* In bytes from beginning of set. */
5395 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5396 for (;;)
5398 if (bit_buffer[ibit])
5400 if (BYTES_BIG_ENDIAN)
5401 word |= (1 << (set_word_size - 1 - bit_pos));
5402 else
5403 word |= 1 << bit_pos;
5406 bit_pos++; ibit++;
5407 if (bit_pos >= set_word_size || ibit == nbits)
5409 if (word != 0 || ! cleared)
5411 rtx datum = GEN_INT (word);
5412 rtx to_rtx;
5414 /* The assumption here is that it is safe to use
5415 XEXP if the set is multi-word, but not if
5416 it's single-word. */
5417 if (GET_CODE (target) == MEM)
5418 to_rtx = adjust_address (target, mode, offset);
5419 else if (offset == 0)
5420 to_rtx = target;
5421 else
5422 abort ();
5423 emit_move_insn (to_rtx, datum);
5426 if (ibit == nbits)
5427 break;
5428 word = 0;
5429 bit_pos = 0;
5430 offset += set_word_size / BITS_PER_UNIT;
5434 else if (!cleared)
5435 /* Don't bother clearing storage if the set is all ones. */
5436 if (TREE_CHAIN (elt) != NULL_TREE
5437 || (TREE_PURPOSE (elt) == NULL_TREE
5438 ? nbits != 1
5439 : ( ! host_integerp (TREE_VALUE (elt), 0)
5440 || ! host_integerp (TREE_PURPOSE (elt), 0)
5441 || (tree_low_cst (TREE_VALUE (elt), 0)
5442 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5443 != (HOST_WIDE_INT) nbits))))
5444 clear_storage (target, expr_size (exp));
5446 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5448 /* Start of range of element or NULL. */
5449 tree startbit = TREE_PURPOSE (elt);
5450 /* End of range of element, or element value. */
5451 tree endbit = TREE_VALUE (elt);
5452 HOST_WIDE_INT startb, endb;
5453 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5455 bitlength_rtx = expand_expr (bitlength,
5456 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5458 /* Handle non-range tuple element like [ expr ]. */
5459 if (startbit == NULL_TREE)
5461 startbit = save_expr (endbit);
5462 endbit = startbit;
5465 startbit = convert (sizetype, startbit);
5466 endbit = convert (sizetype, endbit);
5467 if (! integer_zerop (domain_min))
5469 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5470 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5472 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5473 EXPAND_CONST_ADDRESS);
5474 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5475 EXPAND_CONST_ADDRESS);
5477 if (REG_P (target))
5479 targetx
5480 = assign_temp
5481 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5482 (GET_MODE (target), 0),
5483 TYPE_QUAL_CONST)),
5484 0, 1, 1);
5485 emit_move_insn (targetx, target);
5488 else if (GET_CODE (target) == MEM)
5489 targetx = target;
5490 else
5491 abort ();
5493 /* Optimization: If startbit and endbit are constants divisible
5494 by BITS_PER_UNIT, call memset instead. */
5495 if (TARGET_MEM_FUNCTIONS
5496 && TREE_CODE (startbit) == INTEGER_CST
5497 && TREE_CODE (endbit) == INTEGER_CST
5498 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5499 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5501 emit_library_call (memset_libfunc, LCT_NORMAL,
5502 VOIDmode, 3,
5503 plus_constant (XEXP (targetx, 0),
5504 startb / BITS_PER_UNIT),
5505 Pmode,
5506 constm1_rtx, TYPE_MODE (integer_type_node),
5507 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5508 TYPE_MODE (sizetype));
5510 else
5511 emit_library_call (setbits_libfunc, LCT_NORMAL,
5512 VOIDmode, 4, XEXP (targetx, 0),
5513 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5514 startbit_rtx, TYPE_MODE (sizetype),
5515 endbit_rtx, TYPE_MODE (sizetype));
5517 if (REG_P (target))
5518 emit_move_insn (target, targetx);
5522 else
5523 abort ();
5526 /* Store the value of EXP (an expression tree)
5527 into a subfield of TARGET which has mode MODE and occupies
5528 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5529 If MODE is VOIDmode, it means that we are storing into a bit-field.
5531 If VALUE_MODE is VOIDmode, return nothing in particular.
5532 UNSIGNEDP is not used in this case.
5534 Otherwise, return an rtx for the value stored. This rtx
5535 has mode VALUE_MODE if that is convenient to do.
5536 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5538 TYPE is the type of the underlying object,
5540 ALIAS_SET is the alias set for the destination. This value will
5541 (in general) be different from that for TARGET, since TARGET is a
5542 reference to the containing structure. */
5544 static rtx
5545 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5546 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5547 int unsignedp, tree type, int alias_set)
5549 HOST_WIDE_INT width_mask = 0;
5551 if (TREE_CODE (exp) == ERROR_MARK)
5552 return const0_rtx;
5554 /* If we have nothing to store, do nothing unless the expression has
5555 side-effects. */
5556 if (bitsize == 0)
5557 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5558 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5559 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5561 /* If we are storing into an unaligned field of an aligned union that is
5562 in a register, we may have the mode of TARGET being an integer mode but
5563 MODE == BLKmode. In that case, get an aligned object whose size and
5564 alignment are the same as TARGET and store TARGET into it (we can avoid
5565 the store if the field being stored is the entire width of TARGET). Then
5566 call ourselves recursively to store the field into a BLKmode version of
5567 that object. Finally, load from the object into TARGET. This is not
5568 very efficient in general, but should only be slightly more expensive
5569 than the otherwise-required unaligned accesses. Perhaps this can be
5570 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5571 twice, once with emit_move_insn and once via store_field. */
5573 if (mode == BLKmode
5574 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5576 rtx object = assign_temp (type, 0, 1, 1);
5577 rtx blk_object = adjust_address (object, BLKmode, 0);
5579 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5580 emit_move_insn (object, target);
5582 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5583 alias_set);
5585 emit_move_insn (target, object);
5587 /* We want to return the BLKmode version of the data. */
5588 return blk_object;
5591 if (GET_CODE (target) == CONCAT)
5593 /* We're storing into a struct containing a single __complex. */
5595 if (bitpos != 0)
5596 abort ();
5597 return store_expr (exp, target, 0);
5600 /* If the structure is in a register or if the component
5601 is a bit field, we cannot use addressing to access it.
5602 Use bit-field techniques or SUBREG to store in it. */
5604 if (mode == VOIDmode
5605 || (mode != BLKmode && ! direct_store[(int) mode]
5606 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5607 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5608 || GET_CODE (target) == REG
5609 || GET_CODE (target) == SUBREG
5610 /* If the field isn't aligned enough to store as an ordinary memref,
5611 store it as a bit field. */
5612 || (mode != BLKmode
5613 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5614 || bitpos % GET_MODE_ALIGNMENT (mode))
5615 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5616 || (bitpos % BITS_PER_UNIT != 0)))
5617 /* If the RHS and field are a constant size and the size of the
5618 RHS isn't the same size as the bitfield, we must use bitfield
5619 operations. */
5620 || (bitsize >= 0
5621 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5622 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5624 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5626 /* If BITSIZE is narrower than the size of the type of EXP
5627 we will be narrowing TEMP. Normally, what's wanted are the
5628 low-order bits. However, if EXP's type is a record and this is
5629 big-endian machine, we want the upper BITSIZE bits. */
5630 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5631 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5632 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5633 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5634 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5635 - bitsize),
5636 NULL_RTX, 1);
5638 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5639 MODE. */
5640 if (mode != VOIDmode && mode != BLKmode
5641 && mode != TYPE_MODE (TREE_TYPE (exp)))
5642 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5644 /* If the modes of TARGET and TEMP are both BLKmode, both
5645 must be in memory and BITPOS must be aligned on a byte
5646 boundary. If so, we simply do a block copy. */
5647 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5649 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5650 || bitpos % BITS_PER_UNIT != 0)
5651 abort ();
5653 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5654 emit_block_move (target, temp,
5655 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5656 / BITS_PER_UNIT),
5657 BLOCK_OP_NORMAL);
5659 return value_mode == VOIDmode ? const0_rtx : target;
5662 /* Store the value in the bitfield. */
5663 store_bit_field (target, bitsize, bitpos, mode, temp,
5664 int_size_in_bytes (type));
5666 if (value_mode != VOIDmode)
5668 /* The caller wants an rtx for the value.
5669 If possible, avoid refetching from the bitfield itself. */
5670 if (width_mask != 0
5671 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5673 tree count;
5674 enum machine_mode tmode;
5676 tmode = GET_MODE (temp);
5677 if (tmode == VOIDmode)
5678 tmode = value_mode;
5680 if (unsignedp)
5681 return expand_and (tmode, temp,
5682 gen_int_mode (width_mask, tmode),
5683 NULL_RTX);
5685 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5686 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5687 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5690 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5691 NULL_RTX, value_mode, VOIDmode,
5692 int_size_in_bytes (type));
5694 return const0_rtx;
5696 else
5698 rtx addr = XEXP (target, 0);
5699 rtx to_rtx = target;
5701 /* If a value is wanted, it must be the lhs;
5702 so make the address stable for multiple use. */
5704 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5705 && ! CONSTANT_ADDRESS_P (addr)
5706 /* A frame-pointer reference is already stable. */
5707 && ! (GET_CODE (addr) == PLUS
5708 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5709 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5710 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5711 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5713 /* Now build a reference to just the desired component. */
5715 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5717 if (to_rtx == target)
5718 to_rtx = copy_rtx (to_rtx);
5720 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5721 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5722 set_mem_alias_set (to_rtx, alias_set);
5724 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5728 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5729 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5730 codes and find the ultimate containing object, which we return.
5732 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5733 bit position, and *PUNSIGNEDP to the signedness of the field.
5734 If the position of the field is variable, we store a tree
5735 giving the variable offset (in units) in *POFFSET.
5736 This offset is in addition to the bit position.
5737 If the position is not variable, we store 0 in *POFFSET.
5739 If any of the extraction expressions is volatile,
5740 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5742 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5743 is a mode that can be used to access the field. In that case, *PBITSIZE
5744 is redundant.
5746 If the field describes a variable-sized object, *PMODE is set to
5747 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5748 this case, but the address of the object can be found. */
5750 tree
5751 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5752 HOST_WIDE_INT *pbitpos, tree *poffset,
5753 enum machine_mode *pmode, int *punsignedp,
5754 int *pvolatilep)
5756 tree size_tree = 0;
5757 enum machine_mode mode = VOIDmode;
5758 tree offset = size_zero_node;
5759 tree bit_offset = bitsize_zero_node;
5760 tree placeholder_ptr = 0;
5761 tree tem;
5763 /* First get the mode, signedness, and size. We do this from just the
5764 outermost expression. */
5765 if (TREE_CODE (exp) == COMPONENT_REF)
5767 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5768 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5769 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5771 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5773 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5775 size_tree = TREE_OPERAND (exp, 1);
5776 *punsignedp = TREE_UNSIGNED (exp);
5778 else
5780 mode = TYPE_MODE (TREE_TYPE (exp));
5781 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5783 if (mode == BLKmode)
5784 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5785 else
5786 *pbitsize = GET_MODE_BITSIZE (mode);
5789 if (size_tree != 0)
5791 if (! host_integerp (size_tree, 1))
5792 mode = BLKmode, *pbitsize = -1;
5793 else
5794 *pbitsize = tree_low_cst (size_tree, 1);
5797 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5798 and find the ultimate containing object. */
5799 while (1)
5801 if (TREE_CODE (exp) == BIT_FIELD_REF)
5802 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5803 else if (TREE_CODE (exp) == COMPONENT_REF)
5805 tree field = TREE_OPERAND (exp, 1);
5806 tree this_offset = DECL_FIELD_OFFSET (field);
5808 /* If this field hasn't been filled in yet, don't go
5809 past it. This should only happen when folding expressions
5810 made during type construction. */
5811 if (this_offset == 0)
5812 break;
5813 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5814 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5816 offset = size_binop (PLUS_EXPR, offset, this_offset);
5817 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5818 DECL_FIELD_BIT_OFFSET (field));
5820 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5823 else if (TREE_CODE (exp) == ARRAY_REF
5824 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5826 tree index = TREE_OPERAND (exp, 1);
5827 tree array = TREE_OPERAND (exp, 0);
5828 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5829 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5830 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5832 /* We assume all arrays have sizes that are a multiple of a byte.
5833 First subtract the lower bound, if any, in the type of the
5834 index, then convert to sizetype and multiply by the size of the
5835 array element. */
5836 if (low_bound != 0 && ! integer_zerop (low_bound))
5837 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5838 index, low_bound));
5840 /* If the index has a self-referential type, pass it to a
5841 WITH_RECORD_EXPR; if the component size is, pass our
5842 component to one. */
5843 if (CONTAINS_PLACEHOLDER_P (index))
5844 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5845 if (CONTAINS_PLACEHOLDER_P (unit_size))
5846 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5848 offset = size_binop (PLUS_EXPR, offset,
5849 size_binop (MULT_EXPR,
5850 convert (sizetype, index),
5851 unit_size));
5854 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5856 tree new = find_placeholder (exp, &placeholder_ptr);
5858 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5859 We might have been called from tree optimization where we
5860 haven't set up an object yet. */
5861 if (new == 0)
5862 break;
5863 else
5864 exp = new;
5866 continue;
5869 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5870 conversions that don't change the mode, and all view conversions
5871 except those that need to "step up" the alignment. */
5872 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5873 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5874 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5875 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5876 && STRICT_ALIGNMENT
5877 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5878 < BIGGEST_ALIGNMENT)
5879 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5880 || TYPE_ALIGN_OK (TREE_TYPE
5881 (TREE_OPERAND (exp, 0))))))
5882 && ! ((TREE_CODE (exp) == NOP_EXPR
5883 || TREE_CODE (exp) == CONVERT_EXPR)
5884 && (TYPE_MODE (TREE_TYPE (exp))
5885 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5886 break;
5888 /* If any reference in the chain is volatile, the effect is volatile. */
5889 if (TREE_THIS_VOLATILE (exp))
5890 *pvolatilep = 1;
5892 exp = TREE_OPERAND (exp, 0);
5895 /* If OFFSET is constant, see if we can return the whole thing as a
5896 constant bit position. Otherwise, split it up. */
5897 if (host_integerp (offset, 0)
5898 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5899 bitsize_unit_node))
5900 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5901 && host_integerp (tem, 0))
5902 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5903 else
5904 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5906 *pmode = mode;
5907 return exp;
5910 /* Return 1 if T is an expression that get_inner_reference handles. */
5913 handled_component_p (tree t)
5915 switch (TREE_CODE (t))
5917 case BIT_FIELD_REF:
5918 case COMPONENT_REF:
5919 case ARRAY_REF:
5920 case ARRAY_RANGE_REF:
5921 case NON_LVALUE_EXPR:
5922 case VIEW_CONVERT_EXPR:
5923 return 1;
5925 /* ??? Sure they are handled, but get_inner_reference may return
5926 a different PBITSIZE, depending upon whether the expression is
5927 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5928 case NOP_EXPR:
5929 case CONVERT_EXPR:
5930 return (TYPE_MODE (TREE_TYPE (t))
5931 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5933 default:
5934 return 0;
5938 /* Given an rtx VALUE that may contain additions and multiplications, return
5939 an equivalent value that just refers to a register, memory, or constant.
5940 This is done by generating instructions to perform the arithmetic and
5941 returning a pseudo-register containing the value.
5943 The returned value may be a REG, SUBREG, MEM or constant. */
5946 force_operand (rtx value, rtx target)
5948 rtx op1, op2;
5949 /* Use subtarget as the target for operand 0 of a binary operation. */
5950 rtx subtarget = get_subtarget (target);
5951 enum rtx_code code = GET_CODE (value);
5953 /* Check for a PIC address load. */
5954 if ((code == PLUS || code == MINUS)
5955 && XEXP (value, 0) == pic_offset_table_rtx
5956 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5957 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5958 || GET_CODE (XEXP (value, 1)) == CONST))
5960 if (!subtarget)
5961 subtarget = gen_reg_rtx (GET_MODE (value));
5962 emit_move_insn (subtarget, value);
5963 return subtarget;
5966 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5968 if (!target)
5969 target = gen_reg_rtx (GET_MODE (value));
5970 convert_move (target, force_operand (XEXP (value, 0), NULL),
5971 code == ZERO_EXTEND);
5972 return target;
5975 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5977 op2 = XEXP (value, 1);
5978 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5979 subtarget = 0;
5980 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5982 code = PLUS;
5983 op2 = negate_rtx (GET_MODE (value), op2);
5986 /* Check for an addition with OP2 a constant integer and our first
5987 operand a PLUS of a virtual register and something else. In that
5988 case, we want to emit the sum of the virtual register and the
5989 constant first and then add the other value. This allows virtual
5990 register instantiation to simply modify the constant rather than
5991 creating another one around this addition. */
5992 if (code == PLUS && GET_CODE (op2) == CONST_INT
5993 && GET_CODE (XEXP (value, 0)) == PLUS
5994 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5995 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5996 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5998 rtx temp = expand_simple_binop (GET_MODE (value), code,
5999 XEXP (XEXP (value, 0), 0), op2,
6000 subtarget, 0, OPTAB_LIB_WIDEN);
6001 return expand_simple_binop (GET_MODE (value), code, temp,
6002 force_operand (XEXP (XEXP (value,
6003 0), 1), 0),
6004 target, 0, OPTAB_LIB_WIDEN);
6007 op1 = force_operand (XEXP (value, 0), subtarget);
6008 op2 = force_operand (op2, NULL_RTX);
6009 switch (code)
6011 case MULT:
6012 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6013 case DIV:
6014 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6015 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6016 target, 1, OPTAB_LIB_WIDEN);
6017 else
6018 return expand_divmod (0,
6019 FLOAT_MODE_P (GET_MODE (value))
6020 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6021 GET_MODE (value), op1, op2, target, 0);
6022 break;
6023 case MOD:
6024 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6025 target, 0);
6026 break;
6027 case UDIV:
6028 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6029 target, 1);
6030 break;
6031 case UMOD:
6032 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6033 target, 1);
6034 break;
6035 case ASHIFTRT:
6036 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6037 target, 0, OPTAB_LIB_WIDEN);
6038 break;
6039 default:
6040 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6041 target, 1, OPTAB_LIB_WIDEN);
6044 if (GET_RTX_CLASS (code) == '1')
6046 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6047 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6050 #ifdef INSN_SCHEDULING
6051 /* On machines that have insn scheduling, we want all memory reference to be
6052 explicit, so we need to deal with such paradoxical SUBREGs. */
6053 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6054 && (GET_MODE_SIZE (GET_MODE (value))
6055 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6056 value
6057 = simplify_gen_subreg (GET_MODE (value),
6058 force_reg (GET_MODE (SUBREG_REG (value)),
6059 force_operand (SUBREG_REG (value),
6060 NULL_RTX)),
6061 GET_MODE (SUBREG_REG (value)),
6062 SUBREG_BYTE (value));
6063 #endif
6065 return value;
6068 /* Subroutine of expand_expr: return nonzero iff there is no way that
6069 EXP can reference X, which is being modified. TOP_P is nonzero if this
6070 call is going to be used to determine whether we need a temporary
6071 for EXP, as opposed to a recursive call to this function.
6073 It is always safe for this routine to return zero since it merely
6074 searches for optimization opportunities. */
6077 safe_from_p (rtx x, tree exp, int top_p)
6079 rtx exp_rtl = 0;
6080 int i, nops;
6081 static tree save_expr_list;
6083 if (x == 0
6084 /* If EXP has varying size, we MUST use a target since we currently
6085 have no way of allocating temporaries of variable size
6086 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6087 So we assume here that something at a higher level has prevented a
6088 clash. This is somewhat bogus, but the best we can do. Only
6089 do this when X is BLKmode and when we are at the top level. */
6090 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6091 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6092 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6093 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6094 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6095 != INTEGER_CST)
6096 && GET_MODE (x) == BLKmode)
6097 /* If X is in the outgoing argument area, it is always safe. */
6098 || (GET_CODE (x) == MEM
6099 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6100 || (GET_CODE (XEXP (x, 0)) == PLUS
6101 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6102 return 1;
6104 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6105 find the underlying pseudo. */
6106 if (GET_CODE (x) == SUBREG)
6108 x = SUBREG_REG (x);
6109 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6110 return 0;
6113 /* A SAVE_EXPR might appear many times in the expression passed to the
6114 top-level safe_from_p call, and if it has a complex subexpression,
6115 examining it multiple times could result in a combinatorial explosion.
6116 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6117 with optimization took about 28 minutes to compile -- even though it was
6118 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6119 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6120 we have processed. Note that the only test of top_p was above. */
6122 if (top_p)
6124 int rtn;
6125 tree t;
6127 save_expr_list = 0;
6129 rtn = safe_from_p (x, exp, 0);
6131 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6132 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6134 return rtn;
6137 /* Now look at our tree code and possibly recurse. */
6138 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6140 case 'd':
6141 exp_rtl = DECL_RTL_IF_SET (exp);
6142 break;
6144 case 'c':
6145 return 1;
6147 case 'x':
6148 if (TREE_CODE (exp) == TREE_LIST)
6150 while (1)
6152 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6153 return 0;
6154 exp = TREE_CHAIN (exp);
6155 if (!exp)
6156 return 1;
6157 if (TREE_CODE (exp) != TREE_LIST)
6158 return safe_from_p (x, exp, 0);
6161 else if (TREE_CODE (exp) == ERROR_MARK)
6162 return 1; /* An already-visited SAVE_EXPR? */
6163 else
6164 return 0;
6166 case '2':
6167 case '<':
6168 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6169 return 0;
6170 /* FALLTHRU */
6172 case '1':
6173 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6175 case 'e':
6176 case 'r':
6177 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6178 the expression. If it is set, we conflict iff we are that rtx or
6179 both are in memory. Otherwise, we check all operands of the
6180 expression recursively. */
6182 switch (TREE_CODE (exp))
6184 case ADDR_EXPR:
6185 /* If the operand is static or we are static, we can't conflict.
6186 Likewise if we don't conflict with the operand at all. */
6187 if (staticp (TREE_OPERAND (exp, 0))
6188 || TREE_STATIC (exp)
6189 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6190 return 1;
6192 /* Otherwise, the only way this can conflict is if we are taking
6193 the address of a DECL a that address if part of X, which is
6194 very rare. */
6195 exp = TREE_OPERAND (exp, 0);
6196 if (DECL_P (exp))
6198 if (!DECL_RTL_SET_P (exp)
6199 || GET_CODE (DECL_RTL (exp)) != MEM)
6200 return 0;
6201 else
6202 exp_rtl = XEXP (DECL_RTL (exp), 0);
6204 break;
6206 case INDIRECT_REF:
6207 if (GET_CODE (x) == MEM
6208 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6209 get_alias_set (exp)))
6210 return 0;
6211 break;
6213 case CALL_EXPR:
6214 /* Assume that the call will clobber all hard registers and
6215 all of memory. */
6216 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6217 || GET_CODE (x) == MEM)
6218 return 0;
6219 break;
6221 case RTL_EXPR:
6222 /* If a sequence exists, we would have to scan every instruction
6223 in the sequence to see if it was safe. This is probably not
6224 worthwhile. */
6225 if (RTL_EXPR_SEQUENCE (exp))
6226 return 0;
6228 exp_rtl = RTL_EXPR_RTL (exp);
6229 break;
6231 case WITH_CLEANUP_EXPR:
6232 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6233 break;
6235 case CLEANUP_POINT_EXPR:
6236 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6238 case SAVE_EXPR:
6239 exp_rtl = SAVE_EXPR_RTL (exp);
6240 if (exp_rtl)
6241 break;
6243 /* If we've already scanned this, don't do it again. Otherwise,
6244 show we've scanned it and record for clearing the flag if we're
6245 going on. */
6246 if (TREE_PRIVATE (exp))
6247 return 1;
6249 TREE_PRIVATE (exp) = 1;
6250 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6252 TREE_PRIVATE (exp) = 0;
6253 return 0;
6256 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6257 return 1;
6259 case BIND_EXPR:
6260 /* The only operand we look at is operand 1. The rest aren't
6261 part of the expression. */
6262 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6264 default:
6265 break;
6268 /* If we have an rtx, we do not need to scan our operands. */
6269 if (exp_rtl)
6270 break;
6272 nops = first_rtl_op (TREE_CODE (exp));
6273 for (i = 0; i < nops; i++)
6274 if (TREE_OPERAND (exp, i) != 0
6275 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6276 return 0;
6278 /* If this is a language-specific tree code, it may require
6279 special handling. */
6280 if ((unsigned int) TREE_CODE (exp)
6281 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6282 && !(*lang_hooks.safe_from_p) (x, exp))
6283 return 0;
6286 /* If we have an rtl, find any enclosed object. Then see if we conflict
6287 with it. */
6288 if (exp_rtl)
6290 if (GET_CODE (exp_rtl) == SUBREG)
6292 exp_rtl = SUBREG_REG (exp_rtl);
6293 if (GET_CODE (exp_rtl) == REG
6294 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6295 return 0;
6298 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6299 are memory and they conflict. */
6300 return ! (rtx_equal_p (x, exp_rtl)
6301 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6302 && true_dependence (exp_rtl, VOIDmode, x,
6303 rtx_addr_varies_p)));
6306 /* If we reach here, it is safe. */
6307 return 1;
6310 /* Subroutine of expand_expr: return rtx if EXP is a
6311 variable or parameter; else return 0. */
6313 static rtx
6314 var_rtx (tree exp)
6316 STRIP_NOPS (exp);
6317 switch (TREE_CODE (exp))
6319 case PARM_DECL:
6320 case VAR_DECL:
6321 return DECL_RTL (exp);
6322 default:
6323 return 0;
6327 #ifdef MAX_INTEGER_COMPUTATION_MODE
6329 void
6330 check_max_integer_computation_mode (tree exp)
6332 enum tree_code code;
6333 enum machine_mode mode;
6335 /* Strip any NOPs that don't change the mode. */
6336 STRIP_NOPS (exp);
6337 code = TREE_CODE (exp);
6339 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6340 if (code == NOP_EXPR
6341 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6342 return;
6344 /* First check the type of the overall operation. We need only look at
6345 unary, binary and relational operations. */
6346 if (TREE_CODE_CLASS (code) == '1'
6347 || TREE_CODE_CLASS (code) == '2'
6348 || TREE_CODE_CLASS (code) == '<')
6350 mode = TYPE_MODE (TREE_TYPE (exp));
6351 if (GET_MODE_CLASS (mode) == MODE_INT
6352 && mode > MAX_INTEGER_COMPUTATION_MODE)
6353 internal_error ("unsupported wide integer operation");
6356 /* Check operand of a unary op. */
6357 if (TREE_CODE_CLASS (code) == '1')
6359 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6360 if (GET_MODE_CLASS (mode) == MODE_INT
6361 && mode > MAX_INTEGER_COMPUTATION_MODE)
6362 internal_error ("unsupported wide integer operation");
6365 /* Check operands of a binary/comparison op. */
6366 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6368 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6369 if (GET_MODE_CLASS (mode) == MODE_INT
6370 && mode > MAX_INTEGER_COMPUTATION_MODE)
6371 internal_error ("unsupported wide integer operation");
6373 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6374 if (GET_MODE_CLASS (mode) == MODE_INT
6375 && mode > MAX_INTEGER_COMPUTATION_MODE)
6376 internal_error ("unsupported wide integer operation");
6379 #endif
6381 /* Return the highest power of two that EXP is known to be a multiple of.
6382 This is used in updating alignment of MEMs in array references. */
6384 static unsigned HOST_WIDE_INT
6385 highest_pow2_factor (tree exp)
6387 unsigned HOST_WIDE_INT c0, c1;
6389 switch (TREE_CODE (exp))
6391 case INTEGER_CST:
6392 /* We can find the lowest bit that's a one. If the low
6393 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6394 We need to handle this case since we can find it in a COND_EXPR,
6395 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6396 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6397 later ICE. */
6398 if (TREE_CONSTANT_OVERFLOW (exp))
6399 return BIGGEST_ALIGNMENT;
6400 else
6402 /* Note: tree_low_cst is intentionally not used here,
6403 we don't care about the upper bits. */
6404 c0 = TREE_INT_CST_LOW (exp);
6405 c0 &= -c0;
6406 return c0 ? c0 : BIGGEST_ALIGNMENT;
6408 break;
6410 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6411 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6412 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6413 return MIN (c0, c1);
6415 case MULT_EXPR:
6416 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6417 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6418 return c0 * c1;
6420 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6421 case CEIL_DIV_EXPR:
6422 if (integer_pow2p (TREE_OPERAND (exp, 1))
6423 && host_integerp (TREE_OPERAND (exp, 1), 1))
6425 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6426 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6427 return MAX (1, c0 / c1);
6429 break;
6431 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6432 case SAVE_EXPR: case WITH_RECORD_EXPR:
6433 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6435 case COMPOUND_EXPR:
6436 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6438 case COND_EXPR:
6439 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6440 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6441 return MIN (c0, c1);
6443 default:
6444 break;
6447 return 1;
6450 /* Similar, except that it is known that the expression must be a multiple
6451 of the alignment of TYPE. */
6453 static unsigned HOST_WIDE_INT
6454 highest_pow2_factor_for_type (tree type, tree exp)
6456 unsigned HOST_WIDE_INT type_align, factor;
6458 factor = highest_pow2_factor (exp);
6459 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6460 return MAX (factor, type_align);
6463 /* Return an object on the placeholder list that matches EXP, a
6464 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6465 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6466 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6467 is a location which initially points to a starting location in the
6468 placeholder list (zero means start of the list) and where a pointer into
6469 the placeholder list at which the object is found is placed. */
6471 tree
6472 find_placeholder (tree exp, tree *plist)
6474 tree type = TREE_TYPE (exp);
6475 tree placeholder_expr;
6477 for (placeholder_expr
6478 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6479 placeholder_expr != 0;
6480 placeholder_expr = TREE_CHAIN (placeholder_expr))
6482 tree need_type = TYPE_MAIN_VARIANT (type);
6483 tree elt;
6485 /* Find the outermost reference that is of the type we want. If none,
6486 see if any object has a type that is a pointer to the type we
6487 want. */
6488 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6489 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6490 || TREE_CODE (elt) == COND_EXPR)
6491 ? TREE_OPERAND (elt, 1)
6492 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6493 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6494 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6495 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6496 ? TREE_OPERAND (elt, 0) : 0))
6497 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6499 if (plist)
6500 *plist = placeholder_expr;
6501 return elt;
6504 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6506 = ((TREE_CODE (elt) == COMPOUND_EXPR
6507 || TREE_CODE (elt) == COND_EXPR)
6508 ? TREE_OPERAND (elt, 1)
6509 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6510 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6511 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6512 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6513 ? TREE_OPERAND (elt, 0) : 0))
6514 if (POINTER_TYPE_P (TREE_TYPE (elt))
6515 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6516 == need_type))
6518 if (plist)
6519 *plist = placeholder_expr;
6520 return build1 (INDIRECT_REF, need_type, elt);
6524 return 0;
6527 /* Subroutine of expand_expr. Expand the two operands of a binary
6528 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6529 The value may be stored in TARGET if TARGET is nonzero. The
6530 MODIFIER argument is as documented by expand_expr. */
6532 static void
6533 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6534 enum expand_modifier modifier)
6536 if (! safe_from_p (target, exp1, 1))
6537 target = 0;
6538 if (operand_equal_p (exp0, exp1, 0))
6540 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6541 *op1 = copy_rtx (*op0);
6543 else
6545 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6546 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6551 /* expand_expr: generate code for computing expression EXP.
6552 An rtx for the computed value is returned. The value is never null.
6553 In the case of a void EXP, const0_rtx is returned.
6555 The value may be stored in TARGET if TARGET is nonzero.
6556 TARGET is just a suggestion; callers must assume that
6557 the rtx returned may not be the same as TARGET.
6559 If TARGET is CONST0_RTX, it means that the value will be ignored.
6561 If TMODE is not VOIDmode, it suggests generating the
6562 result in mode TMODE. But this is done only when convenient.
6563 Otherwise, TMODE is ignored and the value generated in its natural mode.
6564 TMODE is just a suggestion; callers must assume that
6565 the rtx returned may not have mode TMODE.
6567 Note that TARGET may have neither TMODE nor MODE. In that case, it
6568 probably will not be used.
6570 If MODIFIER is EXPAND_SUM then when EXP is an addition
6571 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6572 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6573 products as above, or REG or MEM, or constant.
6574 Ordinarily in such cases we would output mul or add instructions
6575 and then return a pseudo reg containing the sum.
6577 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6578 it also marks a label as absolutely required (it can't be dead).
6579 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6580 This is used for outputting expressions used in initializers.
6582 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6583 with a constant address even if that address is not normally legitimate.
6584 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6586 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6587 a call parameter. Such targets require special care as we haven't yet
6588 marked TARGET so that it's safe from being trashed by libcalls. We
6589 don't want to use TARGET for anything but the final result;
6590 Intermediate values must go elsewhere. Additionally, calls to
6591 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6594 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6595 enum expand_modifier modifier)
6597 rtx op0, op1, temp;
6598 tree type = TREE_TYPE (exp);
6599 int unsignedp = TREE_UNSIGNED (type);
6600 enum machine_mode mode;
6601 enum tree_code code = TREE_CODE (exp);
6602 optab this_optab;
6603 rtx subtarget, original_target;
6604 int ignore;
6605 tree context;
6607 /* Handle ERROR_MARK before anybody tries to access its type. */
6608 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6610 op0 = CONST0_RTX (tmode);
6611 if (op0 != 0)
6612 return op0;
6613 return const0_rtx;
6616 mode = TYPE_MODE (type);
6617 /* Use subtarget as the target for operand 0 of a binary operation. */
6618 subtarget = get_subtarget (target);
6619 original_target = target;
6620 ignore = (target == const0_rtx
6621 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6622 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6623 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6624 && TREE_CODE (type) == VOID_TYPE));
6626 /* If we are going to ignore this result, we need only do something
6627 if there is a side-effect somewhere in the expression. If there
6628 is, short-circuit the most common cases here. Note that we must
6629 not call expand_expr with anything but const0_rtx in case this
6630 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6632 if (ignore)
6634 if (! TREE_SIDE_EFFECTS (exp))
6635 return const0_rtx;
6637 /* Ensure we reference a volatile object even if value is ignored, but
6638 don't do this if all we are doing is taking its address. */
6639 if (TREE_THIS_VOLATILE (exp)
6640 && TREE_CODE (exp) != FUNCTION_DECL
6641 && mode != VOIDmode && mode != BLKmode
6642 && modifier != EXPAND_CONST_ADDRESS)
6644 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6645 if (GET_CODE (temp) == MEM)
6646 temp = copy_to_reg (temp);
6647 return const0_rtx;
6650 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6651 || code == INDIRECT_REF || code == BUFFER_REF)
6652 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6653 modifier);
6655 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6656 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6658 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6659 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6660 return const0_rtx;
6662 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6663 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6664 /* If the second operand has no side effects, just evaluate
6665 the first. */
6666 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6667 modifier);
6668 else if (code == BIT_FIELD_REF)
6670 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6671 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6672 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6673 return const0_rtx;
6676 target = 0;
6679 #ifdef MAX_INTEGER_COMPUTATION_MODE
6680 /* Only check stuff here if the mode we want is different from the mode
6681 of the expression; if it's the same, check_max_integer_computation_mode
6682 will handle it. Do we really need to check this stuff at all? */
6684 if (target
6685 && GET_MODE (target) != mode
6686 && TREE_CODE (exp) != INTEGER_CST
6687 && TREE_CODE (exp) != PARM_DECL
6688 && TREE_CODE (exp) != ARRAY_REF
6689 && TREE_CODE (exp) != ARRAY_RANGE_REF
6690 && TREE_CODE (exp) != COMPONENT_REF
6691 && TREE_CODE (exp) != BIT_FIELD_REF
6692 && TREE_CODE (exp) != INDIRECT_REF
6693 && TREE_CODE (exp) != CALL_EXPR
6694 && TREE_CODE (exp) != VAR_DECL
6695 && TREE_CODE (exp) != RTL_EXPR)
6697 enum machine_mode mode = GET_MODE (target);
6699 if (GET_MODE_CLASS (mode) == MODE_INT
6700 && mode > MAX_INTEGER_COMPUTATION_MODE)
6701 internal_error ("unsupported wide integer operation");
6704 if (tmode != mode
6705 && TREE_CODE (exp) != INTEGER_CST
6706 && TREE_CODE (exp) != PARM_DECL
6707 && TREE_CODE (exp) != ARRAY_REF
6708 && TREE_CODE (exp) != ARRAY_RANGE_REF
6709 && TREE_CODE (exp) != COMPONENT_REF
6710 && TREE_CODE (exp) != BIT_FIELD_REF
6711 && TREE_CODE (exp) != INDIRECT_REF
6712 && TREE_CODE (exp) != VAR_DECL
6713 && TREE_CODE (exp) != CALL_EXPR
6714 && TREE_CODE (exp) != RTL_EXPR
6715 && GET_MODE_CLASS (tmode) == MODE_INT
6716 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6717 internal_error ("unsupported wide integer operation");
6719 check_max_integer_computation_mode (exp);
6720 #endif
6722 /* If will do cse, generate all results into pseudo registers
6723 since 1) that allows cse to find more things
6724 and 2) otherwise cse could produce an insn the machine
6725 cannot support. An exception is a CONSTRUCTOR into a multi-word
6726 MEM: that's much more likely to be most efficient into the MEM.
6727 Another is a CALL_EXPR which must return in memory. */
6729 if (! cse_not_expected && mode != BLKmode && target
6730 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6731 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6732 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6733 target = 0;
6735 switch (code)
6737 case LABEL_DECL:
6739 tree function = decl_function_context (exp);
6740 /* Labels in containing functions, or labels used from initializers,
6741 must be forced. */
6742 if (modifier == EXPAND_INITIALIZER
6743 || (function != current_function_decl
6744 && function != inline_function_decl
6745 && function != 0))
6746 temp = force_label_rtx (exp);
6747 else
6748 temp = label_rtx (exp);
6750 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6751 if (function != current_function_decl
6752 && function != inline_function_decl && function != 0)
6753 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6754 return temp;
6757 case PARM_DECL:
6758 if (!DECL_RTL_SET_P (exp))
6760 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6761 return CONST0_RTX (mode);
6764 /* ... fall through ... */
6766 case VAR_DECL:
6767 /* If a static var's type was incomplete when the decl was written,
6768 but the type is complete now, lay out the decl now. */
6769 if (DECL_SIZE (exp) == 0
6770 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6771 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6772 layout_decl (exp, 0);
6774 /* ... fall through ... */
6776 case FUNCTION_DECL:
6777 case RESULT_DECL:
6778 if (DECL_RTL (exp) == 0)
6779 abort ();
6781 /* Ensure variable marked as used even if it doesn't go through
6782 a parser. If it hasn't be used yet, write out an external
6783 definition. */
6784 if (! TREE_USED (exp))
6786 assemble_external (exp);
6787 TREE_USED (exp) = 1;
6790 /* Show we haven't gotten RTL for this yet. */
6791 temp = 0;
6793 /* Handle variables inherited from containing functions. */
6794 context = decl_function_context (exp);
6796 /* We treat inline_function_decl as an alias for the current function
6797 because that is the inline function whose vars, types, etc.
6798 are being merged into the current function.
6799 See expand_inline_function. */
6801 if (context != 0 && context != current_function_decl
6802 && context != inline_function_decl
6803 /* If var is static, we don't need a static chain to access it. */
6804 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6805 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6807 rtx addr;
6809 /* Mark as non-local and addressable. */
6810 DECL_NONLOCAL (exp) = 1;
6811 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6812 abort ();
6813 (*lang_hooks.mark_addressable) (exp);
6814 if (GET_CODE (DECL_RTL (exp)) != MEM)
6815 abort ();
6816 addr = XEXP (DECL_RTL (exp), 0);
6817 if (GET_CODE (addr) == MEM)
6818 addr
6819 = replace_equiv_address (addr,
6820 fix_lexical_addr (XEXP (addr, 0), exp));
6821 else
6822 addr = fix_lexical_addr (addr, exp);
6824 temp = replace_equiv_address (DECL_RTL (exp), addr);
6827 /* This is the case of an array whose size is to be determined
6828 from its initializer, while the initializer is still being parsed.
6829 See expand_decl. */
6831 else if (GET_CODE (DECL_RTL (exp)) == MEM
6832 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6833 temp = validize_mem (DECL_RTL (exp));
6835 /* If DECL_RTL is memory, we are in the normal case and either
6836 the address is not valid or it is not a register and -fforce-addr
6837 is specified, get the address into a register. */
6839 else if (GET_CODE (DECL_RTL (exp)) == MEM
6840 && modifier != EXPAND_CONST_ADDRESS
6841 && modifier != EXPAND_SUM
6842 && modifier != EXPAND_INITIALIZER
6843 && (! memory_address_p (DECL_MODE (exp),
6844 XEXP (DECL_RTL (exp), 0))
6845 || (flag_force_addr
6846 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6847 temp = replace_equiv_address (DECL_RTL (exp),
6848 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6850 /* If we got something, return it. But first, set the alignment
6851 if the address is a register. */
6852 if (temp != 0)
6854 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6855 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6857 return temp;
6860 /* If the mode of DECL_RTL does not match that of the decl, it
6861 must be a promoted value. We return a SUBREG of the wanted mode,
6862 but mark it so that we know that it was already extended. */
6864 if (GET_CODE (DECL_RTL (exp)) == REG
6865 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6867 /* Get the signedness used for this variable. Ensure we get the
6868 same mode we got when the variable was declared. */
6869 if (GET_MODE (DECL_RTL (exp))
6870 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6871 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6872 abort ();
6874 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6875 SUBREG_PROMOTED_VAR_P (temp) = 1;
6876 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6877 return temp;
6880 return DECL_RTL (exp);
6882 case INTEGER_CST:
6883 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6884 TREE_INT_CST_HIGH (exp), mode);
6886 /* ??? If overflow is set, fold will have done an incomplete job,
6887 which can result in (plus xx (const_int 0)), which can get
6888 simplified by validate_replace_rtx during virtual register
6889 instantiation, which can result in unrecognizable insns.
6890 Avoid this by forcing all overflows into registers. */
6891 if (TREE_CONSTANT_OVERFLOW (exp)
6892 && modifier != EXPAND_INITIALIZER)
6893 temp = force_reg (mode, temp);
6895 return temp;
6897 case VECTOR_CST:
6898 return const_vector_from_tree (exp);
6900 case CONST_DECL:
6901 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6903 case REAL_CST:
6904 /* If optimized, generate immediate CONST_DOUBLE
6905 which will be turned into memory by reload if necessary.
6907 We used to force a register so that loop.c could see it. But
6908 this does not allow gen_* patterns to perform optimizations with
6909 the constants. It also produces two insns in cases like "x = 1.0;".
6910 On most machines, floating-point constants are not permitted in
6911 many insns, so we'd end up copying it to a register in any case.
6913 Now, we do the copying in expand_binop, if appropriate. */
6914 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6915 TYPE_MODE (TREE_TYPE (exp)));
6917 case COMPLEX_CST:
6918 /* Handle evaluating a complex constant in a CONCAT target. */
6919 if (original_target && GET_CODE (original_target) == CONCAT)
6921 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6922 rtx rtarg, itarg;
6924 rtarg = XEXP (original_target, 0);
6925 itarg = XEXP (original_target, 1);
6927 /* Move the real and imaginary parts separately. */
6928 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6929 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6931 if (op0 != rtarg)
6932 emit_move_insn (rtarg, op0);
6933 if (op1 != itarg)
6934 emit_move_insn (itarg, op1);
6936 return original_target;
6939 /* ... fall through ... */
6941 case STRING_CST:
6942 temp = output_constant_def (exp, 1);
6944 /* temp contains a constant address.
6945 On RISC machines where a constant address isn't valid,
6946 make some insns to get that address into a register. */
6947 if (modifier != EXPAND_CONST_ADDRESS
6948 && modifier != EXPAND_INITIALIZER
6949 && modifier != EXPAND_SUM
6950 && (! memory_address_p (mode, XEXP (temp, 0))
6951 || flag_force_addr))
6952 return replace_equiv_address (temp,
6953 copy_rtx (XEXP (temp, 0)));
6954 return temp;
6956 case EXPR_WITH_FILE_LOCATION:
6958 rtx to_return;
6959 struct file_stack fs;
6961 fs.location = input_location;
6962 fs.next = expr_wfl_stack;
6963 input_filename = EXPR_WFL_FILENAME (exp);
6964 input_line = EXPR_WFL_LINENO (exp);
6965 expr_wfl_stack = &fs;
6966 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6967 emit_line_note (input_location);
6968 /* Possibly avoid switching back and forth here. */
6969 to_return = expand_expr (EXPR_WFL_NODE (exp),
6970 (ignore ? const0_rtx : target),
6971 tmode, modifier);
6972 if (expr_wfl_stack != &fs)
6973 abort ();
6974 input_location = fs.location;
6975 expr_wfl_stack = fs.next;
6976 return to_return;
6979 case SAVE_EXPR:
6980 context = decl_function_context (exp);
6982 /* If this SAVE_EXPR was at global context, assume we are an
6983 initialization function and move it into our context. */
6984 if (context == 0)
6985 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6987 /* We treat inline_function_decl as an alias for the current function
6988 because that is the inline function whose vars, types, etc.
6989 are being merged into the current function.
6990 See expand_inline_function. */
6991 if (context == current_function_decl || context == inline_function_decl)
6992 context = 0;
6994 /* If this is non-local, handle it. */
6995 if (context)
6997 /* The following call just exists to abort if the context is
6998 not of a containing function. */
6999 find_function_data (context);
7001 temp = SAVE_EXPR_RTL (exp);
7002 if (temp && GET_CODE (temp) == REG)
7004 put_var_into_stack (exp, /*rescan=*/true);
7005 temp = SAVE_EXPR_RTL (exp);
7007 if (temp == 0 || GET_CODE (temp) != MEM)
7008 abort ();
7009 return
7010 replace_equiv_address (temp,
7011 fix_lexical_addr (XEXP (temp, 0), exp));
7013 if (SAVE_EXPR_RTL (exp) == 0)
7015 if (mode == VOIDmode)
7016 temp = const0_rtx;
7017 else
7018 temp = assign_temp (build_qualified_type (type,
7019 (TYPE_QUALS (type)
7020 | TYPE_QUAL_CONST)),
7021 3, 0, 0);
7023 SAVE_EXPR_RTL (exp) = temp;
7024 if (!optimize && GET_CODE (temp) == REG)
7025 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7026 save_expr_regs);
7028 /* If the mode of TEMP does not match that of the expression, it
7029 must be a promoted value. We pass store_expr a SUBREG of the
7030 wanted mode but mark it so that we know that it was already
7031 extended. */
7033 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7035 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7036 promote_mode (type, mode, &unsignedp, 0);
7037 SUBREG_PROMOTED_VAR_P (temp) = 1;
7038 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7041 if (temp == const0_rtx)
7042 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7043 else
7044 store_expr (TREE_OPERAND (exp, 0), temp,
7045 modifier == EXPAND_STACK_PARM ? 2 : 0);
7047 TREE_USED (exp) = 1;
7050 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7051 must be a promoted value. We return a SUBREG of the wanted mode,
7052 but mark it so that we know that it was already extended. */
7054 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7055 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7057 /* Compute the signedness and make the proper SUBREG. */
7058 promote_mode (type, mode, &unsignedp, 0);
7059 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7060 SUBREG_PROMOTED_VAR_P (temp) = 1;
7061 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7062 return temp;
7065 return SAVE_EXPR_RTL (exp);
7067 case UNSAVE_EXPR:
7069 rtx temp;
7070 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7071 TREE_OPERAND (exp, 0)
7072 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7073 return temp;
7076 case PLACEHOLDER_EXPR:
7078 tree old_list = placeholder_list;
7079 tree placeholder_expr = 0;
7081 exp = find_placeholder (exp, &placeholder_expr);
7082 if (exp == 0)
7083 abort ();
7085 placeholder_list = TREE_CHAIN (placeholder_expr);
7086 temp = expand_expr (exp, original_target, tmode, modifier);
7087 placeholder_list = old_list;
7088 return temp;
7091 case WITH_RECORD_EXPR:
7092 /* Put the object on the placeholder list, expand our first operand,
7093 and pop the list. */
7094 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7095 placeholder_list);
7096 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7097 modifier);
7098 placeholder_list = TREE_CHAIN (placeholder_list);
7099 return target;
7101 case GOTO_EXPR:
7102 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7103 expand_goto (TREE_OPERAND (exp, 0));
7104 else
7105 expand_computed_goto (TREE_OPERAND (exp, 0));
7106 return const0_rtx;
7108 case EXIT_EXPR:
7109 expand_exit_loop_if_false (NULL,
7110 invert_truthvalue (TREE_OPERAND (exp, 0)));
7111 return const0_rtx;
7113 case LABELED_BLOCK_EXPR:
7114 if (LABELED_BLOCK_BODY (exp))
7115 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7116 /* Should perhaps use expand_label, but this is simpler and safer. */
7117 do_pending_stack_adjust ();
7118 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7119 return const0_rtx;
7121 case EXIT_BLOCK_EXPR:
7122 if (EXIT_BLOCK_RETURN (exp))
7123 sorry ("returned value in block_exit_expr");
7124 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7125 return const0_rtx;
7127 case LOOP_EXPR:
7128 push_temp_slots ();
7129 expand_start_loop (1);
7130 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7131 expand_end_loop ();
7132 pop_temp_slots ();
7134 return const0_rtx;
7136 case BIND_EXPR:
7138 tree vars = TREE_OPERAND (exp, 0);
7140 /* Need to open a binding contour here because
7141 if there are any cleanups they must be contained here. */
7142 expand_start_bindings (2);
7144 /* Mark the corresponding BLOCK for output in its proper place. */
7145 if (TREE_OPERAND (exp, 2) != 0
7146 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7147 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7149 /* If VARS have not yet been expanded, expand them now. */
7150 while (vars)
7152 if (!DECL_RTL_SET_P (vars))
7153 expand_decl (vars);
7154 expand_decl_init (vars);
7155 vars = TREE_CHAIN (vars);
7158 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7160 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7162 return temp;
7165 case RTL_EXPR:
7166 if (RTL_EXPR_SEQUENCE (exp))
7168 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7169 abort ();
7170 emit_insn (RTL_EXPR_SEQUENCE (exp));
7171 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7173 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7174 free_temps_for_rtl_expr (exp);
7175 return RTL_EXPR_RTL (exp);
7177 case CONSTRUCTOR:
7178 /* If we don't need the result, just ensure we evaluate any
7179 subexpressions. */
7180 if (ignore)
7182 tree elt;
7184 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7185 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7187 return const0_rtx;
7190 /* All elts simple constants => refer to a constant in memory. But
7191 if this is a non-BLKmode mode, let it store a field at a time
7192 since that should make a CONST_INT or CONST_DOUBLE when we
7193 fold. Likewise, if we have a target we can use, it is best to
7194 store directly into the target unless the type is large enough
7195 that memcpy will be used. If we are making an initializer and
7196 all operands are constant, put it in memory as well.
7198 FIXME: Avoid trying to fill vector constructors piece-meal.
7199 Output them with output_constant_def below unless we're sure
7200 they're zeros. This should go away when vector initializers
7201 are treated like VECTOR_CST instead of arrays.
7203 else if ((TREE_STATIC (exp)
7204 && ((mode == BLKmode
7205 && ! (target != 0 && safe_from_p (target, exp, 1)))
7206 || TREE_ADDRESSABLE (exp)
7207 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7208 && (! MOVE_BY_PIECES_P
7209 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7210 TYPE_ALIGN (type)))
7211 && ((TREE_CODE (type) == VECTOR_TYPE
7212 && !is_zeros_p (exp))
7213 || ! mostly_zeros_p (exp)))))
7214 || ((modifier == EXPAND_INITIALIZER
7215 || modifier == EXPAND_CONST_ADDRESS)
7216 && TREE_CONSTANT (exp)))
7218 rtx constructor = output_constant_def (exp, 1);
7220 if (modifier != EXPAND_CONST_ADDRESS
7221 && modifier != EXPAND_INITIALIZER
7222 && modifier != EXPAND_SUM)
7223 constructor = validize_mem (constructor);
7225 return constructor;
7227 else
7229 /* Handle calls that pass values in multiple non-contiguous
7230 locations. The Irix 6 ABI has examples of this. */
7231 if (target == 0 || ! safe_from_p (target, exp, 1)
7232 || GET_CODE (target) == PARALLEL
7233 || modifier == EXPAND_STACK_PARM)
7234 target
7235 = assign_temp (build_qualified_type (type,
7236 (TYPE_QUALS (type)
7237 | (TREE_READONLY (exp)
7238 * TYPE_QUAL_CONST))),
7239 0, TREE_ADDRESSABLE (exp), 1);
7241 store_constructor (exp, target, 0, int_expr_size (exp));
7242 return target;
7245 case INDIRECT_REF:
7247 tree exp1 = TREE_OPERAND (exp, 0);
7248 tree index;
7249 tree string = string_constant (exp1, &index);
7251 /* Try to optimize reads from const strings. */
7252 if (string
7253 && TREE_CODE (string) == STRING_CST
7254 && TREE_CODE (index) == INTEGER_CST
7255 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7256 && GET_MODE_CLASS (mode) == MODE_INT
7257 && GET_MODE_SIZE (mode) == 1
7258 && modifier != EXPAND_WRITE)
7259 return gen_int_mode (TREE_STRING_POINTER (string)
7260 [TREE_INT_CST_LOW (index)], mode);
7262 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7263 op0 = memory_address (mode, op0);
7264 temp = gen_rtx_MEM (mode, op0);
7265 set_mem_attributes (temp, exp, 0);
7267 /* If we are writing to this object and its type is a record with
7268 readonly fields, we must mark it as readonly so it will
7269 conflict with readonly references to those fields. */
7270 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7271 RTX_UNCHANGING_P (temp) = 1;
7273 return temp;
7276 case ARRAY_REF:
7277 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7278 abort ();
7281 tree array = TREE_OPERAND (exp, 0);
7282 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7283 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7284 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7285 HOST_WIDE_INT i;
7287 /* Optimize the special-case of a zero lower bound.
7289 We convert the low_bound to sizetype to avoid some problems
7290 with constant folding. (E.g. suppose the lower bound is 1,
7291 and its mode is QI. Without the conversion, (ARRAY
7292 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7293 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7295 if (! integer_zerop (low_bound))
7296 index = size_diffop (index, convert (sizetype, low_bound));
7298 /* Fold an expression like: "foo"[2].
7299 This is not done in fold so it won't happen inside &.
7300 Don't fold if this is for wide characters since it's too
7301 difficult to do correctly and this is a very rare case. */
7303 if (modifier != EXPAND_CONST_ADDRESS
7304 && modifier != EXPAND_INITIALIZER
7305 && modifier != EXPAND_MEMORY
7306 && TREE_CODE (array) == STRING_CST
7307 && TREE_CODE (index) == INTEGER_CST
7308 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7309 && GET_MODE_CLASS (mode) == MODE_INT
7310 && GET_MODE_SIZE (mode) == 1)
7311 return gen_int_mode (TREE_STRING_POINTER (array)
7312 [TREE_INT_CST_LOW (index)], mode);
7314 /* If this is a constant index into a constant array,
7315 just get the value from the array. Handle both the cases when
7316 we have an explicit constructor and when our operand is a variable
7317 that was declared const. */
7319 if (modifier != EXPAND_CONST_ADDRESS
7320 && modifier != EXPAND_INITIALIZER
7321 && modifier != EXPAND_MEMORY
7322 && TREE_CODE (array) == CONSTRUCTOR
7323 && ! TREE_SIDE_EFFECTS (array)
7324 && TREE_CODE (index) == INTEGER_CST
7325 && 0 > compare_tree_int (index,
7326 list_length (CONSTRUCTOR_ELTS
7327 (TREE_OPERAND (exp, 0)))))
7329 tree elem;
7331 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7332 i = TREE_INT_CST_LOW (index);
7333 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7336 if (elem)
7337 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7338 modifier);
7341 else if (optimize >= 1
7342 && modifier != EXPAND_CONST_ADDRESS
7343 && modifier != EXPAND_INITIALIZER
7344 && modifier != EXPAND_MEMORY
7345 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7346 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7347 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7349 if (TREE_CODE (index) == INTEGER_CST)
7351 tree init = DECL_INITIAL (array);
7353 if (TREE_CODE (init) == CONSTRUCTOR)
7355 tree elem;
7357 for (elem = CONSTRUCTOR_ELTS (init);
7358 (elem
7359 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7360 elem = TREE_CHAIN (elem))
7363 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7364 return expand_expr (fold (TREE_VALUE (elem)), target,
7365 tmode, modifier);
7367 else if (TREE_CODE (init) == STRING_CST
7368 && 0 > compare_tree_int (index,
7369 TREE_STRING_LENGTH (init)))
7371 tree type = TREE_TYPE (TREE_TYPE (init));
7372 enum machine_mode mode = TYPE_MODE (type);
7374 if (GET_MODE_CLASS (mode) == MODE_INT
7375 && GET_MODE_SIZE (mode) == 1)
7376 return gen_int_mode (TREE_STRING_POINTER (init)
7377 [TREE_INT_CST_LOW (index)], mode);
7382 goto normal_inner_ref;
7384 case COMPONENT_REF:
7385 /* If the operand is a CONSTRUCTOR, we can just extract the
7386 appropriate field if it is present. */
7387 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7389 tree elt;
7391 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7392 elt = TREE_CHAIN (elt))
7393 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7394 /* We can normally use the value of the field in the
7395 CONSTRUCTOR. However, if this is a bitfield in
7396 an integral mode that we can fit in a HOST_WIDE_INT,
7397 we must mask only the number of bits in the bitfield,
7398 since this is done implicitly by the constructor. If
7399 the bitfield does not meet either of those conditions,
7400 we can't do this optimization. */
7401 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7402 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7403 == MODE_INT)
7404 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7405 <= HOST_BITS_PER_WIDE_INT))))
7407 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7408 && modifier == EXPAND_STACK_PARM)
7409 target = 0;
7410 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7411 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7413 HOST_WIDE_INT bitsize
7414 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7415 enum machine_mode imode
7416 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7418 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7420 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7421 op0 = expand_and (imode, op0, op1, target);
7423 else
7425 tree count
7426 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7429 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7430 target, 0);
7431 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7432 target, 0);
7436 return op0;
7439 goto normal_inner_ref;
7441 case BIT_FIELD_REF:
7442 case ARRAY_RANGE_REF:
7443 normal_inner_ref:
7445 enum machine_mode mode1;
7446 HOST_WIDE_INT bitsize, bitpos;
7447 tree offset;
7448 int volatilep = 0;
7449 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7450 &mode1, &unsignedp, &volatilep);
7451 rtx orig_op0;
7453 /* If we got back the original object, something is wrong. Perhaps
7454 we are evaluating an expression too early. In any event, don't
7455 infinitely recurse. */
7456 if (tem == exp)
7457 abort ();
7459 /* If TEM's type is a union of variable size, pass TARGET to the inner
7460 computation, since it will need a temporary and TARGET is known
7461 to have to do. This occurs in unchecked conversion in Ada. */
7463 orig_op0 = op0
7464 = expand_expr (tem,
7465 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7466 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7467 != INTEGER_CST)
7468 && modifier != EXPAND_STACK_PARM
7469 ? target : NULL_RTX),
7470 VOIDmode,
7471 (modifier == EXPAND_INITIALIZER
7472 || modifier == EXPAND_CONST_ADDRESS
7473 || modifier == EXPAND_STACK_PARM)
7474 ? modifier : EXPAND_NORMAL);
7476 /* If this is a constant, put it into a register if it is a
7477 legitimate constant and OFFSET is 0 and memory if it isn't. */
7478 if (CONSTANT_P (op0))
7480 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7481 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7482 && offset == 0)
7483 op0 = force_reg (mode, op0);
7484 else
7485 op0 = validize_mem (force_const_mem (mode, op0));
7488 /* Otherwise, if this object not in memory and we either have an
7489 offset or a BLKmode result, put it there. This case can't occur in
7490 C, but can in Ada if we have unchecked conversion of an expression
7491 from a scalar type to an array or record type or for an
7492 ARRAY_RANGE_REF whose type is BLKmode. */
7493 else if (GET_CODE (op0) != MEM
7494 && (offset != 0
7495 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7497 /* If the operand is a SAVE_EXPR, we can deal with this by
7498 forcing the SAVE_EXPR into memory. */
7499 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7501 put_var_into_stack (TREE_OPERAND (exp, 0),
7502 /*rescan=*/true);
7503 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7505 else
7507 tree nt
7508 = build_qualified_type (TREE_TYPE (tem),
7509 (TYPE_QUALS (TREE_TYPE (tem))
7510 | TYPE_QUAL_CONST));
7511 rtx memloc = assign_temp (nt, 1, 1, 1);
7513 emit_move_insn (memloc, op0);
7514 op0 = memloc;
7518 if (offset != 0)
7520 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7521 EXPAND_SUM);
7523 if (GET_CODE (op0) != MEM)
7524 abort ();
7526 #ifdef POINTERS_EXTEND_UNSIGNED
7527 if (GET_MODE (offset_rtx) != Pmode)
7528 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7529 #else
7530 if (GET_MODE (offset_rtx) != ptr_mode)
7531 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7532 #endif
7534 /* A constant address in OP0 can have VOIDmode, we must not try
7535 to call force_reg for that case. Avoid that case. */
7536 if (GET_CODE (op0) == MEM
7537 && GET_MODE (op0) == BLKmode
7538 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7539 && bitsize != 0
7540 && (bitpos % bitsize) == 0
7541 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7542 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7544 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7545 bitpos = 0;
7548 op0 = offset_address (op0, offset_rtx,
7549 highest_pow2_factor (offset));
7552 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7553 record its alignment as BIGGEST_ALIGNMENT. */
7554 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7555 && is_aligning_offset (offset, tem))
7556 set_mem_align (op0, BIGGEST_ALIGNMENT);
7558 /* Don't forget about volatility even if this is a bitfield. */
7559 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7561 if (op0 == orig_op0)
7562 op0 = copy_rtx (op0);
7564 MEM_VOLATILE_P (op0) = 1;
7567 /* The following code doesn't handle CONCAT.
7568 Assume only bitpos == 0 can be used for CONCAT, due to
7569 one element arrays having the same mode as its element. */
7570 if (GET_CODE (op0) == CONCAT)
7572 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7573 abort ();
7574 return op0;
7577 /* In cases where an aligned union has an unaligned object
7578 as a field, we might be extracting a BLKmode value from
7579 an integer-mode (e.g., SImode) object. Handle this case
7580 by doing the extract into an object as wide as the field
7581 (which we know to be the width of a basic mode), then
7582 storing into memory, and changing the mode to BLKmode. */
7583 if (mode1 == VOIDmode
7584 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7585 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7586 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7587 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7588 && modifier != EXPAND_CONST_ADDRESS
7589 && modifier != EXPAND_INITIALIZER)
7590 /* If the field isn't aligned enough to fetch as a memref,
7591 fetch it as a bit field. */
7592 || (mode1 != BLKmode
7593 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7594 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7595 && ((modifier == EXPAND_CONST_ADDRESS
7596 || modifier == EXPAND_INITIALIZER)
7597 ? STRICT_ALIGNMENT
7598 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7599 || (bitpos % BITS_PER_UNIT != 0)))
7600 /* If the type and the field are a constant size and the
7601 size of the type isn't the same size as the bitfield,
7602 we must use bitfield operations. */
7603 || (bitsize >= 0
7604 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7605 == INTEGER_CST)
7606 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7607 bitsize)))
7609 enum machine_mode ext_mode = mode;
7611 if (ext_mode == BLKmode
7612 && ! (target != 0 && GET_CODE (op0) == MEM
7613 && GET_CODE (target) == MEM
7614 && bitpos % BITS_PER_UNIT == 0))
7615 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7617 if (ext_mode == BLKmode)
7619 /* In this case, BITPOS must start at a byte boundary and
7620 TARGET, if specified, must be a MEM. */
7621 if (GET_CODE (op0) != MEM
7622 || (target != 0 && GET_CODE (target) != MEM)
7623 || bitpos % BITS_PER_UNIT != 0)
7624 abort ();
7626 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7627 if (target == 0)
7628 target = assign_temp (type, 0, 1, 1);
7630 emit_block_move (target, op0,
7631 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7632 / BITS_PER_UNIT),
7633 (modifier == EXPAND_STACK_PARM
7634 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7636 return target;
7639 op0 = validize_mem (op0);
7641 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7642 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7644 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7645 (modifier == EXPAND_STACK_PARM
7646 ? NULL_RTX : target),
7647 ext_mode, ext_mode,
7648 int_size_in_bytes (TREE_TYPE (tem)));
7650 /* If the result is a record type and BITSIZE is narrower than
7651 the mode of OP0, an integral mode, and this is a big endian
7652 machine, we must put the field into the high-order bits. */
7653 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7654 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7655 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7656 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7657 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7658 - bitsize),
7659 op0, 1);
7661 if (mode == BLKmode)
7663 rtx new = assign_temp (build_qualified_type
7664 ((*lang_hooks.types.type_for_mode)
7665 (ext_mode, 0),
7666 TYPE_QUAL_CONST), 0, 1, 1);
7668 emit_move_insn (new, op0);
7669 op0 = copy_rtx (new);
7670 PUT_MODE (op0, BLKmode);
7671 set_mem_attributes (op0, exp, 1);
7674 return op0;
7677 /* If the result is BLKmode, use that to access the object
7678 now as well. */
7679 if (mode == BLKmode)
7680 mode1 = BLKmode;
7682 /* Get a reference to just this component. */
7683 if (modifier == EXPAND_CONST_ADDRESS
7684 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7685 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7686 else
7687 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7689 if (op0 == orig_op0)
7690 op0 = copy_rtx (op0);
7692 set_mem_attributes (op0, exp, 0);
7693 if (GET_CODE (XEXP (op0, 0)) == REG)
7694 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7696 MEM_VOLATILE_P (op0) |= volatilep;
7697 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7698 || modifier == EXPAND_CONST_ADDRESS
7699 || modifier == EXPAND_INITIALIZER)
7700 return op0;
7701 else if (target == 0)
7702 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7704 convert_move (target, op0, unsignedp);
7705 return target;
7708 case VTABLE_REF:
7710 rtx insn, before = get_last_insn (), vtbl_ref;
7712 /* Evaluate the interior expression. */
7713 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7714 tmode, modifier);
7716 /* Get or create an instruction off which to hang a note. */
7717 if (REG_P (subtarget))
7719 target = subtarget;
7720 insn = get_last_insn ();
7721 if (insn == before)
7722 abort ();
7723 if (! INSN_P (insn))
7724 insn = prev_nonnote_insn (insn);
7726 else
7728 target = gen_reg_rtx (GET_MODE (subtarget));
7729 insn = emit_move_insn (target, subtarget);
7732 /* Collect the data for the note. */
7733 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7734 vtbl_ref = plus_constant (vtbl_ref,
7735 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7736 /* Discard the initial CONST that was added. */
7737 vtbl_ref = XEXP (vtbl_ref, 0);
7739 REG_NOTES (insn)
7740 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7742 return target;
7745 /* Intended for a reference to a buffer of a file-object in Pascal.
7746 But it's not certain that a special tree code will really be
7747 necessary for these. INDIRECT_REF might work for them. */
7748 case BUFFER_REF:
7749 abort ();
7751 case IN_EXPR:
7753 /* Pascal set IN expression.
7755 Algorithm:
7756 rlo = set_low - (set_low%bits_per_word);
7757 the_word = set [ (index - rlo)/bits_per_word ];
7758 bit_index = index % bits_per_word;
7759 bitmask = 1 << bit_index;
7760 return !!(the_word & bitmask); */
7762 tree set = TREE_OPERAND (exp, 0);
7763 tree index = TREE_OPERAND (exp, 1);
7764 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7765 tree set_type = TREE_TYPE (set);
7766 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7767 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7768 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7769 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7770 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7771 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7772 rtx setaddr = XEXP (setval, 0);
7773 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7774 rtx rlow;
7775 rtx diff, quo, rem, addr, bit, result;
7777 /* If domain is empty, answer is no. Likewise if index is constant
7778 and out of bounds. */
7779 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7780 && TREE_CODE (set_low_bound) == INTEGER_CST
7781 && tree_int_cst_lt (set_high_bound, set_low_bound))
7782 || (TREE_CODE (index) == INTEGER_CST
7783 && TREE_CODE (set_low_bound) == INTEGER_CST
7784 && tree_int_cst_lt (index, set_low_bound))
7785 || (TREE_CODE (set_high_bound) == INTEGER_CST
7786 && TREE_CODE (index) == INTEGER_CST
7787 && tree_int_cst_lt (set_high_bound, index))))
7788 return const0_rtx;
7790 if (target == 0)
7791 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7793 /* If we get here, we have to generate the code for both cases
7794 (in range and out of range). */
7796 op0 = gen_label_rtx ();
7797 op1 = gen_label_rtx ();
7799 if (! (GET_CODE (index_val) == CONST_INT
7800 && GET_CODE (lo_r) == CONST_INT))
7801 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7802 GET_MODE (index_val), iunsignedp, op1);
7804 if (! (GET_CODE (index_val) == CONST_INT
7805 && GET_CODE (hi_r) == CONST_INT))
7806 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7807 GET_MODE (index_val), iunsignedp, op1);
7809 /* Calculate the element number of bit zero in the first word
7810 of the set. */
7811 if (GET_CODE (lo_r) == CONST_INT)
7812 rlow = GEN_INT (INTVAL (lo_r)
7813 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7814 else
7815 rlow = expand_binop (index_mode, and_optab, lo_r,
7816 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7817 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7819 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7820 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7822 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7823 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7824 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7825 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7827 addr = memory_address (byte_mode,
7828 expand_binop (index_mode, add_optab, diff,
7829 setaddr, NULL_RTX, iunsignedp,
7830 OPTAB_LIB_WIDEN));
7832 /* Extract the bit we want to examine. */
7833 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7834 gen_rtx_MEM (byte_mode, addr),
7835 make_tree (TREE_TYPE (index), rem),
7836 NULL_RTX, 1);
7837 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7838 GET_MODE (target) == byte_mode ? target : 0,
7839 1, OPTAB_LIB_WIDEN);
7841 if (result != target)
7842 convert_move (target, result, 1);
7844 /* Output the code to handle the out-of-range case. */
7845 emit_jump (op0);
7846 emit_label (op1);
7847 emit_move_insn (target, const0_rtx);
7848 emit_label (op0);
7849 return target;
7852 case WITH_CLEANUP_EXPR:
7853 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7855 WITH_CLEANUP_EXPR_RTL (exp)
7856 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7857 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7858 CLEANUP_EH_ONLY (exp));
7860 /* That's it for this cleanup. */
7861 TREE_OPERAND (exp, 1) = 0;
7863 return WITH_CLEANUP_EXPR_RTL (exp);
7865 case CLEANUP_POINT_EXPR:
7867 /* Start a new binding layer that will keep track of all cleanup
7868 actions to be performed. */
7869 expand_start_bindings (2);
7871 target_temp_slot_level = temp_slot_level;
7873 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7874 /* If we're going to use this value, load it up now. */
7875 if (! ignore)
7876 op0 = force_not_mem (op0);
7877 preserve_temp_slots (op0);
7878 expand_end_bindings (NULL_TREE, 0, 0);
7880 return op0;
7882 case CALL_EXPR:
7883 /* Check for a built-in function. */
7884 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7885 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7886 == FUNCTION_DECL)
7887 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7889 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7890 == BUILT_IN_FRONTEND)
7891 return (*lang_hooks.expand_expr) (exp, original_target,
7892 tmode, modifier);
7893 else
7894 return expand_builtin (exp, target, subtarget, tmode, ignore);
7897 return expand_call (exp, target, ignore);
7899 case NON_LVALUE_EXPR:
7900 case NOP_EXPR:
7901 case CONVERT_EXPR:
7902 case REFERENCE_EXPR:
7903 if (TREE_OPERAND (exp, 0) == error_mark_node)
7904 return const0_rtx;
7906 if (TREE_CODE (type) == UNION_TYPE)
7908 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7910 /* If both input and output are BLKmode, this conversion isn't doing
7911 anything except possibly changing memory attribute. */
7912 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7914 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7915 modifier);
7917 result = copy_rtx (result);
7918 set_mem_attributes (result, exp, 0);
7919 return result;
7922 if (target == 0)
7923 target = assign_temp (type, 0, 1, 1);
7925 if (GET_CODE (target) == MEM)
7926 /* Store data into beginning of memory target. */
7927 store_expr (TREE_OPERAND (exp, 0),
7928 adjust_address (target, TYPE_MODE (valtype), 0),
7929 modifier == EXPAND_STACK_PARM ? 2 : 0);
7931 else if (GET_CODE (target) == REG)
7932 /* Store this field into a union of the proper type. */
7933 store_field (target,
7934 MIN ((int_size_in_bytes (TREE_TYPE
7935 (TREE_OPERAND (exp, 0)))
7936 * BITS_PER_UNIT),
7937 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7938 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7939 VOIDmode, 0, type, 0);
7940 else
7941 abort ();
7943 /* Return the entire union. */
7944 return target;
7947 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7949 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7950 modifier);
7952 /* If the signedness of the conversion differs and OP0 is
7953 a promoted SUBREG, clear that indication since we now
7954 have to do the proper extension. */
7955 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7956 && GET_CODE (op0) == SUBREG)
7957 SUBREG_PROMOTED_VAR_P (op0) = 0;
7959 return op0;
7962 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7963 if (GET_MODE (op0) == mode)
7964 return op0;
7966 /* If OP0 is a constant, just convert it into the proper mode. */
7967 if (CONSTANT_P (op0))
7969 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7970 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7972 if (modifier == EXPAND_INITIALIZER)
7973 return simplify_gen_subreg (mode, op0, inner_mode,
7974 subreg_lowpart_offset (mode,
7975 inner_mode));
7976 else
7977 return convert_modes (mode, inner_mode, op0,
7978 TREE_UNSIGNED (inner_type));
7981 if (modifier == EXPAND_INITIALIZER)
7982 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7984 if (target == 0)
7985 return
7986 convert_to_mode (mode, op0,
7987 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7988 else
7989 convert_move (target, op0,
7990 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7991 return target;
7993 case VIEW_CONVERT_EXPR:
7994 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7996 /* If the input and output modes are both the same, we are done.
7997 Otherwise, if neither mode is BLKmode and both are integral and within
7998 a word, we can use gen_lowpart. If neither is true, make sure the
7999 operand is in memory and convert the MEM to the new mode. */
8000 if (TYPE_MODE (type) == GET_MODE (op0))
8002 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8003 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8004 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
8005 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
8006 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
8007 op0 = gen_lowpart (TYPE_MODE (type), op0);
8008 else if (GET_CODE (op0) != MEM)
8010 /* If the operand is not a MEM, force it into memory. Since we
8011 are going to be be changing the mode of the MEM, don't call
8012 force_const_mem for constants because we don't allow pool
8013 constants to change mode. */
8014 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8016 if (TREE_ADDRESSABLE (exp))
8017 abort ();
8019 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8020 target
8021 = assign_stack_temp_for_type
8022 (TYPE_MODE (inner_type),
8023 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8025 emit_move_insn (target, op0);
8026 op0 = target;
8029 /* At this point, OP0 is in the correct mode. If the output type is such
8030 that the operand is known to be aligned, indicate that it is.
8031 Otherwise, we need only be concerned about alignment for non-BLKmode
8032 results. */
8033 if (GET_CODE (op0) == MEM)
8035 op0 = copy_rtx (op0);
8037 if (TYPE_ALIGN_OK (type))
8038 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8039 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8040 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8042 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8043 HOST_WIDE_INT temp_size
8044 = MAX (int_size_in_bytes (inner_type),
8045 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8046 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8047 temp_size, 0, type);
8048 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8050 if (TREE_ADDRESSABLE (exp))
8051 abort ();
8053 if (GET_MODE (op0) == BLKmode)
8054 emit_block_move (new_with_op0_mode, op0,
8055 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8056 (modifier == EXPAND_STACK_PARM
8057 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8058 else
8059 emit_move_insn (new_with_op0_mode, op0);
8061 op0 = new;
8064 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8067 return op0;
8069 case PLUS_EXPR:
8070 this_optab = ! unsignedp && flag_trapv
8071 && (GET_MODE_CLASS (mode) == MODE_INT)
8072 ? addv_optab : add_optab;
8074 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8075 something else, make sure we add the register to the constant and
8076 then to the other thing. This case can occur during strength
8077 reduction and doing it this way will produce better code if the
8078 frame pointer or argument pointer is eliminated.
8080 fold-const.c will ensure that the constant is always in the inner
8081 PLUS_EXPR, so the only case we need to do anything about is if
8082 sp, ap, or fp is our second argument, in which case we must swap
8083 the innermost first argument and our second argument. */
8085 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8086 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8087 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8088 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8089 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8090 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8092 tree t = TREE_OPERAND (exp, 1);
8094 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8095 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8098 /* If the result is to be ptr_mode and we are adding an integer to
8099 something, we might be forming a constant. So try to use
8100 plus_constant. If it produces a sum and we can't accept it,
8101 use force_operand. This allows P = &ARR[const] to generate
8102 efficient code on machines where a SYMBOL_REF is not a valid
8103 address.
8105 If this is an EXPAND_SUM call, always return the sum. */
8106 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8107 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8109 if (modifier == EXPAND_STACK_PARM)
8110 target = 0;
8111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8112 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8113 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8115 rtx constant_part;
8117 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8118 EXPAND_SUM);
8119 /* Use immed_double_const to ensure that the constant is
8120 truncated according to the mode of OP1, then sign extended
8121 to a HOST_WIDE_INT. Using the constant directly can result
8122 in non-canonical RTL in a 64x32 cross compile. */
8123 constant_part
8124 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8125 (HOST_WIDE_INT) 0,
8126 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8127 op1 = plus_constant (op1, INTVAL (constant_part));
8128 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8129 op1 = force_operand (op1, target);
8130 return op1;
8133 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8134 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8135 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8137 rtx constant_part;
8139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8140 (modifier == EXPAND_INITIALIZER
8141 ? EXPAND_INITIALIZER : EXPAND_SUM));
8142 if (! CONSTANT_P (op0))
8144 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8145 VOIDmode, modifier);
8146 /* Return a PLUS if modifier says it's OK. */
8147 if (modifier == EXPAND_SUM
8148 || modifier == EXPAND_INITIALIZER)
8149 return simplify_gen_binary (PLUS, mode, op0, op1);
8150 goto binop2;
8152 /* Use immed_double_const to ensure that the constant is
8153 truncated according to the mode of OP1, then sign extended
8154 to a HOST_WIDE_INT. Using the constant directly can result
8155 in non-canonical RTL in a 64x32 cross compile. */
8156 constant_part
8157 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8158 (HOST_WIDE_INT) 0,
8159 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8160 op0 = plus_constant (op0, INTVAL (constant_part));
8161 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8162 op0 = force_operand (op0, target);
8163 return op0;
8167 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8168 subtarget = 0;
8170 /* No sense saving up arithmetic to be done
8171 if it's all in the wrong mode to form part of an address.
8172 And force_operand won't know whether to sign-extend or
8173 zero-extend. */
8174 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8175 || mode != ptr_mode)
8177 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8178 subtarget, &op0, &op1, 0);
8179 if (op0 == const0_rtx)
8180 return op1;
8181 if (op1 == const0_rtx)
8182 return op0;
8183 goto binop2;
8186 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8187 subtarget, &op0, &op1, modifier);
8188 return simplify_gen_binary (PLUS, mode, op0, op1);
8190 case MINUS_EXPR:
8191 /* For initializers, we are allowed to return a MINUS of two
8192 symbolic constants. Here we handle all cases when both operands
8193 are constant. */
8194 /* Handle difference of two symbolic constants,
8195 for the sake of an initializer. */
8196 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8197 && really_constant_p (TREE_OPERAND (exp, 0))
8198 && really_constant_p (TREE_OPERAND (exp, 1)))
8200 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8201 NULL_RTX, &op0, &op1, modifier);
8203 /* If the last operand is a CONST_INT, use plus_constant of
8204 the negated constant. Else make the MINUS. */
8205 if (GET_CODE (op1) == CONST_INT)
8206 return plus_constant (op0, - INTVAL (op1));
8207 else
8208 return gen_rtx_MINUS (mode, op0, op1);
8211 this_optab = ! unsignedp && flag_trapv
8212 && (GET_MODE_CLASS(mode) == MODE_INT)
8213 ? subv_optab : sub_optab;
8215 /* No sense saving up arithmetic to be done
8216 if it's all in the wrong mode to form part of an address.
8217 And force_operand won't know whether to sign-extend or
8218 zero-extend. */
8219 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8220 || mode != ptr_mode)
8221 goto binop;
8223 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8224 subtarget, &op0, &op1, modifier);
8226 /* Convert A - const to A + (-const). */
8227 if (GET_CODE (op1) == CONST_INT)
8229 op1 = negate_rtx (mode, op1);
8230 return simplify_gen_binary (PLUS, mode, op0, op1);
8233 goto binop2;
8235 case MULT_EXPR:
8236 /* If first operand is constant, swap them.
8237 Thus the following special case checks need only
8238 check the second operand. */
8239 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8241 tree t1 = TREE_OPERAND (exp, 0);
8242 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8243 TREE_OPERAND (exp, 1) = t1;
8246 /* Attempt to return something suitable for generating an
8247 indexed address, for machines that support that. */
8249 if (modifier == EXPAND_SUM && mode == ptr_mode
8250 && host_integerp (TREE_OPERAND (exp, 1), 0))
8252 tree exp1 = TREE_OPERAND (exp, 1);
8254 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8255 EXPAND_SUM);
8257 /* If we knew for certain that this is arithmetic for an array
8258 reference, and we knew the bounds of the array, then we could
8259 apply the distributive law across (PLUS X C) for constant C.
8260 Without such knowledge, we risk overflowing the computation
8261 when both X and C are large, but X+C isn't. */
8262 /* ??? Could perhaps special-case EXP being unsigned and C being
8263 positive. In that case we are certain that X+C is no smaller
8264 than X and so the transformed expression will overflow iff the
8265 original would have. */
8267 if (GET_CODE (op0) != REG)
8268 op0 = force_operand (op0, NULL_RTX);
8269 if (GET_CODE (op0) != REG)
8270 op0 = copy_to_mode_reg (mode, op0);
8272 return gen_rtx_MULT (mode, op0,
8273 gen_int_mode (tree_low_cst (exp1, 0),
8274 TYPE_MODE (TREE_TYPE (exp1))));
8277 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8278 subtarget = 0;
8280 if (modifier == EXPAND_STACK_PARM)
8281 target = 0;
8283 /* Check for multiplying things that have been extended
8284 from a narrower type. If this machine supports multiplying
8285 in that narrower type with a result in the desired type,
8286 do it that way, and avoid the explicit type-conversion. */
8287 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8288 && TREE_CODE (type) == INTEGER_TYPE
8289 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8290 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8291 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8292 && int_fits_type_p (TREE_OPERAND (exp, 1),
8293 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8294 /* Don't use a widening multiply if a shift will do. */
8295 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8296 > HOST_BITS_PER_WIDE_INT)
8297 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8299 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8300 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8302 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8303 /* If both operands are extended, they must either both
8304 be zero-extended or both be sign-extended. */
8305 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8307 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8309 enum machine_mode innermode
8310 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8311 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8312 ? smul_widen_optab : umul_widen_optab);
8313 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8314 ? umul_widen_optab : smul_widen_optab);
8315 if (mode == GET_MODE_WIDER_MODE (innermode))
8317 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8319 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8320 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8321 TREE_OPERAND (exp, 1),
8322 NULL_RTX, &op0, &op1, 0);
8323 else
8324 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8325 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8326 NULL_RTX, &op0, &op1, 0);
8327 goto binop2;
8329 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8330 && innermode == word_mode)
8332 rtx htem;
8333 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8334 NULL_RTX, VOIDmode, 0);
8335 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8336 op1 = convert_modes (innermode, mode,
8337 expand_expr (TREE_OPERAND (exp, 1),
8338 NULL_RTX, VOIDmode, 0),
8339 unsignedp);
8340 else
8341 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8342 NULL_RTX, VOIDmode, 0);
8343 temp = expand_binop (mode, other_optab, op0, op1, target,
8344 unsignedp, OPTAB_LIB_WIDEN);
8345 htem = expand_mult_highpart_adjust (innermode,
8346 gen_highpart (innermode, temp),
8347 op0, op1,
8348 gen_highpart (innermode, temp),
8349 unsignedp);
8350 emit_move_insn (gen_highpart (innermode, temp), htem);
8351 return temp;
8355 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8356 subtarget, &op0, &op1, 0);
8357 return expand_mult (mode, op0, op1, target, unsignedp);
8359 case TRUNC_DIV_EXPR:
8360 case FLOOR_DIV_EXPR:
8361 case CEIL_DIV_EXPR:
8362 case ROUND_DIV_EXPR:
8363 case EXACT_DIV_EXPR:
8364 if (modifier == EXPAND_STACK_PARM)
8365 target = 0;
8366 /* Possible optimization: compute the dividend with EXPAND_SUM
8367 then if the divisor is constant can optimize the case
8368 where some terms of the dividend have coeffs divisible by it. */
8369 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8370 subtarget, &op0, &op1, 0);
8371 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8373 case RDIV_EXPR:
8374 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8375 expensive divide. If not, combine will rebuild the original
8376 computation. */
8377 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8378 && TREE_CODE (type) == REAL_TYPE
8379 && !real_onep (TREE_OPERAND (exp, 0)))
8380 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8381 build (RDIV_EXPR, type,
8382 build_real (type, dconst1),
8383 TREE_OPERAND (exp, 1))),
8384 target, tmode, modifier);
8385 this_optab = sdiv_optab;
8386 goto binop;
8388 case TRUNC_MOD_EXPR:
8389 case FLOOR_MOD_EXPR:
8390 case CEIL_MOD_EXPR:
8391 case ROUND_MOD_EXPR:
8392 if (modifier == EXPAND_STACK_PARM)
8393 target = 0;
8394 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8395 subtarget, &op0, &op1, 0);
8396 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8398 case FIX_ROUND_EXPR:
8399 case FIX_FLOOR_EXPR:
8400 case FIX_CEIL_EXPR:
8401 abort (); /* Not used for C. */
8403 case FIX_TRUNC_EXPR:
8404 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8405 if (target == 0 || modifier == EXPAND_STACK_PARM)
8406 target = gen_reg_rtx (mode);
8407 expand_fix (target, op0, unsignedp);
8408 return target;
8410 case FLOAT_EXPR:
8411 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8412 if (target == 0 || modifier == EXPAND_STACK_PARM)
8413 target = gen_reg_rtx (mode);
8414 /* expand_float can't figure out what to do if FROM has VOIDmode.
8415 So give it the correct mode. With -O, cse will optimize this. */
8416 if (GET_MODE (op0) == VOIDmode)
8417 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8418 op0);
8419 expand_float (target, op0,
8420 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8421 return target;
8423 case NEGATE_EXPR:
8424 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8425 if (modifier == EXPAND_STACK_PARM)
8426 target = 0;
8427 temp = expand_unop (mode,
8428 ! unsignedp && flag_trapv
8429 && (GET_MODE_CLASS(mode) == MODE_INT)
8430 ? negv_optab : neg_optab, op0, target, 0);
8431 if (temp == 0)
8432 abort ();
8433 return temp;
8435 case ABS_EXPR:
8436 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8437 if (modifier == EXPAND_STACK_PARM)
8438 target = 0;
8440 /* ABS_EXPR is not valid for complex arguments. */
8441 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8442 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8443 abort ();
8445 /* Unsigned abs is simply the operand. Testing here means we don't
8446 risk generating incorrect code below. */
8447 if (TREE_UNSIGNED (type))
8448 return op0;
8450 return expand_abs (mode, op0, target, unsignedp,
8451 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8453 case MAX_EXPR:
8454 case MIN_EXPR:
8455 target = original_target;
8456 if (target == 0
8457 || modifier == EXPAND_STACK_PARM
8458 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8459 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8460 || GET_MODE (target) != mode
8461 || (GET_CODE (target) == REG
8462 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8463 target = gen_reg_rtx (mode);
8464 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8465 target, &op0, &op1, 0);
8467 /* First try to do it with a special MIN or MAX instruction.
8468 If that does not win, use a conditional jump to select the proper
8469 value. */
8470 this_optab = (TREE_UNSIGNED (type)
8471 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8472 : (code == MIN_EXPR ? smin_optab : smax_optab));
8474 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8475 OPTAB_WIDEN);
8476 if (temp != 0)
8477 return temp;
8479 /* At this point, a MEM target is no longer useful; we will get better
8480 code without it. */
8482 if (GET_CODE (target) == MEM)
8483 target = gen_reg_rtx (mode);
8485 if (target != op0)
8486 emit_move_insn (target, op0);
8488 op0 = gen_label_rtx ();
8490 /* If this mode is an integer too wide to compare properly,
8491 compare word by word. Rely on cse to optimize constant cases. */
8492 if (GET_MODE_CLASS (mode) == MODE_INT
8493 && ! can_compare_p (GE, mode, ccp_jump))
8495 if (code == MAX_EXPR)
8496 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8497 target, op1, NULL_RTX, op0);
8498 else
8499 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8500 op1, target, NULL_RTX, op0);
8502 else
8504 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8505 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8506 unsignedp, mode, NULL_RTX, NULL_RTX,
8507 op0);
8509 emit_move_insn (target, op1);
8510 emit_label (op0);
8511 return target;
8513 case BIT_NOT_EXPR:
8514 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8515 if (modifier == EXPAND_STACK_PARM)
8516 target = 0;
8517 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8518 if (temp == 0)
8519 abort ();
8520 return temp;
8522 /* ??? Can optimize bitwise operations with one arg constant.
8523 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8524 and (a bitwise1 b) bitwise2 b (etc)
8525 but that is probably not worth while. */
8527 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8528 boolean values when we want in all cases to compute both of them. In
8529 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8530 as actual zero-or-1 values and then bitwise anding. In cases where
8531 there cannot be any side effects, better code would be made by
8532 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8533 how to recognize those cases. */
8535 case TRUTH_AND_EXPR:
8536 case BIT_AND_EXPR:
8537 this_optab = and_optab;
8538 goto binop;
8540 case TRUTH_OR_EXPR:
8541 case BIT_IOR_EXPR:
8542 this_optab = ior_optab;
8543 goto binop;
8545 case TRUTH_XOR_EXPR:
8546 case BIT_XOR_EXPR:
8547 this_optab = xor_optab;
8548 goto binop;
8550 case LSHIFT_EXPR:
8551 case RSHIFT_EXPR:
8552 case LROTATE_EXPR:
8553 case RROTATE_EXPR:
8554 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8555 subtarget = 0;
8556 if (modifier == EXPAND_STACK_PARM)
8557 target = 0;
8558 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8559 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8560 unsignedp);
8562 /* Could determine the answer when only additive constants differ. Also,
8563 the addition of one can be handled by changing the condition. */
8564 case LT_EXPR:
8565 case LE_EXPR:
8566 case GT_EXPR:
8567 case GE_EXPR:
8568 case EQ_EXPR:
8569 case NE_EXPR:
8570 case UNORDERED_EXPR:
8571 case ORDERED_EXPR:
8572 case UNLT_EXPR:
8573 case UNLE_EXPR:
8574 case UNGT_EXPR:
8575 case UNGE_EXPR:
8576 case UNEQ_EXPR:
8577 temp = do_store_flag (exp,
8578 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8579 tmode != VOIDmode ? tmode : mode, 0);
8580 if (temp != 0)
8581 return temp;
8583 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8584 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8585 && original_target
8586 && GET_CODE (original_target) == REG
8587 && (GET_MODE (original_target)
8588 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8590 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8591 VOIDmode, 0);
8593 /* If temp is constant, we can just compute the result. */
8594 if (GET_CODE (temp) == CONST_INT)
8596 if (INTVAL (temp) != 0)
8597 emit_move_insn (target, const1_rtx);
8598 else
8599 emit_move_insn (target, const0_rtx);
8601 return target;
8604 if (temp != original_target)
8606 enum machine_mode mode1 = GET_MODE (temp);
8607 if (mode1 == VOIDmode)
8608 mode1 = tmode != VOIDmode ? tmode : mode;
8610 temp = copy_to_mode_reg (mode1, temp);
8613 op1 = gen_label_rtx ();
8614 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8615 GET_MODE (temp), unsignedp, op1);
8616 emit_move_insn (temp, const1_rtx);
8617 emit_label (op1);
8618 return temp;
8621 /* If no set-flag instruction, must generate a conditional
8622 store into a temporary variable. Drop through
8623 and handle this like && and ||. */
8625 case TRUTH_ANDIF_EXPR:
8626 case TRUTH_ORIF_EXPR:
8627 if (! ignore
8628 && (target == 0
8629 || modifier == EXPAND_STACK_PARM
8630 || ! safe_from_p (target, exp, 1)
8631 /* Make sure we don't have a hard reg (such as function's return
8632 value) live across basic blocks, if not optimizing. */
8633 || (!optimize && GET_CODE (target) == REG
8634 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8635 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8637 if (target)
8638 emit_clr_insn (target);
8640 op1 = gen_label_rtx ();
8641 jumpifnot (exp, op1);
8643 if (target)
8644 emit_0_to_1_insn (target);
8646 emit_label (op1);
8647 return ignore ? const0_rtx : target;
8649 case TRUTH_NOT_EXPR:
8650 if (modifier == EXPAND_STACK_PARM)
8651 target = 0;
8652 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8653 /* The parser is careful to generate TRUTH_NOT_EXPR
8654 only with operands that are always zero or one. */
8655 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8656 target, 1, OPTAB_LIB_WIDEN);
8657 if (temp == 0)
8658 abort ();
8659 return temp;
8661 case COMPOUND_EXPR:
8662 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8663 emit_queue ();
8664 return expand_expr (TREE_OPERAND (exp, 1),
8665 (ignore ? const0_rtx : target),
8666 VOIDmode, modifier);
8668 case COND_EXPR:
8669 /* If we would have a "singleton" (see below) were it not for a
8670 conversion in each arm, bring that conversion back out. */
8671 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8672 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8673 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8674 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8676 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8677 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8679 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8680 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8681 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8682 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8683 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8684 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8685 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8686 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8687 return expand_expr (build1 (NOP_EXPR, type,
8688 build (COND_EXPR, TREE_TYPE (iftrue),
8689 TREE_OPERAND (exp, 0),
8690 iftrue, iffalse)),
8691 target, tmode, modifier);
8695 /* Note that COND_EXPRs whose type is a structure or union
8696 are required to be constructed to contain assignments of
8697 a temporary variable, so that we can evaluate them here
8698 for side effect only. If type is void, we must do likewise. */
8700 /* If an arm of the branch requires a cleanup,
8701 only that cleanup is performed. */
8703 tree singleton = 0;
8704 tree binary_op = 0, unary_op = 0;
8706 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8707 convert it to our mode, if necessary. */
8708 if (integer_onep (TREE_OPERAND (exp, 1))
8709 && integer_zerop (TREE_OPERAND (exp, 2))
8710 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8712 if (ignore)
8714 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8715 modifier);
8716 return const0_rtx;
8719 if (modifier == EXPAND_STACK_PARM)
8720 target = 0;
8721 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8722 if (GET_MODE (op0) == mode)
8723 return op0;
8725 if (target == 0)
8726 target = gen_reg_rtx (mode);
8727 convert_move (target, op0, unsignedp);
8728 return target;
8731 /* Check for X ? A + B : A. If we have this, we can copy A to the
8732 output and conditionally add B. Similarly for unary operations.
8733 Don't do this if X has side-effects because those side effects
8734 might affect A or B and the "?" operation is a sequence point in
8735 ANSI. (operand_equal_p tests for side effects.) */
8737 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8738 && operand_equal_p (TREE_OPERAND (exp, 2),
8739 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8740 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8741 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8742 && operand_equal_p (TREE_OPERAND (exp, 1),
8743 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8744 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8745 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8746 && operand_equal_p (TREE_OPERAND (exp, 2),
8747 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8748 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8749 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8750 && operand_equal_p (TREE_OPERAND (exp, 1),
8751 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8752 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8754 /* If we are not to produce a result, we have no target. Otherwise,
8755 if a target was specified use it; it will not be used as an
8756 intermediate target unless it is safe. If no target, use a
8757 temporary. */
8759 if (ignore)
8760 temp = 0;
8761 else if (modifier == EXPAND_STACK_PARM)
8762 temp = assign_temp (type, 0, 0, 1);
8763 else if (original_target
8764 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8765 || (singleton && GET_CODE (original_target) == REG
8766 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8767 && original_target == var_rtx (singleton)))
8768 && GET_MODE (original_target) == mode
8769 #ifdef HAVE_conditional_move
8770 && (! can_conditionally_move_p (mode)
8771 || GET_CODE (original_target) == REG
8772 || TREE_ADDRESSABLE (type))
8773 #endif
8774 && (GET_CODE (original_target) != MEM
8775 || TREE_ADDRESSABLE (type)))
8776 temp = original_target;
8777 else if (TREE_ADDRESSABLE (type))
8778 abort ();
8779 else
8780 temp = assign_temp (type, 0, 0, 1);
8782 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8783 do the test of X as a store-flag operation, do this as
8784 A + ((X != 0) << log C). Similarly for other simple binary
8785 operators. Only do for C == 1 if BRANCH_COST is low. */
8786 if (temp && singleton && binary_op
8787 && (TREE_CODE (binary_op) == PLUS_EXPR
8788 || TREE_CODE (binary_op) == MINUS_EXPR
8789 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8790 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8791 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8792 : integer_onep (TREE_OPERAND (binary_op, 1)))
8793 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8795 rtx result;
8796 tree cond;
8797 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8798 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8799 ? addv_optab : add_optab)
8800 : TREE_CODE (binary_op) == MINUS_EXPR
8801 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8802 ? subv_optab : sub_optab)
8803 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8804 : xor_optab);
8806 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8807 if (singleton == TREE_OPERAND (exp, 1))
8808 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8809 else
8810 cond = TREE_OPERAND (exp, 0);
8812 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8813 ? temp : NULL_RTX),
8814 mode, BRANCH_COST <= 1);
8816 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8817 result = expand_shift (LSHIFT_EXPR, mode, result,
8818 build_int_2 (tree_log2
8819 (TREE_OPERAND
8820 (binary_op, 1)),
8822 (safe_from_p (temp, singleton, 1)
8823 ? temp : NULL_RTX), 0);
8825 if (result)
8827 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8828 return expand_binop (mode, boptab, op1, result, temp,
8829 unsignedp, OPTAB_LIB_WIDEN);
8833 do_pending_stack_adjust ();
8834 NO_DEFER_POP;
8835 op0 = gen_label_rtx ();
8837 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8839 if (temp != 0)
8841 /* If the target conflicts with the other operand of the
8842 binary op, we can't use it. Also, we can't use the target
8843 if it is a hard register, because evaluating the condition
8844 might clobber it. */
8845 if ((binary_op
8846 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8847 || (GET_CODE (temp) == REG
8848 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8849 temp = gen_reg_rtx (mode);
8850 store_expr (singleton, temp,
8851 modifier == EXPAND_STACK_PARM ? 2 : 0);
8853 else
8854 expand_expr (singleton,
8855 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8856 if (singleton == TREE_OPERAND (exp, 1))
8857 jumpif (TREE_OPERAND (exp, 0), op0);
8858 else
8859 jumpifnot (TREE_OPERAND (exp, 0), op0);
8861 start_cleanup_deferral ();
8862 if (binary_op && temp == 0)
8863 /* Just touch the other operand. */
8864 expand_expr (TREE_OPERAND (binary_op, 1),
8865 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8866 else if (binary_op)
8867 store_expr (build (TREE_CODE (binary_op), type,
8868 make_tree (type, temp),
8869 TREE_OPERAND (binary_op, 1)),
8870 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8871 else
8872 store_expr (build1 (TREE_CODE (unary_op), type,
8873 make_tree (type, temp)),
8874 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8875 op1 = op0;
8877 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8878 comparison operator. If we have one of these cases, set the
8879 output to A, branch on A (cse will merge these two references),
8880 then set the output to FOO. */
8881 else if (temp
8882 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8883 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8884 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8885 TREE_OPERAND (exp, 1), 0)
8886 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8887 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8888 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8890 if (GET_CODE (temp) == REG
8891 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8892 temp = gen_reg_rtx (mode);
8893 store_expr (TREE_OPERAND (exp, 1), temp,
8894 modifier == EXPAND_STACK_PARM ? 2 : 0);
8895 jumpif (TREE_OPERAND (exp, 0), op0);
8897 start_cleanup_deferral ();
8898 store_expr (TREE_OPERAND (exp, 2), temp,
8899 modifier == EXPAND_STACK_PARM ? 2 : 0);
8900 op1 = op0;
8902 else if (temp
8903 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8904 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8905 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8906 TREE_OPERAND (exp, 2), 0)
8907 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8908 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8909 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8911 if (GET_CODE (temp) == REG
8912 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8913 temp = gen_reg_rtx (mode);
8914 store_expr (TREE_OPERAND (exp, 2), temp,
8915 modifier == EXPAND_STACK_PARM ? 2 : 0);
8916 jumpifnot (TREE_OPERAND (exp, 0), op0);
8918 start_cleanup_deferral ();
8919 store_expr (TREE_OPERAND (exp, 1), temp,
8920 modifier == EXPAND_STACK_PARM ? 2 : 0);
8921 op1 = op0;
8923 else
8925 op1 = gen_label_rtx ();
8926 jumpifnot (TREE_OPERAND (exp, 0), op0);
8928 start_cleanup_deferral ();
8930 /* One branch of the cond can be void, if it never returns. For
8931 example A ? throw : E */
8932 if (temp != 0
8933 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8934 store_expr (TREE_OPERAND (exp, 1), temp,
8935 modifier == EXPAND_STACK_PARM ? 2 : 0);
8936 else
8937 expand_expr (TREE_OPERAND (exp, 1),
8938 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8939 end_cleanup_deferral ();
8940 emit_queue ();
8941 emit_jump_insn (gen_jump (op1));
8942 emit_barrier ();
8943 emit_label (op0);
8944 start_cleanup_deferral ();
8945 if (temp != 0
8946 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8947 store_expr (TREE_OPERAND (exp, 2), temp,
8948 modifier == EXPAND_STACK_PARM ? 2 : 0);
8949 else
8950 expand_expr (TREE_OPERAND (exp, 2),
8951 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8954 end_cleanup_deferral ();
8956 emit_queue ();
8957 emit_label (op1);
8958 OK_DEFER_POP;
8960 return temp;
8963 case TARGET_EXPR:
8965 /* Something needs to be initialized, but we didn't know
8966 where that thing was when building the tree. For example,
8967 it could be the return value of a function, or a parameter
8968 to a function which lays down in the stack, or a temporary
8969 variable which must be passed by reference.
8971 We guarantee that the expression will either be constructed
8972 or copied into our original target. */
8974 tree slot = TREE_OPERAND (exp, 0);
8975 tree cleanups = NULL_TREE;
8976 tree exp1;
8978 if (TREE_CODE (slot) != VAR_DECL)
8979 abort ();
8981 if (! ignore)
8982 target = original_target;
8984 /* Set this here so that if we get a target that refers to a
8985 register variable that's already been used, put_reg_into_stack
8986 knows that it should fix up those uses. */
8987 TREE_USED (slot) = 1;
8989 if (target == 0)
8991 if (DECL_RTL_SET_P (slot))
8993 target = DECL_RTL (slot);
8994 /* If we have already expanded the slot, so don't do
8995 it again. (mrs) */
8996 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8997 return target;
8999 else
9001 target = assign_temp (type, 2, 0, 1);
9002 /* All temp slots at this level must not conflict. */
9003 preserve_temp_slots (target);
9004 SET_DECL_RTL (slot, target);
9005 if (TREE_ADDRESSABLE (slot))
9006 put_var_into_stack (slot, /*rescan=*/false);
9008 /* Since SLOT is not known to the called function
9009 to belong to its stack frame, we must build an explicit
9010 cleanup. This case occurs when we must build up a reference
9011 to pass the reference as an argument. In this case,
9012 it is very likely that such a reference need not be
9013 built here. */
9015 if (TREE_OPERAND (exp, 2) == 0)
9016 TREE_OPERAND (exp, 2)
9017 = (*lang_hooks.maybe_build_cleanup) (slot);
9018 cleanups = TREE_OPERAND (exp, 2);
9021 else
9023 /* This case does occur, when expanding a parameter which
9024 needs to be constructed on the stack. The target
9025 is the actual stack address that we want to initialize.
9026 The function we call will perform the cleanup in this case. */
9028 /* If we have already assigned it space, use that space,
9029 not target that we were passed in, as our target
9030 parameter is only a hint. */
9031 if (DECL_RTL_SET_P (slot))
9033 target = DECL_RTL (slot);
9034 /* If we have already expanded the slot, so don't do
9035 it again. (mrs) */
9036 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9037 return target;
9039 else
9041 SET_DECL_RTL (slot, target);
9042 /* If we must have an addressable slot, then make sure that
9043 the RTL that we just stored in slot is OK. */
9044 if (TREE_ADDRESSABLE (slot))
9045 put_var_into_stack (slot, /*rescan=*/true);
9049 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9050 /* Mark it as expanded. */
9051 TREE_OPERAND (exp, 1) = NULL_TREE;
9053 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9055 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9057 return target;
9060 case INIT_EXPR:
9062 tree lhs = TREE_OPERAND (exp, 0);
9063 tree rhs = TREE_OPERAND (exp, 1);
9065 temp = expand_assignment (lhs, rhs, ! ignore);
9066 return temp;
9069 case MODIFY_EXPR:
9071 /* If lhs is complex, expand calls in rhs before computing it.
9072 That's so we don't compute a pointer and save it over a
9073 call. If lhs is simple, compute it first so we can give it
9074 as a target if the rhs is just a call. This avoids an
9075 extra temp and copy and that prevents a partial-subsumption
9076 which makes bad code. Actually we could treat
9077 component_ref's of vars like vars. */
9079 tree lhs = TREE_OPERAND (exp, 0);
9080 tree rhs = TREE_OPERAND (exp, 1);
9082 temp = 0;
9084 /* Check for |= or &= of a bitfield of size one into another bitfield
9085 of size 1. In this case, (unless we need the result of the
9086 assignment) we can do this more efficiently with a
9087 test followed by an assignment, if necessary.
9089 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9090 things change so we do, this code should be enhanced to
9091 support it. */
9092 if (ignore
9093 && TREE_CODE (lhs) == COMPONENT_REF
9094 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9095 || TREE_CODE (rhs) == BIT_AND_EXPR)
9096 && TREE_OPERAND (rhs, 0) == lhs
9097 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9098 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9099 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9101 rtx label = gen_label_rtx ();
9103 do_jump (TREE_OPERAND (rhs, 1),
9104 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9105 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9106 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9107 (TREE_CODE (rhs) == BIT_IOR_EXPR
9108 ? integer_one_node
9109 : integer_zero_node)),
9111 do_pending_stack_adjust ();
9112 emit_label (label);
9113 return const0_rtx;
9116 temp = expand_assignment (lhs, rhs, ! ignore);
9118 return temp;
9121 case RETURN_EXPR:
9122 if (!TREE_OPERAND (exp, 0))
9123 expand_null_return ();
9124 else
9125 expand_return (TREE_OPERAND (exp, 0));
9126 return const0_rtx;
9128 case PREINCREMENT_EXPR:
9129 case PREDECREMENT_EXPR:
9130 return expand_increment (exp, 0, ignore);
9132 case POSTINCREMENT_EXPR:
9133 case POSTDECREMENT_EXPR:
9134 /* Faster to treat as pre-increment if result is not used. */
9135 return expand_increment (exp, ! ignore, ignore);
9137 case ADDR_EXPR:
9138 if (modifier == EXPAND_STACK_PARM)
9139 target = 0;
9140 /* Are we taking the address of a nested function? */
9141 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9142 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9143 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9144 && ! TREE_STATIC (exp))
9146 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9147 op0 = force_operand (op0, target);
9149 /* If we are taking the address of something erroneous, just
9150 return a zero. */
9151 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9152 return const0_rtx;
9153 /* If we are taking the address of a constant and are at the
9154 top level, we have to use output_constant_def since we can't
9155 call force_const_mem at top level. */
9156 else if (cfun == 0
9157 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9158 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9159 == 'c')))
9160 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9161 else
9163 /* We make sure to pass const0_rtx down if we came in with
9164 ignore set, to avoid doing the cleanups twice for something. */
9165 op0 = expand_expr (TREE_OPERAND (exp, 0),
9166 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9167 (modifier == EXPAND_INITIALIZER
9168 ? modifier : EXPAND_CONST_ADDRESS));
9170 /* If we are going to ignore the result, OP0 will have been set
9171 to const0_rtx, so just return it. Don't get confused and
9172 think we are taking the address of the constant. */
9173 if (ignore)
9174 return op0;
9176 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9177 clever and returns a REG when given a MEM. */
9178 op0 = protect_from_queue (op0, 1);
9180 /* We would like the object in memory. If it is a constant, we can
9181 have it be statically allocated into memory. For a non-constant,
9182 we need to allocate some memory and store the value into it. */
9184 if (CONSTANT_P (op0))
9185 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9186 op0);
9187 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9188 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9189 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9191 /* If the operand is a SAVE_EXPR, we can deal with this by
9192 forcing the SAVE_EXPR into memory. */
9193 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9195 put_var_into_stack (TREE_OPERAND (exp, 0),
9196 /*rescan=*/true);
9197 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9199 else
9201 /* If this object is in a register, it can't be BLKmode. */
9202 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9203 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9205 if (GET_CODE (op0) == PARALLEL)
9206 /* Handle calls that pass values in multiple
9207 non-contiguous locations. The Irix 6 ABI has examples
9208 of this. */
9209 emit_group_store (memloc, op0, inner_type,
9210 int_size_in_bytes (inner_type));
9211 else
9212 emit_move_insn (memloc, op0);
9214 op0 = memloc;
9218 if (GET_CODE (op0) != MEM)
9219 abort ();
9221 mark_temp_addr_taken (op0);
9222 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9224 op0 = XEXP (op0, 0);
9225 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9226 op0 = convert_memory_address (ptr_mode, op0);
9227 return op0;
9230 /* If OP0 is not aligned as least as much as the type requires, we
9231 need to make a temporary, copy OP0 to it, and take the address of
9232 the temporary. We want to use the alignment of the type, not of
9233 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9234 the test for BLKmode means that can't happen. The test for
9235 BLKmode is because we never make mis-aligned MEMs with
9236 non-BLKmode.
9238 We don't need to do this at all if the machine doesn't have
9239 strict alignment. */
9240 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9241 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9242 > MEM_ALIGN (op0))
9243 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9245 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9246 rtx new;
9248 if (TYPE_ALIGN_OK (inner_type))
9249 abort ();
9251 if (TREE_ADDRESSABLE (inner_type))
9253 /* We can't make a bitwise copy of this object, so fail. */
9254 error ("cannot take the address of an unaligned member");
9255 return const0_rtx;
9258 new = assign_stack_temp_for_type
9259 (TYPE_MODE (inner_type),
9260 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9261 : int_size_in_bytes (inner_type),
9262 1, build_qualified_type (inner_type,
9263 (TYPE_QUALS (inner_type)
9264 | TYPE_QUAL_CONST)));
9266 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9267 (modifier == EXPAND_STACK_PARM
9268 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9270 op0 = new;
9273 op0 = force_operand (XEXP (op0, 0), target);
9276 if (flag_force_addr
9277 && GET_CODE (op0) != REG
9278 && modifier != EXPAND_CONST_ADDRESS
9279 && modifier != EXPAND_INITIALIZER
9280 && modifier != EXPAND_SUM)
9281 op0 = force_reg (Pmode, op0);
9283 if (GET_CODE (op0) == REG
9284 && ! REG_USERVAR_P (op0))
9285 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9287 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9288 op0 = convert_memory_address (ptr_mode, op0);
9290 return op0;
9292 case ENTRY_VALUE_EXPR:
9293 abort ();
9295 /* COMPLEX type for Extended Pascal & Fortran */
9296 case COMPLEX_EXPR:
9298 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9299 rtx insns;
9301 /* Get the rtx code of the operands. */
9302 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9303 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9305 if (! target)
9306 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9308 start_sequence ();
9310 /* Move the real (op0) and imaginary (op1) parts to their location. */
9311 emit_move_insn (gen_realpart (mode, target), op0);
9312 emit_move_insn (gen_imagpart (mode, target), op1);
9314 insns = get_insns ();
9315 end_sequence ();
9317 /* Complex construction should appear as a single unit. */
9318 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9319 each with a separate pseudo as destination.
9320 It's not correct for flow to treat them as a unit. */
9321 if (GET_CODE (target) != CONCAT)
9322 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9323 else
9324 emit_insn (insns);
9326 return target;
9329 case REALPART_EXPR:
9330 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9331 return gen_realpart (mode, op0);
9333 case IMAGPART_EXPR:
9334 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9335 return gen_imagpart (mode, op0);
9337 case CONJ_EXPR:
9339 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9340 rtx imag_t;
9341 rtx insns;
9343 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9345 if (! target)
9346 target = gen_reg_rtx (mode);
9348 start_sequence ();
9350 /* Store the realpart and the negated imagpart to target. */
9351 emit_move_insn (gen_realpart (partmode, target),
9352 gen_realpart (partmode, op0));
9354 imag_t = gen_imagpart (partmode, target);
9355 temp = expand_unop (partmode,
9356 ! unsignedp && flag_trapv
9357 && (GET_MODE_CLASS(partmode) == MODE_INT)
9358 ? negv_optab : neg_optab,
9359 gen_imagpart (partmode, op0), imag_t, 0);
9360 if (temp != imag_t)
9361 emit_move_insn (imag_t, temp);
9363 insns = get_insns ();
9364 end_sequence ();
9366 /* Conjugate should appear as a single unit
9367 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9368 each with a separate pseudo as destination.
9369 It's not correct for flow to treat them as a unit. */
9370 if (GET_CODE (target) != CONCAT)
9371 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9372 else
9373 emit_insn (insns);
9375 return target;
9378 case TRY_CATCH_EXPR:
9380 tree handler = TREE_OPERAND (exp, 1);
9382 expand_eh_region_start ();
9384 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9386 expand_eh_region_end_cleanup (handler);
9388 return op0;
9391 case TRY_FINALLY_EXPR:
9393 tree try_block = TREE_OPERAND (exp, 0);
9394 tree finally_block = TREE_OPERAND (exp, 1);
9396 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9398 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9399 is not sufficient, so we cannot expand the block twice.
9400 So we play games with GOTO_SUBROUTINE_EXPR to let us
9401 expand the thing only once. */
9402 /* When not optimizing, we go ahead with this form since
9403 (1) user breakpoints operate more predictably without
9404 code duplication, and
9405 (2) we're not running any of the global optimizers
9406 that would explode in time/space with the highly
9407 connected CFG created by the indirect branching. */
9409 rtx finally_label = gen_label_rtx ();
9410 rtx done_label = gen_label_rtx ();
9411 rtx return_link = gen_reg_rtx (Pmode);
9412 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9413 (tree) finally_label, (tree) return_link);
9414 TREE_SIDE_EFFECTS (cleanup) = 1;
9416 /* Start a new binding layer that will keep track of all cleanup
9417 actions to be performed. */
9418 expand_start_bindings (2);
9419 target_temp_slot_level = temp_slot_level;
9421 expand_decl_cleanup (NULL_TREE, cleanup);
9422 op0 = expand_expr (try_block, target, tmode, modifier);
9424 preserve_temp_slots (op0);
9425 expand_end_bindings (NULL_TREE, 0, 0);
9426 emit_jump (done_label);
9427 emit_label (finally_label);
9428 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9429 emit_indirect_jump (return_link);
9430 emit_label (done_label);
9432 else
9434 expand_start_bindings (2);
9435 target_temp_slot_level = temp_slot_level;
9437 expand_decl_cleanup (NULL_TREE, finally_block);
9438 op0 = expand_expr (try_block, target, tmode, modifier);
9440 preserve_temp_slots (op0);
9441 expand_end_bindings (NULL_TREE, 0, 0);
9444 return op0;
9447 case GOTO_SUBROUTINE_EXPR:
9449 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9450 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9451 rtx return_address = gen_label_rtx ();
9452 emit_move_insn (return_link,
9453 gen_rtx_LABEL_REF (Pmode, return_address));
9454 emit_jump (subr);
9455 emit_label (return_address);
9456 return const0_rtx;
9459 case VA_ARG_EXPR:
9460 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9462 case EXC_PTR_EXPR:
9463 return get_exception_pointer (cfun);
9465 case FDESC_EXPR:
9466 /* Function descriptors are not valid except for as
9467 initialization constants, and should not be expanded. */
9468 abort ();
9470 default:
9471 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9474 /* Here to do an ordinary binary operator, generating an instruction
9475 from the optab already placed in `this_optab'. */
9476 binop:
9477 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9478 subtarget, &op0, &op1, 0);
9479 binop2:
9480 if (modifier == EXPAND_STACK_PARM)
9481 target = 0;
9482 temp = expand_binop (mode, this_optab, op0, op1, target,
9483 unsignedp, OPTAB_LIB_WIDEN);
9484 if (temp == 0)
9485 abort ();
9486 return temp;
9489 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9490 when applied to the address of EXP produces an address known to be
9491 aligned more than BIGGEST_ALIGNMENT. */
9493 static int
9494 is_aligning_offset (tree offset, tree exp)
9496 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9497 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9498 || TREE_CODE (offset) == NOP_EXPR
9499 || TREE_CODE (offset) == CONVERT_EXPR
9500 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9501 offset = TREE_OPERAND (offset, 0);
9503 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9504 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9505 if (TREE_CODE (offset) != BIT_AND_EXPR
9506 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9507 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9508 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9509 return 0;
9511 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9512 It must be NEGATE_EXPR. Then strip any more conversions. */
9513 offset = TREE_OPERAND (offset, 0);
9514 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9515 || TREE_CODE (offset) == NOP_EXPR
9516 || TREE_CODE (offset) == CONVERT_EXPR)
9517 offset = TREE_OPERAND (offset, 0);
9519 if (TREE_CODE (offset) != NEGATE_EXPR)
9520 return 0;
9522 offset = TREE_OPERAND (offset, 0);
9523 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9524 || TREE_CODE (offset) == NOP_EXPR
9525 || TREE_CODE (offset) == CONVERT_EXPR)
9526 offset = TREE_OPERAND (offset, 0);
9528 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9529 whose type is the same as EXP. */
9530 return (TREE_CODE (offset) == ADDR_EXPR
9531 && (TREE_OPERAND (offset, 0) == exp
9532 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9533 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9534 == TREE_TYPE (exp)))));
9537 /* Return the tree node if an ARG corresponds to a string constant or zero
9538 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9539 in bytes within the string that ARG is accessing. The type of the
9540 offset will be `sizetype'. */
9542 tree
9543 string_constant (tree arg, tree *ptr_offset)
9545 STRIP_NOPS (arg);
9547 if (TREE_CODE (arg) == ADDR_EXPR
9548 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9550 *ptr_offset = size_zero_node;
9551 return TREE_OPERAND (arg, 0);
9553 else if (TREE_CODE (arg) == PLUS_EXPR)
9555 tree arg0 = TREE_OPERAND (arg, 0);
9556 tree arg1 = TREE_OPERAND (arg, 1);
9558 STRIP_NOPS (arg0);
9559 STRIP_NOPS (arg1);
9561 if (TREE_CODE (arg0) == ADDR_EXPR
9562 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9564 *ptr_offset = convert (sizetype, arg1);
9565 return TREE_OPERAND (arg0, 0);
9567 else if (TREE_CODE (arg1) == ADDR_EXPR
9568 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9570 *ptr_offset = convert (sizetype, arg0);
9571 return TREE_OPERAND (arg1, 0);
9575 return 0;
9578 /* Expand code for a post- or pre- increment or decrement
9579 and return the RTX for the result.
9580 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9582 static rtx
9583 expand_increment (tree exp, int post, int ignore)
9585 rtx op0, op1;
9586 rtx temp, value;
9587 tree incremented = TREE_OPERAND (exp, 0);
9588 optab this_optab = add_optab;
9589 int icode;
9590 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9591 int op0_is_copy = 0;
9592 int single_insn = 0;
9593 /* 1 means we can't store into OP0 directly,
9594 because it is a subreg narrower than a word,
9595 and we don't dare clobber the rest of the word. */
9596 int bad_subreg = 0;
9598 /* Stabilize any component ref that might need to be
9599 evaluated more than once below. */
9600 if (!post
9601 || TREE_CODE (incremented) == BIT_FIELD_REF
9602 || (TREE_CODE (incremented) == COMPONENT_REF
9603 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9604 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9605 incremented = stabilize_reference (incremented);
9606 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9607 ones into save exprs so that they don't accidentally get evaluated
9608 more than once by the code below. */
9609 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9610 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9611 incremented = save_expr (incremented);
9613 /* Compute the operands as RTX.
9614 Note whether OP0 is the actual lvalue or a copy of it:
9615 I believe it is a copy iff it is a register or subreg
9616 and insns were generated in computing it. */
9618 temp = get_last_insn ();
9619 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9621 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9622 in place but instead must do sign- or zero-extension during assignment,
9623 so we copy it into a new register and let the code below use it as
9624 a copy.
9626 Note that we can safely modify this SUBREG since it is know not to be
9627 shared (it was made by the expand_expr call above). */
9629 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9631 if (post)
9632 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9633 else
9634 bad_subreg = 1;
9636 else if (GET_CODE (op0) == SUBREG
9637 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9639 /* We cannot increment this SUBREG in place. If we are
9640 post-incrementing, get a copy of the old value. Otherwise,
9641 just mark that we cannot increment in place. */
9642 if (post)
9643 op0 = copy_to_reg (op0);
9644 else
9645 bad_subreg = 1;
9648 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9649 && temp != get_last_insn ());
9650 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9652 /* Decide whether incrementing or decrementing. */
9653 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9654 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9655 this_optab = sub_optab;
9657 /* Convert decrement by a constant into a negative increment. */
9658 if (this_optab == sub_optab
9659 && GET_CODE (op1) == CONST_INT)
9661 op1 = GEN_INT (-INTVAL (op1));
9662 this_optab = add_optab;
9665 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9666 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9668 /* For a preincrement, see if we can do this with a single instruction. */
9669 if (!post)
9671 icode = (int) this_optab->handlers[(int) mode].insn_code;
9672 if (icode != (int) CODE_FOR_nothing
9673 /* Make sure that OP0 is valid for operands 0 and 1
9674 of the insn we want to queue. */
9675 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9676 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9677 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9678 single_insn = 1;
9681 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9682 then we cannot just increment OP0. We must therefore contrive to
9683 increment the original value. Then, for postincrement, we can return
9684 OP0 since it is a copy of the old value. For preincrement, expand here
9685 unless we can do it with a single insn.
9687 Likewise if storing directly into OP0 would clobber high bits
9688 we need to preserve (bad_subreg). */
9689 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9691 /* This is the easiest way to increment the value wherever it is.
9692 Problems with multiple evaluation of INCREMENTED are prevented
9693 because either (1) it is a component_ref or preincrement,
9694 in which case it was stabilized above, or (2) it is an array_ref
9695 with constant index in an array in a register, which is
9696 safe to reevaluate. */
9697 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9698 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9699 ? MINUS_EXPR : PLUS_EXPR),
9700 TREE_TYPE (exp),
9701 incremented,
9702 TREE_OPERAND (exp, 1));
9704 while (TREE_CODE (incremented) == NOP_EXPR
9705 || TREE_CODE (incremented) == CONVERT_EXPR)
9707 newexp = convert (TREE_TYPE (incremented), newexp);
9708 incremented = TREE_OPERAND (incremented, 0);
9711 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9712 return post ? op0 : temp;
9715 if (post)
9717 /* We have a true reference to the value in OP0.
9718 If there is an insn to add or subtract in this mode, queue it.
9719 Queueing the increment insn avoids the register shuffling
9720 that often results if we must increment now and first save
9721 the old value for subsequent use. */
9723 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9724 op0 = stabilize (op0);
9725 #endif
9727 icode = (int) this_optab->handlers[(int) mode].insn_code;
9728 if (icode != (int) CODE_FOR_nothing
9729 /* Make sure that OP0 is valid for operands 0 and 1
9730 of the insn we want to queue. */
9731 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9732 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9734 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9735 op1 = force_reg (mode, op1);
9737 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9739 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9741 rtx addr = (general_operand (XEXP (op0, 0), mode)
9742 ? force_reg (Pmode, XEXP (op0, 0))
9743 : copy_to_reg (XEXP (op0, 0)));
9744 rtx temp, result;
9746 op0 = replace_equiv_address (op0, addr);
9747 temp = force_reg (GET_MODE (op0), op0);
9748 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9749 op1 = force_reg (mode, op1);
9751 /* The increment queue is LIFO, thus we have to `queue'
9752 the instructions in reverse order. */
9753 enqueue_insn (op0, gen_move_insn (op0, temp));
9754 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9755 return result;
9759 /* Preincrement, or we can't increment with one simple insn. */
9760 if (post)
9761 /* Save a copy of the value before inc or dec, to return it later. */
9762 temp = value = copy_to_reg (op0);
9763 else
9764 /* Arrange to return the incremented value. */
9765 /* Copy the rtx because expand_binop will protect from the queue,
9766 and the results of that would be invalid for us to return
9767 if our caller does emit_queue before using our result. */
9768 temp = copy_rtx (value = op0);
9770 /* Increment however we can. */
9771 op1 = expand_binop (mode, this_optab, value, op1, op0,
9772 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9774 /* Make sure the value is stored into OP0. */
9775 if (op1 != op0)
9776 emit_move_insn (op0, op1);
9778 return temp;
9781 /* Generate code to calculate EXP using a store-flag instruction
9782 and return an rtx for the result. EXP is either a comparison
9783 or a TRUTH_NOT_EXPR whose operand is a comparison.
9785 If TARGET is nonzero, store the result there if convenient.
9787 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9788 cheap.
9790 Return zero if there is no suitable set-flag instruction
9791 available on this machine.
9793 Once expand_expr has been called on the arguments of the comparison,
9794 we are committed to doing the store flag, since it is not safe to
9795 re-evaluate the expression. We emit the store-flag insn by calling
9796 emit_store_flag, but only expand the arguments if we have a reason
9797 to believe that emit_store_flag will be successful. If we think that
9798 it will, but it isn't, we have to simulate the store-flag with a
9799 set/jump/set sequence. */
9801 static rtx
9802 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9804 enum rtx_code code;
9805 tree arg0, arg1, type;
9806 tree tem;
9807 enum machine_mode operand_mode;
9808 int invert = 0;
9809 int unsignedp;
9810 rtx op0, op1;
9811 enum insn_code icode;
9812 rtx subtarget = target;
9813 rtx result, label;
9815 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9816 result at the end. We can't simply invert the test since it would
9817 have already been inverted if it were valid. This case occurs for
9818 some floating-point comparisons. */
9820 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9821 invert = 1, exp = TREE_OPERAND (exp, 0);
9823 arg0 = TREE_OPERAND (exp, 0);
9824 arg1 = TREE_OPERAND (exp, 1);
9826 /* Don't crash if the comparison was erroneous. */
9827 if (arg0 == error_mark_node || arg1 == error_mark_node)
9828 return const0_rtx;
9830 type = TREE_TYPE (arg0);
9831 operand_mode = TYPE_MODE (type);
9832 unsignedp = TREE_UNSIGNED (type);
9834 /* We won't bother with BLKmode store-flag operations because it would mean
9835 passing a lot of information to emit_store_flag. */
9836 if (operand_mode == BLKmode)
9837 return 0;
9839 /* We won't bother with store-flag operations involving function pointers
9840 when function pointers must be canonicalized before comparisons. */
9841 #ifdef HAVE_canonicalize_funcptr_for_compare
9842 if (HAVE_canonicalize_funcptr_for_compare
9843 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9844 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9845 == FUNCTION_TYPE))
9846 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9847 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9848 == FUNCTION_TYPE))))
9849 return 0;
9850 #endif
9852 STRIP_NOPS (arg0);
9853 STRIP_NOPS (arg1);
9855 /* Get the rtx comparison code to use. We know that EXP is a comparison
9856 operation of some type. Some comparisons against 1 and -1 can be
9857 converted to comparisons with zero. Do so here so that the tests
9858 below will be aware that we have a comparison with zero. These
9859 tests will not catch constants in the first operand, but constants
9860 are rarely passed as the first operand. */
9862 switch (TREE_CODE (exp))
9864 case EQ_EXPR:
9865 code = EQ;
9866 break;
9867 case NE_EXPR:
9868 code = NE;
9869 break;
9870 case LT_EXPR:
9871 if (integer_onep (arg1))
9872 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9873 else
9874 code = unsignedp ? LTU : LT;
9875 break;
9876 case LE_EXPR:
9877 if (! unsignedp && integer_all_onesp (arg1))
9878 arg1 = integer_zero_node, code = LT;
9879 else
9880 code = unsignedp ? LEU : LE;
9881 break;
9882 case GT_EXPR:
9883 if (! unsignedp && integer_all_onesp (arg1))
9884 arg1 = integer_zero_node, code = GE;
9885 else
9886 code = unsignedp ? GTU : GT;
9887 break;
9888 case GE_EXPR:
9889 if (integer_onep (arg1))
9890 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9891 else
9892 code = unsignedp ? GEU : GE;
9893 break;
9895 case UNORDERED_EXPR:
9896 code = UNORDERED;
9897 break;
9898 case ORDERED_EXPR:
9899 code = ORDERED;
9900 break;
9901 case UNLT_EXPR:
9902 code = UNLT;
9903 break;
9904 case UNLE_EXPR:
9905 code = UNLE;
9906 break;
9907 case UNGT_EXPR:
9908 code = UNGT;
9909 break;
9910 case UNGE_EXPR:
9911 code = UNGE;
9912 break;
9913 case UNEQ_EXPR:
9914 code = UNEQ;
9915 break;
9917 default:
9918 abort ();
9921 /* Put a constant second. */
9922 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9924 tem = arg0; arg0 = arg1; arg1 = tem;
9925 code = swap_condition (code);
9928 /* If this is an equality or inequality test of a single bit, we can
9929 do this by shifting the bit being tested to the low-order bit and
9930 masking the result with the constant 1. If the condition was EQ,
9931 we xor it with 1. This does not require an scc insn and is faster
9932 than an scc insn even if we have it.
9934 The code to make this transformation was moved into fold_single_bit_test,
9935 so we just call into the folder and expand its result. */
9937 if ((code == NE || code == EQ)
9938 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9939 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9941 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9942 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9943 arg0, arg1, type),
9944 target, VOIDmode, EXPAND_NORMAL);
9947 /* Now see if we are likely to be able to do this. Return if not. */
9948 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9949 return 0;
9951 icode = setcc_gen_code[(int) code];
9952 if (icode == CODE_FOR_nothing
9953 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9955 /* We can only do this if it is one of the special cases that
9956 can be handled without an scc insn. */
9957 if ((code == LT && integer_zerop (arg1))
9958 || (! only_cheap && code == GE && integer_zerop (arg1)))
9960 else if (BRANCH_COST >= 0
9961 && ! only_cheap && (code == NE || code == EQ)
9962 && TREE_CODE (type) != REAL_TYPE
9963 && ((abs_optab->handlers[(int) operand_mode].insn_code
9964 != CODE_FOR_nothing)
9965 || (ffs_optab->handlers[(int) operand_mode].insn_code
9966 != CODE_FOR_nothing)))
9968 else
9969 return 0;
9972 if (! get_subtarget (target)
9973 || GET_MODE (subtarget) != operand_mode
9974 || ! safe_from_p (subtarget, arg1, 1))
9975 subtarget = 0;
9977 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9979 if (target == 0)
9980 target = gen_reg_rtx (mode);
9982 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9983 because, if the emit_store_flag does anything it will succeed and
9984 OP0 and OP1 will not be used subsequently. */
9986 result = emit_store_flag (target, code,
9987 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9988 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9989 operand_mode, unsignedp, 1);
9991 if (result)
9993 if (invert)
9994 result = expand_binop (mode, xor_optab, result, const1_rtx,
9995 result, 0, OPTAB_LIB_WIDEN);
9996 return result;
9999 /* If this failed, we have to do this with set/compare/jump/set code. */
10000 if (GET_CODE (target) != REG
10001 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10002 target = gen_reg_rtx (GET_MODE (target));
10004 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10005 result = compare_from_rtx (op0, op1, code, unsignedp,
10006 operand_mode, NULL_RTX);
10007 if (GET_CODE (result) == CONST_INT)
10008 return (((result == const0_rtx && ! invert)
10009 || (result != const0_rtx && invert))
10010 ? const0_rtx : const1_rtx);
10012 /* The code of RESULT may not match CODE if compare_from_rtx
10013 decided to swap its operands and reverse the original code.
10015 We know that compare_from_rtx returns either a CONST_INT or
10016 a new comparison code, so it is safe to just extract the
10017 code from RESULT. */
10018 code = GET_CODE (result);
10020 label = gen_label_rtx ();
10021 if (bcc_gen_fctn[(int) code] == 0)
10022 abort ();
10024 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10025 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10026 emit_label (label);
10028 return target;
10032 /* Stubs in case we haven't got a casesi insn. */
10033 #ifndef HAVE_casesi
10034 # define HAVE_casesi 0
10035 # define gen_casesi(a, b, c, d, e) (0)
10036 # define CODE_FOR_casesi CODE_FOR_nothing
10037 #endif
10039 /* If the machine does not have a case insn that compares the bounds,
10040 this means extra overhead for dispatch tables, which raises the
10041 threshold for using them. */
10042 #ifndef CASE_VALUES_THRESHOLD
10043 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10044 #endif /* CASE_VALUES_THRESHOLD */
10046 unsigned int
10047 case_values_threshold (void)
10049 return CASE_VALUES_THRESHOLD;
10052 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10053 0 otherwise (i.e. if there is no casesi instruction). */
10055 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10056 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10058 enum machine_mode index_mode = SImode;
10059 int index_bits = GET_MODE_BITSIZE (index_mode);
10060 rtx op1, op2, index;
10061 enum machine_mode op_mode;
10063 if (! HAVE_casesi)
10064 return 0;
10066 /* Convert the index to SImode. */
10067 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10069 enum machine_mode omode = TYPE_MODE (index_type);
10070 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10072 /* We must handle the endpoints in the original mode. */
10073 index_expr = build (MINUS_EXPR, index_type,
10074 index_expr, minval);
10075 minval = integer_zero_node;
10076 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10077 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10078 omode, 1, default_label);
10079 /* Now we can safely truncate. */
10080 index = convert_to_mode (index_mode, index, 0);
10082 else
10084 if (TYPE_MODE (index_type) != index_mode)
10086 index_expr = convert ((*lang_hooks.types.type_for_size)
10087 (index_bits, 0), index_expr);
10088 index_type = TREE_TYPE (index_expr);
10091 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10093 emit_queue ();
10094 index = protect_from_queue (index, 0);
10095 do_pending_stack_adjust ();
10097 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10098 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10099 (index, op_mode))
10100 index = copy_to_mode_reg (op_mode, index);
10102 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10104 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10105 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10106 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10107 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10108 (op1, op_mode))
10109 op1 = copy_to_mode_reg (op_mode, op1);
10111 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10113 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10114 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10115 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10116 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10117 (op2, op_mode))
10118 op2 = copy_to_mode_reg (op_mode, op2);
10120 emit_jump_insn (gen_casesi (index, op1, op2,
10121 table_label, default_label));
10122 return 1;
10125 /* Attempt to generate a tablejump instruction; same concept. */
10126 #ifndef HAVE_tablejump
10127 #define HAVE_tablejump 0
10128 #define gen_tablejump(x, y) (0)
10129 #endif
10131 /* Subroutine of the next function.
10133 INDEX is the value being switched on, with the lowest value
10134 in the table already subtracted.
10135 MODE is its expected mode (needed if INDEX is constant).
10136 RANGE is the length of the jump table.
10137 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10139 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10140 index value is out of range. */
10142 static void
10143 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10144 rtx default_label)
10146 rtx temp, vector;
10148 if (INTVAL (range) > cfun->max_jumptable_ents)
10149 cfun->max_jumptable_ents = INTVAL (range);
10151 /* Do an unsigned comparison (in the proper mode) between the index
10152 expression and the value which represents the length of the range.
10153 Since we just finished subtracting the lower bound of the range
10154 from the index expression, this comparison allows us to simultaneously
10155 check that the original index expression value is both greater than
10156 or equal to the minimum value of the range and less than or equal to
10157 the maximum value of the range. */
10159 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10160 default_label);
10162 /* If index is in range, it must fit in Pmode.
10163 Convert to Pmode so we can index with it. */
10164 if (mode != Pmode)
10165 index = convert_to_mode (Pmode, index, 1);
10167 /* Don't let a MEM slip thru, because then INDEX that comes
10168 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10169 and break_out_memory_refs will go to work on it and mess it up. */
10170 #ifdef PIC_CASE_VECTOR_ADDRESS
10171 if (flag_pic && GET_CODE (index) != REG)
10172 index = copy_to_mode_reg (Pmode, index);
10173 #endif
10175 /* If flag_force_addr were to affect this address
10176 it could interfere with the tricky assumptions made
10177 about addresses that contain label-refs,
10178 which may be valid only very near the tablejump itself. */
10179 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10180 GET_MODE_SIZE, because this indicates how large insns are. The other
10181 uses should all be Pmode, because they are addresses. This code
10182 could fail if addresses and insns are not the same size. */
10183 index = gen_rtx_PLUS (Pmode,
10184 gen_rtx_MULT (Pmode, index,
10185 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10186 gen_rtx_LABEL_REF (Pmode, table_label));
10187 #ifdef PIC_CASE_VECTOR_ADDRESS
10188 if (flag_pic)
10189 index = PIC_CASE_VECTOR_ADDRESS (index);
10190 else
10191 #endif
10192 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10193 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10194 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10195 RTX_UNCHANGING_P (vector) = 1;
10196 MEM_NOTRAP_P (vector) = 1;
10197 convert_move (temp, vector, 0);
10199 emit_jump_insn (gen_tablejump (temp, table_label));
10201 /* If we are generating PIC code or if the table is PC-relative, the
10202 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10203 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10204 emit_barrier ();
10208 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10209 rtx table_label, rtx default_label)
10211 rtx index;
10213 if (! HAVE_tablejump)
10214 return 0;
10216 index_expr = fold (build (MINUS_EXPR, index_type,
10217 convert (index_type, index_expr),
10218 convert (index_type, minval)));
10219 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10220 emit_queue ();
10221 index = protect_from_queue (index, 0);
10222 do_pending_stack_adjust ();
10224 do_tablejump (index, TYPE_MODE (index_type),
10225 convert_modes (TYPE_MODE (index_type),
10226 TYPE_MODE (TREE_TYPE (range)),
10227 expand_expr (range, NULL_RTX,
10228 VOIDmode, 0),
10229 TREE_UNSIGNED (TREE_TYPE (range))),
10230 table_label, default_label);
10231 return 1;
10234 /* Nonzero if the mode is a valid vector mode for this architecture.
10235 This returns nonzero even if there is no hardware support for the
10236 vector mode, but we can emulate with narrower modes. */
10239 vector_mode_valid_p (enum machine_mode mode)
10241 enum mode_class class = GET_MODE_CLASS (mode);
10242 enum machine_mode innermode;
10244 /* Doh! What's going on? */
10245 if (class != MODE_VECTOR_INT
10246 && class != MODE_VECTOR_FLOAT)
10247 return 0;
10249 /* Hardware support. Woo hoo! */
10250 if (VECTOR_MODE_SUPPORTED_P (mode))
10251 return 1;
10253 innermode = GET_MODE_INNER (mode);
10255 /* We should probably return 1 if requesting V4DI and we have no DI,
10256 but we have V2DI, but this is probably very unlikely. */
10258 /* If we have support for the inner mode, we can safely emulate it.
10259 We may not have V2DI, but me can emulate with a pair of DIs. */
10260 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10263 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10264 static rtx
10265 const_vector_from_tree (tree exp)
10267 rtvec v;
10268 int units, i;
10269 tree link, elt;
10270 enum machine_mode inner, mode;
10272 mode = TYPE_MODE (TREE_TYPE (exp));
10274 if (is_zeros_p (exp))
10275 return CONST0_RTX (mode);
10277 units = GET_MODE_NUNITS (mode);
10278 inner = GET_MODE_INNER (mode);
10280 v = rtvec_alloc (units);
10282 link = TREE_VECTOR_CST_ELTS (exp);
10283 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10285 elt = TREE_VALUE (link);
10287 if (TREE_CODE (elt) == REAL_CST)
10288 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10289 inner);
10290 else
10291 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10292 TREE_INT_CST_HIGH (elt),
10293 inner);
10296 /* Initialize remaining elements to 0. */
10297 for (; i < units; ++i)
10298 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10300 return gen_rtx_raw_CONST_VECTOR (mode, v);
10303 #include "gt-expr.h"