gcc/ChangeLog:
[official-gcc.git] / gcc / expr.c
blob8f03a69a5844a18aa402254167a57e23f9bc8346
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static rtx enqueue_insn (rtx, rtx);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
148 static rtx var_rtx (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
151 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
153 static int is_aligning_offset (tree, tree);
154 static rtx expand_increment (tree, int, int);
155 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
157 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 #ifdef PUSH_ROUNDING
159 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 #endif
161 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
162 static rtx const_vector_from_tree (tree);
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
194 #endif
196 /* This array records the insn_code of insns to perform block moves. */
197 enum insn_code movmem_optab[NUM_MACHINE_MODES];
199 /* This array records the insn_code of insns to perform block clears. */
200 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
202 /* These arrays record the insn_code of two different kinds of insns
203 to perform block compares. */
204 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
205 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
207 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
209 #ifndef SLOW_UNALIGNED_ACCESS
210 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
211 #endif
213 /* This is run once per compilation to set up which modes can be used
214 directly in memory and to initialize the block move optab. */
216 void
217 init_expr_once (void)
219 rtx insn, pat;
220 enum machine_mode mode;
221 int num_clobbers;
222 rtx mem, mem1;
223 rtx reg;
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
228 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
229 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
231 /* A scratch register we can modify in-place below to avoid
232 useless RTL allocations. */
233 reg = gen_rtx_REG (VOIDmode, -1);
235 insn = rtx_alloc (INSN);
236 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
237 PATTERN (insn) = pat;
239 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
240 mode = (enum machine_mode) ((int) mode + 1))
242 int regno;
244 direct_load[(int) mode] = direct_store[(int) mode] = 0;
245 PUT_MODE (mem, mode);
246 PUT_MODE (mem1, mode);
247 PUT_MODE (reg, mode);
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
252 if (mode != VOIDmode && mode != BLKmode)
253 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
254 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 regno++)
257 if (! HARD_REGNO_MODE_OK (regno, mode))
258 continue;
260 REGNO (reg) = regno;
262 SET_SRC (pat) = mem;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
267 SET_SRC (pat) = mem1;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
277 SET_SRC (pat) = reg;
278 SET_DEST (pat) = mem1;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
284 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
287 mode = GET_MODE_WIDER_MODE (mode))
289 enum machine_mode srcmode;
290 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
291 srcmode = GET_MODE_WIDER_MODE (srcmode))
293 enum insn_code ic;
295 ic = can_extend_p (mode, srcmode, 0);
296 if (ic == CODE_FOR_nothing)
297 continue;
299 PUT_MODE (mem, srcmode);
301 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
302 float_extend_from_mem[mode][srcmode] = true;
307 /* This is run at the start of compiling a function. */
309 void
310 init_expr (void)
312 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
315 /* Small sanity check that the queue is empty at the end of a function. */
317 void
318 finish_expr_for_function (void)
320 if (pending_chain)
321 abort ();
324 /* Manage the queue of increment instructions to be output
325 for POSTINCREMENT_EXPR expressions, etc. */
327 /* Queue up to increment (or change) VAR later. BODY says how:
328 BODY should be the same thing you would pass to emit_insn
329 to increment right away. It will go to emit_insn later on.
331 The value is a QUEUED expression to be used in place of VAR
332 where you want to guarantee the pre-incrementation value of VAR. */
334 static rtx
335 enqueue_insn (rtx var, rtx body)
337 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
338 body, pending_chain);
339 return pending_chain;
342 /* Use protect_from_queue to convert a QUEUED expression
343 into something that you can put immediately into an instruction.
344 If the queued incrementation has not happened yet,
345 protect_from_queue returns the variable itself.
346 If the incrementation has happened, protect_from_queue returns a temp
347 that contains a copy of the old value of the variable.
349 Any time an rtx which might possibly be a QUEUED is to be put
350 into an instruction, it must be passed through protect_from_queue first.
351 QUEUED expressions are not meaningful in instructions.
353 Do not pass a value through protect_from_queue and then hold
354 on to it for a while before putting it in an instruction!
355 If the queue is flushed in between, incorrect code will result. */
358 protect_from_queue (rtx x, int modify)
360 RTX_CODE code = GET_CODE (x);
362 #if 0 /* A QUEUED can hang around after the queue is forced out. */
363 /* Shortcut for most common case. */
364 if (pending_chain == 0)
365 return x;
366 #endif
368 if (code != QUEUED)
370 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
371 use of autoincrement. Make a copy of the contents of the memory
372 location rather than a copy of the address, but not if the value is
373 of mode BLKmode. Don't modify X in place since it might be
374 shared. */
375 if (code == MEM && GET_MODE (x) != BLKmode
376 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
378 rtx y = XEXP (x, 0);
379 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
381 if (QUEUED_INSN (y))
383 rtx temp = gen_reg_rtx (GET_MODE (x));
385 emit_insn_before (gen_move_insn (temp, new),
386 QUEUED_INSN (y));
387 return temp;
390 /* Copy the address into a pseudo, so that the returned value
391 remains correct across calls to emit_queue. */
392 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
397 if (code == MEM)
399 rtx tem = protect_from_queue (XEXP (x, 0), 0);
400 if (tem != XEXP (x, 0))
402 x = copy_rtx (x);
403 XEXP (x, 0) = tem;
406 else if (code == PLUS || code == MULT)
408 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
409 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
410 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 x = copy_rtx (x);
413 XEXP (x, 0) = new0;
414 XEXP (x, 1) = new1;
417 return x;
419 /* If the increment has not happened, use the variable itself. Copy it
420 into a new pseudo so that the value remains correct across calls to
421 emit_queue. */
422 if (QUEUED_INSN (x) == 0)
423 return copy_to_reg (QUEUED_VAR (x));
424 /* If the increment has happened and a pre-increment copy exists,
425 use that copy. */
426 if (QUEUED_COPY (x) != 0)
427 return QUEUED_COPY (x);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
432 QUEUED_INSN (x));
433 return QUEUED_COPY (x);
436 /* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
442 queued_subexp_p (rtx x)
444 enum rtx_code code = GET_CODE (x);
445 switch (code)
447 case QUEUED:
448 return 1;
449 case MEM:
450 return queued_subexp_p (XEXP (x, 0));
451 case MULT:
452 case PLUS:
453 case MINUS:
454 return (queued_subexp_p (XEXP (x, 0))
455 || queued_subexp_p (XEXP (x, 1)));
456 default:
457 return 0;
461 /* Retrieve a mark on the queue. */
463 static rtx
464 mark_queue (void)
466 return pending_chain;
469 /* Perform all the pending incrementations that have been enqueued
470 after MARK was retrieved. If MARK is null, perform all the
471 pending incrementations. */
473 static void
474 emit_insns_enqueued_after_mark (rtx mark)
476 rtx p;
478 /* The marked incrementation may have been emitted in the meantime
479 through a call to emit_queue. In this case, the mark is not valid
480 anymore so do nothing. */
481 if (mark && ! QUEUED_BODY (mark))
482 return;
484 while ((p = pending_chain) != mark)
486 rtx body = QUEUED_BODY (p);
488 switch (GET_CODE (body))
490 case INSN:
491 case JUMP_INSN:
492 case CALL_INSN:
493 case CODE_LABEL:
494 case BARRIER:
495 case NOTE:
496 QUEUED_INSN (p) = body;
497 emit_insn (body);
498 break;
500 #ifdef ENABLE_CHECKING
501 case SEQUENCE:
502 abort ();
503 break;
504 #endif
506 default:
507 QUEUED_INSN (p) = emit_insn (body);
508 break;
511 QUEUED_BODY (p) = 0;
512 pending_chain = QUEUED_NEXT (p);
516 /* Perform all the pending incrementations. */
518 void
519 emit_queue (void)
521 emit_insns_enqueued_after_mark (NULL_RTX);
524 /* Copy data from FROM to TO, where the machine modes are not the same.
525 Both modes may be integer, or both may be floating.
526 UNSIGNEDP should be nonzero if FROM is an unsigned type.
527 This causes zero-extension instead of sign-extension. */
529 void
530 convert_move (rtx to, rtx from, int unsignedp)
532 enum machine_mode to_mode = GET_MODE (to);
533 enum machine_mode from_mode = GET_MODE (from);
534 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
535 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
536 enum insn_code code;
537 rtx libcall;
539 /* rtx code for making an equivalent value. */
540 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
541 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
543 to = protect_from_queue (to, 1);
544 from = protect_from_queue (from, 0);
546 if (to_real != from_real)
547 abort ();
549 /* If the source and destination are already the same, then there's
550 nothing to do. */
551 if (to == from)
552 return;
554 /* If FROM is a SUBREG that indicates that we have already done at least
555 the required extension, strip it. We don't handle such SUBREGs as
556 TO here. */
558 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
559 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
560 >= GET_MODE_SIZE (to_mode))
561 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
562 from = gen_lowpart (to_mode, from), from_mode = to_mode;
564 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
565 abort ();
567 if (to_mode == from_mode
568 || (from_mode == VOIDmode && CONSTANT_P (from)))
570 emit_move_insn (to, from);
571 return;
574 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
576 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
577 abort ();
579 if (VECTOR_MODE_P (to_mode))
580 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
581 else
582 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
584 emit_move_insn (to, from);
585 return;
588 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
590 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
591 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
592 return;
595 if (to_real)
597 rtx value, insns;
598 convert_optab tab;
600 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
601 tab = sext_optab;
602 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
603 tab = trunc_optab;
604 else
605 abort ();
607 /* Try converting directly if the insn is supported. */
609 code = tab->handlers[to_mode][from_mode].insn_code;
610 if (code != CODE_FOR_nothing)
612 emit_unop_insn (code, to, from,
613 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
614 return;
617 /* Otherwise use a libcall. */
618 libcall = tab->handlers[to_mode][from_mode].libfunc;
620 if (!libcall)
621 /* This conversion is not implemented yet. */
622 abort ();
624 start_sequence ();
625 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
626 1, from, from_mode);
627 insns = get_insns ();
628 end_sequence ();
629 emit_libcall_block (insns, to, value,
630 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
631 from)
632 : gen_rtx_FLOAT_EXTEND (to_mode, from));
633 return;
636 /* Handle pointer conversion. */ /* SPEE 900220. */
637 /* Targets are expected to provide conversion insns between PxImode and
638 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
639 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
641 enum machine_mode full_mode
642 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
644 if (trunc_optab->handlers[to_mode][full_mode].insn_code
645 == CODE_FOR_nothing)
646 abort ();
648 if (full_mode != from_mode)
649 from = convert_to_mode (full_mode, from, unsignedp);
650 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
651 to, from, UNKNOWN);
652 return;
654 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
656 enum machine_mode full_mode
657 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
659 if (sext_optab->handlers[full_mode][from_mode].insn_code
660 == CODE_FOR_nothing)
661 abort ();
663 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
664 to, from, UNKNOWN);
665 if (to_mode == full_mode)
666 return;
668 /* else proceed to integer conversions below. */
669 from_mode = full_mode;
672 /* Now both modes are integers. */
674 /* Handle expanding beyond a word. */
675 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
676 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
678 rtx insns;
679 rtx lowpart;
680 rtx fill_value;
681 rtx lowfrom;
682 int i;
683 enum machine_mode lowpart_mode;
684 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
686 /* Try converting directly if the insn is supported. */
687 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
688 != CODE_FOR_nothing)
690 /* If FROM is a SUBREG, put it into a register. Do this
691 so that we always generate the same set of insns for
692 better cse'ing; if an intermediate assignment occurred,
693 we won't be doing the operation directly on the SUBREG. */
694 if (optimize > 0 && GET_CODE (from) == SUBREG)
695 from = force_reg (from_mode, from);
696 emit_unop_insn (code, to, from, equiv_code);
697 return;
699 /* Next, try converting via full word. */
700 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
701 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
702 != CODE_FOR_nothing))
704 if (REG_P (to))
706 if (reg_overlap_mentioned_p (to, from))
707 from = force_reg (from_mode, from);
708 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
710 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
711 emit_unop_insn (code, to,
712 gen_lowpart (word_mode, to), equiv_code);
713 return;
716 /* No special multiword conversion insn; do it by hand. */
717 start_sequence ();
719 /* Since we will turn this into a no conflict block, we must ensure
720 that the source does not overlap the target. */
722 if (reg_overlap_mentioned_p (to, from))
723 from = force_reg (from_mode, from);
725 /* Get a copy of FROM widened to a word, if necessary. */
726 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
727 lowpart_mode = word_mode;
728 else
729 lowpart_mode = from_mode;
731 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
733 lowpart = gen_lowpart (lowpart_mode, to);
734 emit_move_insn (lowpart, lowfrom);
736 /* Compute the value to put in each remaining word. */
737 if (unsignedp)
738 fill_value = const0_rtx;
739 else
741 #ifdef HAVE_slt
742 if (HAVE_slt
743 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
744 && STORE_FLAG_VALUE == -1)
746 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
747 lowpart_mode, 0);
748 fill_value = gen_reg_rtx (word_mode);
749 emit_insn (gen_slt (fill_value));
751 else
752 #endif
754 fill_value
755 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
756 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
757 NULL_RTX, 0);
758 fill_value = convert_to_mode (word_mode, fill_value, 1);
762 /* Fill the remaining words. */
763 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
765 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
766 rtx subword = operand_subword (to, index, 1, to_mode);
768 if (subword == 0)
769 abort ();
771 if (fill_value != subword)
772 emit_move_insn (subword, fill_value);
775 insns = get_insns ();
776 end_sequence ();
778 emit_no_conflict_block (insns, to, from, NULL_RTX,
779 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
780 return;
783 /* Truncating multi-word to a word or less. */
784 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
785 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
787 if (!((MEM_P (from)
788 && ! MEM_VOLATILE_P (from)
789 && direct_load[(int) to_mode]
790 && ! mode_dependent_address_p (XEXP (from, 0)))
791 || REG_P (from)
792 || GET_CODE (from) == SUBREG))
793 from = force_reg (from_mode, from);
794 convert_move (to, gen_lowpart (word_mode, from), 0);
795 return;
798 /* Now follow all the conversions between integers
799 no more than a word long. */
801 /* For truncation, usually we can just refer to FROM in a narrower mode. */
802 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
803 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
804 GET_MODE_BITSIZE (from_mode)))
806 if (!((MEM_P (from)
807 && ! MEM_VOLATILE_P (from)
808 && direct_load[(int) to_mode]
809 && ! mode_dependent_address_p (XEXP (from, 0)))
810 || REG_P (from)
811 || GET_CODE (from) == SUBREG))
812 from = force_reg (from_mode, from);
813 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
814 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
815 from = copy_to_reg (from);
816 emit_move_insn (to, gen_lowpart (to_mode, from));
817 return;
820 /* Handle extension. */
821 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
823 /* Convert directly if that works. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 != CODE_FOR_nothing)
827 if (flag_force_mem)
828 from = force_not_mem (from);
830 emit_unop_insn (code, to, from, equiv_code);
831 return;
833 else
835 enum machine_mode intermediate;
836 rtx tmp;
837 tree shift_amount;
839 /* Search for a mode to convert via. */
840 for (intermediate = from_mode; intermediate != VOIDmode;
841 intermediate = GET_MODE_WIDER_MODE (intermediate))
842 if (((can_extend_p (to_mode, intermediate, unsignedp)
843 != CODE_FOR_nothing)
844 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
845 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
846 GET_MODE_BITSIZE (intermediate))))
847 && (can_extend_p (intermediate, from_mode, unsignedp)
848 != CODE_FOR_nothing))
850 convert_move (to, convert_to_mode (intermediate, from,
851 unsignedp), unsignedp);
852 return;
855 /* No suitable intermediate mode.
856 Generate what we need with shifts. */
857 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
858 - GET_MODE_BITSIZE (from_mode), 0);
859 from = gen_lowpart (to_mode, force_reg (from_mode, from));
860 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
861 to, unsignedp);
862 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
863 to, unsignedp);
864 if (tmp != to)
865 emit_move_insn (to, tmp);
866 return;
870 /* Support special truncate insns for certain modes. */
871 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
873 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
874 to, from, UNKNOWN);
875 return;
878 /* Handle truncation of volatile memrefs, and so on;
879 the things that couldn't be truncated directly,
880 and for which there was no special instruction.
882 ??? Code above formerly short-circuited this, for most integer
883 mode pairs, with a force_reg in from_mode followed by a recursive
884 call to this routine. Appears always to have been wrong. */
885 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
887 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
888 emit_move_insn (to, temp);
889 return;
892 /* Mode combination is not recognized. */
893 abort ();
896 /* Return an rtx for a value that would result
897 from converting X to mode MODE.
898 Both X and MODE may be floating, or both integer.
899 UNSIGNEDP is nonzero if X is an unsigned value.
900 This can be done by referring to a part of X in place
901 or by copying to a new temporary with conversion.
903 This function *must not* call protect_from_queue
904 except when putting X into an insn (in which case convert_move does it). */
907 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
909 return convert_modes (mode, VOIDmode, x, unsignedp);
912 /* Return an rtx for a value that would result
913 from converting X from mode OLDMODE to mode MODE.
914 Both modes may be floating, or both integer.
915 UNSIGNEDP is nonzero if X is an unsigned value.
917 This can be done by referring to a part of X in place
918 or by copying to a new temporary with conversion.
920 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
922 This function *must not* call protect_from_queue
923 except when putting X into an insn (in which case convert_move does it). */
926 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
928 rtx temp;
930 /* If FROM is a SUBREG that indicates that we have already done at least
931 the required extension, strip it. */
933 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
934 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
935 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
936 x = gen_lowpart (mode, x);
938 if (GET_MODE (x) != VOIDmode)
939 oldmode = GET_MODE (x);
941 if (mode == oldmode)
942 return x;
944 /* There is one case that we must handle specially: If we are converting
945 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
946 we are to interpret the constant as unsigned, gen_lowpart will do
947 the wrong if the constant appears negative. What we want to do is
948 make the high-order word of the constant zero, not all ones. */
950 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
951 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
952 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
954 HOST_WIDE_INT val = INTVAL (x);
956 if (oldmode != VOIDmode
957 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
959 int width = GET_MODE_BITSIZE (oldmode);
961 /* We need to zero extend VAL. */
962 val &= ((HOST_WIDE_INT) 1 << width) - 1;
965 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
968 /* We can do this with a gen_lowpart if both desired and current modes
969 are integer, and this is either a constant integer, a register, or a
970 non-volatile MEM. Except for the constant case where MODE is no
971 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
973 if ((GET_CODE (x) == CONST_INT
974 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
975 || (GET_MODE_CLASS (mode) == MODE_INT
976 && GET_MODE_CLASS (oldmode) == MODE_INT
977 && (GET_CODE (x) == CONST_DOUBLE
978 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
979 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
980 && direct_load[(int) mode])
981 || (REG_P (x)
982 && (! HARD_REGISTER_P (x)
983 || HARD_REGNO_MODE_OK (REGNO (x), mode))
984 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
985 GET_MODE_BITSIZE (GET_MODE (x)))))))))
987 /* ?? If we don't know OLDMODE, we have to assume here that
988 X does not need sign- or zero-extension. This may not be
989 the case, but it's the best we can do. */
990 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
991 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
993 HOST_WIDE_INT val = INTVAL (x);
994 int width = GET_MODE_BITSIZE (oldmode);
996 /* We must sign or zero-extend in this case. Start by
997 zero-extending, then sign extend if we need to. */
998 val &= ((HOST_WIDE_INT) 1 << width) - 1;
999 if (! unsignedp
1000 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1001 val |= (HOST_WIDE_INT) (-1) << width;
1003 return gen_int_mode (val, mode);
1006 return gen_lowpart (mode, x);
1009 /* Converting from integer constant into mode is always equivalent to an
1010 subreg operation. */
1011 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1013 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1014 abort ();
1015 return simplify_gen_subreg (mode, x, oldmode, 0);
1018 temp = gen_reg_rtx (mode);
1019 convert_move (temp, x, unsignedp);
1020 return temp;
1023 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1024 store efficiently. Due to internal GCC limitations, this is
1025 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1026 for an immediate constant. */
1028 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1030 /* Determine whether the LEN bytes can be moved by using several move
1031 instructions. Return nonzero if a call to move_by_pieces should
1032 succeed. */
1035 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1036 unsigned int align ATTRIBUTE_UNUSED)
1038 return MOVE_BY_PIECES_P (len, align);
1041 /* Generate several move instructions to copy LEN bytes from block FROM to
1042 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1043 and TO through protect_from_queue before calling.
1045 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1046 used to push FROM to the stack.
1048 ALIGN is maximum stack alignment we can assume.
1050 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1051 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1052 stpcpy. */
1055 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1056 unsigned int align, int endp)
1058 struct move_by_pieces data;
1059 rtx to_addr, from_addr = XEXP (from, 0);
1060 unsigned int max_size = MOVE_MAX_PIECES + 1;
1061 enum machine_mode mode = VOIDmode, tmode;
1062 enum insn_code icode;
1064 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1066 data.offset = 0;
1067 data.from_addr = from_addr;
1068 if (to)
1070 to_addr = XEXP (to, 0);
1071 data.to = to;
1072 data.autinc_to
1073 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1074 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1075 data.reverse
1076 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1078 else
1080 to_addr = NULL_RTX;
1081 data.to = NULL_RTX;
1082 data.autinc_to = 1;
1083 #ifdef STACK_GROWS_DOWNWARD
1084 data.reverse = 1;
1085 #else
1086 data.reverse = 0;
1087 #endif
1089 data.to_addr = to_addr;
1090 data.from = from;
1091 data.autinc_from
1092 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1093 || GET_CODE (from_addr) == POST_INC
1094 || GET_CODE (from_addr) == POST_DEC);
1096 data.explicit_inc_from = 0;
1097 data.explicit_inc_to = 0;
1098 if (data.reverse) data.offset = len;
1099 data.len = len;
1101 /* If copying requires more than two move insns,
1102 copy addresses to registers (to make displacements shorter)
1103 and use post-increment if available. */
1104 if (!(data.autinc_from && data.autinc_to)
1105 && move_by_pieces_ninsns (len, align) > 2)
1107 /* Find the mode of the largest move... */
1108 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1109 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1110 if (GET_MODE_SIZE (tmode) < max_size)
1111 mode = tmode;
1113 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1115 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1116 data.autinc_from = 1;
1117 data.explicit_inc_from = -1;
1119 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1121 data.from_addr = copy_addr_to_reg (from_addr);
1122 data.autinc_from = 1;
1123 data.explicit_inc_from = 1;
1125 if (!data.autinc_from && CONSTANT_P (from_addr))
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1129 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1130 data.autinc_to = 1;
1131 data.explicit_inc_to = -1;
1133 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1135 data.to_addr = copy_addr_to_reg (to_addr);
1136 data.autinc_to = 1;
1137 data.explicit_inc_to = 1;
1139 if (!data.autinc_to && CONSTANT_P (to_addr))
1140 data.to_addr = copy_addr_to_reg (to_addr);
1143 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1144 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1145 align = MOVE_MAX * BITS_PER_UNIT;
1147 /* First move what we can in the largest integer mode, then go to
1148 successively smaller modes. */
1150 while (max_size > 1)
1152 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1153 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1154 if (GET_MODE_SIZE (tmode) < max_size)
1155 mode = tmode;
1157 if (mode == VOIDmode)
1158 break;
1160 icode = mov_optab->handlers[(int) mode].insn_code;
1161 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1162 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1164 max_size = GET_MODE_SIZE (mode);
1167 /* The code above should have handled everything. */
1168 if (data.len > 0)
1169 abort ();
1171 if (endp)
1173 rtx to1;
1175 if (data.reverse)
1176 abort ();
1177 if (data.autinc_to)
1179 if (endp == 2)
1181 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1182 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1183 else
1184 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1185 -1));
1187 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1188 data.offset);
1190 else
1192 if (endp == 2)
1193 --data.offset;
1194 to1 = adjust_address (data.to, QImode, data.offset);
1196 return to1;
1198 else
1199 return data.to;
1202 /* Return number of insns required to move L bytes by pieces.
1203 ALIGN (in bits) is maximum alignment we can assume. */
1205 static unsigned HOST_WIDE_INT
1206 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1208 unsigned HOST_WIDE_INT n_insns = 0;
1209 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1211 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1212 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1213 align = MOVE_MAX * BITS_PER_UNIT;
1215 while (max_size > 1)
1217 enum machine_mode mode = VOIDmode, tmode;
1218 enum insn_code icode;
1220 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1221 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1222 if (GET_MODE_SIZE (tmode) < max_size)
1223 mode = tmode;
1225 if (mode == VOIDmode)
1226 break;
1228 icode = mov_optab->handlers[(int) mode].insn_code;
1229 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1230 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1232 max_size = GET_MODE_SIZE (mode);
1235 if (l)
1236 abort ();
1237 return n_insns;
1240 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1241 with move instructions for mode MODE. GENFUN is the gen_... function
1242 to make a move insn for that mode. DATA has all the other info. */
1244 static void
1245 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1246 struct move_by_pieces *data)
1248 unsigned int size = GET_MODE_SIZE (mode);
1249 rtx to1 = NULL_RTX, from1;
1251 while (data->len >= size)
1253 if (data->reverse)
1254 data->offset -= size;
1256 if (data->to)
1258 if (data->autinc_to)
1259 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1260 data->offset);
1261 else
1262 to1 = adjust_address (data->to, mode, data->offset);
1265 if (data->autinc_from)
1266 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1267 data->offset);
1268 else
1269 from1 = adjust_address (data->from, mode, data->offset);
1271 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1272 emit_insn (gen_add2_insn (data->to_addr,
1273 GEN_INT (-(HOST_WIDE_INT)size)));
1274 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1275 emit_insn (gen_add2_insn (data->from_addr,
1276 GEN_INT (-(HOST_WIDE_INT)size)));
1278 if (data->to)
1279 emit_insn ((*genfun) (to1, from1));
1280 else
1282 #ifdef PUSH_ROUNDING
1283 emit_single_push_insn (mode, from1, NULL);
1284 #else
1285 abort ();
1286 #endif
1289 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1290 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1291 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1292 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1294 if (! data->reverse)
1295 data->offset += size;
1297 data->len -= size;
1301 /* Emit code to move a block Y to a block X. This may be done with
1302 string-move instructions, with multiple scalar move instructions,
1303 or with a library call.
1305 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1306 SIZE is an rtx that says how long they are.
1307 ALIGN is the maximum alignment we can assume they have.
1308 METHOD describes what kind of copy this is, and what mechanisms may be used.
1310 Return the address of the new block, if memcpy is called and returns it,
1311 0 otherwise. */
1314 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1316 bool may_use_call;
1317 rtx retval = 0;
1318 unsigned int align;
1320 switch (method)
1322 case BLOCK_OP_NORMAL:
1323 may_use_call = true;
1324 break;
1326 case BLOCK_OP_CALL_PARM:
1327 may_use_call = block_move_libcall_safe_for_call_parm ();
1329 /* Make inhibit_defer_pop nonzero around the library call
1330 to force it to pop the arguments right away. */
1331 NO_DEFER_POP;
1332 break;
1334 case BLOCK_OP_NO_LIBCALL:
1335 may_use_call = false;
1336 break;
1338 default:
1339 abort ();
1342 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1344 x = protect_from_queue (x, 1);
1345 y = protect_from_queue (y, 0);
1346 size = protect_from_queue (size, 0);
1348 if (!MEM_P (x))
1349 abort ();
1350 if (!MEM_P (y))
1351 abort ();
1352 if (size == 0)
1353 abort ();
1355 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1356 block copy is more efficient for other large modes, e.g. DCmode. */
1357 x = adjust_address (x, BLKmode, 0);
1358 y = adjust_address (y, BLKmode, 0);
1360 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1361 can be incorrect is coming from __builtin_memcpy. */
1362 if (GET_CODE (size) == CONST_INT)
1364 if (INTVAL (size) == 0)
1365 return 0;
1367 x = shallow_copy_rtx (x);
1368 y = shallow_copy_rtx (y);
1369 set_mem_size (x, size);
1370 set_mem_size (y, size);
1373 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1374 move_by_pieces (x, y, INTVAL (size), align, 0);
1375 else if (emit_block_move_via_movmem (x, y, size, align))
1377 else if (may_use_call)
1378 retval = emit_block_move_via_libcall (x, y, size);
1379 else
1380 emit_block_move_via_loop (x, y, size, align);
1382 if (method == BLOCK_OP_CALL_PARM)
1383 OK_DEFER_POP;
1385 return retval;
1388 /* A subroutine of emit_block_move. Returns true if calling the
1389 block move libcall will not clobber any parameters which may have
1390 already been placed on the stack. */
1392 static bool
1393 block_move_libcall_safe_for_call_parm (void)
1395 /* If arguments are pushed on the stack, then they're safe. */
1396 if (PUSH_ARGS)
1397 return true;
1399 /* If registers go on the stack anyway, any argument is sure to clobber
1400 an outgoing argument. */
1401 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1403 tree fn = emit_block_move_libcall_fn (false);
1404 (void) fn;
1405 if (REG_PARM_STACK_SPACE (fn) != 0)
1406 return false;
1408 #endif
1410 /* If any argument goes in memory, then it might clobber an outgoing
1411 argument. */
1413 CUMULATIVE_ARGS args_so_far;
1414 tree fn, arg;
1416 fn = emit_block_move_libcall_fn (false);
1417 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1419 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1420 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1422 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1423 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1424 if (!tmp || !REG_P (tmp))
1425 return false;
1426 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1427 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1428 NULL_TREE, 1))
1429 return false;
1430 #endif
1431 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1434 return true;
1437 /* A subroutine of emit_block_move. Expand a movmem pattern;
1438 return true if successful. */
1440 static bool
1441 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1443 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1444 int save_volatile_ok = volatile_ok;
1445 enum machine_mode mode;
1447 /* Since this is a move insn, we don't care about volatility. */
1448 volatile_ok = 1;
1450 /* Try the most limited insn first, because there's no point
1451 including more than one in the machine description unless
1452 the more limited one has some advantage. */
1454 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1455 mode = GET_MODE_WIDER_MODE (mode))
1457 enum insn_code code = movmem_optab[(int) mode];
1458 insn_operand_predicate_fn pred;
1460 if (code != CODE_FOR_nothing
1461 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1462 here because if SIZE is less than the mode mask, as it is
1463 returned by the macro, it will definitely be less than the
1464 actual mode mask. */
1465 && ((GET_CODE (size) == CONST_INT
1466 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1467 <= (GET_MODE_MASK (mode) >> 1)))
1468 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1469 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1470 || (*pred) (x, BLKmode))
1471 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1472 || (*pred) (y, BLKmode))
1473 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1474 || (*pred) (opalign, VOIDmode)))
1476 rtx op2;
1477 rtx last = get_last_insn ();
1478 rtx pat;
1480 op2 = convert_to_mode (mode, size, 1);
1481 pred = insn_data[(int) code].operand[2].predicate;
1482 if (pred != 0 && ! (*pred) (op2, mode))
1483 op2 = copy_to_mode_reg (mode, op2);
1485 /* ??? When called via emit_block_move_for_call, it'd be
1486 nice if there were some way to inform the backend, so
1487 that it doesn't fail the expansion because it thinks
1488 emitting the libcall would be more efficient. */
1490 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1491 if (pat)
1493 emit_insn (pat);
1494 volatile_ok = save_volatile_ok;
1495 return true;
1497 else
1498 delete_insns_since (last);
1502 volatile_ok = save_volatile_ok;
1503 return false;
1506 /* A subroutine of emit_block_move. Expand a call to memcpy.
1507 Return the return value from memcpy, 0 otherwise. */
1509 static rtx
1510 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1512 rtx dst_addr, src_addr;
1513 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1514 enum machine_mode size_mode;
1515 rtx retval;
1517 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1519 It is unsafe to save the value generated by protect_from_queue and reuse
1520 it later. Consider what happens if emit_queue is called before the
1521 return value from protect_from_queue is used.
1523 Expansion of the CALL_EXPR below will call emit_queue before we are
1524 finished emitting RTL for argument setup. So if we are not careful we
1525 could get the wrong value for an argument.
1527 To avoid this problem we go ahead and emit code to copy the addresses of
1528 DST and SRC and SIZE into new pseudos.
1530 Note this is not strictly needed for library calls since they do not call
1531 emit_queue before loading their arguments. However, we may need to have
1532 library calls call emit_queue in the future since failing to do so could
1533 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1534 arguments in registers. */
1536 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1537 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1539 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1540 src_addr = convert_memory_address (ptr_mode, src_addr);
1542 dst_tree = make_tree (ptr_type_node, dst_addr);
1543 src_tree = make_tree (ptr_type_node, src_addr);
1545 size_mode = TYPE_MODE (sizetype);
1547 size = convert_to_mode (size_mode, size, 1);
1548 size = copy_to_mode_reg (size_mode, size);
1550 /* It is incorrect to use the libcall calling conventions to call
1551 memcpy in this context. This could be a user call to memcpy and
1552 the user may wish to examine the return value from memcpy. For
1553 targets where libcalls and normal calls have different conventions
1554 for returning pointers, we could end up generating incorrect code. */
1556 size_tree = make_tree (sizetype, size);
1558 fn = emit_block_move_libcall_fn (true);
1559 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1560 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1561 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1563 /* Now we have to build up the CALL_EXPR itself. */
1564 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1565 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1566 call_expr, arg_list, NULL_TREE);
1568 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1570 /* If we are initializing a readonly value, show the above call clobbered
1571 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1572 the delay slot scheduler might overlook conflicts and take nasty
1573 decisions. */
1574 if (RTX_UNCHANGING_P (dst))
1575 add_function_usage_to
1576 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1577 gen_rtx_CLOBBER (VOIDmode, dst),
1578 NULL_RTX));
1580 return retval;
1583 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1584 for the function we use for block copies. The first time FOR_CALL
1585 is true, we call assemble_external. */
1587 static GTY(()) tree block_move_fn;
1589 void
1590 init_block_move_fn (const char *asmspec)
1592 if (!block_move_fn)
1594 tree args, fn;
1596 fn = get_identifier ("memcpy");
1597 args = build_function_type_list (ptr_type_node, ptr_type_node,
1598 const_ptr_type_node, sizetype,
1599 NULL_TREE);
1601 fn = build_decl (FUNCTION_DECL, fn, args);
1602 DECL_EXTERNAL (fn) = 1;
1603 TREE_PUBLIC (fn) = 1;
1604 DECL_ARTIFICIAL (fn) = 1;
1605 TREE_NOTHROW (fn) = 1;
1607 block_move_fn = fn;
1610 if (asmspec)
1612 SET_DECL_RTL (block_move_fn, NULL_RTX);
1613 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1617 static tree
1618 emit_block_move_libcall_fn (int for_call)
1620 static bool emitted_extern;
1622 if (!block_move_fn)
1623 init_block_move_fn (NULL);
1625 if (for_call && !emitted_extern)
1627 emitted_extern = true;
1628 make_decl_rtl (block_move_fn, NULL);
1629 assemble_external (block_move_fn);
1632 return block_move_fn;
1635 /* A subroutine of emit_block_move. Copy the data via an explicit
1636 loop. This is used only when libcalls are forbidden. */
1637 /* ??? It'd be nice to copy in hunks larger than QImode. */
1639 static void
1640 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1641 unsigned int align ATTRIBUTE_UNUSED)
1643 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1644 enum machine_mode iter_mode;
1646 iter_mode = GET_MODE (size);
1647 if (iter_mode == VOIDmode)
1648 iter_mode = word_mode;
1650 top_label = gen_label_rtx ();
1651 cmp_label = gen_label_rtx ();
1652 iter = gen_reg_rtx (iter_mode);
1654 emit_move_insn (iter, const0_rtx);
1656 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1657 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1658 do_pending_stack_adjust ();
1660 emit_jump (cmp_label);
1661 emit_label (top_label);
1663 tmp = convert_modes (Pmode, iter_mode, iter, true);
1664 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1665 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1666 x = change_address (x, QImode, x_addr);
1667 y = change_address (y, QImode, y_addr);
1669 emit_move_insn (x, y);
1671 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1672 true, OPTAB_LIB_WIDEN);
1673 if (tmp != iter)
1674 emit_move_insn (iter, tmp);
1676 emit_label (cmp_label);
1678 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1679 true, top_label);
1682 /* Copy all or part of a value X into registers starting at REGNO.
1683 The number of registers to be filled is NREGS. */
1685 void
1686 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1688 int i;
1689 #ifdef HAVE_load_multiple
1690 rtx pat;
1691 rtx last;
1692 #endif
1694 if (nregs == 0)
1695 return;
1697 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1698 x = validize_mem (force_const_mem (mode, x));
1700 /* See if the machine can do this with a load multiple insn. */
1701 #ifdef HAVE_load_multiple
1702 if (HAVE_load_multiple)
1704 last = get_last_insn ();
1705 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1706 GEN_INT (nregs));
1707 if (pat)
1709 emit_insn (pat);
1710 return;
1712 else
1713 delete_insns_since (last);
1715 #endif
1717 for (i = 0; i < nregs; i++)
1718 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1719 operand_subword_force (x, i, mode));
1722 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1723 The number of registers to be filled is NREGS. */
1725 void
1726 move_block_from_reg (int regno, rtx x, int nregs)
1728 int i;
1730 if (nregs == 0)
1731 return;
1733 /* See if the machine can do this with a store multiple insn. */
1734 #ifdef HAVE_store_multiple
1735 if (HAVE_store_multiple)
1737 rtx last = get_last_insn ();
1738 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1739 GEN_INT (nregs));
1740 if (pat)
1742 emit_insn (pat);
1743 return;
1745 else
1746 delete_insns_since (last);
1748 #endif
1750 for (i = 0; i < nregs; i++)
1752 rtx tem = operand_subword (x, i, 1, BLKmode);
1754 if (tem == 0)
1755 abort ();
1757 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1761 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1762 ORIG, where ORIG is a non-consecutive group of registers represented by
1763 a PARALLEL. The clone is identical to the original except in that the
1764 original set of registers is replaced by a new set of pseudo registers.
1765 The new set has the same modes as the original set. */
1768 gen_group_rtx (rtx orig)
1770 int i, length;
1771 rtx *tmps;
1773 if (GET_CODE (orig) != PARALLEL)
1774 abort ();
1776 length = XVECLEN (orig, 0);
1777 tmps = alloca (sizeof (rtx) * length);
1779 /* Skip a NULL entry in first slot. */
1780 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1782 if (i)
1783 tmps[0] = 0;
1785 for (; i < length; i++)
1787 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1788 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1790 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1793 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1796 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1797 where DST is non-consecutive registers represented by a PARALLEL.
1798 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1799 if not known. */
1801 void
1802 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1804 rtx *tmps, src;
1805 int start, i;
1807 if (GET_CODE (dst) != PARALLEL)
1808 abort ();
1810 /* Check for a NULL entry, used to indicate that the parameter goes
1811 both on the stack and in registers. */
1812 if (XEXP (XVECEXP (dst, 0, 0), 0))
1813 start = 0;
1814 else
1815 start = 1;
1817 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1819 /* Process the pieces. */
1820 for (i = start; i < XVECLEN (dst, 0); i++)
1822 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1823 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1824 unsigned int bytelen = GET_MODE_SIZE (mode);
1825 int shift = 0;
1827 /* Handle trailing fragments that run over the size of the struct. */
1828 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1830 /* Arrange to shift the fragment to where it belongs.
1831 extract_bit_field loads to the lsb of the reg. */
1832 if (
1833 #ifdef BLOCK_REG_PADDING
1834 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1835 == (BYTES_BIG_ENDIAN ? upward : downward)
1836 #else
1837 BYTES_BIG_ENDIAN
1838 #endif
1840 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1841 bytelen = ssize - bytepos;
1842 if (bytelen <= 0)
1843 abort ();
1846 /* If we won't be loading directly from memory, protect the real source
1847 from strange tricks we might play; but make sure that the source can
1848 be loaded directly into the destination. */
1849 src = orig_src;
1850 if (!MEM_P (orig_src)
1851 && (!CONSTANT_P (orig_src)
1852 || (GET_MODE (orig_src) != mode
1853 && GET_MODE (orig_src) != VOIDmode)))
1855 if (GET_MODE (orig_src) == VOIDmode)
1856 src = gen_reg_rtx (mode);
1857 else
1858 src = gen_reg_rtx (GET_MODE (orig_src));
1860 emit_move_insn (src, orig_src);
1863 /* Optimize the access just a bit. */
1864 if (MEM_P (src)
1865 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1866 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1867 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1868 && bytelen == GET_MODE_SIZE (mode))
1870 tmps[i] = gen_reg_rtx (mode);
1871 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1873 else if (GET_CODE (src) == CONCAT)
1875 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1876 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1878 if ((bytepos == 0 && bytelen == slen0)
1879 || (bytepos != 0 && bytepos + bytelen <= slen))
1881 /* The following assumes that the concatenated objects all
1882 have the same size. In this case, a simple calculation
1883 can be used to determine the object and the bit field
1884 to be extracted. */
1885 tmps[i] = XEXP (src, bytepos / slen0);
1886 if (! CONSTANT_P (tmps[i])
1887 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1888 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1889 (bytepos % slen0) * BITS_PER_UNIT,
1890 1, NULL_RTX, mode, mode, ssize);
1892 else if (bytepos == 0)
1894 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1895 emit_move_insn (mem, src);
1896 tmps[i] = adjust_address (mem, mode, 0);
1898 else
1899 abort ();
1901 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1902 SIMD register, which is currently broken. While we get GCC
1903 to emit proper RTL for these cases, let's dump to memory. */
1904 else if (VECTOR_MODE_P (GET_MODE (dst))
1905 && REG_P (src))
1907 int slen = GET_MODE_SIZE (GET_MODE (src));
1908 rtx mem;
1910 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1911 emit_move_insn (mem, src);
1912 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1914 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1915 && XVECLEN (dst, 0) > 1)
1916 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1917 else if (CONSTANT_P (src)
1918 || (REG_P (src) && GET_MODE (src) == mode))
1919 tmps[i] = src;
1920 else
1921 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1922 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1923 mode, mode, ssize);
1925 if (shift)
1926 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1927 build_int_2 (shift, 0), tmps[i], 0);
1930 emit_queue ();
1932 /* Copy the extracted pieces into the proper (probable) hard regs. */
1933 for (i = start; i < XVECLEN (dst, 0); i++)
1934 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1937 /* Emit code to move a block SRC to block DST, where SRC and DST are
1938 non-consecutive groups of registers, each represented by a PARALLEL. */
1940 void
1941 emit_group_move (rtx dst, rtx src)
1943 int i;
1945 if (GET_CODE (src) != PARALLEL
1946 || GET_CODE (dst) != PARALLEL
1947 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1948 abort ();
1950 /* Skip first entry if NULL. */
1951 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1952 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1953 XEXP (XVECEXP (src, 0, i), 0));
1956 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1957 where SRC is non-consecutive registers represented by a PARALLEL.
1958 SSIZE represents the total size of block ORIG_DST, or -1 if not
1959 known. */
1961 void
1962 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1964 rtx *tmps, dst;
1965 int start, i;
1967 if (GET_CODE (src) != PARALLEL)
1968 abort ();
1970 /* Check for a NULL entry, used to indicate that the parameter goes
1971 both on the stack and in registers. */
1972 if (XEXP (XVECEXP (src, 0, 0), 0))
1973 start = 0;
1974 else
1975 start = 1;
1977 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1979 /* Copy the (probable) hard regs into pseudos. */
1980 for (i = start; i < XVECLEN (src, 0); i++)
1982 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1983 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1984 emit_move_insn (tmps[i], reg);
1986 emit_queue ();
1988 /* If we won't be storing directly into memory, protect the real destination
1989 from strange tricks we might play. */
1990 dst = orig_dst;
1991 if (GET_CODE (dst) == PARALLEL)
1993 rtx temp;
1995 /* We can get a PARALLEL dst if there is a conditional expression in
1996 a return statement. In that case, the dst and src are the same,
1997 so no action is necessary. */
1998 if (rtx_equal_p (dst, src))
1999 return;
2001 /* It is unclear if we can ever reach here, but we may as well handle
2002 it. Allocate a temporary, and split this into a store/load to/from
2003 the temporary. */
2005 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2006 emit_group_store (temp, src, type, ssize);
2007 emit_group_load (dst, temp, type, ssize);
2008 return;
2010 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2012 dst = gen_reg_rtx (GET_MODE (orig_dst));
2013 /* Make life a bit easier for combine. */
2014 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2017 /* Process the pieces. */
2018 for (i = start; i < XVECLEN (src, 0); i++)
2020 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2021 enum machine_mode mode = GET_MODE (tmps[i]);
2022 unsigned int bytelen = GET_MODE_SIZE (mode);
2023 rtx dest = dst;
2025 /* Handle trailing fragments that run over the size of the struct. */
2026 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2028 /* store_bit_field always takes its value from the lsb.
2029 Move the fragment to the lsb if it's not already there. */
2030 if (
2031 #ifdef BLOCK_REG_PADDING
2032 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2033 == (BYTES_BIG_ENDIAN ? upward : downward)
2034 #else
2035 BYTES_BIG_ENDIAN
2036 #endif
2039 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2040 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2041 build_int_2 (shift, 0), tmps[i], 0);
2043 bytelen = ssize - bytepos;
2046 if (GET_CODE (dst) == CONCAT)
2048 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2049 dest = XEXP (dst, 0);
2050 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2052 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2053 dest = XEXP (dst, 1);
2055 else if (bytepos == 0 && XVECLEN (src, 0))
2057 dest = assign_stack_temp (GET_MODE (dest),
2058 GET_MODE_SIZE (GET_MODE (dest)), 0);
2059 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2060 tmps[i]);
2061 dst = dest;
2062 break;
2064 else
2065 abort ();
2068 /* Optimize the access just a bit. */
2069 if (MEM_P (dest)
2070 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2071 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2072 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2073 && bytelen == GET_MODE_SIZE (mode))
2074 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2075 else
2076 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077 mode, tmps[i], ssize);
2080 emit_queue ();
2082 /* Copy from the pseudo into the (probable) hard reg. */
2083 if (orig_dst != dst)
2084 emit_move_insn (orig_dst, dst);
2087 /* Generate code to copy a BLKmode object of TYPE out of a
2088 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2089 is null, a stack temporary is created. TGTBLK is returned.
2091 The purpose of this routine is to handle functions that return
2092 BLKmode structures in registers. Some machines (the PA for example)
2093 want to return all small structures in registers regardless of the
2094 structure's alignment. */
2097 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2099 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2100 rtx src = NULL, dst = NULL;
2101 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2102 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2104 if (tgtblk == 0)
2106 tgtblk = assign_temp (build_qualified_type (type,
2107 (TYPE_QUALS (type)
2108 | TYPE_QUAL_CONST)),
2109 0, 1, 1);
2110 preserve_temp_slots (tgtblk);
2113 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2114 into a new pseudo which is a full word. */
2116 if (GET_MODE (srcreg) != BLKmode
2117 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2118 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2120 /* If the structure doesn't take up a whole number of words, see whether
2121 SRCREG is padded on the left or on the right. If it's on the left,
2122 set PADDING_CORRECTION to the number of bits to skip.
2124 In most ABIs, the structure will be returned at the least end of
2125 the register, which translates to right padding on little-endian
2126 targets and left padding on big-endian targets. The opposite
2127 holds if the structure is returned at the most significant
2128 end of the register. */
2129 if (bytes % UNITS_PER_WORD != 0
2130 && (targetm.calls.return_in_msb (type)
2131 ? !BYTES_BIG_ENDIAN
2132 : BYTES_BIG_ENDIAN))
2133 padding_correction
2134 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2136 /* Copy the structure BITSIZE bites at a time.
2138 We could probably emit more efficient code for machines which do not use
2139 strict alignment, but it doesn't seem worth the effort at the current
2140 time. */
2141 for (bitpos = 0, xbitpos = padding_correction;
2142 bitpos < bytes * BITS_PER_UNIT;
2143 bitpos += bitsize, xbitpos += bitsize)
2145 /* We need a new source operand each time xbitpos is on a
2146 word boundary and when xbitpos == padding_correction
2147 (the first time through). */
2148 if (xbitpos % BITS_PER_WORD == 0
2149 || xbitpos == padding_correction)
2150 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2151 GET_MODE (srcreg));
2153 /* We need a new destination operand each time bitpos is on
2154 a word boundary. */
2155 if (bitpos % BITS_PER_WORD == 0)
2156 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2158 /* Use xbitpos for the source extraction (right justified) and
2159 xbitpos for the destination store (left justified). */
2160 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2161 extract_bit_field (src, bitsize,
2162 xbitpos % BITS_PER_WORD, 1,
2163 NULL_RTX, word_mode, word_mode,
2164 BITS_PER_WORD),
2165 BITS_PER_WORD);
2168 return tgtblk;
2171 /* Add a USE expression for REG to the (possibly empty) list pointed
2172 to by CALL_FUSAGE. REG must denote a hard register. */
2174 void
2175 use_reg (rtx *call_fusage, rtx reg)
2177 if (!REG_P (reg)
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2179 abort ();
2181 *call_fusage
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2189 void
2190 use_regs (rtx *call_fusage, int regno, int nregs)
2192 int i;
2194 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2195 abort ();
2197 for (i = 0; i < nregs; i++)
2198 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2201 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2202 PARALLEL REGS. This is for calls that pass values in multiple
2203 non-contiguous locations. The Irix 6 ABI has examples of this. */
2205 void
2206 use_group_regs (rtx *call_fusage, rtx regs)
2208 int i;
2210 for (i = 0; i < XVECLEN (regs, 0); i++)
2212 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
2217 if (reg != 0 && REG_P (reg))
2218 use_reg (call_fusage, reg);
2223 /* Determine whether the LEN bytes generated by CONSTFUN can be
2224 stored to memory using several move instructions. CONSTFUNDATA is
2225 a pointer which will be passed as argument in every CONSTFUN call.
2226 ALIGN is maximum alignment we can assume. Return nonzero if a
2227 call to store_by_pieces should succeed. */
2230 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2231 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2232 void *constfundata, unsigned int align)
2234 unsigned HOST_WIDE_INT max_size, l;
2235 HOST_WIDE_INT offset = 0;
2236 enum machine_mode mode, tmode;
2237 enum insn_code icode;
2238 int reverse;
2239 rtx cst;
2241 if (len == 0)
2242 return 1;
2244 if (! STORE_BY_PIECES_P (len, align))
2245 return 0;
2247 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2248 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2249 align = MOVE_MAX * BITS_PER_UNIT;
2251 /* We would first store what we can in the largest integer mode, then go to
2252 successively smaller modes. */
2254 for (reverse = 0;
2255 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2256 reverse++)
2258 l = len;
2259 mode = VOIDmode;
2260 max_size = STORE_MAX_PIECES + 1;
2261 while (max_size > 1)
2263 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2264 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2265 if (GET_MODE_SIZE (tmode) < max_size)
2266 mode = tmode;
2268 if (mode == VOIDmode)
2269 break;
2271 icode = mov_optab->handlers[(int) mode].insn_code;
2272 if (icode != CODE_FOR_nothing
2273 && align >= GET_MODE_ALIGNMENT (mode))
2275 unsigned int size = GET_MODE_SIZE (mode);
2277 while (l >= size)
2279 if (reverse)
2280 offset -= size;
2282 cst = (*constfun) (constfundata, offset, mode);
2283 if (!LEGITIMATE_CONSTANT_P (cst))
2284 return 0;
2286 if (!reverse)
2287 offset += size;
2289 l -= size;
2293 max_size = GET_MODE_SIZE (mode);
2296 /* The code above should have handled everything. */
2297 if (l != 0)
2298 abort ();
2301 return 1;
2304 /* Generate several move instructions to store LEN bytes generated by
2305 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2306 pointer which will be passed as argument in every CONSTFUN call.
2307 ALIGN is maximum alignment we can assume.
2308 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2309 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2310 stpcpy. */
2313 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2314 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2315 void *constfundata, unsigned int align, int endp)
2317 struct store_by_pieces data;
2319 if (len == 0)
2321 if (endp == 2)
2322 abort ();
2323 return to;
2326 if (! STORE_BY_PIECES_P (len, align))
2327 abort ();
2328 to = protect_from_queue (to, 1);
2329 data.constfun = constfun;
2330 data.constfundata = constfundata;
2331 data.len = len;
2332 data.to = to;
2333 store_by_pieces_1 (&data, align);
2334 if (endp)
2336 rtx to1;
2338 if (data.reverse)
2339 abort ();
2340 if (data.autinc_to)
2342 if (endp == 2)
2344 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2345 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2346 else
2347 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2348 -1));
2350 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2351 data.offset);
2353 else
2355 if (endp == 2)
2356 --data.offset;
2357 to1 = adjust_address (data.to, QImode, data.offset);
2359 return to1;
2361 else
2362 return data.to;
2365 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2366 rtx with BLKmode). The caller must pass TO through protect_from_queue
2367 before calling. ALIGN is maximum alignment we can assume. */
2369 static void
2370 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2372 struct store_by_pieces data;
2374 if (len == 0)
2375 return;
2377 data.constfun = clear_by_pieces_1;
2378 data.constfundata = NULL;
2379 data.len = len;
2380 data.to = to;
2381 store_by_pieces_1 (&data, align);
2384 /* Callback routine for clear_by_pieces.
2385 Return const0_rtx unconditionally. */
2387 static rtx
2388 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2389 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2390 enum machine_mode mode ATTRIBUTE_UNUSED)
2392 return const0_rtx;
2395 /* Subroutine of clear_by_pieces and store_by_pieces.
2396 Generate several move instructions to store LEN bytes of block TO. (A MEM
2397 rtx with BLKmode). The caller must pass TO through protect_from_queue
2398 before calling. ALIGN is maximum alignment we can assume. */
2400 static void
2401 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2402 unsigned int align ATTRIBUTE_UNUSED)
2404 rtx to_addr = XEXP (data->to, 0);
2405 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2406 enum machine_mode mode = VOIDmode, tmode;
2407 enum insn_code icode;
2409 data->offset = 0;
2410 data->to_addr = to_addr;
2411 data->autinc_to
2412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2415 data->explicit_inc_to = 0;
2416 data->reverse
2417 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2418 if (data->reverse)
2419 data->offset = data->len;
2421 /* If storing requires more than two move insns,
2422 copy addresses to registers (to make displacements shorter)
2423 and use post-increment if available. */
2424 if (!data->autinc_to
2425 && move_by_pieces_ninsns (data->len, align) > 2)
2427 /* Determine the main mode we'll be using. */
2428 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2429 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2430 if (GET_MODE_SIZE (tmode) < max_size)
2431 mode = tmode;
2433 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2435 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2436 data->autinc_to = 1;
2437 data->explicit_inc_to = -1;
2440 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2441 && ! data->autinc_to)
2443 data->to_addr = copy_addr_to_reg (to_addr);
2444 data->autinc_to = 1;
2445 data->explicit_inc_to = 1;
2448 if ( !data->autinc_to && CONSTANT_P (to_addr))
2449 data->to_addr = copy_addr_to_reg (to_addr);
2452 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2453 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2454 align = MOVE_MAX * BITS_PER_UNIT;
2456 /* First store what we can in the largest integer mode, then go to
2457 successively smaller modes. */
2459 while (max_size > 1)
2461 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2462 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2463 if (GET_MODE_SIZE (tmode) < max_size)
2464 mode = tmode;
2466 if (mode == VOIDmode)
2467 break;
2469 icode = mov_optab->handlers[(int) mode].insn_code;
2470 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2471 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2473 max_size = GET_MODE_SIZE (mode);
2476 /* The code above should have handled everything. */
2477 if (data->len != 0)
2478 abort ();
2481 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2482 with move instructions for mode MODE. GENFUN is the gen_... function
2483 to make a move insn for that mode. DATA has all the other info. */
2485 static void
2486 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2487 struct store_by_pieces *data)
2489 unsigned int size = GET_MODE_SIZE (mode);
2490 rtx to1, cst;
2492 while (data->len >= size)
2494 if (data->reverse)
2495 data->offset -= size;
2497 if (data->autinc_to)
2498 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2499 data->offset);
2500 else
2501 to1 = adjust_address (data->to, mode, data->offset);
2503 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2504 emit_insn (gen_add2_insn (data->to_addr,
2505 GEN_INT (-(HOST_WIDE_INT) size)));
2507 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2508 emit_insn ((*genfun) (to1, cst));
2510 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2511 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2513 if (! data->reverse)
2514 data->offset += size;
2516 data->len -= size;
2520 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2521 its length in bytes. */
2524 clear_storage (rtx object, rtx size)
2526 rtx retval = 0;
2527 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2528 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2530 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2531 just move a zero. Otherwise, do this a piece at a time. */
2532 if (GET_MODE (object) != BLKmode
2533 && GET_CODE (size) == CONST_INT
2534 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2535 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2536 else
2538 object = protect_from_queue (object, 1);
2539 size = protect_from_queue (size, 0);
2541 if (size == const0_rtx)
2543 else if (GET_CODE (size) == CONST_INT
2544 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2545 clear_by_pieces (object, INTVAL (size), align);
2546 else if (clear_storage_via_clrmem (object, size, align))
2548 else
2549 retval = clear_storage_via_libcall (object, size);
2552 return retval;
2555 /* A subroutine of clear_storage. Expand a clrmem pattern;
2556 return true if successful. */
2558 static bool
2559 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2561 /* Try the most limited insn first, because there's no point
2562 including more than one in the machine description unless
2563 the more limited one has some advantage. */
2565 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2566 enum machine_mode mode;
2568 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2569 mode = GET_MODE_WIDER_MODE (mode))
2571 enum insn_code code = clrmem_optab[(int) mode];
2572 insn_operand_predicate_fn pred;
2574 if (code != CODE_FOR_nothing
2575 /* We don't need MODE to be narrower than
2576 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2577 the mode mask, as it is returned by the macro, it will
2578 definitely be less than the actual mode mask. */
2579 && ((GET_CODE (size) == CONST_INT
2580 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2581 <= (GET_MODE_MASK (mode) >> 1)))
2582 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2583 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2584 || (*pred) (object, BLKmode))
2585 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2586 || (*pred) (opalign, VOIDmode)))
2588 rtx op1;
2589 rtx last = get_last_insn ();
2590 rtx pat;
2592 op1 = convert_to_mode (mode, size, 1);
2593 pred = insn_data[(int) code].operand[1].predicate;
2594 if (pred != 0 && ! (*pred) (op1, mode))
2595 op1 = copy_to_mode_reg (mode, op1);
2597 pat = GEN_FCN ((int) code) (object, op1, opalign);
2598 if (pat)
2600 emit_insn (pat);
2601 return true;
2603 else
2604 delete_insns_since (last);
2608 return false;
2611 /* A subroutine of clear_storage. Expand a call to memset.
2612 Return the return value of memset, 0 otherwise. */
2614 static rtx
2615 clear_storage_via_libcall (rtx object, rtx size)
2617 tree call_expr, arg_list, fn, object_tree, size_tree;
2618 enum machine_mode size_mode;
2619 rtx retval;
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos.
2634 Note this is not strictly needed for library calls since they
2635 do not call emit_queue before loading their arguments. However,
2636 we may need to have library calls call emit_queue in the future
2637 since failing to do so could cause problems for targets which
2638 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2640 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2642 size_mode = TYPE_MODE (sizetype);
2643 size = convert_to_mode (size_mode, size, 1);
2644 size = copy_to_mode_reg (size_mode, size);
2646 /* It is incorrect to use the libcall calling conventions to call
2647 memset in this context. This could be a user call to memset and
2648 the user may wish to examine the return value from memset. For
2649 targets where libcalls and normal calls have different conventions
2650 for returning pointers, we could end up generating incorrect code. */
2652 object_tree = make_tree (ptr_type_node, object);
2653 size_tree = make_tree (sizetype, size);
2655 fn = clear_storage_libcall_fn (true);
2656 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2657 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2658 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2660 /* Now we have to build up the CALL_EXPR itself. */
2661 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2662 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2663 call_expr, arg_list, NULL_TREE);
2665 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2667 /* If we are initializing a readonly value, show the above call
2668 clobbered it. Otherwise, a load from it may erroneously be
2669 hoisted from a loop. */
2670 if (RTX_UNCHANGING_P (object))
2671 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2673 return retval;
2676 /* A subroutine of clear_storage_via_libcall. Create the tree node
2677 for the function we use for block clears. The first time FOR_CALL
2678 is true, we call assemble_external. */
2680 static GTY(()) tree block_clear_fn;
2682 void
2683 init_block_clear_fn (const char *asmspec)
2685 if (!block_clear_fn)
2687 tree fn, args;
2689 fn = get_identifier ("memset");
2690 args = build_function_type_list (ptr_type_node, ptr_type_node,
2691 integer_type_node, sizetype,
2692 NULL_TREE);
2694 fn = build_decl (FUNCTION_DECL, fn, args);
2695 DECL_EXTERNAL (fn) = 1;
2696 TREE_PUBLIC (fn) = 1;
2697 DECL_ARTIFICIAL (fn) = 1;
2698 TREE_NOTHROW (fn) = 1;
2700 block_clear_fn = fn;
2703 if (asmspec)
2705 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2706 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2710 static tree
2711 clear_storage_libcall_fn (int for_call)
2713 static bool emitted_extern;
2715 if (!block_clear_fn)
2716 init_block_clear_fn (NULL);
2718 if (for_call && !emitted_extern)
2720 emitted_extern = true;
2721 make_decl_rtl (block_clear_fn, NULL);
2722 assemble_external (block_clear_fn);
2725 return block_clear_fn;
2728 /* Generate code to copy Y into X.
2729 Both Y and X must have the same mode, except that
2730 Y can be a constant with VOIDmode.
2731 This mode cannot be BLKmode; use emit_block_move for that.
2733 Return the last instruction emitted. */
2736 emit_move_insn (rtx x, rtx y)
2738 enum machine_mode mode = GET_MODE (x);
2739 rtx y_cst = NULL_RTX;
2740 rtx last_insn, set;
2742 x = protect_from_queue (x, 1);
2743 y = protect_from_queue (y, 0);
2745 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2746 abort ();
2748 if (CONSTANT_P (y))
2750 if (optimize
2751 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2752 && (last_insn = compress_float_constant (x, y)))
2753 return last_insn;
2755 y_cst = y;
2757 if (!LEGITIMATE_CONSTANT_P (y))
2759 y = force_const_mem (mode, y);
2761 /* If the target's cannot_force_const_mem prevented the spill,
2762 assume that the target's move expanders will also take care
2763 of the non-legitimate constant. */
2764 if (!y)
2765 y = y_cst;
2769 /* If X or Y are memory references, verify that their addresses are valid
2770 for the machine. */
2771 if (MEM_P (x)
2772 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2773 && ! push_operand (x, GET_MODE (x)))
2774 || (flag_force_addr
2775 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2776 x = validize_mem (x);
2778 if (MEM_P (y)
2779 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2780 || (flag_force_addr
2781 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2782 y = validize_mem (y);
2784 if (mode == BLKmode)
2785 abort ();
2787 last_insn = emit_move_insn_1 (x, y);
2789 if (y_cst && REG_P (x)
2790 && (set = single_set (last_insn)) != NULL_RTX
2791 && SET_DEST (set) == x
2792 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2793 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2795 return last_insn;
2798 /* Low level part of emit_move_insn.
2799 Called just like emit_move_insn, but assumes X and Y
2800 are basically valid. */
2803 emit_move_insn_1 (rtx x, rtx y)
2805 enum machine_mode mode = GET_MODE (x);
2806 enum machine_mode submode;
2807 enum mode_class class = GET_MODE_CLASS (mode);
2809 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2810 abort ();
2812 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2813 return
2814 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2816 /* Expand complex moves by moving real part and imag part, if possible. */
2817 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2818 && BLKmode != (submode = GET_MODE_INNER (mode))
2819 && (mov_optab->handlers[(int) submode].insn_code
2820 != CODE_FOR_nothing))
2822 /* Don't split destination if it is a stack push. */
2823 int stack = push_operand (x, GET_MODE (x));
2825 #ifdef PUSH_ROUNDING
2826 /* In case we output to the stack, but the size is smaller than the
2827 machine can push exactly, we need to use move instructions. */
2828 if (stack
2829 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2830 != GET_MODE_SIZE (submode)))
2832 rtx temp;
2833 HOST_WIDE_INT offset1, offset2;
2835 /* Do not use anti_adjust_stack, since we don't want to update
2836 stack_pointer_delta. */
2837 temp = expand_binop (Pmode,
2838 #ifdef STACK_GROWS_DOWNWARD
2839 sub_optab,
2840 #else
2841 add_optab,
2842 #endif
2843 stack_pointer_rtx,
2844 GEN_INT
2845 (PUSH_ROUNDING
2846 (GET_MODE_SIZE (GET_MODE (x)))),
2847 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2849 if (temp != stack_pointer_rtx)
2850 emit_move_insn (stack_pointer_rtx, temp);
2852 #ifdef STACK_GROWS_DOWNWARD
2853 offset1 = 0;
2854 offset2 = GET_MODE_SIZE (submode);
2855 #else
2856 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2857 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2858 + GET_MODE_SIZE (submode));
2859 #endif
2861 emit_move_insn (change_address (x, submode,
2862 gen_rtx_PLUS (Pmode,
2863 stack_pointer_rtx,
2864 GEN_INT (offset1))),
2865 gen_realpart (submode, y));
2866 emit_move_insn (change_address (x, submode,
2867 gen_rtx_PLUS (Pmode,
2868 stack_pointer_rtx,
2869 GEN_INT (offset2))),
2870 gen_imagpart (submode, y));
2872 else
2873 #endif
2874 /* If this is a stack, push the highpart first, so it
2875 will be in the argument order.
2877 In that case, change_address is used only to convert
2878 the mode, not to change the address. */
2879 if (stack)
2881 /* Note that the real part always precedes the imag part in memory
2882 regardless of machine's endianness. */
2883 #ifdef STACK_GROWS_DOWNWARD
2884 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2885 gen_imagpart (submode, y));
2886 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2887 gen_realpart (submode, y));
2888 #else
2889 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2890 gen_realpart (submode, y));
2891 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2892 gen_imagpart (submode, y));
2893 #endif
2895 else
2897 rtx realpart_x, realpart_y;
2898 rtx imagpart_x, imagpart_y;
2900 /* If this is a complex value with each part being smaller than a
2901 word, the usual calling sequence will likely pack the pieces into
2902 a single register. Unfortunately, SUBREG of hard registers only
2903 deals in terms of words, so we have a problem converting input
2904 arguments to the CONCAT of two registers that is used elsewhere
2905 for complex values. If this is before reload, we can copy it into
2906 memory and reload. FIXME, we should see about using extract and
2907 insert on integer registers, but complex short and complex char
2908 variables should be rarely used. */
2909 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2910 && (reload_in_progress | reload_completed) == 0)
2912 int packed_dest_p
2913 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2914 int packed_src_p
2915 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2917 if (packed_dest_p || packed_src_p)
2919 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2920 ? MODE_FLOAT : MODE_INT);
2922 enum machine_mode reg_mode
2923 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2925 if (reg_mode != BLKmode)
2927 rtx mem = assign_stack_temp (reg_mode,
2928 GET_MODE_SIZE (mode), 0);
2929 rtx cmem = adjust_address (mem, mode, 0);
2931 if (packed_dest_p)
2933 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2935 emit_move_insn_1 (cmem, y);
2936 return emit_move_insn_1 (sreg, mem);
2938 else
2940 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2942 emit_move_insn_1 (mem, sreg);
2943 return emit_move_insn_1 (x, cmem);
2949 realpart_x = gen_realpart (submode, x);
2950 realpart_y = gen_realpart (submode, y);
2951 imagpart_x = gen_imagpart (submode, x);
2952 imagpart_y = gen_imagpart (submode, y);
2954 /* Show the output dies here. This is necessary for SUBREGs
2955 of pseudos since we cannot track their lifetimes correctly;
2956 hard regs shouldn't appear here except as return values.
2957 We never want to emit such a clobber after reload. */
2958 if (x != y
2959 && ! (reload_in_progress || reload_completed)
2960 && (GET_CODE (realpart_x) == SUBREG
2961 || GET_CODE (imagpart_x) == SUBREG))
2962 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2964 emit_move_insn (realpart_x, realpart_y);
2965 emit_move_insn (imagpart_x, imagpart_y);
2968 return get_last_insn ();
2971 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2972 find a mode to do it in. If we have a movcc, use it. Otherwise,
2973 find the MODE_INT mode of the same width. */
2974 else if (GET_MODE_CLASS (mode) == MODE_CC
2975 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2977 enum insn_code insn_code;
2978 enum machine_mode tmode = VOIDmode;
2979 rtx x1 = x, y1 = y;
2981 if (mode != CCmode
2982 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2983 tmode = CCmode;
2984 else
2985 for (tmode = QImode; tmode != VOIDmode;
2986 tmode = GET_MODE_WIDER_MODE (tmode))
2987 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2988 break;
2990 if (tmode == VOIDmode)
2991 abort ();
2993 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2994 may call change_address which is not appropriate if we were
2995 called when a reload was in progress. We don't have to worry
2996 about changing the address since the size in bytes is supposed to
2997 be the same. Copy the MEM to change the mode and move any
2998 substitutions from the old MEM to the new one. */
3000 if (reload_in_progress)
3002 x = gen_lowpart_common (tmode, x1);
3003 if (x == 0 && MEM_P (x1))
3005 x = adjust_address_nv (x1, tmode, 0);
3006 copy_replacements (x1, x);
3009 y = gen_lowpart_common (tmode, y1);
3010 if (y == 0 && MEM_P (y1))
3012 y = adjust_address_nv (y1, tmode, 0);
3013 copy_replacements (y1, y);
3016 else
3018 x = gen_lowpart (tmode, x);
3019 y = gen_lowpart (tmode, y);
3022 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3023 return emit_insn (GEN_FCN (insn_code) (x, y));
3026 /* Try using a move pattern for the corresponding integer mode. This is
3027 only safe when simplify_subreg can convert MODE constants into integer
3028 constants. At present, it can only do this reliably if the value
3029 fits within a HOST_WIDE_INT. */
3030 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3031 && (submode = int_mode_for_mode (mode)) != BLKmode
3032 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3033 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3034 (simplify_gen_subreg (submode, x, mode, 0),
3035 simplify_gen_subreg (submode, y, mode, 0)));
3037 /* This will handle any multi-word or full-word mode that lacks a move_insn
3038 pattern. However, you will get better code if you define such patterns,
3039 even if they must turn into multiple assembler instructions. */
3040 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3042 rtx last_insn = 0;
3043 rtx seq, inner;
3044 int need_clobber;
3045 int i;
3047 #ifdef PUSH_ROUNDING
3049 /* If X is a push on the stack, do the push now and replace
3050 X with a reference to the stack pointer. */
3051 if (push_operand (x, GET_MODE (x)))
3053 rtx temp;
3054 enum rtx_code code;
3056 /* Do not use anti_adjust_stack, since we don't want to update
3057 stack_pointer_delta. */
3058 temp = expand_binop (Pmode,
3059 #ifdef STACK_GROWS_DOWNWARD
3060 sub_optab,
3061 #else
3062 add_optab,
3063 #endif
3064 stack_pointer_rtx,
3065 GEN_INT
3066 (PUSH_ROUNDING
3067 (GET_MODE_SIZE (GET_MODE (x)))),
3068 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3070 if (temp != stack_pointer_rtx)
3071 emit_move_insn (stack_pointer_rtx, temp);
3073 code = GET_CODE (XEXP (x, 0));
3075 /* Just hope that small offsets off SP are OK. */
3076 if (code == POST_INC)
3077 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3078 GEN_INT (-((HOST_WIDE_INT)
3079 GET_MODE_SIZE (GET_MODE (x)))));
3080 else if (code == POST_DEC)
3081 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3082 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3083 else
3084 temp = stack_pointer_rtx;
3086 x = change_address (x, VOIDmode, temp);
3088 #endif
3090 /* If we are in reload, see if either operand is a MEM whose address
3091 is scheduled for replacement. */
3092 if (reload_in_progress && MEM_P (x)
3093 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3094 x = replace_equiv_address_nv (x, inner);
3095 if (reload_in_progress && MEM_P (y)
3096 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3097 y = replace_equiv_address_nv (y, inner);
3099 start_sequence ();
3101 need_clobber = 0;
3102 for (i = 0;
3103 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3104 i++)
3106 rtx xpart = operand_subword (x, i, 1, mode);
3107 rtx ypart = operand_subword (y, i, 1, mode);
3109 /* If we can't get a part of Y, put Y into memory if it is a
3110 constant. Otherwise, force it into a register. If we still
3111 can't get a part of Y, abort. */
3112 if (ypart == 0 && CONSTANT_P (y))
3114 y = force_const_mem (mode, y);
3115 ypart = operand_subword (y, i, 1, mode);
3117 else if (ypart == 0)
3118 ypart = operand_subword_force (y, i, mode);
3120 if (xpart == 0 || ypart == 0)
3121 abort ();
3123 need_clobber |= (GET_CODE (xpart) == SUBREG);
3125 last_insn = emit_move_insn (xpart, ypart);
3128 seq = get_insns ();
3129 end_sequence ();
3131 /* Show the output dies here. This is necessary for SUBREGs
3132 of pseudos since we cannot track their lifetimes correctly;
3133 hard regs shouldn't appear here except as return values.
3134 We never want to emit such a clobber after reload. */
3135 if (x != y
3136 && ! (reload_in_progress || reload_completed)
3137 && need_clobber != 0)
3138 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3140 emit_insn (seq);
3142 return last_insn;
3144 else
3145 abort ();
3148 /* If Y is representable exactly in a narrower mode, and the target can
3149 perform the extension directly from constant or memory, then emit the
3150 move as an extension. */
3152 static rtx
3153 compress_float_constant (rtx x, rtx y)
3155 enum machine_mode dstmode = GET_MODE (x);
3156 enum machine_mode orig_srcmode = GET_MODE (y);
3157 enum machine_mode srcmode;
3158 REAL_VALUE_TYPE r;
3160 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3162 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3163 srcmode != orig_srcmode;
3164 srcmode = GET_MODE_WIDER_MODE (srcmode))
3166 enum insn_code ic;
3167 rtx trunc_y, last_insn;
3169 /* Skip if the target can't extend this way. */
3170 ic = can_extend_p (dstmode, srcmode, 0);
3171 if (ic == CODE_FOR_nothing)
3172 continue;
3174 /* Skip if the narrowed value isn't exact. */
3175 if (! exact_real_truncate (srcmode, &r))
3176 continue;
3178 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3180 if (LEGITIMATE_CONSTANT_P (trunc_y))
3182 /* Skip if the target needs extra instructions to perform
3183 the extension. */
3184 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3185 continue;
3187 else if (float_extend_from_mem[dstmode][srcmode])
3188 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3189 else
3190 continue;
3192 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3193 last_insn = get_last_insn ();
3195 if (REG_P (x))
3196 set_unique_reg_note (last_insn, REG_EQUAL, y);
3198 return last_insn;
3201 return NULL_RTX;
3204 /* Pushing data onto the stack. */
3206 /* Push a block of length SIZE (perhaps variable)
3207 and return an rtx to address the beginning of the block.
3208 Note that it is not possible for the value returned to be a QUEUED.
3209 The value may be virtual_outgoing_args_rtx.
3211 EXTRA is the number of bytes of padding to push in addition to SIZE.
3212 BELOW nonzero means this padding comes at low addresses;
3213 otherwise, the padding comes at high addresses. */
3216 push_block (rtx size, int extra, int below)
3218 rtx temp;
3220 size = convert_modes (Pmode, ptr_mode, size, 1);
3221 if (CONSTANT_P (size))
3222 anti_adjust_stack (plus_constant (size, extra));
3223 else if (REG_P (size) && extra == 0)
3224 anti_adjust_stack (size);
3225 else
3227 temp = copy_to_mode_reg (Pmode, size);
3228 if (extra != 0)
3229 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3230 temp, 0, OPTAB_LIB_WIDEN);
3231 anti_adjust_stack (temp);
3234 #ifndef STACK_GROWS_DOWNWARD
3235 if (0)
3236 #else
3237 if (1)
3238 #endif
3240 temp = virtual_outgoing_args_rtx;
3241 if (extra != 0 && below)
3242 temp = plus_constant (temp, extra);
3244 else
3246 if (GET_CODE (size) == CONST_INT)
3247 temp = plus_constant (virtual_outgoing_args_rtx,
3248 -INTVAL (size) - (below ? 0 : extra));
3249 else if (extra != 0 && !below)
3250 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3251 negate_rtx (Pmode, plus_constant (size, extra)));
3252 else
3253 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3254 negate_rtx (Pmode, size));
3257 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3260 #ifdef PUSH_ROUNDING
3262 /* Emit single push insn. */
3264 static void
3265 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3267 rtx dest_addr;
3268 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3269 rtx dest;
3270 enum insn_code icode;
3271 insn_operand_predicate_fn pred;
3273 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3274 /* If there is push pattern, use it. Otherwise try old way of throwing
3275 MEM representing push operation to move expander. */
3276 icode = push_optab->handlers[(int) mode].insn_code;
3277 if (icode != CODE_FOR_nothing)
3279 if (((pred = insn_data[(int) icode].operand[0].predicate)
3280 && !((*pred) (x, mode))))
3281 x = force_reg (mode, x);
3282 emit_insn (GEN_FCN (icode) (x));
3283 return;
3285 if (GET_MODE_SIZE (mode) == rounded_size)
3286 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3287 /* If we are to pad downward, adjust the stack pointer first and
3288 then store X into the stack location using an offset. This is
3289 because emit_move_insn does not know how to pad; it does not have
3290 access to type. */
3291 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3293 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3294 HOST_WIDE_INT offset;
3296 emit_move_insn (stack_pointer_rtx,
3297 expand_binop (Pmode,
3298 #ifdef STACK_GROWS_DOWNWARD
3299 sub_optab,
3300 #else
3301 add_optab,
3302 #endif
3303 stack_pointer_rtx,
3304 GEN_INT (rounded_size),
3305 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3307 offset = (HOST_WIDE_INT) padding_size;
3308 #ifdef STACK_GROWS_DOWNWARD
3309 if (STACK_PUSH_CODE == POST_DEC)
3310 /* We have already decremented the stack pointer, so get the
3311 previous value. */
3312 offset += (HOST_WIDE_INT) rounded_size;
3313 #else
3314 if (STACK_PUSH_CODE == POST_INC)
3315 /* We have already incremented the stack pointer, so get the
3316 previous value. */
3317 offset -= (HOST_WIDE_INT) rounded_size;
3318 #endif
3319 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3321 else
3323 #ifdef STACK_GROWS_DOWNWARD
3324 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3325 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3326 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3327 #else
3328 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3329 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3330 GEN_INT (rounded_size));
3331 #endif
3332 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3335 dest = gen_rtx_MEM (mode, dest_addr);
3337 if (type != 0)
3339 set_mem_attributes (dest, type, 1);
3341 if (flag_optimize_sibling_calls)
3342 /* Function incoming arguments may overlap with sibling call
3343 outgoing arguments and we cannot allow reordering of reads
3344 from function arguments with stores to outgoing arguments
3345 of sibling calls. */
3346 set_mem_alias_set (dest, 0);
3348 emit_move_insn (dest, x);
3350 #endif
3352 /* Generate code to push X onto the stack, assuming it has mode MODE and
3353 type TYPE.
3354 MODE is redundant except when X is a CONST_INT (since they don't
3355 carry mode info).
3356 SIZE is an rtx for the size of data to be copied (in bytes),
3357 needed only if X is BLKmode.
3359 ALIGN (in bits) is maximum alignment we can assume.
3361 If PARTIAL and REG are both nonzero, then copy that many of the first
3362 words of X into registers starting with REG, and push the rest of X.
3363 The amount of space pushed is decreased by PARTIAL words,
3364 rounded *down* to a multiple of PARM_BOUNDARY.
3365 REG must be a hard register in this case.
3366 If REG is zero but PARTIAL is not, take any all others actions for an
3367 argument partially in registers, but do not actually load any
3368 registers.
3370 EXTRA is the amount in bytes of extra space to leave next to this arg.
3371 This is ignored if an argument block has already been allocated.
3373 On a machine that lacks real push insns, ARGS_ADDR is the address of
3374 the bottom of the argument block for this call. We use indexing off there
3375 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3376 argument block has not been preallocated.
3378 ARGS_SO_FAR is the size of args previously pushed for this call.
3380 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3381 for arguments passed in registers. If nonzero, it will be the number
3382 of bytes required. */
3384 void
3385 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3386 unsigned int align, int partial, rtx reg, int extra,
3387 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3388 rtx alignment_pad)
3390 rtx xinner;
3391 enum direction stack_direction
3392 #ifdef STACK_GROWS_DOWNWARD
3393 = downward;
3394 #else
3395 = upward;
3396 #endif
3398 /* Decide where to pad the argument: `downward' for below,
3399 `upward' for above, or `none' for don't pad it.
3400 Default is below for small data on big-endian machines; else above. */
3401 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3403 /* Invert direction if stack is post-decrement.
3404 FIXME: why? */
3405 if (STACK_PUSH_CODE == POST_DEC)
3406 if (where_pad != none)
3407 where_pad = (where_pad == downward ? upward : downward);
3409 xinner = x = protect_from_queue (x, 0);
3411 if (mode == BLKmode)
3413 /* Copy a block into the stack, entirely or partially. */
3415 rtx temp;
3416 int used = partial * UNITS_PER_WORD;
3417 int offset;
3418 int skip;
3420 if (reg && GET_CODE (reg) == PARALLEL)
3422 /* Use the size of the elt to compute offset. */
3423 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3424 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3425 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3427 else
3428 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3430 if (size == 0)
3431 abort ();
3433 used -= offset;
3435 /* USED is now the # of bytes we need not copy to the stack
3436 because registers will take care of them. */
3438 if (partial != 0)
3439 xinner = adjust_address (xinner, BLKmode, used);
3441 /* If the partial register-part of the arg counts in its stack size,
3442 skip the part of stack space corresponding to the registers.
3443 Otherwise, start copying to the beginning of the stack space,
3444 by setting SKIP to 0. */
3445 skip = (reg_parm_stack_space == 0) ? 0 : used;
3447 #ifdef PUSH_ROUNDING
3448 /* Do it with several push insns if that doesn't take lots of insns
3449 and if there is no difficulty with push insns that skip bytes
3450 on the stack for alignment purposes. */
3451 if (args_addr == 0
3452 && PUSH_ARGS
3453 && GET_CODE (size) == CONST_INT
3454 && skip == 0
3455 && MEM_ALIGN (xinner) >= align
3456 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3457 /* Here we avoid the case of a structure whose weak alignment
3458 forces many pushes of a small amount of data,
3459 and such small pushes do rounding that causes trouble. */
3460 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3461 || align >= BIGGEST_ALIGNMENT
3462 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3463 == (align / BITS_PER_UNIT)))
3464 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3466 /* Push padding now if padding above and stack grows down,
3467 or if padding below and stack grows up.
3468 But if space already allocated, this has already been done. */
3469 if (extra && args_addr == 0
3470 && where_pad != none && where_pad != stack_direction)
3471 anti_adjust_stack (GEN_INT (extra));
3473 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3475 else
3476 #endif /* PUSH_ROUNDING */
3478 rtx target;
3480 /* Otherwise make space on the stack and copy the data
3481 to the address of that space. */
3483 /* Deduct words put into registers from the size we must copy. */
3484 if (partial != 0)
3486 if (GET_CODE (size) == CONST_INT)
3487 size = GEN_INT (INTVAL (size) - used);
3488 else
3489 size = expand_binop (GET_MODE (size), sub_optab, size,
3490 GEN_INT (used), NULL_RTX, 0,
3491 OPTAB_LIB_WIDEN);
3494 /* Get the address of the stack space.
3495 In this case, we do not deal with EXTRA separately.
3496 A single stack adjust will do. */
3497 if (! args_addr)
3499 temp = push_block (size, extra, where_pad == downward);
3500 extra = 0;
3502 else if (GET_CODE (args_so_far) == CONST_INT)
3503 temp = memory_address (BLKmode,
3504 plus_constant (args_addr,
3505 skip + INTVAL (args_so_far)));
3506 else
3507 temp = memory_address (BLKmode,
3508 plus_constant (gen_rtx_PLUS (Pmode,
3509 args_addr,
3510 args_so_far),
3511 skip));
3513 if (!ACCUMULATE_OUTGOING_ARGS)
3515 /* If the source is referenced relative to the stack pointer,
3516 copy it to another register to stabilize it. We do not need
3517 to do this if we know that we won't be changing sp. */
3519 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3520 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3521 temp = copy_to_reg (temp);
3524 target = gen_rtx_MEM (BLKmode, temp);
3526 if (type != 0)
3528 set_mem_attributes (target, type, 1);
3529 /* Function incoming arguments may overlap with sibling call
3530 outgoing arguments and we cannot allow reordering of reads
3531 from function arguments with stores to outgoing arguments
3532 of sibling calls. */
3533 set_mem_alias_set (target, 0);
3536 /* ALIGN may well be better aligned than TYPE, e.g. due to
3537 PARM_BOUNDARY. Assume the caller isn't lying. */
3538 set_mem_align (target, align);
3540 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3543 else if (partial > 0)
3545 /* Scalar partly in registers. */
3547 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3548 int i;
3549 int not_stack;
3550 /* # words of start of argument
3551 that we must make space for but need not store. */
3552 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3553 int args_offset = INTVAL (args_so_far);
3554 int skip;
3556 /* Push padding now if padding above and stack grows down,
3557 or if padding below and stack grows up.
3558 But if space already allocated, this has already been done. */
3559 if (extra && args_addr == 0
3560 && where_pad != none && where_pad != stack_direction)
3561 anti_adjust_stack (GEN_INT (extra));
3563 /* If we make space by pushing it, we might as well push
3564 the real data. Otherwise, we can leave OFFSET nonzero
3565 and leave the space uninitialized. */
3566 if (args_addr == 0)
3567 offset = 0;
3569 /* Now NOT_STACK gets the number of words that we don't need to
3570 allocate on the stack. */
3571 not_stack = partial - offset;
3573 /* If the partial register-part of the arg counts in its stack size,
3574 skip the part of stack space corresponding to the registers.
3575 Otherwise, start copying to the beginning of the stack space,
3576 by setting SKIP to 0. */
3577 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3579 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3580 x = validize_mem (force_const_mem (mode, x));
3582 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3583 SUBREGs of such registers are not allowed. */
3584 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3585 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3586 x = copy_to_reg (x);
3588 /* Loop over all the words allocated on the stack for this arg. */
3589 /* We can do it by words, because any scalar bigger than a word
3590 has a size a multiple of a word. */
3591 #ifndef PUSH_ARGS_REVERSED
3592 for (i = not_stack; i < size; i++)
3593 #else
3594 for (i = size - 1; i >= not_stack; i--)
3595 #endif
3596 if (i >= not_stack + offset)
3597 emit_push_insn (operand_subword_force (x, i, mode),
3598 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3599 0, args_addr,
3600 GEN_INT (args_offset + ((i - not_stack + skip)
3601 * UNITS_PER_WORD)),
3602 reg_parm_stack_space, alignment_pad);
3604 else
3606 rtx addr;
3607 rtx dest;
3609 /* Push padding now if padding above and stack grows down,
3610 or if padding below and stack grows up.
3611 But if space already allocated, this has already been done. */
3612 if (extra && args_addr == 0
3613 && where_pad != none && where_pad != stack_direction)
3614 anti_adjust_stack (GEN_INT (extra));
3616 #ifdef PUSH_ROUNDING
3617 if (args_addr == 0 && PUSH_ARGS)
3618 emit_single_push_insn (mode, x, type);
3619 else
3620 #endif
3622 if (GET_CODE (args_so_far) == CONST_INT)
3623 addr
3624 = memory_address (mode,
3625 plus_constant (args_addr,
3626 INTVAL (args_so_far)));
3627 else
3628 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3629 args_so_far));
3630 dest = gen_rtx_MEM (mode, addr);
3631 if (type != 0)
3633 set_mem_attributes (dest, type, 1);
3634 /* Function incoming arguments may overlap with sibling call
3635 outgoing arguments and we cannot allow reordering of reads
3636 from function arguments with stores to outgoing arguments
3637 of sibling calls. */
3638 set_mem_alias_set (dest, 0);
3641 emit_move_insn (dest, x);
3645 /* If part should go in registers, copy that part
3646 into the appropriate registers. Do this now, at the end,
3647 since mem-to-mem copies above may do function calls. */
3648 if (partial > 0 && reg != 0)
3650 /* Handle calls that pass values in multiple non-contiguous locations.
3651 The Irix 6 ABI has examples of this. */
3652 if (GET_CODE (reg) == PARALLEL)
3653 emit_group_load (reg, x, type, -1);
3654 else
3655 move_block_to_reg (REGNO (reg), x, partial, mode);
3658 if (extra && args_addr == 0 && where_pad == stack_direction)
3659 anti_adjust_stack (GEN_INT (extra));
3661 if (alignment_pad && args_addr == 0)
3662 anti_adjust_stack (alignment_pad);
3665 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3666 operations. */
3668 static rtx
3669 get_subtarget (rtx x)
3671 return ((x == 0
3672 /* Only registers can be subtargets. */
3673 || !REG_P (x)
3674 /* If the register is readonly, it can't be set more than once. */
3675 || RTX_UNCHANGING_P (x)
3676 /* Don't use hard regs to avoid extending their life. */
3677 || REGNO (x) < FIRST_PSEUDO_REGISTER
3678 /* Avoid subtargets inside loops,
3679 since they hide some invariant expressions. */
3680 || preserve_subexpressions_p ())
3681 ? 0 : x);
3684 /* Expand an assignment that stores the value of FROM into TO.
3685 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3686 (This may contain a QUEUED rtx;
3687 if the value is constant, this rtx is a constant.)
3688 Otherwise, the returned value is NULL_RTX. */
3691 expand_assignment (tree to, tree from, int want_value)
3693 rtx to_rtx = 0;
3694 rtx result;
3696 /* Don't crash if the lhs of the assignment was erroneous. */
3698 if (TREE_CODE (to) == ERROR_MARK)
3700 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3701 return want_value ? result : NULL_RTX;
3704 /* Assignment of a structure component needs special treatment
3705 if the structure component's rtx is not simply a MEM.
3706 Assignment of an array element at a constant index, and assignment of
3707 an array element in an unaligned packed structure field, has the same
3708 problem. */
3710 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3711 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3712 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3714 enum machine_mode mode1;
3715 HOST_WIDE_INT bitsize, bitpos;
3716 rtx orig_to_rtx;
3717 tree offset;
3718 int unsignedp;
3719 int volatilep = 0;
3720 tree tem;
3722 push_temp_slots ();
3723 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3724 &unsignedp, &volatilep);
3726 /* If we are going to use store_bit_field and extract_bit_field,
3727 make sure to_rtx will be safe for multiple use. */
3729 if (mode1 == VOIDmode && want_value)
3730 tem = stabilize_reference (tem);
3732 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3734 if (offset != 0)
3736 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3738 if (!MEM_P (to_rtx))
3739 abort ();
3741 #ifdef POINTERS_EXTEND_UNSIGNED
3742 if (GET_MODE (offset_rtx) != Pmode)
3743 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3744 #else
3745 if (GET_MODE (offset_rtx) != ptr_mode)
3746 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3747 #endif
3749 /* A constant address in TO_RTX can have VOIDmode, we must not try
3750 to call force_reg for that case. Avoid that case. */
3751 if (MEM_P (to_rtx)
3752 && GET_MODE (to_rtx) == BLKmode
3753 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3754 && bitsize > 0
3755 && (bitpos % bitsize) == 0
3756 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3757 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3759 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3760 bitpos = 0;
3763 to_rtx = offset_address (to_rtx, offset_rtx,
3764 highest_pow2_factor_for_target (to,
3765 offset));
3768 if (MEM_P (to_rtx))
3770 /* If the field is at offset zero, we could have been given the
3771 DECL_RTX of the parent struct. Don't munge it. */
3772 to_rtx = shallow_copy_rtx (to_rtx);
3774 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3777 /* Deal with volatile and readonly fields. The former is only done
3778 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3779 if (volatilep && MEM_P (to_rtx))
3781 if (to_rtx == orig_to_rtx)
3782 to_rtx = copy_rtx (to_rtx);
3783 MEM_VOLATILE_P (to_rtx) = 1;
3786 if (TREE_CODE (to) == COMPONENT_REF
3787 && TREE_READONLY (TREE_OPERAND (to, 1))
3788 /* We can't assert that a MEM won't be set more than once
3789 if the component is not addressable because another
3790 non-addressable component may be referenced by the same MEM. */
3791 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3793 if (to_rtx == orig_to_rtx)
3794 to_rtx = copy_rtx (to_rtx);
3795 RTX_UNCHANGING_P (to_rtx) = 1;
3798 if (MEM_P (to_rtx) && ! can_address_p (to))
3800 if (to_rtx == orig_to_rtx)
3801 to_rtx = copy_rtx (to_rtx);
3802 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3805 /* Disabled temporarily. GET_MODE (to_rtx) is often not the right
3806 mode. */
3807 while (0 && mode1 == VOIDmode && !want_value
3808 && bitpos + bitsize <= BITS_PER_WORD
3809 && bitsize < BITS_PER_WORD
3810 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3811 && !TREE_SIDE_EFFECTS (to)
3812 && !TREE_THIS_VOLATILE (to))
3814 tree src, op0, op1;
3815 rtx value;
3816 HOST_WIDE_INT count = bitpos;
3817 optab binop;
3819 src = from;
3820 STRIP_NOPS (src);
3821 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3822 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3823 break;
3825 op0 = TREE_OPERAND (src, 0);
3826 op1 = TREE_OPERAND (src, 1);
3827 STRIP_NOPS (op0);
3829 if (! operand_equal_p (to, op0, 0))
3830 break;
3832 if (BYTES_BIG_ENDIAN)
3833 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3835 /* Special case some bitfield op= exp. */
3836 switch (TREE_CODE (src))
3838 case PLUS_EXPR:
3839 case MINUS_EXPR:
3840 if (count <= 0)
3841 break;
3843 /* For now, just optimize the case of the topmost bitfield
3844 where we don't need to do any masking and also
3845 1 bit bitfields where xor can be used.
3846 We might win by one instruction for the other bitfields
3847 too if insv/extv instructions aren't used, so that
3848 can be added later. */
3849 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))
3850 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3851 break;
3852 value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
3853 value = protect_from_queue (value, 0);
3854 to_rtx = protect_from_queue (to_rtx, 1);
3855 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3856 if (bitsize == 1
3857 && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3859 value = expand_and (GET_MODE (to_rtx), value, const1_rtx,
3860 NULL_RTX);
3861 binop = xor_optab;
3863 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3864 value, build_int_2 (count, 0),
3865 NULL_RTX, 1);
3866 result = expand_binop (GET_MODE (to_rtx), binop, to_rtx,
3867 value, to_rtx, 1, OPTAB_WIDEN);
3868 if (result != to_rtx)
3869 emit_move_insn (to_rtx, result);
3870 free_temp_slots ();
3871 pop_temp_slots ();
3872 return NULL_RTX;
3873 default:
3874 break;
3877 break;
3880 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3881 (want_value
3882 /* Spurious cast for HPUX compiler. */
3883 ? ((enum machine_mode)
3884 TYPE_MODE (TREE_TYPE (to)))
3885 : VOIDmode),
3886 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3888 preserve_temp_slots (result);
3889 free_temp_slots ();
3890 pop_temp_slots ();
3892 /* If the value is meaningful, convert RESULT to the proper mode.
3893 Otherwise, return nothing. */
3894 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3895 TYPE_MODE (TREE_TYPE (from)),
3896 result,
3897 TYPE_UNSIGNED (TREE_TYPE (to)))
3898 : NULL_RTX);
3901 /* If the rhs is a function call and its value is not an aggregate,
3902 call the function before we start to compute the lhs.
3903 This is needed for correct code for cases such as
3904 val = setjmp (buf) on machines where reference to val
3905 requires loading up part of an address in a separate insn.
3907 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3908 since it might be a promoted variable where the zero- or sign- extension
3909 needs to be done. Handling this in the normal way is safe because no
3910 computation is done before the call. */
3911 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3912 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3913 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3914 && REG_P (DECL_RTL (to))))
3916 rtx value;
3918 push_temp_slots ();
3919 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3920 if (to_rtx == 0)
3921 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3923 /* Handle calls that return values in multiple non-contiguous locations.
3924 The Irix 6 ABI has examples of this. */
3925 if (GET_CODE (to_rtx) == PARALLEL)
3926 emit_group_load (to_rtx, value, TREE_TYPE (from),
3927 int_size_in_bytes (TREE_TYPE (from)));
3928 else if (GET_MODE (to_rtx) == BLKmode)
3929 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3930 else
3932 if (POINTER_TYPE_P (TREE_TYPE (to)))
3933 value = convert_memory_address (GET_MODE (to_rtx), value);
3934 emit_move_insn (to_rtx, value);
3936 preserve_temp_slots (to_rtx);
3937 free_temp_slots ();
3938 pop_temp_slots ();
3939 return want_value ? to_rtx : NULL_RTX;
3942 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3943 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3945 if (to_rtx == 0)
3946 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3948 /* Don't move directly into a return register. */
3949 if (TREE_CODE (to) == RESULT_DECL
3950 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3952 rtx temp;
3954 push_temp_slots ();
3955 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3957 if (GET_CODE (to_rtx) == PARALLEL)
3958 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3959 int_size_in_bytes (TREE_TYPE (from)));
3960 else
3961 emit_move_insn (to_rtx, temp);
3963 preserve_temp_slots (to_rtx);
3964 free_temp_slots ();
3965 pop_temp_slots ();
3966 return want_value ? to_rtx : NULL_RTX;
3969 /* In case we are returning the contents of an object which overlaps
3970 the place the value is being stored, use a safe function when copying
3971 a value through a pointer into a structure value return block. */
3972 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3973 && current_function_returns_struct
3974 && !current_function_returns_pcc_struct)
3976 rtx from_rtx, size;
3978 push_temp_slots ();
3979 size = expr_size (from);
3980 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3982 emit_library_call (memmove_libfunc, LCT_NORMAL,
3983 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3984 XEXP (from_rtx, 0), Pmode,
3985 convert_to_mode (TYPE_MODE (sizetype),
3986 size, TYPE_UNSIGNED (sizetype)),
3987 TYPE_MODE (sizetype));
3989 preserve_temp_slots (to_rtx);
3990 free_temp_slots ();
3991 pop_temp_slots ();
3992 return want_value ? to_rtx : NULL_RTX;
3995 /* Compute FROM and store the value in the rtx we got. */
3997 push_temp_slots ();
3998 result = store_expr (from, to_rtx, want_value);
3999 preserve_temp_slots (result);
4000 free_temp_slots ();
4001 pop_temp_slots ();
4002 return want_value ? result : NULL_RTX;
4005 /* Generate code for computing expression EXP,
4006 and storing the value into TARGET.
4007 TARGET may contain a QUEUED rtx.
4009 If WANT_VALUE & 1 is nonzero, return a copy of the value
4010 not in TARGET, so that we can be sure to use the proper
4011 value in a containing expression even if TARGET has something
4012 else stored in it. If possible, we copy the value through a pseudo
4013 and return that pseudo. Or, if the value is constant, we try to
4014 return the constant. In some cases, we return a pseudo
4015 copied *from* TARGET.
4017 If the mode is BLKmode then we may return TARGET itself.
4018 It turns out that in BLKmode it doesn't cause a problem.
4019 because C has no operators that could combine two different
4020 assignments into the same BLKmode object with different values
4021 with no sequence point. Will other languages need this to
4022 be more thorough?
4024 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4025 to catch quickly any cases where the caller uses the value
4026 and fails to set WANT_VALUE.
4028 If WANT_VALUE & 2 is set, this is a store into a call param on the
4029 stack, and block moves may need to be treated specially. */
4032 store_expr (tree exp, rtx target, int want_value)
4034 rtx temp;
4035 rtx alt_rtl = NULL_RTX;
4036 rtx mark = mark_queue ();
4037 int dont_return_target = 0;
4038 int dont_store_target = 0;
4040 if (VOID_TYPE_P (TREE_TYPE (exp)))
4042 /* C++ can generate ?: expressions with a throw expression in one
4043 branch and an rvalue in the other. Here, we resolve attempts to
4044 store the throw expression's nonexistent result. */
4045 if (want_value)
4046 abort ();
4047 expand_expr (exp, const0_rtx, VOIDmode, 0);
4048 return NULL_RTX;
4050 if (TREE_CODE (exp) == COMPOUND_EXPR)
4052 /* Perform first part of compound expression, then assign from second
4053 part. */
4054 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4055 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4056 emit_queue ();
4057 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4059 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4061 /* For conditional expression, get safe form of the target. Then
4062 test the condition, doing the appropriate assignment on either
4063 side. This avoids the creation of unnecessary temporaries.
4064 For non-BLKmode, it is more efficient not to do this. */
4066 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4068 emit_queue ();
4069 target = protect_from_queue (target, 1);
4071 do_pending_stack_adjust ();
4072 NO_DEFER_POP;
4073 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4074 start_cleanup_deferral ();
4075 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4076 end_cleanup_deferral ();
4077 emit_queue ();
4078 emit_jump_insn (gen_jump (lab2));
4079 emit_barrier ();
4080 emit_label (lab1);
4081 start_cleanup_deferral ();
4082 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4083 end_cleanup_deferral ();
4084 emit_queue ();
4085 emit_label (lab2);
4086 OK_DEFER_POP;
4088 return want_value & 1 ? target : NULL_RTX;
4090 else if (queued_subexp_p (target))
4091 /* If target contains a postincrement, let's not risk
4092 using it as the place to generate the rhs. */
4094 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4096 /* Expand EXP into a new pseudo. */
4097 temp = gen_reg_rtx (GET_MODE (target));
4098 temp = expand_expr (exp, temp, GET_MODE (target),
4099 (want_value & 2
4100 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4102 else
4103 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4104 (want_value & 2
4105 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4107 /* If target is volatile, ANSI requires accessing the value
4108 *from* the target, if it is accessed. So make that happen.
4109 In no case return the target itself. */
4110 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4111 dont_return_target = 1;
4113 else if ((want_value & 1) != 0
4114 && MEM_P (target)
4115 && ! MEM_VOLATILE_P (target)
4116 && GET_MODE (target) != BLKmode)
4117 /* If target is in memory and caller wants value in a register instead,
4118 arrange that. Pass TARGET as target for expand_expr so that,
4119 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4120 We know expand_expr will not use the target in that case.
4121 Don't do this if TARGET is volatile because we are supposed
4122 to write it and then read it. */
4124 temp = expand_expr (exp, target, GET_MODE (target),
4125 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4126 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4128 /* If TEMP is already in the desired TARGET, only copy it from
4129 memory and don't store it there again. */
4130 if (temp == target
4131 || (rtx_equal_p (temp, target)
4132 && ! side_effects_p (temp) && ! side_effects_p (target)))
4133 dont_store_target = 1;
4134 temp = copy_to_reg (temp);
4136 dont_return_target = 1;
4138 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4139 /* If this is a scalar in a register that is stored in a wider mode
4140 than the declared mode, compute the result into its declared mode
4141 and then convert to the wider mode. Our value is the computed
4142 expression. */
4144 rtx inner_target = 0;
4146 /* If we don't want a value, we can do the conversion inside EXP,
4147 which will often result in some optimizations. Do the conversion
4148 in two steps: first change the signedness, if needed, then
4149 the extend. But don't do this if the type of EXP is a subtype
4150 of something else since then the conversion might involve
4151 more than just converting modes. */
4152 if ((want_value & 1) == 0
4153 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4154 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4156 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4157 != SUBREG_PROMOTED_UNSIGNED_P (target))
4158 exp = convert
4159 (lang_hooks.types.signed_or_unsigned_type
4160 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4162 exp = convert (lang_hooks.types.type_for_mode
4163 (GET_MODE (SUBREG_REG (target)),
4164 SUBREG_PROMOTED_UNSIGNED_P (target)),
4165 exp);
4167 inner_target = SUBREG_REG (target);
4170 temp = expand_expr (exp, inner_target, VOIDmode,
4171 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4173 /* If TEMP is a MEM and we want a result value, make the access
4174 now so it gets done only once. Strictly speaking, this is
4175 only necessary if the MEM is volatile, or if the address
4176 overlaps TARGET. But not performing the load twice also
4177 reduces the amount of rtl we generate and then have to CSE. */
4178 if (MEM_P (temp) && (want_value & 1) != 0)
4179 temp = copy_to_reg (temp);
4181 /* If TEMP is a VOIDmode constant, use convert_modes to make
4182 sure that we properly convert it. */
4183 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4185 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4186 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4187 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4188 GET_MODE (target), temp,
4189 SUBREG_PROMOTED_UNSIGNED_P (target));
4192 convert_move (SUBREG_REG (target), temp,
4193 SUBREG_PROMOTED_UNSIGNED_P (target));
4195 /* If we promoted a constant, change the mode back down to match
4196 target. Otherwise, the caller might get confused by a result whose
4197 mode is larger than expected. */
4199 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4201 if (GET_MODE (temp) != VOIDmode)
4203 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4204 SUBREG_PROMOTED_VAR_P (temp) = 1;
4205 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4206 SUBREG_PROMOTED_UNSIGNED_P (target));
4208 else
4209 temp = convert_modes (GET_MODE (target),
4210 GET_MODE (SUBREG_REG (target)),
4211 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4214 return want_value & 1 ? temp : NULL_RTX;
4216 else
4218 temp = expand_expr_real (exp, target, GET_MODE (target),
4219 (want_value & 2
4220 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4221 &alt_rtl);
4222 /* Return TARGET if it's a specified hardware register.
4223 If TARGET is a volatile mem ref, either return TARGET
4224 or return a reg copied *from* TARGET; ANSI requires this.
4226 Otherwise, if TEMP is not TARGET, return TEMP
4227 if it is constant (for efficiency),
4228 or if we really want the correct value. */
4229 if (!(target && REG_P (target)
4230 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4231 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4232 && ! rtx_equal_p (temp, target)
4233 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4234 dont_return_target = 1;
4237 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4238 the same as that of TARGET, adjust the constant. This is needed, for
4239 example, in case it is a CONST_DOUBLE and we want only a word-sized
4240 value. */
4241 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4242 && TREE_CODE (exp) != ERROR_MARK
4243 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4244 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4245 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4247 /* If value was not generated in the target, store it there.
4248 Convert the value to TARGET's type first if necessary and emit the
4249 pending incrementations that have been queued when expanding EXP.
4250 Note that we cannot emit the whole queue blindly because this will
4251 effectively disable the POST_INC optimization later.
4253 If TEMP and TARGET compare equal according to rtx_equal_p, but
4254 one or both of them are volatile memory refs, we have to distinguish
4255 two cases:
4256 - expand_expr has used TARGET. In this case, we must not generate
4257 another copy. This can be detected by TARGET being equal according
4258 to == .
4259 - expand_expr has not used TARGET - that means that the source just
4260 happens to have the same RTX form. Since temp will have been created
4261 by expand_expr, it will compare unequal according to == .
4262 We must generate a copy in this case, to reach the correct number
4263 of volatile memory references. */
4265 if ((! rtx_equal_p (temp, target)
4266 || (temp != target && (side_effects_p (temp)
4267 || side_effects_p (target))))
4268 && TREE_CODE (exp) != ERROR_MARK
4269 && ! dont_store_target
4270 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4271 but TARGET is not valid memory reference, TEMP will differ
4272 from TARGET although it is really the same location. */
4273 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4274 /* If there's nothing to copy, don't bother. Don't call expr_size
4275 unless necessary, because some front-ends (C++) expr_size-hook
4276 aborts on objects that are not supposed to be bit-copied or
4277 bit-initialized. */
4278 && expr_size (exp) != const0_rtx)
4280 emit_insns_enqueued_after_mark (mark);
4281 target = protect_from_queue (target, 1);
4282 temp = protect_from_queue (temp, 0);
4283 if (GET_MODE (temp) != GET_MODE (target)
4284 && GET_MODE (temp) != VOIDmode)
4286 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4287 if (dont_return_target)
4289 /* In this case, we will return TEMP,
4290 so make sure it has the proper mode.
4291 But don't forget to store the value into TARGET. */
4292 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4293 emit_move_insn (target, temp);
4295 else
4296 convert_move (target, temp, unsignedp);
4299 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4301 /* Handle copying a string constant into an array. The string
4302 constant may be shorter than the array. So copy just the string's
4303 actual length, and clear the rest. First get the size of the data
4304 type of the string, which is actually the size of the target. */
4305 rtx size = expr_size (exp);
4307 if (GET_CODE (size) == CONST_INT
4308 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4309 emit_block_move (target, temp, size,
4310 (want_value & 2
4311 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4312 else
4314 /* Compute the size of the data to copy from the string. */
4315 tree copy_size
4316 = size_binop (MIN_EXPR,
4317 make_tree (sizetype, size),
4318 size_int (TREE_STRING_LENGTH (exp)));
4319 rtx copy_size_rtx
4320 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4321 (want_value & 2
4322 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4323 rtx label = 0;
4325 /* Copy that much. */
4326 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4327 TYPE_UNSIGNED (sizetype));
4328 emit_block_move (target, temp, copy_size_rtx,
4329 (want_value & 2
4330 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4332 /* Figure out how much is left in TARGET that we have to clear.
4333 Do all calculations in ptr_mode. */
4334 if (GET_CODE (copy_size_rtx) == CONST_INT)
4336 size = plus_constant (size, -INTVAL (copy_size_rtx));
4337 target = adjust_address (target, BLKmode,
4338 INTVAL (copy_size_rtx));
4340 else
4342 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4343 copy_size_rtx, NULL_RTX, 0,
4344 OPTAB_LIB_WIDEN);
4346 #ifdef POINTERS_EXTEND_UNSIGNED
4347 if (GET_MODE (copy_size_rtx) != Pmode)
4348 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4349 TYPE_UNSIGNED (sizetype));
4350 #endif
4352 target = offset_address (target, copy_size_rtx,
4353 highest_pow2_factor (copy_size));
4354 label = gen_label_rtx ();
4355 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4356 GET_MODE (size), 0, label);
4359 if (size != const0_rtx)
4360 clear_storage (target, size);
4362 if (label)
4363 emit_label (label);
4366 /* Handle calls that return values in multiple non-contiguous locations.
4367 The Irix 6 ABI has examples of this. */
4368 else if (GET_CODE (target) == PARALLEL)
4369 emit_group_load (target, temp, TREE_TYPE (exp),
4370 int_size_in_bytes (TREE_TYPE (exp)));
4371 else if (GET_MODE (temp) == BLKmode)
4372 emit_block_move (target, temp, expr_size (exp),
4373 (want_value & 2
4374 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4375 else
4377 temp = force_operand (temp, target);
4378 if (temp != target)
4379 emit_move_insn (target, temp);
4383 /* If we don't want a value, return NULL_RTX. */
4384 if ((want_value & 1) == 0)
4385 return NULL_RTX;
4387 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4388 ??? The latter test doesn't seem to make sense. */
4389 else if (dont_return_target && !MEM_P (temp))
4390 return temp;
4392 /* Return TARGET itself if it is a hard register. */
4393 else if ((want_value & 1) != 0
4394 && GET_MODE (target) != BLKmode
4395 && ! (REG_P (target)
4396 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4397 return copy_to_reg (target);
4399 else
4400 return target;
4403 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4404 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4405 are set to non-constant values and place it in *P_NC_ELTS. */
4407 static void
4408 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4409 HOST_WIDE_INT *p_nc_elts)
4411 HOST_WIDE_INT nz_elts, nc_elts;
4412 tree list;
4414 nz_elts = 0;
4415 nc_elts = 0;
4417 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4419 tree value = TREE_VALUE (list);
4420 tree purpose = TREE_PURPOSE (list);
4421 HOST_WIDE_INT mult;
4423 mult = 1;
4424 if (TREE_CODE (purpose) == RANGE_EXPR)
4426 tree lo_index = TREE_OPERAND (purpose, 0);
4427 tree hi_index = TREE_OPERAND (purpose, 1);
4429 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4430 mult = (tree_low_cst (hi_index, 1)
4431 - tree_low_cst (lo_index, 1) + 1);
4434 switch (TREE_CODE (value))
4436 case CONSTRUCTOR:
4438 HOST_WIDE_INT nz = 0, nc = 0;
4439 categorize_ctor_elements_1 (value, &nz, &nc);
4440 nz_elts += mult * nz;
4441 nc_elts += mult * nc;
4443 break;
4445 case INTEGER_CST:
4446 case REAL_CST:
4447 if (!initializer_zerop (value))
4448 nz_elts += mult;
4449 break;
4450 case COMPLEX_CST:
4451 if (!initializer_zerop (TREE_REALPART (value)))
4452 nz_elts += mult;
4453 if (!initializer_zerop (TREE_IMAGPART (value)))
4454 nz_elts += mult;
4455 break;
4456 case VECTOR_CST:
4458 tree v;
4459 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4460 if (!initializer_zerop (TREE_VALUE (v)))
4461 nz_elts += mult;
4463 break;
4465 default:
4466 nz_elts += mult;
4467 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4468 nc_elts += mult;
4469 break;
4473 *p_nz_elts += nz_elts;
4474 *p_nc_elts += nc_elts;
4477 void
4478 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4479 HOST_WIDE_INT *p_nc_elts)
4481 *p_nz_elts = 0;
4482 *p_nc_elts = 0;
4483 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4486 /* Count the number of scalars in TYPE. Return -1 on overflow or
4487 variable-sized. */
4489 HOST_WIDE_INT
4490 count_type_elements (tree type)
4492 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4493 switch (TREE_CODE (type))
4495 case ARRAY_TYPE:
4497 tree telts = array_type_nelts (type);
4498 if (telts && host_integerp (telts, 1))
4500 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4501 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4502 if (n == 0)
4503 return 0;
4504 else if (max / n > m)
4505 return n * m;
4507 return -1;
4510 case RECORD_TYPE:
4512 HOST_WIDE_INT n = 0, t;
4513 tree f;
4515 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4516 if (TREE_CODE (f) == FIELD_DECL)
4518 t = count_type_elements (TREE_TYPE (f));
4519 if (t < 0)
4520 return -1;
4521 n += t;
4524 return n;
4527 case UNION_TYPE:
4528 case QUAL_UNION_TYPE:
4530 /* Ho hum. How in the world do we guess here? Clearly it isn't
4531 right to count the fields. Guess based on the number of words. */
4532 HOST_WIDE_INT n = int_size_in_bytes (type);
4533 if (n < 0)
4534 return -1;
4535 return n / UNITS_PER_WORD;
4538 case COMPLEX_TYPE:
4539 return 2;
4541 case VECTOR_TYPE:
4542 /* ??? This is broke. We should encode the vector width in the tree. */
4543 return GET_MODE_NUNITS (TYPE_MODE (type));
4545 case INTEGER_TYPE:
4546 case REAL_TYPE:
4547 case ENUMERAL_TYPE:
4548 case BOOLEAN_TYPE:
4549 case CHAR_TYPE:
4550 case POINTER_TYPE:
4551 case OFFSET_TYPE:
4552 case REFERENCE_TYPE:
4553 return 1;
4555 case VOID_TYPE:
4556 case METHOD_TYPE:
4557 case FILE_TYPE:
4558 case SET_TYPE:
4559 case FUNCTION_TYPE:
4560 case LANG_TYPE:
4561 default:
4562 abort ();
4566 /* Return 1 if EXP contains mostly (3/4) zeros. */
4569 mostly_zeros_p (tree exp)
4571 if (TREE_CODE (exp) == CONSTRUCTOR)
4574 HOST_WIDE_INT nz_elts, nc_elts, elts;
4576 /* If there are no ranges of true bits, it is all zero. */
4577 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4578 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4580 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4581 elts = count_type_elements (TREE_TYPE (exp));
4583 return nz_elts < elts / 4;
4586 return initializer_zerop (exp);
4589 /* Helper function for store_constructor.
4590 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4591 TYPE is the type of the CONSTRUCTOR, not the element type.
4592 CLEARED is as for store_constructor.
4593 ALIAS_SET is the alias set to use for any stores.
4595 This provides a recursive shortcut back to store_constructor when it isn't
4596 necessary to go through store_field. This is so that we can pass through
4597 the cleared field to let store_constructor know that we may not have to
4598 clear a substructure if the outer structure has already been cleared. */
4600 static void
4601 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4602 HOST_WIDE_INT bitpos, enum machine_mode mode,
4603 tree exp, tree type, int cleared, int alias_set)
4605 if (TREE_CODE (exp) == CONSTRUCTOR
4606 /* We can only call store_constructor recursively if the size and
4607 bit position are on a byte boundary. */
4608 && bitpos % BITS_PER_UNIT == 0
4609 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4610 /* If we have a nonzero bitpos for a register target, then we just
4611 let store_field do the bitfield handling. This is unlikely to
4612 generate unnecessary clear instructions anyways. */
4613 && (bitpos == 0 || MEM_P (target)))
4615 if (MEM_P (target))
4616 target
4617 = adjust_address (target,
4618 GET_MODE (target) == BLKmode
4619 || 0 != (bitpos
4620 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4621 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4624 /* Update the alias set, if required. */
4625 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4626 && MEM_ALIAS_SET (target) != 0)
4628 target = copy_rtx (target);
4629 set_mem_alias_set (target, alias_set);
4632 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4634 else
4635 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4636 alias_set);
4639 /* Store the value of constructor EXP into the rtx TARGET.
4640 TARGET is either a REG or a MEM; we know it cannot conflict, since
4641 safe_from_p has been called.
4642 CLEARED is true if TARGET is known to have been zero'd.
4643 SIZE is the number of bytes of TARGET we are allowed to modify: this
4644 may not be the same as the size of EXP if we are assigning to a field
4645 which has been packed to exclude padding bits. */
4647 static void
4648 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4650 tree type = TREE_TYPE (exp);
4651 #ifdef WORD_REGISTER_OPERATIONS
4652 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4653 #endif
4655 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4656 || TREE_CODE (type) == QUAL_UNION_TYPE)
4658 tree elt;
4660 /* If size is zero or the target is already cleared, do nothing. */
4661 if (size == 0 || cleared)
4662 cleared = 1;
4663 /* We either clear the aggregate or indicate the value is dead. */
4664 else if ((TREE_CODE (type) == UNION_TYPE
4665 || TREE_CODE (type) == QUAL_UNION_TYPE)
4666 && ! CONSTRUCTOR_ELTS (exp))
4667 /* If the constructor is empty, clear the union. */
4669 clear_storage (target, expr_size (exp));
4670 cleared = 1;
4673 /* If we are building a static constructor into a register,
4674 set the initial value as zero so we can fold the value into
4675 a constant. But if more than one register is involved,
4676 this probably loses. */
4677 else if (REG_P (target) && TREE_STATIC (exp)
4678 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4680 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4681 cleared = 1;
4684 /* If the constructor has fewer fields than the structure
4685 or if we are initializing the structure to mostly zeros,
4686 clear the whole structure first. Don't do this if TARGET is a
4687 register whose mode size isn't equal to SIZE since clear_storage
4688 can't handle this case. */
4689 else if (size > 0
4690 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4691 || mostly_zeros_p (exp))
4692 && (!REG_P (target)
4693 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4694 == size)))
4696 rtx xtarget = target;
4698 if (readonly_fields_p (type))
4700 xtarget = copy_rtx (xtarget);
4701 RTX_UNCHANGING_P (xtarget) = 1;
4704 clear_storage (xtarget, GEN_INT (size));
4705 cleared = 1;
4708 if (! cleared)
4709 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4711 /* Store each element of the constructor into
4712 the corresponding field of TARGET. */
4714 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4716 tree field = TREE_PURPOSE (elt);
4717 tree value = TREE_VALUE (elt);
4718 enum machine_mode mode;
4719 HOST_WIDE_INT bitsize;
4720 HOST_WIDE_INT bitpos = 0;
4721 tree offset;
4722 rtx to_rtx = target;
4724 /* Just ignore missing fields.
4725 We cleared the whole structure, above,
4726 if any fields are missing. */
4727 if (field == 0)
4728 continue;
4730 if (cleared && initializer_zerop (value))
4731 continue;
4733 if (host_integerp (DECL_SIZE (field), 1))
4734 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4735 else
4736 bitsize = -1;
4738 mode = DECL_MODE (field);
4739 if (DECL_BIT_FIELD (field))
4740 mode = VOIDmode;
4742 offset = DECL_FIELD_OFFSET (field);
4743 if (host_integerp (offset, 0)
4744 && host_integerp (bit_position (field), 0))
4746 bitpos = int_bit_position (field);
4747 offset = 0;
4749 else
4750 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4752 if (offset)
4754 rtx offset_rtx;
4756 offset
4757 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4758 make_tree (TREE_TYPE (exp),
4759 target));
4761 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4762 if (!MEM_P (to_rtx))
4763 abort ();
4765 #ifdef POINTERS_EXTEND_UNSIGNED
4766 if (GET_MODE (offset_rtx) != Pmode)
4767 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4768 #else
4769 if (GET_MODE (offset_rtx) != ptr_mode)
4770 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4771 #endif
4773 to_rtx = offset_address (to_rtx, offset_rtx,
4774 highest_pow2_factor (offset));
4777 if (TREE_READONLY (field))
4779 if (MEM_P (to_rtx))
4780 to_rtx = copy_rtx (to_rtx);
4782 RTX_UNCHANGING_P (to_rtx) = 1;
4785 #ifdef WORD_REGISTER_OPERATIONS
4786 /* If this initializes a field that is smaller than a word, at the
4787 start of a word, try to widen it to a full word.
4788 This special case allows us to output C++ member function
4789 initializations in a form that the optimizers can understand. */
4790 if (REG_P (target)
4791 && bitsize < BITS_PER_WORD
4792 && bitpos % BITS_PER_WORD == 0
4793 && GET_MODE_CLASS (mode) == MODE_INT
4794 && TREE_CODE (value) == INTEGER_CST
4795 && exp_size >= 0
4796 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4798 tree type = TREE_TYPE (value);
4800 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4802 type = lang_hooks.types.type_for_size
4803 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4804 value = convert (type, value);
4807 if (BYTES_BIG_ENDIAN)
4808 value
4809 = fold (build (LSHIFT_EXPR, type, value,
4810 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4811 bitsize = BITS_PER_WORD;
4812 mode = word_mode;
4814 #endif
4816 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4817 && DECL_NONADDRESSABLE_P (field))
4819 to_rtx = copy_rtx (to_rtx);
4820 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4823 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4824 value, type, cleared,
4825 get_alias_set (TREE_TYPE (field)));
4828 else if (TREE_CODE (type) == ARRAY_TYPE
4829 || TREE_CODE (type) == VECTOR_TYPE)
4831 tree elt;
4832 int i;
4833 int need_to_clear;
4834 tree domain;
4835 tree elttype = TREE_TYPE (type);
4836 int const_bounds_p;
4837 HOST_WIDE_INT minelt = 0;
4838 HOST_WIDE_INT maxelt = 0;
4839 int icode = 0;
4840 rtx *vector = NULL;
4841 int elt_size = 0;
4842 unsigned n_elts = 0;
4844 if (TREE_CODE (type) == ARRAY_TYPE)
4845 domain = TYPE_DOMAIN (type);
4846 else
4847 /* Vectors do not have domains; look up the domain of
4848 the array embedded in the debug representation type.
4849 FIXME Would probably be more efficient to treat vectors
4850 separately from arrays. */
4852 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4853 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4854 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4856 enum machine_mode mode = GET_MODE (target);
4858 icode = (int) vec_init_optab->handlers[mode].insn_code;
4859 if (icode != CODE_FOR_nothing)
4861 unsigned int i;
4863 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4864 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4865 vector = alloca (n_elts);
4866 for (i = 0; i < n_elts; i++)
4867 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4872 const_bounds_p = (TYPE_MIN_VALUE (domain)
4873 && TYPE_MAX_VALUE (domain)
4874 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4875 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4877 /* If we have constant bounds for the range of the type, get them. */
4878 if (const_bounds_p)
4880 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4881 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4884 /* If the constructor has fewer elements than the array,
4885 clear the whole array first. Similarly if this is
4886 static constructor of a non-BLKmode object. */
4887 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4888 need_to_clear = 1;
4889 else
4891 HOST_WIDE_INT count = 0, zero_count = 0;
4892 need_to_clear = ! const_bounds_p;
4894 /* This loop is a more accurate version of the loop in
4895 mostly_zeros_p (it handles RANGE_EXPR in an index).
4896 It is also needed to check for missing elements. */
4897 for (elt = CONSTRUCTOR_ELTS (exp);
4898 elt != NULL_TREE && ! need_to_clear;
4899 elt = TREE_CHAIN (elt))
4901 tree index = TREE_PURPOSE (elt);
4902 HOST_WIDE_INT this_node_count;
4904 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4906 tree lo_index = TREE_OPERAND (index, 0);
4907 tree hi_index = TREE_OPERAND (index, 1);
4909 if (! host_integerp (lo_index, 1)
4910 || ! host_integerp (hi_index, 1))
4912 need_to_clear = 1;
4913 break;
4916 this_node_count = (tree_low_cst (hi_index, 1)
4917 - tree_low_cst (lo_index, 1) + 1);
4919 else
4920 this_node_count = 1;
4922 count += this_node_count;
4923 if (mostly_zeros_p (TREE_VALUE (elt)))
4924 zero_count += this_node_count;
4927 /* Clear the entire array first if there are any missing elements,
4928 or if the incidence of zero elements is >= 75%. */
4929 if (! need_to_clear
4930 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4931 need_to_clear = 1;
4934 if (need_to_clear && size > 0 && !vector)
4936 if (! cleared)
4938 if (REG_P (target))
4939 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4940 else
4941 clear_storage (target, GEN_INT (size));
4943 cleared = 1;
4945 else if (REG_P (target))
4946 /* Inform later passes that the old value is dead. */
4947 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4949 /* Store each element of the constructor into
4950 the corresponding element of TARGET, determined
4951 by counting the elements. */
4952 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4953 elt;
4954 elt = TREE_CHAIN (elt), i++)
4956 enum machine_mode mode;
4957 HOST_WIDE_INT bitsize;
4958 HOST_WIDE_INT bitpos;
4959 int unsignedp;
4960 tree value = TREE_VALUE (elt);
4961 tree index = TREE_PURPOSE (elt);
4962 rtx xtarget = target;
4964 if (cleared && initializer_zerop (value))
4965 continue;
4967 unsignedp = TYPE_UNSIGNED (elttype);
4968 mode = TYPE_MODE (elttype);
4969 if (mode == BLKmode)
4970 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4971 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4972 : -1);
4973 else
4974 bitsize = GET_MODE_BITSIZE (mode);
4976 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4978 tree lo_index = TREE_OPERAND (index, 0);
4979 tree hi_index = TREE_OPERAND (index, 1);
4980 rtx index_r, pos_rtx;
4981 HOST_WIDE_INT lo, hi, count;
4982 tree position;
4984 if (vector)
4985 abort ();
4987 /* If the range is constant and "small", unroll the loop. */
4988 if (const_bounds_p
4989 && host_integerp (lo_index, 0)
4990 && host_integerp (hi_index, 0)
4991 && (lo = tree_low_cst (lo_index, 0),
4992 hi = tree_low_cst (hi_index, 0),
4993 count = hi - lo + 1,
4994 (!MEM_P (target)
4995 || count <= 2
4996 || (host_integerp (TYPE_SIZE (elttype), 1)
4997 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4998 <= 40 * 8)))))
5000 lo -= minelt; hi -= minelt;
5001 for (; lo <= hi; lo++)
5003 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5005 if (MEM_P (target)
5006 && !MEM_KEEP_ALIAS_SET_P (target)
5007 && TREE_CODE (type) == ARRAY_TYPE
5008 && TYPE_NONALIASED_COMPONENT (type))
5010 target = copy_rtx (target);
5011 MEM_KEEP_ALIAS_SET_P (target) = 1;
5014 store_constructor_field
5015 (target, bitsize, bitpos, mode, value, type, cleared,
5016 get_alias_set (elttype));
5019 else
5021 rtx loop_start = gen_label_rtx ();
5022 rtx loop_end = gen_label_rtx ();
5023 tree exit_cond;
5025 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5026 unsignedp = TYPE_UNSIGNED (domain);
5028 index = build_decl (VAR_DECL, NULL_TREE, domain);
5030 index_r
5031 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5032 &unsignedp, 0));
5033 SET_DECL_RTL (index, index_r);
5034 store_expr (lo_index, index_r, 0);
5036 /* Build the head of the loop. */
5037 do_pending_stack_adjust ();
5038 emit_queue ();
5039 emit_label (loop_start);
5041 /* Assign value to element index. */
5042 position
5043 = convert (ssizetype,
5044 fold (build (MINUS_EXPR, TREE_TYPE (index),
5045 index, TYPE_MIN_VALUE (domain))));
5046 position = size_binop (MULT_EXPR, position,
5047 convert (ssizetype,
5048 TYPE_SIZE_UNIT (elttype)));
5050 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5051 xtarget = offset_address (target, pos_rtx,
5052 highest_pow2_factor (position));
5053 xtarget = adjust_address (xtarget, mode, 0);
5054 if (TREE_CODE (value) == CONSTRUCTOR)
5055 store_constructor (value, xtarget, cleared,
5056 bitsize / BITS_PER_UNIT);
5057 else
5058 store_expr (value, xtarget, 0);
5060 /* Generate a conditional jump to exit the loop. */
5061 exit_cond = build (LT_EXPR, integer_type_node,
5062 index, hi_index);
5063 jumpif (exit_cond, loop_end);
5065 /* Update the loop counter, and jump to the head of
5066 the loop. */
5067 expand_increment (build (PREINCREMENT_EXPR,
5068 TREE_TYPE (index),
5069 index, integer_one_node), 0, 0);
5070 emit_jump (loop_start);
5072 /* Build the end of the loop. */
5073 emit_label (loop_end);
5076 else if ((index != 0 && ! host_integerp (index, 0))
5077 || ! host_integerp (TYPE_SIZE (elttype), 1))
5079 tree position;
5081 if (vector)
5082 abort ();
5084 if (index == 0)
5085 index = ssize_int (1);
5087 if (minelt)
5088 index = convert (ssizetype,
5089 fold (build (MINUS_EXPR, index,
5090 TYPE_MIN_VALUE (domain))));
5092 position = size_binop (MULT_EXPR, index,
5093 convert (ssizetype,
5094 TYPE_SIZE_UNIT (elttype)));
5095 xtarget = offset_address (target,
5096 expand_expr (position, 0, VOIDmode, 0),
5097 highest_pow2_factor (position));
5098 xtarget = adjust_address (xtarget, mode, 0);
5099 store_expr (value, xtarget, 0);
5101 else if (vector)
5103 int pos;
5105 if (index != 0)
5106 pos = tree_low_cst (index, 0) - minelt;
5107 else
5108 pos = i;
5109 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5111 else
5113 if (index != 0)
5114 bitpos = ((tree_low_cst (index, 0) - minelt)
5115 * tree_low_cst (TYPE_SIZE (elttype), 1));
5116 else
5117 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5119 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5120 && TREE_CODE (type) == ARRAY_TYPE
5121 && TYPE_NONALIASED_COMPONENT (type))
5123 target = copy_rtx (target);
5124 MEM_KEEP_ALIAS_SET_P (target) = 1;
5126 store_constructor_field (target, bitsize, bitpos, mode, value,
5127 type, cleared, get_alias_set (elttype));
5130 if (vector)
5132 emit_insn (GEN_FCN (icode) (target,
5133 gen_rtx_PARALLEL (GET_MODE (target),
5134 gen_rtvec_v (n_elts, vector))));
5138 /* Set constructor assignments. */
5139 else if (TREE_CODE (type) == SET_TYPE)
5141 tree elt = CONSTRUCTOR_ELTS (exp);
5142 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5143 tree domain = TYPE_DOMAIN (type);
5144 tree domain_min, domain_max, bitlength;
5146 /* The default implementation strategy is to extract the constant
5147 parts of the constructor, use that to initialize the target,
5148 and then "or" in whatever non-constant ranges we need in addition.
5150 If a large set is all zero or all ones, it is
5151 probably better to set it using memset.
5152 Also, if a large set has just a single range, it may also be
5153 better to first clear all the first clear the set (using
5154 memset), and set the bits we want. */
5156 /* Check for all zeros. */
5157 if (elt == NULL_TREE && size > 0)
5159 if (!cleared)
5160 clear_storage (target, GEN_INT (size));
5161 return;
5164 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5165 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5166 bitlength = size_binop (PLUS_EXPR,
5167 size_diffop (domain_max, domain_min),
5168 ssize_int (1));
5170 nbits = tree_low_cst (bitlength, 1);
5172 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5173 are "complicated" (more than one range), initialize (the
5174 constant parts) by copying from a constant. */
5175 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5176 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5178 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5179 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5180 char *bit_buffer = alloca (nbits);
5181 HOST_WIDE_INT word = 0;
5182 unsigned int bit_pos = 0;
5183 unsigned int ibit = 0;
5184 unsigned int offset = 0; /* In bytes from beginning of set. */
5186 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5187 for (;;)
5189 if (bit_buffer[ibit])
5191 if (BYTES_BIG_ENDIAN)
5192 word |= (1 << (set_word_size - 1 - bit_pos));
5193 else
5194 word |= 1 << bit_pos;
5197 bit_pos++; ibit++;
5198 if (bit_pos >= set_word_size || ibit == nbits)
5200 if (word != 0 || ! cleared)
5202 rtx datum = gen_int_mode (word, mode);
5203 rtx to_rtx;
5205 /* The assumption here is that it is safe to use
5206 XEXP if the set is multi-word, but not if
5207 it's single-word. */
5208 if (MEM_P (target))
5209 to_rtx = adjust_address (target, mode, offset);
5210 else if (offset == 0)
5211 to_rtx = target;
5212 else
5213 abort ();
5214 emit_move_insn (to_rtx, datum);
5217 if (ibit == nbits)
5218 break;
5219 word = 0;
5220 bit_pos = 0;
5221 offset += set_word_size / BITS_PER_UNIT;
5225 else if (!cleared)
5226 /* Don't bother clearing storage if the set is all ones. */
5227 if (TREE_CHAIN (elt) != NULL_TREE
5228 || (TREE_PURPOSE (elt) == NULL_TREE
5229 ? nbits != 1
5230 : ( ! host_integerp (TREE_VALUE (elt), 0)
5231 || ! host_integerp (TREE_PURPOSE (elt), 0)
5232 || (tree_low_cst (TREE_VALUE (elt), 0)
5233 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5234 != (HOST_WIDE_INT) nbits))))
5235 clear_storage (target, expr_size (exp));
5237 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5239 /* Start of range of element or NULL. */
5240 tree startbit = TREE_PURPOSE (elt);
5241 /* End of range of element, or element value. */
5242 tree endbit = TREE_VALUE (elt);
5243 HOST_WIDE_INT startb, endb;
5244 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5246 bitlength_rtx = expand_expr (bitlength,
5247 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5249 /* Handle non-range tuple element like [ expr ]. */
5250 if (startbit == NULL_TREE)
5252 startbit = save_expr (endbit);
5253 endbit = startbit;
5256 startbit = convert (sizetype, startbit);
5257 endbit = convert (sizetype, endbit);
5258 if (! integer_zerop (domain_min))
5260 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5261 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5263 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5264 EXPAND_CONST_ADDRESS);
5265 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5266 EXPAND_CONST_ADDRESS);
5268 if (REG_P (target))
5270 targetx
5271 = assign_temp
5272 ((build_qualified_type (lang_hooks.types.type_for_mode
5273 (GET_MODE (target), 0),
5274 TYPE_QUAL_CONST)),
5275 0, 1, 1);
5276 emit_move_insn (targetx, target);
5279 else if (MEM_P (target))
5280 targetx = target;
5281 else
5282 abort ();
5284 /* Optimization: If startbit and endbit are constants divisible
5285 by BITS_PER_UNIT, call memset instead. */
5286 if (TREE_CODE (startbit) == INTEGER_CST
5287 && TREE_CODE (endbit) == INTEGER_CST
5288 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5289 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5291 emit_library_call (memset_libfunc, LCT_NORMAL,
5292 VOIDmode, 3,
5293 plus_constant (XEXP (targetx, 0),
5294 startb / BITS_PER_UNIT),
5295 Pmode,
5296 constm1_rtx, TYPE_MODE (integer_type_node),
5297 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5298 TYPE_MODE (sizetype));
5300 else
5301 emit_library_call (setbits_libfunc, LCT_NORMAL,
5302 VOIDmode, 4, XEXP (targetx, 0),
5303 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5304 startbit_rtx, TYPE_MODE (sizetype),
5305 endbit_rtx, TYPE_MODE (sizetype));
5307 if (REG_P (target))
5308 emit_move_insn (target, targetx);
5312 else
5313 abort ();
5316 /* Store the value of EXP (an expression tree)
5317 into a subfield of TARGET which has mode MODE and occupies
5318 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5319 If MODE is VOIDmode, it means that we are storing into a bit-field.
5321 If VALUE_MODE is VOIDmode, return nothing in particular.
5322 UNSIGNEDP is not used in this case.
5324 Otherwise, return an rtx for the value stored. This rtx
5325 has mode VALUE_MODE if that is convenient to do.
5326 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5328 TYPE is the type of the underlying object,
5330 ALIAS_SET is the alias set for the destination. This value will
5331 (in general) be different from that for TARGET, since TARGET is a
5332 reference to the containing structure. */
5334 static rtx
5335 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5336 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5337 int unsignedp, tree type, int alias_set)
5339 HOST_WIDE_INT width_mask = 0;
5341 if (TREE_CODE (exp) == ERROR_MARK)
5342 return const0_rtx;
5344 /* If we have nothing to store, do nothing unless the expression has
5345 side-effects. */
5346 if (bitsize == 0)
5347 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5348 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5349 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5351 /* If we are storing into an unaligned field of an aligned union that is
5352 in a register, we may have the mode of TARGET being an integer mode but
5353 MODE == BLKmode. In that case, get an aligned object whose size and
5354 alignment are the same as TARGET and store TARGET into it (we can avoid
5355 the store if the field being stored is the entire width of TARGET). Then
5356 call ourselves recursively to store the field into a BLKmode version of
5357 that object. Finally, load from the object into TARGET. This is not
5358 very efficient in general, but should only be slightly more expensive
5359 than the otherwise-required unaligned accesses. Perhaps this can be
5360 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5361 twice, once with emit_move_insn and once via store_field. */
5363 if (mode == BLKmode
5364 && (REG_P (target) || GET_CODE (target) == SUBREG))
5366 rtx object = assign_temp (type, 0, 1, 1);
5367 rtx blk_object = adjust_address (object, BLKmode, 0);
5369 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5370 emit_move_insn (object, target);
5372 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5373 alias_set);
5375 emit_move_insn (target, object);
5377 /* We want to return the BLKmode version of the data. */
5378 return blk_object;
5381 if (GET_CODE (target) == CONCAT)
5383 /* We're storing into a struct containing a single __complex. */
5385 if (bitpos != 0)
5386 abort ();
5387 return store_expr (exp, target, value_mode != VOIDmode);
5390 /* If the structure is in a register or if the component
5391 is a bit field, we cannot use addressing to access it.
5392 Use bit-field techniques or SUBREG to store in it. */
5394 if (mode == VOIDmode
5395 || (mode != BLKmode && ! direct_store[(int) mode]
5396 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5397 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5398 || REG_P (target)
5399 || GET_CODE (target) == SUBREG
5400 /* If the field isn't aligned enough to store as an ordinary memref,
5401 store it as a bit field. */
5402 || (mode != BLKmode
5403 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5404 || bitpos % GET_MODE_ALIGNMENT (mode))
5405 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5406 || (bitpos % BITS_PER_UNIT != 0)))
5407 /* If the RHS and field are a constant size and the size of the
5408 RHS isn't the same size as the bitfield, we must use bitfield
5409 operations. */
5410 || (bitsize >= 0
5411 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5412 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5414 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5416 /* If BITSIZE is narrower than the size of the type of EXP
5417 we will be narrowing TEMP. Normally, what's wanted are the
5418 low-order bits. However, if EXP's type is a record and this is
5419 big-endian machine, we want the upper BITSIZE bits. */
5420 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5421 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5422 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5423 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5424 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5425 - bitsize),
5426 NULL_RTX, 1);
5428 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5429 MODE. */
5430 if (mode != VOIDmode && mode != BLKmode
5431 && mode != TYPE_MODE (TREE_TYPE (exp)))
5432 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5434 /* If the modes of TARGET and TEMP are both BLKmode, both
5435 must be in memory and BITPOS must be aligned on a byte
5436 boundary. If so, we simply do a block copy. */
5437 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5439 if (!MEM_P (target) || !MEM_P (temp)
5440 || bitpos % BITS_PER_UNIT != 0)
5441 abort ();
5443 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5444 emit_block_move (target, temp,
5445 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5446 / BITS_PER_UNIT),
5447 BLOCK_OP_NORMAL);
5449 return value_mode == VOIDmode ? const0_rtx : target;
5452 /* Store the value in the bitfield. */
5453 store_bit_field (target, bitsize, bitpos, mode, temp,
5454 int_size_in_bytes (type));
5456 if (value_mode != VOIDmode)
5458 /* The caller wants an rtx for the value.
5459 If possible, avoid refetching from the bitfield itself. */
5460 if (width_mask != 0
5461 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5463 tree count;
5464 enum machine_mode tmode;
5466 tmode = GET_MODE (temp);
5467 if (tmode == VOIDmode)
5468 tmode = value_mode;
5470 if (unsignedp)
5471 return expand_and (tmode, temp,
5472 gen_int_mode (width_mask, tmode),
5473 NULL_RTX);
5475 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5476 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5477 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5480 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5481 NULL_RTX, value_mode, VOIDmode,
5482 int_size_in_bytes (type));
5484 return const0_rtx;
5486 else
5488 rtx addr = XEXP (target, 0);
5489 rtx to_rtx = target;
5491 /* If a value is wanted, it must be the lhs;
5492 so make the address stable for multiple use. */
5494 if (value_mode != VOIDmode && !REG_P (addr)
5495 && ! CONSTANT_ADDRESS_P (addr)
5496 /* A frame-pointer reference is already stable. */
5497 && ! (GET_CODE (addr) == PLUS
5498 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5499 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5500 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5501 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5503 /* Now build a reference to just the desired component. */
5505 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5507 if (to_rtx == target)
5508 to_rtx = copy_rtx (to_rtx);
5510 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5511 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5512 set_mem_alias_set (to_rtx, alias_set);
5514 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5518 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5519 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5520 codes and find the ultimate containing object, which we return.
5522 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5523 bit position, and *PUNSIGNEDP to the signedness of the field.
5524 If the position of the field is variable, we store a tree
5525 giving the variable offset (in units) in *POFFSET.
5526 This offset is in addition to the bit position.
5527 If the position is not variable, we store 0 in *POFFSET.
5529 If any of the extraction expressions is volatile,
5530 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5532 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5533 is a mode that can be used to access the field. In that case, *PBITSIZE
5534 is redundant.
5536 If the field describes a variable-sized object, *PMODE is set to
5537 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5538 this case, but the address of the object can be found. */
5540 tree
5541 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5542 HOST_WIDE_INT *pbitpos, tree *poffset,
5543 enum machine_mode *pmode, int *punsignedp,
5544 int *pvolatilep)
5546 tree size_tree = 0;
5547 enum machine_mode mode = VOIDmode;
5548 tree offset = size_zero_node;
5549 tree bit_offset = bitsize_zero_node;
5550 tree tem;
5552 /* First get the mode, signedness, and size. We do this from just the
5553 outermost expression. */
5554 if (TREE_CODE (exp) == COMPONENT_REF)
5556 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5557 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5558 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5560 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5562 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5564 size_tree = TREE_OPERAND (exp, 1);
5565 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5567 else
5569 mode = TYPE_MODE (TREE_TYPE (exp));
5570 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5572 if (mode == BLKmode)
5573 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5574 else
5575 *pbitsize = GET_MODE_BITSIZE (mode);
5578 if (size_tree != 0)
5580 if (! host_integerp (size_tree, 1))
5581 mode = BLKmode, *pbitsize = -1;
5582 else
5583 *pbitsize = tree_low_cst (size_tree, 1);
5586 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5587 and find the ultimate containing object. */
5588 while (1)
5590 if (TREE_CODE (exp) == BIT_FIELD_REF)
5591 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5592 else if (TREE_CODE (exp) == COMPONENT_REF)
5594 tree field = TREE_OPERAND (exp, 1);
5595 tree this_offset = component_ref_field_offset (exp);
5597 /* If this field hasn't been filled in yet, don't go
5598 past it. This should only happen when folding expressions
5599 made during type construction. */
5600 if (this_offset == 0)
5601 break;
5603 offset = size_binop (PLUS_EXPR, offset, this_offset);
5604 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5605 DECL_FIELD_BIT_OFFSET (field));
5607 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5610 else if (TREE_CODE (exp) == ARRAY_REF
5611 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5613 tree index = TREE_OPERAND (exp, 1);
5614 tree low_bound = array_ref_low_bound (exp);
5615 tree unit_size = array_ref_element_size (exp);
5617 /* We assume all arrays have sizes that are a multiple of a byte.
5618 First subtract the lower bound, if any, in the type of the
5619 index, then convert to sizetype and multiply by the size of the
5620 array element. */
5621 if (! integer_zerop (low_bound))
5622 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5623 index, low_bound));
5625 offset = size_binop (PLUS_EXPR, offset,
5626 size_binop (MULT_EXPR,
5627 convert (sizetype, index),
5628 unit_size));
5631 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5632 conversions that don't change the mode, and all view conversions
5633 except those that need to "step up" the alignment. */
5634 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5635 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5636 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5637 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5638 && STRICT_ALIGNMENT
5639 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5640 < BIGGEST_ALIGNMENT)
5641 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5642 || TYPE_ALIGN_OK (TREE_TYPE
5643 (TREE_OPERAND (exp, 0))))))
5644 && ! ((TREE_CODE (exp) == NOP_EXPR
5645 || TREE_CODE (exp) == CONVERT_EXPR)
5646 && (TYPE_MODE (TREE_TYPE (exp))
5647 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5648 break;
5650 /* If any reference in the chain is volatile, the effect is volatile. */
5651 if (TREE_THIS_VOLATILE (exp))
5652 *pvolatilep = 1;
5654 exp = TREE_OPERAND (exp, 0);
5657 /* If OFFSET is constant, see if we can return the whole thing as a
5658 constant bit position. Otherwise, split it up. */
5659 if (host_integerp (offset, 0)
5660 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5661 bitsize_unit_node))
5662 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5663 && host_integerp (tem, 0))
5664 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5665 else
5666 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5668 *pmode = mode;
5669 return exp;
5672 /* Return a tree of sizetype representing the size, in bytes, of the element
5673 of EXP, an ARRAY_REF. */
5675 tree
5676 array_ref_element_size (tree exp)
5678 tree aligned_size = TREE_OPERAND (exp, 3);
5679 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5681 /* If a size was specified in the ARRAY_REF, it's the size measured
5682 in alignment units of the element type. So multiply by that value. */
5683 if (aligned_size)
5684 return size_binop (MULT_EXPR, aligned_size,
5685 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5687 /* Otherwise, take the size from that of the element type. Substitute
5688 any PLACEHOLDER_EXPR that we have. */
5689 else
5690 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5693 /* Return a tree representing the lower bound of the array mentioned in
5694 EXP, an ARRAY_REF. */
5696 tree
5697 array_ref_low_bound (tree exp)
5699 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5701 /* If a lower bound is specified in EXP, use it. */
5702 if (TREE_OPERAND (exp, 2))
5703 return TREE_OPERAND (exp, 2);
5705 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5706 substituting for a PLACEHOLDER_EXPR as needed. */
5707 if (domain_type && TYPE_MIN_VALUE (domain_type))
5708 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5710 /* Otherwise, return a zero of the appropriate type. */
5711 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5714 /* Return a tree representing the offset, in bytes, of the field referenced
5715 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5717 tree
5718 component_ref_field_offset (tree exp)
5720 tree aligned_offset = TREE_OPERAND (exp, 2);
5721 tree field = TREE_OPERAND (exp, 1);
5723 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5724 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5725 value. */
5726 if (aligned_offset)
5727 return size_binop (MULT_EXPR, aligned_offset,
5728 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5730 /* Otherwise, take the offset from that of the field. Substitute
5731 any PLACEHOLDER_EXPR that we have. */
5732 else
5733 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5736 /* Return 1 if T is an expression that get_inner_reference handles. */
5739 handled_component_p (tree t)
5741 switch (TREE_CODE (t))
5743 case BIT_FIELD_REF:
5744 case COMPONENT_REF:
5745 case ARRAY_REF:
5746 case ARRAY_RANGE_REF:
5747 case NON_LVALUE_EXPR:
5748 case VIEW_CONVERT_EXPR:
5749 return 1;
5751 /* ??? Sure they are handled, but get_inner_reference may return
5752 a different PBITSIZE, depending upon whether the expression is
5753 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5754 case NOP_EXPR:
5755 case CONVERT_EXPR:
5756 return (TYPE_MODE (TREE_TYPE (t))
5757 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5759 default:
5760 return 0;
5764 /* Given an rtx VALUE that may contain additions and multiplications, return
5765 an equivalent value that just refers to a register, memory, or constant.
5766 This is done by generating instructions to perform the arithmetic and
5767 returning a pseudo-register containing the value.
5769 The returned value may be a REG, SUBREG, MEM or constant. */
5772 force_operand (rtx value, rtx target)
5774 rtx op1, op2;
5775 /* Use subtarget as the target for operand 0 of a binary operation. */
5776 rtx subtarget = get_subtarget (target);
5777 enum rtx_code code = GET_CODE (value);
5779 /* Check for subreg applied to an expression produced by loop optimizer. */
5780 if (code == SUBREG
5781 && !REG_P (SUBREG_REG (value))
5782 && !MEM_P (SUBREG_REG (value)))
5784 value = simplify_gen_subreg (GET_MODE (value),
5785 force_reg (GET_MODE (SUBREG_REG (value)),
5786 force_operand (SUBREG_REG (value),
5787 NULL_RTX)),
5788 GET_MODE (SUBREG_REG (value)),
5789 SUBREG_BYTE (value));
5790 code = GET_CODE (value);
5793 /* Check for a PIC address load. */
5794 if ((code == PLUS || code == MINUS)
5795 && XEXP (value, 0) == pic_offset_table_rtx
5796 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5797 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5798 || GET_CODE (XEXP (value, 1)) == CONST))
5800 if (!subtarget)
5801 subtarget = gen_reg_rtx (GET_MODE (value));
5802 emit_move_insn (subtarget, value);
5803 return subtarget;
5806 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5808 if (!target)
5809 target = gen_reg_rtx (GET_MODE (value));
5810 convert_move (target, force_operand (XEXP (value, 0), NULL),
5811 code == ZERO_EXTEND);
5812 return target;
5815 if (ARITHMETIC_P (value))
5817 op2 = XEXP (value, 1);
5818 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5819 subtarget = 0;
5820 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5822 code = PLUS;
5823 op2 = negate_rtx (GET_MODE (value), op2);
5826 /* Check for an addition with OP2 a constant integer and our first
5827 operand a PLUS of a virtual register and something else. In that
5828 case, we want to emit the sum of the virtual register and the
5829 constant first and then add the other value. This allows virtual
5830 register instantiation to simply modify the constant rather than
5831 creating another one around this addition. */
5832 if (code == PLUS && GET_CODE (op2) == CONST_INT
5833 && GET_CODE (XEXP (value, 0)) == PLUS
5834 && REG_P (XEXP (XEXP (value, 0), 0))
5835 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5836 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5838 rtx temp = expand_simple_binop (GET_MODE (value), code,
5839 XEXP (XEXP (value, 0), 0), op2,
5840 subtarget, 0, OPTAB_LIB_WIDEN);
5841 return expand_simple_binop (GET_MODE (value), code, temp,
5842 force_operand (XEXP (XEXP (value,
5843 0), 1), 0),
5844 target, 0, OPTAB_LIB_WIDEN);
5847 op1 = force_operand (XEXP (value, 0), subtarget);
5848 op2 = force_operand (op2, NULL_RTX);
5849 switch (code)
5851 case MULT:
5852 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5853 case DIV:
5854 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5855 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5856 target, 1, OPTAB_LIB_WIDEN);
5857 else
5858 return expand_divmod (0,
5859 FLOAT_MODE_P (GET_MODE (value))
5860 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5861 GET_MODE (value), op1, op2, target, 0);
5862 break;
5863 case MOD:
5864 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5865 target, 0);
5866 break;
5867 case UDIV:
5868 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5869 target, 1);
5870 break;
5871 case UMOD:
5872 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5873 target, 1);
5874 break;
5875 case ASHIFTRT:
5876 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5877 target, 0, OPTAB_LIB_WIDEN);
5878 break;
5879 default:
5880 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5881 target, 1, OPTAB_LIB_WIDEN);
5884 if (UNARY_P (value))
5886 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5887 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5890 #ifdef INSN_SCHEDULING
5891 /* On machines that have insn scheduling, we want all memory reference to be
5892 explicit, so we need to deal with such paradoxical SUBREGs. */
5893 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5894 && (GET_MODE_SIZE (GET_MODE (value))
5895 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5896 value
5897 = simplify_gen_subreg (GET_MODE (value),
5898 force_reg (GET_MODE (SUBREG_REG (value)),
5899 force_operand (SUBREG_REG (value),
5900 NULL_RTX)),
5901 GET_MODE (SUBREG_REG (value)),
5902 SUBREG_BYTE (value));
5903 #endif
5905 return value;
5908 /* Subroutine of expand_expr: return nonzero iff there is no way that
5909 EXP can reference X, which is being modified. TOP_P is nonzero if this
5910 call is going to be used to determine whether we need a temporary
5911 for EXP, as opposed to a recursive call to this function.
5913 It is always safe for this routine to return zero since it merely
5914 searches for optimization opportunities. */
5917 safe_from_p (rtx x, tree exp, int top_p)
5919 rtx exp_rtl = 0;
5920 int i, nops;
5922 if (x == 0
5923 /* If EXP has varying size, we MUST use a target since we currently
5924 have no way of allocating temporaries of variable size
5925 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5926 So we assume here that something at a higher level has prevented a
5927 clash. This is somewhat bogus, but the best we can do. Only
5928 do this when X is BLKmode and when we are at the top level. */
5929 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5930 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5931 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5932 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5933 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5934 != INTEGER_CST)
5935 && GET_MODE (x) == BLKmode)
5936 /* If X is in the outgoing argument area, it is always safe. */
5937 || (MEM_P (x)
5938 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5939 || (GET_CODE (XEXP (x, 0)) == PLUS
5940 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5941 return 1;
5943 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5944 find the underlying pseudo. */
5945 if (GET_CODE (x) == SUBREG)
5947 x = SUBREG_REG (x);
5948 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5949 return 0;
5952 /* Now look at our tree code and possibly recurse. */
5953 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5955 case 'd':
5956 exp_rtl = DECL_RTL_IF_SET (exp);
5957 break;
5959 case 'c':
5960 return 1;
5962 case 'x':
5963 if (TREE_CODE (exp) == TREE_LIST)
5965 while (1)
5967 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5968 return 0;
5969 exp = TREE_CHAIN (exp);
5970 if (!exp)
5971 return 1;
5972 if (TREE_CODE (exp) != TREE_LIST)
5973 return safe_from_p (x, exp, 0);
5976 else if (TREE_CODE (exp) == ERROR_MARK)
5977 return 1; /* An already-visited SAVE_EXPR? */
5978 else
5979 return 0;
5981 case 's':
5982 /* The only case we look at here is the DECL_INITIAL inside a
5983 DECL_EXPR. */
5984 return (TREE_CODE (exp) != DECL_EXPR
5985 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5986 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5987 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5989 case '2':
5990 case '<':
5991 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5992 return 0;
5993 /* Fall through. */
5995 case '1':
5996 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5998 case 'e':
5999 case 'r':
6000 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6001 the expression. If it is set, we conflict iff we are that rtx or
6002 both are in memory. Otherwise, we check all operands of the
6003 expression recursively. */
6005 switch (TREE_CODE (exp))
6007 case ADDR_EXPR:
6008 /* If the operand is static or we are static, we can't conflict.
6009 Likewise if we don't conflict with the operand at all. */
6010 if (staticp (TREE_OPERAND (exp, 0))
6011 || TREE_STATIC (exp)
6012 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6013 return 1;
6015 /* Otherwise, the only way this can conflict is if we are taking
6016 the address of a DECL a that address if part of X, which is
6017 very rare. */
6018 exp = TREE_OPERAND (exp, 0);
6019 if (DECL_P (exp))
6021 if (!DECL_RTL_SET_P (exp)
6022 || !MEM_P (DECL_RTL (exp)))
6023 return 0;
6024 else
6025 exp_rtl = XEXP (DECL_RTL (exp), 0);
6027 break;
6029 case INDIRECT_REF:
6030 if (MEM_P (x)
6031 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6032 get_alias_set (exp)))
6033 return 0;
6034 break;
6036 case CALL_EXPR:
6037 /* Assume that the call will clobber all hard registers and
6038 all of memory. */
6039 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6040 || MEM_P (x))
6041 return 0;
6042 break;
6044 case WITH_CLEANUP_EXPR:
6045 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6046 break;
6048 case CLEANUP_POINT_EXPR:
6049 case SAVE_EXPR:
6050 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6052 case BIND_EXPR:
6053 /* The only operand we look at is operand 1. The rest aren't
6054 part of the expression. */
6055 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6057 default:
6058 break;
6061 /* If we have an rtx, we do not need to scan our operands. */
6062 if (exp_rtl)
6063 break;
6065 nops = first_rtl_op (TREE_CODE (exp));
6066 for (i = 0; i < nops; i++)
6067 if (TREE_OPERAND (exp, i) != 0
6068 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6069 return 0;
6071 /* If this is a language-specific tree code, it may require
6072 special handling. */
6073 if ((unsigned int) TREE_CODE (exp)
6074 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6075 && !lang_hooks.safe_from_p (x, exp))
6076 return 0;
6079 /* If we have an rtl, find any enclosed object. Then see if we conflict
6080 with it. */
6081 if (exp_rtl)
6083 if (GET_CODE (exp_rtl) == SUBREG)
6085 exp_rtl = SUBREG_REG (exp_rtl);
6086 if (REG_P (exp_rtl)
6087 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6088 return 0;
6091 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6092 are memory and they conflict. */
6093 return ! (rtx_equal_p (x, exp_rtl)
6094 || (MEM_P (x) && MEM_P (exp_rtl)
6095 && true_dependence (exp_rtl, VOIDmode, x,
6096 rtx_addr_varies_p)));
6099 /* If we reach here, it is safe. */
6100 return 1;
6103 /* Subroutine of expand_expr: return rtx if EXP is a
6104 variable or parameter; else return 0. */
6106 static rtx
6107 var_rtx (tree exp)
6109 STRIP_NOPS (exp);
6110 switch (TREE_CODE (exp))
6112 case PARM_DECL:
6113 case VAR_DECL:
6114 return DECL_RTL (exp);
6115 default:
6116 return 0;
6120 /* Return the highest power of two that EXP is known to be a multiple of.
6121 This is used in updating alignment of MEMs in array references. */
6123 static unsigned HOST_WIDE_INT
6124 highest_pow2_factor (tree exp)
6126 unsigned HOST_WIDE_INT c0, c1;
6128 switch (TREE_CODE (exp))
6130 case INTEGER_CST:
6131 /* We can find the lowest bit that's a one. If the low
6132 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6133 We need to handle this case since we can find it in a COND_EXPR,
6134 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6135 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6136 later ICE. */
6137 if (TREE_CONSTANT_OVERFLOW (exp))
6138 return BIGGEST_ALIGNMENT;
6139 else
6141 /* Note: tree_low_cst is intentionally not used here,
6142 we don't care about the upper bits. */
6143 c0 = TREE_INT_CST_LOW (exp);
6144 c0 &= -c0;
6145 return c0 ? c0 : BIGGEST_ALIGNMENT;
6147 break;
6149 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6150 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6151 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6152 return MIN (c0, c1);
6154 case MULT_EXPR:
6155 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6156 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6157 return c0 * c1;
6159 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6160 case CEIL_DIV_EXPR:
6161 if (integer_pow2p (TREE_OPERAND (exp, 1))
6162 && host_integerp (TREE_OPERAND (exp, 1), 1))
6164 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6165 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6166 return MAX (1, c0 / c1);
6168 break;
6170 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6171 case SAVE_EXPR:
6172 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6174 case COMPOUND_EXPR:
6175 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6177 case COND_EXPR:
6178 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6179 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6180 return MIN (c0, c1);
6182 default:
6183 break;
6186 return 1;
6189 /* Similar, except that the alignment requirements of TARGET are
6190 taken into account. Assume it is at least as aligned as its
6191 type, unless it is a COMPONENT_REF in which case the layout of
6192 the structure gives the alignment. */
6194 static unsigned HOST_WIDE_INT
6195 highest_pow2_factor_for_target (tree target, tree exp)
6197 unsigned HOST_WIDE_INT target_align, factor;
6199 factor = highest_pow2_factor (exp);
6200 if (TREE_CODE (target) == COMPONENT_REF)
6201 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6202 else
6203 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6204 return MAX (factor, target_align);
6207 /* Expands variable VAR. */
6209 void
6210 expand_var (tree var)
6212 if (DECL_EXTERNAL (var))
6213 return;
6215 if (TREE_STATIC (var))
6216 /* If this is an inlined copy of a static local variable,
6217 look up the original decl. */
6218 var = DECL_ORIGIN (var);
6220 if (TREE_STATIC (var)
6221 ? !TREE_ASM_WRITTEN (var)
6222 : !DECL_RTL_SET_P (var))
6224 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6226 /* Prepare a mem & address for the decl. */
6227 rtx x;
6229 if (TREE_STATIC (var))
6230 abort ();
6232 x = gen_rtx_MEM (DECL_MODE (var),
6233 gen_reg_rtx (Pmode));
6235 set_mem_attributes (x, var, 1);
6236 SET_DECL_RTL (var, x);
6238 else if (lang_hooks.expand_decl (var))
6239 /* OK. */;
6240 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6241 expand_decl (var);
6242 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6243 rest_of_decl_compilation (var, NULL, 0, 0);
6244 else if (TREE_CODE (var) == TYPE_DECL
6245 || TREE_CODE (var) == CONST_DECL
6246 || TREE_CODE (var) == FUNCTION_DECL
6247 || TREE_CODE (var) == LABEL_DECL)
6248 /* No expansion needed. */;
6249 else
6250 abort ();
6254 /* Expands declarations of variables in list VARS. */
6256 static void
6257 expand_vars (tree vars)
6259 for (; vars; vars = TREE_CHAIN (vars))
6261 tree var = vars;
6263 if (DECL_EXTERNAL (var))
6264 continue;
6266 expand_var (var);
6267 expand_decl_init (var);
6271 /* Subroutine of expand_expr. Expand the two operands of a binary
6272 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6273 The value may be stored in TARGET if TARGET is nonzero. The
6274 MODIFIER argument is as documented by expand_expr. */
6276 static void
6277 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6278 enum expand_modifier modifier)
6280 if (! safe_from_p (target, exp1, 1))
6281 target = 0;
6282 if (operand_equal_p (exp0, exp1, 0))
6284 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6285 *op1 = copy_rtx (*op0);
6287 else
6289 /* If we need to preserve evaluation order, copy exp0 into its own
6290 temporary variable so that it can't be clobbered by exp1. */
6291 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6292 exp0 = save_expr (exp0);
6293 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6294 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6299 /* expand_expr: generate code for computing expression EXP.
6300 An rtx for the computed value is returned. The value is never null.
6301 In the case of a void EXP, const0_rtx is returned.
6303 The value may be stored in TARGET if TARGET is nonzero.
6304 TARGET is just a suggestion; callers must assume that
6305 the rtx returned may not be the same as TARGET.
6307 If TARGET is CONST0_RTX, it means that the value will be ignored.
6309 If TMODE is not VOIDmode, it suggests generating the
6310 result in mode TMODE. But this is done only when convenient.
6311 Otherwise, TMODE is ignored and the value generated in its natural mode.
6312 TMODE is just a suggestion; callers must assume that
6313 the rtx returned may not have mode TMODE.
6315 Note that TARGET may have neither TMODE nor MODE. In that case, it
6316 probably will not be used.
6318 If MODIFIER is EXPAND_SUM then when EXP is an addition
6319 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6320 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6321 products as above, or REG or MEM, or constant.
6322 Ordinarily in such cases we would output mul or add instructions
6323 and then return a pseudo reg containing the sum.
6325 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6326 it also marks a label as absolutely required (it can't be dead).
6327 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6328 This is used for outputting expressions used in initializers.
6330 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6331 with a constant address even if that address is not normally legitimate.
6332 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6334 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6335 a call parameter. Such targets require special care as we haven't yet
6336 marked TARGET so that it's safe from being trashed by libcalls. We
6337 don't want to use TARGET for anything but the final result;
6338 Intermediate values must go elsewhere. Additionally, calls to
6339 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6341 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6342 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6343 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6344 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6345 recursively. */
6347 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6348 enum expand_modifier, rtx *);
6351 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6352 enum expand_modifier modifier, rtx *alt_rtl)
6354 int rn = -1;
6355 rtx ret, last = NULL;
6357 /* Handle ERROR_MARK before anybody tries to access its type. */
6358 if (TREE_CODE (exp) == ERROR_MARK
6359 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6361 ret = CONST0_RTX (tmode);
6362 return ret ? ret : const0_rtx;
6365 if (flag_non_call_exceptions)
6367 rn = lookup_stmt_eh_region (exp);
6368 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6369 if (rn >= 0)
6370 last = get_last_insn ();
6373 /* If this is an expression of some kind and it has an associated line
6374 number, then emit the line number before expanding the expression.
6376 We need to save and restore the file and line information so that
6377 errors discovered during expansion are emitted with the right
6378 information. It would be better of the diagnostic routines
6379 used the file/line information embedded in the tree nodes rather
6380 than globals. */
6381 if (cfun && EXPR_HAS_LOCATION (exp))
6383 location_t saved_location = input_location;
6384 input_location = EXPR_LOCATION (exp);
6385 emit_line_note (input_location);
6387 /* Record where the insns produced belong. */
6388 record_block_change (TREE_BLOCK (exp));
6390 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6392 input_location = saved_location;
6394 else
6396 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6399 /* If using non-call exceptions, mark all insns that may trap.
6400 expand_call() will mark CALL_INSNs before we get to this code,
6401 but it doesn't handle libcalls, and these may trap. */
6402 if (rn >= 0)
6404 rtx insn;
6405 for (insn = next_real_insn (last); insn;
6406 insn = next_real_insn (insn))
6408 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6409 /* If we want exceptions for non-call insns, any
6410 may_trap_p instruction may throw. */
6411 && GET_CODE (PATTERN (insn)) != CLOBBER
6412 && GET_CODE (PATTERN (insn)) != USE
6413 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6415 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6416 REG_NOTES (insn));
6421 return ret;
6424 static rtx
6425 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6426 enum expand_modifier modifier, rtx *alt_rtl)
6428 rtx op0, op1, temp;
6429 tree type = TREE_TYPE (exp);
6430 int unsignedp;
6431 enum machine_mode mode;
6432 enum tree_code code = TREE_CODE (exp);
6433 optab this_optab;
6434 rtx subtarget, original_target;
6435 int ignore;
6436 tree context;
6438 mode = TYPE_MODE (type);
6439 unsignedp = TYPE_UNSIGNED (type);
6441 /* Use subtarget as the target for operand 0 of a binary operation. */
6442 subtarget = get_subtarget (target);
6443 original_target = target;
6444 ignore = (target == const0_rtx
6445 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6446 || code == CONVERT_EXPR || code == COND_EXPR
6447 || code == VIEW_CONVERT_EXPR)
6448 && TREE_CODE (type) == VOID_TYPE));
6450 /* If we are going to ignore this result, we need only do something
6451 if there is a side-effect somewhere in the expression. If there
6452 is, short-circuit the most common cases here. Note that we must
6453 not call expand_expr with anything but const0_rtx in case this
6454 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6456 if (ignore)
6458 if (! TREE_SIDE_EFFECTS (exp))
6459 return const0_rtx;
6461 /* Ensure we reference a volatile object even if value is ignored, but
6462 don't do this if all we are doing is taking its address. */
6463 if (TREE_THIS_VOLATILE (exp)
6464 && TREE_CODE (exp) != FUNCTION_DECL
6465 && mode != VOIDmode && mode != BLKmode
6466 && modifier != EXPAND_CONST_ADDRESS)
6468 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6469 if (MEM_P (temp))
6470 temp = copy_to_reg (temp);
6471 return const0_rtx;
6474 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6475 || code == INDIRECT_REF || code == BUFFER_REF)
6476 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6477 modifier);
6479 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6480 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6482 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6483 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6484 return const0_rtx;
6486 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6487 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6488 /* If the second operand has no side effects, just evaluate
6489 the first. */
6490 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6491 modifier);
6492 else if (code == BIT_FIELD_REF)
6494 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6495 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6496 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6497 return const0_rtx;
6500 target = 0;
6503 /* If will do cse, generate all results into pseudo registers
6504 since 1) that allows cse to find more things
6505 and 2) otherwise cse could produce an insn the machine
6506 cannot support. An exception is a CONSTRUCTOR into a multi-word
6507 MEM: that's much more likely to be most efficient into the MEM.
6508 Another is a CALL_EXPR which must return in memory. */
6510 if (! cse_not_expected && mode != BLKmode && target
6511 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6512 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6513 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6514 target = 0;
6516 switch (code)
6518 case LABEL_DECL:
6520 tree function = decl_function_context (exp);
6522 temp = label_rtx (exp);
6523 temp = gen_rtx_LABEL_REF (Pmode, temp);
6525 if (function != current_function_decl
6526 && function != 0)
6527 LABEL_REF_NONLOCAL_P (temp) = 1;
6529 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6530 return temp;
6533 case PARM_DECL:
6534 if (!DECL_RTL_SET_P (exp))
6536 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6537 return CONST0_RTX (mode);
6540 /* ... fall through ... */
6542 case VAR_DECL:
6543 /* If a static var's type was incomplete when the decl was written,
6544 but the type is complete now, lay out the decl now. */
6545 if (DECL_SIZE (exp) == 0
6546 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6547 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6548 layout_decl (exp, 0);
6550 /* ... fall through ... */
6552 case FUNCTION_DECL:
6553 case RESULT_DECL:
6554 if (DECL_RTL (exp) == 0)
6555 abort ();
6557 /* Ensure variable marked as used even if it doesn't go through
6558 a parser. If it hasn't be used yet, write out an external
6559 definition. */
6560 if (! TREE_USED (exp))
6562 assemble_external (exp);
6563 TREE_USED (exp) = 1;
6566 /* Show we haven't gotten RTL for this yet. */
6567 temp = 0;
6569 /* Handle variables inherited from containing functions. */
6570 context = decl_function_context (exp);
6572 if (context != 0 && context != current_function_decl
6573 /* If var is static, we don't need a static chain to access it. */
6574 && ! (MEM_P (DECL_RTL (exp))
6575 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6577 rtx addr;
6579 /* Mark as non-local and addressable. */
6580 DECL_NONLOCAL (exp) = 1;
6581 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6582 abort ();
6583 lang_hooks.mark_addressable (exp);
6584 if (!MEM_P (DECL_RTL (exp)))
6585 abort ();
6586 addr = XEXP (DECL_RTL (exp), 0);
6587 if (MEM_P (addr))
6588 addr
6589 = replace_equiv_address (addr,
6590 fix_lexical_addr (XEXP (addr, 0), exp));
6591 else
6592 addr = fix_lexical_addr (addr, exp);
6594 temp = replace_equiv_address (DECL_RTL (exp), addr);
6597 /* This is the case of an array whose size is to be determined
6598 from its initializer, while the initializer is still being parsed.
6599 See expand_decl. */
6601 else if (MEM_P (DECL_RTL (exp))
6602 && REG_P (XEXP (DECL_RTL (exp), 0)))
6603 temp = validize_mem (DECL_RTL (exp));
6605 /* If DECL_RTL is memory, we are in the normal case and either
6606 the address is not valid or it is not a register and -fforce-addr
6607 is specified, get the address into a register. */
6609 else if (MEM_P (DECL_RTL (exp))
6610 && modifier != EXPAND_CONST_ADDRESS
6611 && modifier != EXPAND_SUM
6612 && modifier != EXPAND_INITIALIZER
6613 && (! memory_address_p (DECL_MODE (exp),
6614 XEXP (DECL_RTL (exp), 0))
6615 || (flag_force_addr
6616 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6618 if (alt_rtl)
6619 *alt_rtl = DECL_RTL (exp);
6620 temp = replace_equiv_address (DECL_RTL (exp),
6621 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6624 /* If we got something, return it. But first, set the alignment
6625 if the address is a register. */
6626 if (temp != 0)
6628 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6629 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6631 return temp;
6634 /* If the mode of DECL_RTL does not match that of the decl, it
6635 must be a promoted value. We return a SUBREG of the wanted mode,
6636 but mark it so that we know that it was already extended. */
6638 if (REG_P (DECL_RTL (exp))
6639 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6641 /* Get the signedness used for this variable. Ensure we get the
6642 same mode we got when the variable was declared. */
6643 if (GET_MODE (DECL_RTL (exp))
6644 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6645 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6646 abort ();
6648 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6649 SUBREG_PROMOTED_VAR_P (temp) = 1;
6650 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6651 return temp;
6654 return DECL_RTL (exp);
6656 case INTEGER_CST:
6657 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6658 TREE_INT_CST_HIGH (exp), mode);
6660 /* ??? If overflow is set, fold will have done an incomplete job,
6661 which can result in (plus xx (const_int 0)), which can get
6662 simplified by validate_replace_rtx during virtual register
6663 instantiation, which can result in unrecognizable insns.
6664 Avoid this by forcing all overflows into registers. */
6665 if (TREE_CONSTANT_OVERFLOW (exp)
6666 && modifier != EXPAND_INITIALIZER)
6667 temp = force_reg (mode, temp);
6669 return temp;
6671 case VECTOR_CST:
6672 return const_vector_from_tree (exp);
6674 case CONST_DECL:
6675 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6677 case REAL_CST:
6678 /* If optimized, generate immediate CONST_DOUBLE
6679 which will be turned into memory by reload if necessary.
6681 We used to force a register so that loop.c could see it. But
6682 this does not allow gen_* patterns to perform optimizations with
6683 the constants. It also produces two insns in cases like "x = 1.0;".
6684 On most machines, floating-point constants are not permitted in
6685 many insns, so we'd end up copying it to a register in any case.
6687 Now, we do the copying in expand_binop, if appropriate. */
6688 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6689 TYPE_MODE (TREE_TYPE (exp)));
6691 case COMPLEX_CST:
6692 /* Handle evaluating a complex constant in a CONCAT target. */
6693 if (original_target && GET_CODE (original_target) == CONCAT)
6695 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6696 rtx rtarg, itarg;
6698 rtarg = XEXP (original_target, 0);
6699 itarg = XEXP (original_target, 1);
6701 /* Move the real and imaginary parts separately. */
6702 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6703 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6705 if (op0 != rtarg)
6706 emit_move_insn (rtarg, op0);
6707 if (op1 != itarg)
6708 emit_move_insn (itarg, op1);
6710 return original_target;
6713 /* ... fall through ... */
6715 case STRING_CST:
6716 temp = output_constant_def (exp, 1);
6718 /* temp contains a constant address.
6719 On RISC machines where a constant address isn't valid,
6720 make some insns to get that address into a register. */
6721 if (modifier != EXPAND_CONST_ADDRESS
6722 && modifier != EXPAND_INITIALIZER
6723 && modifier != EXPAND_SUM
6724 && (! memory_address_p (mode, XEXP (temp, 0))
6725 || flag_force_addr))
6726 return replace_equiv_address (temp,
6727 copy_rtx (XEXP (temp, 0)));
6728 return temp;
6730 case SAVE_EXPR:
6732 tree val = TREE_OPERAND (exp, 0);
6733 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6735 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6737 /* We can indeed still hit this case, typically via builtin
6738 expanders calling save_expr immediately before expanding
6739 something. Assume this means that we only have to deal
6740 with non-BLKmode values. */
6741 if (GET_MODE (ret) == BLKmode)
6742 abort ();
6744 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6745 DECL_ARTIFICIAL (val) = 1;
6746 TREE_OPERAND (exp, 0) = val;
6748 if (!CONSTANT_P (ret))
6749 ret = copy_to_reg (ret);
6750 SET_DECL_RTL (val, ret);
6753 return ret;
6756 case UNSAVE_EXPR:
6758 rtx temp;
6759 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6760 TREE_OPERAND (exp, 0)
6761 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6762 return temp;
6765 case GOTO_EXPR:
6766 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6767 expand_goto (TREE_OPERAND (exp, 0));
6768 else
6769 expand_computed_goto (TREE_OPERAND (exp, 0));
6770 return const0_rtx;
6772 /* These are lowered during gimplification, so we should never ever
6773 see them here. */
6774 case LOOP_EXPR:
6775 case EXIT_EXPR:
6776 abort ();
6778 case LABELED_BLOCK_EXPR:
6779 if (LABELED_BLOCK_BODY (exp))
6780 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6781 /* Should perhaps use expand_label, but this is simpler and safer. */
6782 do_pending_stack_adjust ();
6783 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6784 return const0_rtx;
6786 case EXIT_BLOCK_EXPR:
6787 if (EXIT_BLOCK_RETURN (exp))
6788 sorry ("returned value in block_exit_expr");
6789 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6790 return const0_rtx;
6792 case BIND_EXPR:
6794 tree block = BIND_EXPR_BLOCK (exp);
6795 int mark_ends;
6797 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6798 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6799 mark_ends = (block != NULL_TREE);
6800 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6802 /* If VARS have not yet been expanded, expand them now. */
6803 expand_vars (BIND_EXPR_VARS (exp));
6805 /* TARGET was clobbered early in this function. The correct
6806 indicator or whether or not we need the value of this
6807 expression is the IGNORE variable. */
6808 temp = expand_expr (BIND_EXPR_BODY (exp),
6809 ignore ? const0_rtx : target,
6810 tmode, modifier);
6812 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6814 return temp;
6817 case CONSTRUCTOR:
6818 /* If we don't need the result, just ensure we evaluate any
6819 subexpressions. */
6820 if (ignore)
6822 tree elt;
6824 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6825 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6827 return const0_rtx;
6830 /* All elts simple constants => refer to a constant in memory. But
6831 if this is a non-BLKmode mode, let it store a field at a time
6832 since that should make a CONST_INT or CONST_DOUBLE when we
6833 fold. Likewise, if we have a target we can use, it is best to
6834 store directly into the target unless the type is large enough
6835 that memcpy will be used. If we are making an initializer and
6836 all operands are constant, put it in memory as well.
6838 FIXME: Avoid trying to fill vector constructors piece-meal.
6839 Output them with output_constant_def below unless we're sure
6840 they're zeros. This should go away when vector initializers
6841 are treated like VECTOR_CST instead of arrays.
6843 else if ((TREE_STATIC (exp)
6844 && ((mode == BLKmode
6845 && ! (target != 0 && safe_from_p (target, exp, 1)))
6846 || TREE_ADDRESSABLE (exp)
6847 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6848 && (! MOVE_BY_PIECES_P
6849 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6850 TYPE_ALIGN (type)))
6851 && ! mostly_zeros_p (exp))))
6852 || ((modifier == EXPAND_INITIALIZER
6853 || modifier == EXPAND_CONST_ADDRESS)
6854 && TREE_CONSTANT (exp)))
6856 rtx constructor = output_constant_def (exp, 1);
6858 if (modifier != EXPAND_CONST_ADDRESS
6859 && modifier != EXPAND_INITIALIZER
6860 && modifier != EXPAND_SUM)
6861 constructor = validize_mem (constructor);
6863 return constructor;
6865 else
6867 /* Handle calls that pass values in multiple non-contiguous
6868 locations. The Irix 6 ABI has examples of this. */
6869 if (target == 0 || ! safe_from_p (target, exp, 1)
6870 || GET_CODE (target) == PARALLEL
6871 || modifier == EXPAND_STACK_PARM)
6872 target
6873 = assign_temp (build_qualified_type (type,
6874 (TYPE_QUALS (type)
6875 | (TREE_READONLY (exp)
6876 * TYPE_QUAL_CONST))),
6877 0, TREE_ADDRESSABLE (exp), 1);
6879 store_constructor (exp, target, 0, int_expr_size (exp));
6880 return target;
6883 case INDIRECT_REF:
6885 tree exp1 = TREE_OPERAND (exp, 0);
6887 if (modifier != EXPAND_WRITE)
6889 tree t;
6891 t = fold_read_from_constant_string (exp);
6892 if (t)
6893 return expand_expr (t, target, tmode, modifier);
6896 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6897 op0 = memory_address (mode, op0);
6898 temp = gen_rtx_MEM (mode, op0);
6899 set_mem_attributes (temp, exp, 0);
6901 /* If we are writing to this object and its type is a record with
6902 readonly fields, we must mark it as readonly so it will
6903 conflict with readonly references to those fields. */
6904 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6905 RTX_UNCHANGING_P (temp) = 1;
6907 return temp;
6910 case ARRAY_REF:
6912 #ifdef ENABLE_CHECKING
6913 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6914 abort ();
6915 #endif
6918 tree array = TREE_OPERAND (exp, 0);
6919 tree low_bound = array_ref_low_bound (exp);
6920 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6921 HOST_WIDE_INT i;
6923 /* Optimize the special-case of a zero lower bound.
6925 We convert the low_bound to sizetype to avoid some problems
6926 with constant folding. (E.g. suppose the lower bound is 1,
6927 and its mode is QI. Without the conversion, (ARRAY
6928 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6929 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6931 if (! integer_zerop (low_bound))
6932 index = size_diffop (index, convert (sizetype, low_bound));
6934 /* Fold an expression like: "foo"[2].
6935 This is not done in fold so it won't happen inside &.
6936 Don't fold if this is for wide characters since it's too
6937 difficult to do correctly and this is a very rare case. */
6939 if (modifier != EXPAND_CONST_ADDRESS
6940 && modifier != EXPAND_INITIALIZER
6941 && modifier != EXPAND_MEMORY)
6943 tree t = fold_read_from_constant_string (exp);
6945 if (t)
6946 return expand_expr (t, target, tmode, modifier);
6949 /* If this is a constant index into a constant array,
6950 just get the value from the array. Handle both the cases when
6951 we have an explicit constructor and when our operand is a variable
6952 that was declared const. */
6954 if (modifier != EXPAND_CONST_ADDRESS
6955 && modifier != EXPAND_INITIALIZER
6956 && modifier != EXPAND_MEMORY
6957 && TREE_CODE (array) == CONSTRUCTOR
6958 && ! TREE_SIDE_EFFECTS (array)
6959 && TREE_CODE (index) == INTEGER_CST
6960 && 0 > compare_tree_int (index,
6961 list_length (CONSTRUCTOR_ELTS
6962 (TREE_OPERAND (exp, 0)))))
6964 tree elem;
6966 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6967 i = TREE_INT_CST_LOW (index);
6968 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6971 if (elem)
6972 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6973 modifier);
6976 else if (optimize >= 1
6977 && modifier != EXPAND_CONST_ADDRESS
6978 && modifier != EXPAND_INITIALIZER
6979 && modifier != EXPAND_MEMORY
6980 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6981 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6982 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6983 && targetm.binds_local_p (array))
6985 if (TREE_CODE (index) == INTEGER_CST)
6987 tree init = DECL_INITIAL (array);
6989 if (TREE_CODE (init) == CONSTRUCTOR)
6991 tree elem;
6993 for (elem = CONSTRUCTOR_ELTS (init);
6994 (elem
6995 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6996 elem = TREE_CHAIN (elem))
6999 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7000 return expand_expr (fold (TREE_VALUE (elem)), target,
7001 tmode, modifier);
7003 else if (TREE_CODE (init) == STRING_CST
7004 && 0 > compare_tree_int (index,
7005 TREE_STRING_LENGTH (init)))
7007 tree type = TREE_TYPE (TREE_TYPE (init));
7008 enum machine_mode mode = TYPE_MODE (type);
7010 if (GET_MODE_CLASS (mode) == MODE_INT
7011 && GET_MODE_SIZE (mode) == 1)
7012 return gen_int_mode (TREE_STRING_POINTER (init)
7013 [TREE_INT_CST_LOW (index)], mode);
7018 goto normal_inner_ref;
7020 case COMPONENT_REF:
7021 /* If the operand is a CONSTRUCTOR, we can just extract the
7022 appropriate field if it is present. */
7023 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7025 tree elt;
7027 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7028 elt = TREE_CHAIN (elt))
7029 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7030 /* We can normally use the value of the field in the
7031 CONSTRUCTOR. However, if this is a bitfield in
7032 an integral mode that we can fit in a HOST_WIDE_INT,
7033 we must mask only the number of bits in the bitfield,
7034 since this is done implicitly by the constructor. If
7035 the bitfield does not meet either of those conditions,
7036 we can't do this optimization. */
7037 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7038 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7039 == MODE_INT)
7040 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7041 <= HOST_BITS_PER_WIDE_INT))))
7043 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7044 && modifier == EXPAND_STACK_PARM)
7045 target = 0;
7046 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7047 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7049 HOST_WIDE_INT bitsize
7050 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7051 enum machine_mode imode
7052 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7054 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7056 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7057 op0 = expand_and (imode, op0, op1, target);
7059 else
7061 tree count
7062 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7065 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7066 target, 0);
7067 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7068 target, 0);
7072 return op0;
7075 goto normal_inner_ref;
7077 case BIT_FIELD_REF:
7078 case ARRAY_RANGE_REF:
7079 normal_inner_ref:
7081 enum machine_mode mode1;
7082 HOST_WIDE_INT bitsize, bitpos;
7083 tree offset;
7084 int volatilep = 0;
7085 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7086 &mode1, &unsignedp, &volatilep);
7087 rtx orig_op0;
7089 /* If we got back the original object, something is wrong. Perhaps
7090 we are evaluating an expression too early. In any event, don't
7091 infinitely recurse. */
7092 if (tem == exp)
7093 abort ();
7095 /* If TEM's type is a union of variable size, pass TARGET to the inner
7096 computation, since it will need a temporary and TARGET is known
7097 to have to do. This occurs in unchecked conversion in Ada. */
7099 orig_op0 = op0
7100 = expand_expr (tem,
7101 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7102 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7103 != INTEGER_CST)
7104 && modifier != EXPAND_STACK_PARM
7105 ? target : NULL_RTX),
7106 VOIDmode,
7107 (modifier == EXPAND_INITIALIZER
7108 || modifier == EXPAND_CONST_ADDRESS
7109 || modifier == EXPAND_STACK_PARM)
7110 ? modifier : EXPAND_NORMAL);
7112 /* If this is a constant, put it into a register if it is a
7113 legitimate constant and OFFSET is 0 and memory if it isn't. */
7114 if (CONSTANT_P (op0))
7116 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7117 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7118 && offset == 0)
7119 op0 = force_reg (mode, op0);
7120 else
7121 op0 = validize_mem (force_const_mem (mode, op0));
7124 /* Otherwise, if this object not in memory and we either have an
7125 offset or a BLKmode result, put it there. This case can't occur in
7126 C, but can in Ada if we have unchecked conversion of an expression
7127 from a scalar type to an array or record type or for an
7128 ARRAY_RANGE_REF whose type is BLKmode. */
7129 else if (!MEM_P (op0)
7130 && (offset != 0
7131 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7133 tree nt = build_qualified_type (TREE_TYPE (tem),
7134 (TYPE_QUALS (TREE_TYPE (tem))
7135 | TYPE_QUAL_CONST));
7136 rtx memloc = assign_temp (nt, 1, 1, 1);
7138 emit_move_insn (memloc, op0);
7139 op0 = memloc;
7142 if (offset != 0)
7144 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7145 EXPAND_SUM);
7147 if (!MEM_P (op0))
7148 abort ();
7150 #ifdef POINTERS_EXTEND_UNSIGNED
7151 if (GET_MODE (offset_rtx) != Pmode)
7152 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7153 #else
7154 if (GET_MODE (offset_rtx) != ptr_mode)
7155 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7156 #endif
7158 if (GET_MODE (op0) == BLKmode
7159 /* A constant address in OP0 can have VOIDmode, we must
7160 not try to call force_reg in that case. */
7161 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7162 && bitsize != 0
7163 && (bitpos % bitsize) == 0
7164 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7165 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7167 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7168 bitpos = 0;
7171 op0 = offset_address (op0, offset_rtx,
7172 highest_pow2_factor (offset));
7175 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7176 record its alignment as BIGGEST_ALIGNMENT. */
7177 if (MEM_P (op0) && bitpos == 0 && offset != 0
7178 && is_aligning_offset (offset, tem))
7179 set_mem_align (op0, BIGGEST_ALIGNMENT);
7181 /* Don't forget about volatility even if this is a bitfield. */
7182 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7184 if (op0 == orig_op0)
7185 op0 = copy_rtx (op0);
7187 MEM_VOLATILE_P (op0) = 1;
7190 /* The following code doesn't handle CONCAT.
7191 Assume only bitpos == 0 can be used for CONCAT, due to
7192 one element arrays having the same mode as its element. */
7193 if (GET_CODE (op0) == CONCAT)
7195 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7196 abort ();
7197 return op0;
7200 /* In cases where an aligned union has an unaligned object
7201 as a field, we might be extracting a BLKmode value from
7202 an integer-mode (e.g., SImode) object. Handle this case
7203 by doing the extract into an object as wide as the field
7204 (which we know to be the width of a basic mode), then
7205 storing into memory, and changing the mode to BLKmode. */
7206 if (mode1 == VOIDmode
7207 || REG_P (op0) || GET_CODE (op0) == SUBREG
7208 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7209 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7210 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7211 && modifier != EXPAND_CONST_ADDRESS
7212 && modifier != EXPAND_INITIALIZER)
7213 /* If the field isn't aligned enough to fetch as a memref,
7214 fetch it as a bit field. */
7215 || (mode1 != BLKmode
7216 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7217 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7218 || (MEM_P (op0)
7219 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7220 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7221 && ((modifier == EXPAND_CONST_ADDRESS
7222 || modifier == EXPAND_INITIALIZER)
7223 ? STRICT_ALIGNMENT
7224 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7225 || (bitpos % BITS_PER_UNIT != 0)))
7226 /* If the type and the field are a constant size and the
7227 size of the type isn't the same size as the bitfield,
7228 we must use bitfield operations. */
7229 || (bitsize >= 0
7230 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7231 == INTEGER_CST)
7232 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7233 bitsize)))
7235 enum machine_mode ext_mode = mode;
7237 if (ext_mode == BLKmode
7238 && ! (target != 0 && MEM_P (op0)
7239 && MEM_P (target)
7240 && bitpos % BITS_PER_UNIT == 0))
7241 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7243 if (ext_mode == BLKmode)
7245 if (target == 0)
7246 target = assign_temp (type, 0, 1, 1);
7248 if (bitsize == 0)
7249 return target;
7251 /* In this case, BITPOS must start at a byte boundary and
7252 TARGET, if specified, must be a MEM. */
7253 if (!MEM_P (op0)
7254 || (target != 0 && !MEM_P (target))
7255 || bitpos % BITS_PER_UNIT != 0)
7256 abort ();
7258 emit_block_move (target,
7259 adjust_address (op0, VOIDmode,
7260 bitpos / BITS_PER_UNIT),
7261 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7262 / BITS_PER_UNIT),
7263 (modifier == EXPAND_STACK_PARM
7264 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7266 return target;
7269 op0 = validize_mem (op0);
7271 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7272 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7274 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7275 (modifier == EXPAND_STACK_PARM
7276 ? NULL_RTX : target),
7277 ext_mode, ext_mode,
7278 int_size_in_bytes (TREE_TYPE (tem)));
7280 /* If the result is a record type and BITSIZE is narrower than
7281 the mode of OP0, an integral mode, and this is a big endian
7282 machine, we must put the field into the high-order bits. */
7283 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7284 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7285 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7286 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7287 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7288 - bitsize),
7289 op0, 1);
7291 /* If the result type is BLKmode, store the data into a temporary
7292 of the appropriate type, but with the mode corresponding to the
7293 mode for the data we have (op0's mode). It's tempting to make
7294 this a constant type, since we know it's only being stored once,
7295 but that can cause problems if we are taking the address of this
7296 COMPONENT_REF because the MEM of any reference via that address
7297 will have flags corresponding to the type, which will not
7298 necessarily be constant. */
7299 if (mode == BLKmode)
7301 rtx new
7302 = assign_stack_temp_for_type
7303 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7305 emit_move_insn (new, op0);
7306 op0 = copy_rtx (new);
7307 PUT_MODE (op0, BLKmode);
7308 set_mem_attributes (op0, exp, 1);
7311 return op0;
7314 /* If the result is BLKmode, use that to access the object
7315 now as well. */
7316 if (mode == BLKmode)
7317 mode1 = BLKmode;
7319 /* Get a reference to just this component. */
7320 if (modifier == EXPAND_CONST_ADDRESS
7321 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7322 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7323 else
7324 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7326 if (op0 == orig_op0)
7327 op0 = copy_rtx (op0);
7329 set_mem_attributes (op0, exp, 0);
7330 if (REG_P (XEXP (op0, 0)))
7331 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7333 MEM_VOLATILE_P (op0) |= volatilep;
7334 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7335 || modifier == EXPAND_CONST_ADDRESS
7336 || modifier == EXPAND_INITIALIZER)
7337 return op0;
7338 else if (target == 0)
7339 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7341 convert_move (target, op0, unsignedp);
7342 return target;
7345 case OBJ_TYPE_REF:
7346 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7348 /* Intended for a reference to a buffer of a file-object in Pascal.
7349 But it's not certain that a special tree code will really be
7350 necessary for these. INDIRECT_REF might work for them. */
7351 case BUFFER_REF:
7352 abort ();
7354 case IN_EXPR:
7356 /* Pascal set IN expression.
7358 Algorithm:
7359 rlo = set_low - (set_low%bits_per_word);
7360 the_word = set [ (index - rlo)/bits_per_word ];
7361 bit_index = index % bits_per_word;
7362 bitmask = 1 << bit_index;
7363 return !!(the_word & bitmask); */
7365 tree set = TREE_OPERAND (exp, 0);
7366 tree index = TREE_OPERAND (exp, 1);
7367 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7368 tree set_type = TREE_TYPE (set);
7369 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7370 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7371 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7372 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7373 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7374 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7375 rtx setaddr = XEXP (setval, 0);
7376 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7377 rtx rlow;
7378 rtx diff, quo, rem, addr, bit, result;
7380 /* If domain is empty, answer is no. Likewise if index is constant
7381 and out of bounds. */
7382 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7383 && TREE_CODE (set_low_bound) == INTEGER_CST
7384 && tree_int_cst_lt (set_high_bound, set_low_bound))
7385 || (TREE_CODE (index) == INTEGER_CST
7386 && TREE_CODE (set_low_bound) == INTEGER_CST
7387 && tree_int_cst_lt (index, set_low_bound))
7388 || (TREE_CODE (set_high_bound) == INTEGER_CST
7389 && TREE_CODE (index) == INTEGER_CST
7390 && tree_int_cst_lt (set_high_bound, index))))
7391 return const0_rtx;
7393 if (target == 0)
7394 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7396 /* If we get here, we have to generate the code for both cases
7397 (in range and out of range). */
7399 op0 = gen_label_rtx ();
7400 op1 = gen_label_rtx ();
7402 if (! (GET_CODE (index_val) == CONST_INT
7403 && GET_CODE (lo_r) == CONST_INT))
7404 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7405 GET_MODE (index_val), iunsignedp, op1);
7407 if (! (GET_CODE (index_val) == CONST_INT
7408 && GET_CODE (hi_r) == CONST_INT))
7409 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7410 GET_MODE (index_val), iunsignedp, op1);
7412 /* Calculate the element number of bit zero in the first word
7413 of the set. */
7414 if (GET_CODE (lo_r) == CONST_INT)
7415 rlow = GEN_INT (INTVAL (lo_r)
7416 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7417 else
7418 rlow = expand_binop (index_mode, and_optab, lo_r,
7419 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7420 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7422 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7423 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7425 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7426 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7427 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7428 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7430 addr = memory_address (byte_mode,
7431 expand_binop (index_mode, add_optab, diff,
7432 setaddr, NULL_RTX, iunsignedp,
7433 OPTAB_LIB_WIDEN));
7435 /* Extract the bit we want to examine. */
7436 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7437 gen_rtx_MEM (byte_mode, addr),
7438 make_tree (TREE_TYPE (index), rem),
7439 NULL_RTX, 1);
7440 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7441 GET_MODE (target) == byte_mode ? target : 0,
7442 1, OPTAB_LIB_WIDEN);
7444 if (result != target)
7445 convert_move (target, result, 1);
7447 /* Output the code to handle the out-of-range case. */
7448 emit_jump (op0);
7449 emit_label (op1);
7450 emit_move_insn (target, const0_rtx);
7451 emit_label (op0);
7452 return target;
7455 case WITH_CLEANUP_EXPR:
7456 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7458 WITH_CLEANUP_EXPR_RTL (exp)
7459 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7460 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7461 CLEANUP_EH_ONLY (exp));
7463 /* That's it for this cleanup. */
7464 TREE_OPERAND (exp, 1) = 0;
7466 return WITH_CLEANUP_EXPR_RTL (exp);
7468 case CLEANUP_POINT_EXPR:
7470 /* Start a new binding layer that will keep track of all cleanup
7471 actions to be performed. */
7472 expand_start_bindings (2);
7474 target_temp_slot_level = temp_slot_level;
7476 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7477 /* If we're going to use this value, load it up now. */
7478 if (! ignore)
7479 op0 = force_not_mem (op0);
7480 preserve_temp_slots (op0);
7481 expand_end_bindings (NULL_TREE, 0, 0);
7483 return op0;
7485 case CALL_EXPR:
7486 /* Check for a built-in function. */
7487 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7488 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7489 == FUNCTION_DECL)
7490 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7492 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7493 == BUILT_IN_FRONTEND)
7494 return lang_hooks.expand_expr (exp, original_target,
7495 tmode, modifier,
7496 alt_rtl);
7497 else
7498 return expand_builtin (exp, target, subtarget, tmode, ignore);
7501 return expand_call (exp, target, ignore);
7503 case NON_LVALUE_EXPR:
7504 case NOP_EXPR:
7505 case CONVERT_EXPR:
7506 if (TREE_OPERAND (exp, 0) == error_mark_node)
7507 return const0_rtx;
7509 if (TREE_CODE (type) == UNION_TYPE)
7511 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7513 /* If both input and output are BLKmode, this conversion isn't doing
7514 anything except possibly changing memory attribute. */
7515 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7517 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7518 modifier);
7520 result = copy_rtx (result);
7521 set_mem_attributes (result, exp, 0);
7522 return result;
7525 if (target == 0)
7527 if (TYPE_MODE (type) != BLKmode)
7528 target = gen_reg_rtx (TYPE_MODE (type));
7529 else
7530 target = assign_temp (type, 0, 1, 1);
7533 if (MEM_P (target))
7534 /* Store data into beginning of memory target. */
7535 store_expr (TREE_OPERAND (exp, 0),
7536 adjust_address (target, TYPE_MODE (valtype), 0),
7537 modifier == EXPAND_STACK_PARM ? 2 : 0);
7539 else if (REG_P (target))
7540 /* Store this field into a union of the proper type. */
7541 store_field (target,
7542 MIN ((int_size_in_bytes (TREE_TYPE
7543 (TREE_OPERAND (exp, 0)))
7544 * BITS_PER_UNIT),
7545 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7546 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7547 VOIDmode, 0, type, 0);
7548 else
7549 abort ();
7551 /* Return the entire union. */
7552 return target;
7555 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7557 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7558 modifier);
7560 /* If the signedness of the conversion differs and OP0 is
7561 a promoted SUBREG, clear that indication since we now
7562 have to do the proper extension. */
7563 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7564 && GET_CODE (op0) == SUBREG)
7565 SUBREG_PROMOTED_VAR_P (op0) = 0;
7567 return op0;
7570 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7571 if (GET_MODE (op0) == mode)
7572 return op0;
7574 /* If OP0 is a constant, just convert it into the proper mode. */
7575 if (CONSTANT_P (op0))
7577 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7578 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7580 if (modifier == EXPAND_INITIALIZER)
7581 return simplify_gen_subreg (mode, op0, inner_mode,
7582 subreg_lowpart_offset (mode,
7583 inner_mode));
7584 else
7585 return convert_modes (mode, inner_mode, op0,
7586 TYPE_UNSIGNED (inner_type));
7589 if (modifier == EXPAND_INITIALIZER)
7590 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7592 if (target == 0)
7593 return
7594 convert_to_mode (mode, op0,
7595 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7596 else
7597 convert_move (target, op0,
7598 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7599 return target;
7601 case VIEW_CONVERT_EXPR:
7602 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7604 /* If the input and output modes are both the same, we are done.
7605 Otherwise, if neither mode is BLKmode and both are integral and within
7606 a word, we can use gen_lowpart. If neither is true, make sure the
7607 operand is in memory and convert the MEM to the new mode. */
7608 if (TYPE_MODE (type) == GET_MODE (op0))
7610 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7611 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7612 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7613 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7614 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7615 op0 = gen_lowpart (TYPE_MODE (type), op0);
7616 else if (!MEM_P (op0))
7618 /* If the operand is not a MEM, force it into memory. Since we
7619 are going to be be changing the mode of the MEM, don't call
7620 force_const_mem for constants because we don't allow pool
7621 constants to change mode. */
7622 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7624 if (TREE_ADDRESSABLE (exp))
7625 abort ();
7627 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7628 target
7629 = assign_stack_temp_for_type
7630 (TYPE_MODE (inner_type),
7631 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7633 emit_move_insn (target, op0);
7634 op0 = target;
7637 /* At this point, OP0 is in the correct mode. If the output type is such
7638 that the operand is known to be aligned, indicate that it is.
7639 Otherwise, we need only be concerned about alignment for non-BLKmode
7640 results. */
7641 if (MEM_P (op0))
7643 op0 = copy_rtx (op0);
7645 if (TYPE_ALIGN_OK (type))
7646 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7647 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7648 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7650 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7651 HOST_WIDE_INT temp_size
7652 = MAX (int_size_in_bytes (inner_type),
7653 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7654 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7655 temp_size, 0, type);
7656 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7658 if (TREE_ADDRESSABLE (exp))
7659 abort ();
7661 if (GET_MODE (op0) == BLKmode)
7662 emit_block_move (new_with_op0_mode, op0,
7663 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7664 (modifier == EXPAND_STACK_PARM
7665 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7666 else
7667 emit_move_insn (new_with_op0_mode, op0);
7669 op0 = new;
7672 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7675 return op0;
7677 case PLUS_EXPR:
7678 this_optab = ! unsignedp && flag_trapv
7679 && (GET_MODE_CLASS (mode) == MODE_INT)
7680 ? addv_optab : add_optab;
7682 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7683 something else, make sure we add the register to the constant and
7684 then to the other thing. This case can occur during strength
7685 reduction and doing it this way will produce better code if the
7686 frame pointer or argument pointer is eliminated.
7688 fold-const.c will ensure that the constant is always in the inner
7689 PLUS_EXPR, so the only case we need to do anything about is if
7690 sp, ap, or fp is our second argument, in which case we must swap
7691 the innermost first argument and our second argument. */
7693 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7694 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7695 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7696 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7697 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7698 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7700 tree t = TREE_OPERAND (exp, 1);
7702 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7703 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7706 /* If the result is to be ptr_mode and we are adding an integer to
7707 something, we might be forming a constant. So try to use
7708 plus_constant. If it produces a sum and we can't accept it,
7709 use force_operand. This allows P = &ARR[const] to generate
7710 efficient code on machines where a SYMBOL_REF is not a valid
7711 address.
7713 If this is an EXPAND_SUM call, always return the sum. */
7714 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7715 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7717 if (modifier == EXPAND_STACK_PARM)
7718 target = 0;
7719 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7720 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7721 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7723 rtx constant_part;
7725 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7726 EXPAND_SUM);
7727 /* Use immed_double_const to ensure that the constant is
7728 truncated according to the mode of OP1, then sign extended
7729 to a HOST_WIDE_INT. Using the constant directly can result
7730 in non-canonical RTL in a 64x32 cross compile. */
7731 constant_part
7732 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7733 (HOST_WIDE_INT) 0,
7734 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7735 op1 = plus_constant (op1, INTVAL (constant_part));
7736 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7737 op1 = force_operand (op1, target);
7738 return op1;
7741 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7742 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7743 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7745 rtx constant_part;
7747 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7748 (modifier == EXPAND_INITIALIZER
7749 ? EXPAND_INITIALIZER : EXPAND_SUM));
7750 if (! CONSTANT_P (op0))
7752 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7753 VOIDmode, modifier);
7754 /* Return a PLUS if modifier says it's OK. */
7755 if (modifier == EXPAND_SUM
7756 || modifier == EXPAND_INITIALIZER)
7757 return simplify_gen_binary (PLUS, mode, op0, op1);
7758 goto binop2;
7760 /* Use immed_double_const to ensure that the constant is
7761 truncated according to the mode of OP1, then sign extended
7762 to a HOST_WIDE_INT. Using the constant directly can result
7763 in non-canonical RTL in a 64x32 cross compile. */
7764 constant_part
7765 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7766 (HOST_WIDE_INT) 0,
7767 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7768 op0 = plus_constant (op0, INTVAL (constant_part));
7769 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7770 op0 = force_operand (op0, target);
7771 return op0;
7775 /* No sense saving up arithmetic to be done
7776 if it's all in the wrong mode to form part of an address.
7777 And force_operand won't know whether to sign-extend or
7778 zero-extend. */
7779 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7780 || mode != ptr_mode)
7782 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7783 subtarget, &op0, &op1, 0);
7784 if (op0 == const0_rtx)
7785 return op1;
7786 if (op1 == const0_rtx)
7787 return op0;
7788 goto binop2;
7791 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7792 subtarget, &op0, &op1, modifier);
7793 return simplify_gen_binary (PLUS, mode, op0, op1);
7795 case MINUS_EXPR:
7796 /* For initializers, we are allowed to return a MINUS of two
7797 symbolic constants. Here we handle all cases when both operands
7798 are constant. */
7799 /* Handle difference of two symbolic constants,
7800 for the sake of an initializer. */
7801 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7802 && really_constant_p (TREE_OPERAND (exp, 0))
7803 && really_constant_p (TREE_OPERAND (exp, 1)))
7805 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7806 NULL_RTX, &op0, &op1, modifier);
7808 /* If the last operand is a CONST_INT, use plus_constant of
7809 the negated constant. Else make the MINUS. */
7810 if (GET_CODE (op1) == CONST_INT)
7811 return plus_constant (op0, - INTVAL (op1));
7812 else
7813 return gen_rtx_MINUS (mode, op0, op1);
7816 this_optab = ! unsignedp && flag_trapv
7817 && (GET_MODE_CLASS(mode) == MODE_INT)
7818 ? subv_optab : sub_optab;
7820 /* No sense saving up arithmetic to be done
7821 if it's all in the wrong mode to form part of an address.
7822 And force_operand won't know whether to sign-extend or
7823 zero-extend. */
7824 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7825 || mode != ptr_mode)
7826 goto binop;
7828 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7829 subtarget, &op0, &op1, modifier);
7831 /* Convert A - const to A + (-const). */
7832 if (GET_CODE (op1) == CONST_INT)
7834 op1 = negate_rtx (mode, op1);
7835 return simplify_gen_binary (PLUS, mode, op0, op1);
7838 goto binop2;
7840 case MULT_EXPR:
7841 /* If first operand is constant, swap them.
7842 Thus the following special case checks need only
7843 check the second operand. */
7844 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7846 tree t1 = TREE_OPERAND (exp, 0);
7847 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7848 TREE_OPERAND (exp, 1) = t1;
7851 /* Attempt to return something suitable for generating an
7852 indexed address, for machines that support that. */
7854 if (modifier == EXPAND_SUM && mode == ptr_mode
7855 && host_integerp (TREE_OPERAND (exp, 1), 0))
7857 tree exp1 = TREE_OPERAND (exp, 1);
7859 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7860 EXPAND_SUM);
7862 if (!REG_P (op0))
7863 op0 = force_operand (op0, NULL_RTX);
7864 if (!REG_P (op0))
7865 op0 = copy_to_mode_reg (mode, op0);
7867 return gen_rtx_MULT (mode, op0,
7868 gen_int_mode (tree_low_cst (exp1, 0),
7869 TYPE_MODE (TREE_TYPE (exp1))));
7872 if (modifier == EXPAND_STACK_PARM)
7873 target = 0;
7875 /* Check for multiplying things that have been extended
7876 from a narrower type. If this machine supports multiplying
7877 in that narrower type with a result in the desired type,
7878 do it that way, and avoid the explicit type-conversion. */
7879 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7880 && TREE_CODE (type) == INTEGER_TYPE
7881 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7882 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7883 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7884 && int_fits_type_p (TREE_OPERAND (exp, 1),
7885 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7886 /* Don't use a widening multiply if a shift will do. */
7887 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7888 > HOST_BITS_PER_WIDE_INT)
7889 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7891 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7892 && (TYPE_PRECISION (TREE_TYPE
7893 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7894 == TYPE_PRECISION (TREE_TYPE
7895 (TREE_OPERAND
7896 (TREE_OPERAND (exp, 0), 0))))
7897 /* If both operands are extended, they must either both
7898 be zero-extended or both be sign-extended. */
7899 && (TYPE_UNSIGNED (TREE_TYPE
7900 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7901 == TYPE_UNSIGNED (TREE_TYPE
7902 (TREE_OPERAND
7903 (TREE_OPERAND (exp, 0), 0)))))))
7905 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7906 enum machine_mode innermode = TYPE_MODE (op0type);
7907 bool zextend_p = TYPE_UNSIGNED (op0type);
7908 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7909 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7911 if (mode == GET_MODE_WIDER_MODE (innermode))
7913 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7915 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7916 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7917 TREE_OPERAND (exp, 1),
7918 NULL_RTX, &op0, &op1, 0);
7919 else
7920 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7921 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7922 NULL_RTX, &op0, &op1, 0);
7923 goto binop2;
7925 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7926 && innermode == word_mode)
7928 rtx htem, hipart;
7929 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7930 NULL_RTX, VOIDmode, 0);
7931 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7932 op1 = convert_modes (innermode, mode,
7933 expand_expr (TREE_OPERAND (exp, 1),
7934 NULL_RTX, VOIDmode, 0),
7935 unsignedp);
7936 else
7937 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7938 NULL_RTX, VOIDmode, 0);
7939 temp = expand_binop (mode, other_optab, op0, op1, target,
7940 unsignedp, OPTAB_LIB_WIDEN);
7941 hipart = gen_highpart (innermode, temp);
7942 htem = expand_mult_highpart_adjust (innermode, hipart,
7943 op0, op1, hipart,
7944 zextend_p);
7945 if (htem != hipart)
7946 emit_move_insn (hipart, htem);
7947 return temp;
7951 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7952 subtarget, &op0, &op1, 0);
7953 return expand_mult (mode, op0, op1, target, unsignedp);
7955 case TRUNC_DIV_EXPR:
7956 case FLOOR_DIV_EXPR:
7957 case CEIL_DIV_EXPR:
7958 case ROUND_DIV_EXPR:
7959 case EXACT_DIV_EXPR:
7960 if (modifier == EXPAND_STACK_PARM)
7961 target = 0;
7962 /* Possible optimization: compute the dividend with EXPAND_SUM
7963 then if the divisor is constant can optimize the case
7964 where some terms of the dividend have coeffs divisible by it. */
7965 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7966 subtarget, &op0, &op1, 0);
7967 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7969 case RDIV_EXPR:
7970 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7971 expensive divide. If not, combine will rebuild the original
7972 computation. */
7973 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7974 && TREE_CODE (type) == REAL_TYPE
7975 && !real_onep (TREE_OPERAND (exp, 0)))
7976 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7977 build (RDIV_EXPR, type,
7978 build_real (type, dconst1),
7979 TREE_OPERAND (exp, 1))),
7980 target, tmode, modifier);
7981 this_optab = sdiv_optab;
7982 goto binop;
7984 case TRUNC_MOD_EXPR:
7985 case FLOOR_MOD_EXPR:
7986 case CEIL_MOD_EXPR:
7987 case ROUND_MOD_EXPR:
7988 if (modifier == EXPAND_STACK_PARM)
7989 target = 0;
7990 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7991 subtarget, &op0, &op1, 0);
7992 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7994 case FIX_ROUND_EXPR:
7995 case FIX_FLOOR_EXPR:
7996 case FIX_CEIL_EXPR:
7997 abort (); /* Not used for C. */
7999 case FIX_TRUNC_EXPR:
8000 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8001 if (target == 0 || modifier == EXPAND_STACK_PARM)
8002 target = gen_reg_rtx (mode);
8003 expand_fix (target, op0, unsignedp);
8004 return target;
8006 case FLOAT_EXPR:
8007 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8008 if (target == 0 || modifier == EXPAND_STACK_PARM)
8009 target = gen_reg_rtx (mode);
8010 /* expand_float can't figure out what to do if FROM has VOIDmode.
8011 So give it the correct mode. With -O, cse will optimize this. */
8012 if (GET_MODE (op0) == VOIDmode)
8013 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8014 op0);
8015 expand_float (target, op0,
8016 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8017 return target;
8019 case NEGATE_EXPR:
8020 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8021 if (modifier == EXPAND_STACK_PARM)
8022 target = 0;
8023 temp = expand_unop (mode,
8024 ! unsignedp && flag_trapv
8025 && (GET_MODE_CLASS(mode) == MODE_INT)
8026 ? negv_optab : neg_optab, op0, target, 0);
8027 if (temp == 0)
8028 abort ();
8029 return temp;
8031 case ABS_EXPR:
8032 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8033 if (modifier == EXPAND_STACK_PARM)
8034 target = 0;
8036 /* ABS_EXPR is not valid for complex arguments. */
8037 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8038 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8039 abort ();
8041 /* Unsigned abs is simply the operand. Testing here means we don't
8042 risk generating incorrect code below. */
8043 if (TYPE_UNSIGNED (type))
8044 return op0;
8046 return expand_abs (mode, op0, target, unsignedp,
8047 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8049 case MAX_EXPR:
8050 case MIN_EXPR:
8051 target = original_target;
8052 if (target == 0
8053 || modifier == EXPAND_STACK_PARM
8054 || (MEM_P (target) && MEM_VOLATILE_P (target))
8055 || GET_MODE (target) != mode
8056 || (REG_P (target)
8057 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8058 target = gen_reg_rtx (mode);
8059 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8060 target, &op0, &op1, 0);
8062 /* First try to do it with a special MIN or MAX instruction.
8063 If that does not win, use a conditional jump to select the proper
8064 value. */
8065 this_optab = (unsignedp
8066 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8067 : (code == MIN_EXPR ? smin_optab : smax_optab));
8069 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8070 OPTAB_WIDEN);
8071 if (temp != 0)
8072 return temp;
8074 /* At this point, a MEM target is no longer useful; we will get better
8075 code without it. */
8077 if (MEM_P (target))
8078 target = gen_reg_rtx (mode);
8080 /* If op1 was placed in target, swap op0 and op1. */
8081 if (target != op0 && target == op1)
8083 rtx tem = op0;
8084 op0 = op1;
8085 op1 = tem;
8088 if (target != op0)
8089 emit_move_insn (target, op0);
8091 op0 = gen_label_rtx ();
8093 /* If this mode is an integer too wide to compare properly,
8094 compare word by word. Rely on cse to optimize constant cases. */
8095 if (GET_MODE_CLASS (mode) == MODE_INT
8096 && ! can_compare_p (GE, mode, ccp_jump))
8098 if (code == MAX_EXPR)
8099 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8100 NULL_RTX, op0);
8101 else
8102 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8103 NULL_RTX, op0);
8105 else
8107 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8108 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8110 emit_move_insn (target, op1);
8111 emit_label (op0);
8112 return target;
8114 case BIT_NOT_EXPR:
8115 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8116 if (modifier == EXPAND_STACK_PARM)
8117 target = 0;
8118 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8119 if (temp == 0)
8120 abort ();
8121 return temp;
8123 /* ??? Can optimize bitwise operations with one arg constant.
8124 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8125 and (a bitwise1 b) bitwise2 b (etc)
8126 but that is probably not worth while. */
8128 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8129 boolean values when we want in all cases to compute both of them. In
8130 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8131 as actual zero-or-1 values and then bitwise anding. In cases where
8132 there cannot be any side effects, better code would be made by
8133 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8134 how to recognize those cases. */
8136 case TRUTH_AND_EXPR:
8137 case BIT_AND_EXPR:
8138 this_optab = and_optab;
8139 goto binop;
8141 case TRUTH_OR_EXPR:
8142 case BIT_IOR_EXPR:
8143 this_optab = ior_optab;
8144 goto binop;
8146 case TRUTH_XOR_EXPR:
8147 case BIT_XOR_EXPR:
8148 this_optab = xor_optab;
8149 goto binop;
8151 case LSHIFT_EXPR:
8152 case RSHIFT_EXPR:
8153 case LROTATE_EXPR:
8154 case RROTATE_EXPR:
8155 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8156 subtarget = 0;
8157 if (modifier == EXPAND_STACK_PARM)
8158 target = 0;
8159 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8160 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8161 unsignedp);
8163 /* Could determine the answer when only additive constants differ. Also,
8164 the addition of one can be handled by changing the condition. */
8165 case LT_EXPR:
8166 case LE_EXPR:
8167 case GT_EXPR:
8168 case GE_EXPR:
8169 case EQ_EXPR:
8170 case NE_EXPR:
8171 case UNORDERED_EXPR:
8172 case ORDERED_EXPR:
8173 case UNLT_EXPR:
8174 case UNLE_EXPR:
8175 case UNGT_EXPR:
8176 case UNGE_EXPR:
8177 case UNEQ_EXPR:
8178 case LTGT_EXPR:
8179 temp = do_store_flag (exp,
8180 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8181 tmode != VOIDmode ? tmode : mode, 0);
8182 if (temp != 0)
8183 return temp;
8185 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8186 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8187 && original_target
8188 && REG_P (original_target)
8189 && (GET_MODE (original_target)
8190 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8192 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8193 VOIDmode, 0);
8195 /* If temp is constant, we can just compute the result. */
8196 if (GET_CODE (temp) == CONST_INT)
8198 if (INTVAL (temp) != 0)
8199 emit_move_insn (target, const1_rtx);
8200 else
8201 emit_move_insn (target, const0_rtx);
8203 return target;
8206 if (temp != original_target)
8208 enum machine_mode mode1 = GET_MODE (temp);
8209 if (mode1 == VOIDmode)
8210 mode1 = tmode != VOIDmode ? tmode : mode;
8212 temp = copy_to_mode_reg (mode1, temp);
8215 op1 = gen_label_rtx ();
8216 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8217 GET_MODE (temp), unsignedp, op1);
8218 emit_move_insn (temp, const1_rtx);
8219 emit_label (op1);
8220 return temp;
8223 /* If no set-flag instruction, must generate a conditional
8224 store into a temporary variable. Drop through
8225 and handle this like && and ||. */
8227 case TRUTH_ANDIF_EXPR:
8228 case TRUTH_ORIF_EXPR:
8229 if (! ignore
8230 && (target == 0
8231 || modifier == EXPAND_STACK_PARM
8232 || ! safe_from_p (target, exp, 1)
8233 /* Make sure we don't have a hard reg (such as function's return
8234 value) live across basic blocks, if not optimizing. */
8235 || (!optimize && REG_P (target)
8236 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8237 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8239 if (target)
8240 emit_clr_insn (target);
8242 op1 = gen_label_rtx ();
8243 jumpifnot (exp, op1);
8245 if (target)
8246 emit_0_to_1_insn (target);
8248 emit_label (op1);
8249 return ignore ? const0_rtx : target;
8251 case TRUTH_NOT_EXPR:
8252 if (modifier == EXPAND_STACK_PARM)
8253 target = 0;
8254 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8255 /* The parser is careful to generate TRUTH_NOT_EXPR
8256 only with operands that are always zero or one. */
8257 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8258 target, 1, OPTAB_LIB_WIDEN);
8259 if (temp == 0)
8260 abort ();
8261 return temp;
8263 case COMPOUND_EXPR:
8264 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8265 emit_queue ();
8266 return expand_expr_real (TREE_OPERAND (exp, 1),
8267 (ignore ? const0_rtx : target),
8268 VOIDmode, modifier, alt_rtl);
8270 case STATEMENT_LIST:
8272 tree_stmt_iterator iter;
8274 if (!ignore)
8275 abort ();
8277 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8278 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8280 return const0_rtx;
8282 case COND_EXPR:
8283 /* If it's void, we don't need to worry about computing a value. */
8284 if (VOID_TYPE_P (TREE_TYPE (exp)))
8286 tree pred = TREE_OPERAND (exp, 0);
8287 tree then_ = TREE_OPERAND (exp, 1);
8288 tree else_ = TREE_OPERAND (exp, 2);
8290 /* If we do not have any pending cleanups or stack_levels
8291 to restore, and at least one arm of the COND_EXPR is a
8292 GOTO_EXPR to a local label, then we can emit more efficient
8293 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8294 if (! optimize
8295 || containing_blocks_have_cleanups_or_stack_level ())
8297 else if (TREE_CODE (then_) == GOTO_EXPR
8298 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8300 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8301 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8303 else if (TREE_CODE (else_) == GOTO_EXPR
8304 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8306 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8307 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8310 /* Just use the 'if' machinery. */
8311 expand_start_cond (pred, 0);
8312 start_cleanup_deferral ();
8313 expand_expr (then_, const0_rtx, VOIDmode, 0);
8315 exp = else_;
8317 /* Iterate over 'else if's instead of recursing. */
8318 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8320 expand_start_else ();
8321 if (EXPR_HAS_LOCATION (exp))
8323 emit_line_note (EXPR_LOCATION (exp));
8324 record_block_change (TREE_BLOCK (exp));
8326 expand_elseif (TREE_OPERAND (exp, 0));
8327 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8329 /* Don't emit the jump and label if there's no 'else' clause. */
8330 if (TREE_SIDE_EFFECTS (exp))
8332 expand_start_else ();
8333 expand_expr (exp, const0_rtx, VOIDmode, 0);
8335 end_cleanup_deferral ();
8336 expand_end_cond ();
8337 return const0_rtx;
8340 /* If we would have a "singleton" (see below) were it not for a
8341 conversion in each arm, bring that conversion back out. */
8342 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8343 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8344 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8345 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8347 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8348 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8350 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8351 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8352 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8353 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8354 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8355 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8356 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8357 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8358 return expand_expr (build1 (NOP_EXPR, type,
8359 build (COND_EXPR, TREE_TYPE (iftrue),
8360 TREE_OPERAND (exp, 0),
8361 iftrue, iffalse)),
8362 target, tmode, modifier);
8366 /* Note that COND_EXPRs whose type is a structure or union
8367 are required to be constructed to contain assignments of
8368 a temporary variable, so that we can evaluate them here
8369 for side effect only. If type is void, we must do likewise. */
8371 /* If an arm of the branch requires a cleanup,
8372 only that cleanup is performed. */
8374 tree singleton = 0;
8375 tree binary_op = 0, unary_op = 0;
8377 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8378 convert it to our mode, if necessary. */
8379 if (integer_onep (TREE_OPERAND (exp, 1))
8380 && integer_zerop (TREE_OPERAND (exp, 2))
8381 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8383 if (ignore)
8385 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8386 modifier);
8387 return const0_rtx;
8390 if (modifier == EXPAND_STACK_PARM)
8391 target = 0;
8392 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8393 if (GET_MODE (op0) == mode)
8394 return op0;
8396 if (target == 0)
8397 target = gen_reg_rtx (mode);
8398 convert_move (target, op0, unsignedp);
8399 return target;
8402 /* Check for X ? A + B : A. If we have this, we can copy A to the
8403 output and conditionally add B. Similarly for unary operations.
8404 Don't do this if X has side-effects because those side effects
8405 might affect A or B and the "?" operation is a sequence point in
8406 ANSI. (operand_equal_p tests for side effects.) */
8408 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8409 && operand_equal_p (TREE_OPERAND (exp, 2),
8410 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8411 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8412 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8413 && operand_equal_p (TREE_OPERAND (exp, 1),
8414 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8415 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8416 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8417 && operand_equal_p (TREE_OPERAND (exp, 2),
8418 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8419 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8420 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8421 && operand_equal_p (TREE_OPERAND (exp, 1),
8422 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8423 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8425 /* If we are not to produce a result, we have no target. Otherwise,
8426 if a target was specified use it; it will not be used as an
8427 intermediate target unless it is safe. If no target, use a
8428 temporary. */
8430 if (ignore)
8431 temp = 0;
8432 else if (modifier == EXPAND_STACK_PARM)
8433 temp = assign_temp (type, 0, 0, 1);
8434 else if (original_target
8435 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8436 || (singleton && REG_P (original_target)
8437 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8438 && original_target == var_rtx (singleton)))
8439 && GET_MODE (original_target) == mode
8440 #ifdef HAVE_conditional_move
8441 && (! can_conditionally_move_p (mode)
8442 || REG_P (original_target)
8443 || TREE_ADDRESSABLE (type))
8444 #endif
8445 && (!MEM_P (original_target)
8446 || TREE_ADDRESSABLE (type)))
8447 temp = original_target;
8448 else if (TREE_ADDRESSABLE (type))
8449 abort ();
8450 else
8451 temp = assign_temp (type, 0, 0, 1);
8453 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8454 do the test of X as a store-flag operation, do this as
8455 A + ((X != 0) << log C). Similarly for other simple binary
8456 operators. Only do for C == 1 if BRANCH_COST is low. */
8457 if (temp && singleton && binary_op
8458 && (TREE_CODE (binary_op) == PLUS_EXPR
8459 || TREE_CODE (binary_op) == MINUS_EXPR
8460 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8461 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8462 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8463 : integer_onep (TREE_OPERAND (binary_op, 1)))
8464 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8466 rtx result;
8467 tree cond;
8468 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8469 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8470 ? addv_optab : add_optab)
8471 : TREE_CODE (binary_op) == MINUS_EXPR
8472 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8473 ? subv_optab : sub_optab)
8474 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8475 : xor_optab);
8477 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8478 if (singleton == TREE_OPERAND (exp, 1))
8479 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8480 else
8481 cond = TREE_OPERAND (exp, 0);
8483 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8484 ? temp : NULL_RTX),
8485 mode, BRANCH_COST <= 1);
8487 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8488 result = expand_shift (LSHIFT_EXPR, mode, result,
8489 build_int_2 (tree_log2
8490 (TREE_OPERAND
8491 (binary_op, 1)),
8493 (safe_from_p (temp, singleton, 1)
8494 ? temp : NULL_RTX), 0);
8496 if (result)
8498 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8499 return expand_binop (mode, boptab, op1, result, temp,
8500 unsignedp, OPTAB_LIB_WIDEN);
8504 do_pending_stack_adjust ();
8505 NO_DEFER_POP;
8506 op0 = gen_label_rtx ();
8508 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8510 if (temp != 0)
8512 /* If the target conflicts with the other operand of the
8513 binary op, we can't use it. Also, we can't use the target
8514 if it is a hard register, because evaluating the condition
8515 might clobber it. */
8516 if ((binary_op
8517 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8518 || (REG_P (temp)
8519 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8520 temp = gen_reg_rtx (mode);
8521 store_expr (singleton, temp,
8522 modifier == EXPAND_STACK_PARM ? 2 : 0);
8524 else
8525 expand_expr (singleton,
8526 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8527 if (singleton == TREE_OPERAND (exp, 1))
8528 jumpif (TREE_OPERAND (exp, 0), op0);
8529 else
8530 jumpifnot (TREE_OPERAND (exp, 0), op0);
8532 start_cleanup_deferral ();
8533 if (binary_op && temp == 0)
8534 /* Just touch the other operand. */
8535 expand_expr (TREE_OPERAND (binary_op, 1),
8536 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8537 else if (binary_op)
8538 store_expr (build (TREE_CODE (binary_op), type,
8539 make_tree (type, temp),
8540 TREE_OPERAND (binary_op, 1)),
8541 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8542 else
8543 store_expr (build1 (TREE_CODE (unary_op), type,
8544 make_tree (type, temp)),
8545 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8546 op1 = op0;
8548 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8549 comparison operator. If we have one of these cases, set the
8550 output to A, branch on A (cse will merge these two references),
8551 then set the output to FOO. */
8552 else if (temp
8553 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8554 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8555 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8556 TREE_OPERAND (exp, 1), 0)
8557 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8558 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8559 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8561 if (REG_P (temp)
8562 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8563 temp = gen_reg_rtx (mode);
8564 store_expr (TREE_OPERAND (exp, 1), temp,
8565 modifier == EXPAND_STACK_PARM ? 2 : 0);
8566 jumpif (TREE_OPERAND (exp, 0), op0);
8568 start_cleanup_deferral ();
8569 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8570 store_expr (TREE_OPERAND (exp, 2), temp,
8571 modifier == EXPAND_STACK_PARM ? 2 : 0);
8572 else
8573 expand_expr (TREE_OPERAND (exp, 2),
8574 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8575 op1 = op0;
8577 else if (temp
8578 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8579 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8580 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8581 TREE_OPERAND (exp, 2), 0)
8582 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8583 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8584 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8586 if (REG_P (temp)
8587 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8588 temp = gen_reg_rtx (mode);
8589 store_expr (TREE_OPERAND (exp, 2), temp,
8590 modifier == EXPAND_STACK_PARM ? 2 : 0);
8591 jumpifnot (TREE_OPERAND (exp, 0), op0);
8593 start_cleanup_deferral ();
8594 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8595 store_expr (TREE_OPERAND (exp, 1), temp,
8596 modifier == EXPAND_STACK_PARM ? 2 : 0);
8597 else
8598 expand_expr (TREE_OPERAND (exp, 1),
8599 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8600 op1 = op0;
8602 else
8604 op1 = gen_label_rtx ();
8605 jumpifnot (TREE_OPERAND (exp, 0), op0);
8607 start_cleanup_deferral ();
8609 /* One branch of the cond can be void, if it never returns. For
8610 example A ? throw : E */
8611 if (temp != 0
8612 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8613 store_expr (TREE_OPERAND (exp, 1), temp,
8614 modifier == EXPAND_STACK_PARM ? 2 : 0);
8615 else
8616 expand_expr (TREE_OPERAND (exp, 1),
8617 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8618 end_cleanup_deferral ();
8619 emit_queue ();
8620 emit_jump_insn (gen_jump (op1));
8621 emit_barrier ();
8622 emit_label (op0);
8623 start_cleanup_deferral ();
8624 if (temp != 0
8625 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8626 store_expr (TREE_OPERAND (exp, 2), temp,
8627 modifier == EXPAND_STACK_PARM ? 2 : 0);
8628 else
8629 expand_expr (TREE_OPERAND (exp, 2),
8630 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8633 end_cleanup_deferral ();
8635 emit_queue ();
8636 emit_label (op1);
8637 OK_DEFER_POP;
8639 return temp;
8642 case TARGET_EXPR:
8644 /* Something needs to be initialized, but we didn't know
8645 where that thing was when building the tree. For example,
8646 it could be the return value of a function, or a parameter
8647 to a function which lays down in the stack, or a temporary
8648 variable which must be passed by reference.
8650 We guarantee that the expression will either be constructed
8651 or copied into our original target. */
8653 tree slot = TREE_OPERAND (exp, 0);
8654 tree cleanups = NULL_TREE;
8655 tree exp1;
8657 if (TREE_CODE (slot) != VAR_DECL)
8658 abort ();
8660 if (! ignore)
8661 target = original_target;
8663 /* Set this here so that if we get a target that refers to a
8664 register variable that's already been used, put_reg_into_stack
8665 knows that it should fix up those uses. */
8666 TREE_USED (slot) = 1;
8668 if (target == 0)
8670 if (DECL_RTL_SET_P (slot))
8672 target = DECL_RTL (slot);
8673 /* If we have already expanded the slot, so don't do
8674 it again. (mrs) */
8675 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8676 return target;
8678 else
8680 target = assign_temp (type, 2, 0, 1);
8681 SET_DECL_RTL (slot, target);
8683 /* Since SLOT is not known to the called function
8684 to belong to its stack frame, we must build an explicit
8685 cleanup. This case occurs when we must build up a reference
8686 to pass the reference as an argument. In this case,
8687 it is very likely that such a reference need not be
8688 built here. */
8690 if (TREE_OPERAND (exp, 2) == 0)
8691 TREE_OPERAND (exp, 2)
8692 = lang_hooks.maybe_build_cleanup (slot);
8693 cleanups = TREE_OPERAND (exp, 2);
8696 else
8698 /* This case does occur, when expanding a parameter which
8699 needs to be constructed on the stack. The target
8700 is the actual stack address that we want to initialize.
8701 The function we call will perform the cleanup in this case. */
8703 /* If we have already assigned it space, use that space,
8704 not target that we were passed in, as our target
8705 parameter is only a hint. */
8706 if (DECL_RTL_SET_P (slot))
8708 target = DECL_RTL (slot);
8709 /* If we have already expanded the slot, so don't do
8710 it again. (mrs) */
8711 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8712 return target;
8714 else
8715 SET_DECL_RTL (slot, target);
8718 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8719 /* Mark it as expanded. */
8720 TREE_OPERAND (exp, 1) = NULL_TREE;
8722 if (VOID_TYPE_P (TREE_TYPE (exp1)))
8723 /* If the initializer is void, just expand it; it will initialize
8724 the object directly. */
8725 expand_expr (exp1, const0_rtx, VOIDmode, 0);
8726 else
8727 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8729 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8731 return target;
8734 case INIT_EXPR:
8736 tree lhs = TREE_OPERAND (exp, 0);
8737 tree rhs = TREE_OPERAND (exp, 1);
8739 temp = expand_assignment (lhs, rhs, ! ignore);
8740 return temp;
8743 case MODIFY_EXPR:
8745 /* If lhs is complex, expand calls in rhs before computing it.
8746 That's so we don't compute a pointer and save it over a
8747 call. If lhs is simple, compute it first so we can give it
8748 as a target if the rhs is just a call. This avoids an
8749 extra temp and copy and that prevents a partial-subsumption
8750 which makes bad code. Actually we could treat
8751 component_ref's of vars like vars. */
8753 tree lhs = TREE_OPERAND (exp, 0);
8754 tree rhs = TREE_OPERAND (exp, 1);
8756 temp = 0;
8758 /* Check for |= or &= of a bitfield of size one into another bitfield
8759 of size 1. In this case, (unless we need the result of the
8760 assignment) we can do this more efficiently with a
8761 test followed by an assignment, if necessary.
8763 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8764 things change so we do, this code should be enhanced to
8765 support it. */
8766 if (ignore
8767 && TREE_CODE (lhs) == COMPONENT_REF
8768 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8769 || TREE_CODE (rhs) == BIT_AND_EXPR)
8770 && TREE_OPERAND (rhs, 0) == lhs
8771 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8772 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8773 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8775 rtx label = gen_label_rtx ();
8777 do_jump (TREE_OPERAND (rhs, 1),
8778 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8779 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8780 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8781 (TREE_CODE (rhs) == BIT_IOR_EXPR
8782 ? integer_one_node
8783 : integer_zero_node)),
8785 do_pending_stack_adjust ();
8786 emit_label (label);
8787 return const0_rtx;
8790 temp = expand_assignment (lhs, rhs, ! ignore);
8792 return temp;
8795 case RETURN_EXPR:
8796 if (!TREE_OPERAND (exp, 0))
8797 expand_null_return ();
8798 else
8799 expand_return (TREE_OPERAND (exp, 0));
8800 return const0_rtx;
8802 case PREINCREMENT_EXPR:
8803 case PREDECREMENT_EXPR:
8804 return expand_increment (exp, 0, ignore);
8806 case POSTINCREMENT_EXPR:
8807 case POSTDECREMENT_EXPR:
8808 /* Faster to treat as pre-increment if result is not used. */
8809 return expand_increment (exp, ! ignore, ignore);
8811 case ADDR_EXPR:
8812 if (modifier == EXPAND_STACK_PARM)
8813 target = 0;
8814 /* If we are taking the address of something erroneous, just
8815 return a zero. */
8816 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8817 return const0_rtx;
8818 /* If we are taking the address of a constant and are at the
8819 top level, we have to use output_constant_def since we can't
8820 call force_const_mem at top level. */
8821 else if (cfun == 0
8822 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8823 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8824 == 'c')))
8825 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8826 else
8828 /* We make sure to pass const0_rtx down if we came in with
8829 ignore set, to avoid doing the cleanups twice for something. */
8830 op0 = expand_expr (TREE_OPERAND (exp, 0),
8831 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8832 (modifier == EXPAND_INITIALIZER
8833 ? modifier : EXPAND_CONST_ADDRESS));
8835 /* If we are going to ignore the result, OP0 will have been set
8836 to const0_rtx, so just return it. Don't get confused and
8837 think we are taking the address of the constant. */
8838 if (ignore)
8839 return op0;
8841 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8842 clever and returns a REG when given a MEM. */
8843 op0 = protect_from_queue (op0, 1);
8845 /* We would like the object in memory. If it is a constant, we can
8846 have it be statically allocated into memory. For a non-constant,
8847 we need to allocate some memory and store the value into it. */
8849 if (CONSTANT_P (op0))
8850 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8851 op0);
8852 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8853 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8854 || GET_CODE (op0) == LO_SUM)
8856 /* If this object is in a register, it can't be BLKmode. */
8857 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8858 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8860 if (GET_CODE (op0) == PARALLEL)
8861 /* Handle calls that pass values in multiple
8862 non-contiguous locations. The Irix 6 ABI has examples
8863 of this. */
8864 emit_group_store (memloc, op0, inner_type,
8865 int_size_in_bytes (inner_type));
8866 else
8867 emit_move_insn (memloc, op0);
8869 op0 = memloc;
8872 if (!MEM_P (op0))
8873 abort ();
8875 mark_temp_addr_taken (op0);
8876 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8878 op0 = XEXP (op0, 0);
8879 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8880 op0 = convert_memory_address (ptr_mode, op0);
8881 return op0;
8884 /* If OP0 is not aligned as least as much as the type requires, we
8885 need to make a temporary, copy OP0 to it, and take the address of
8886 the temporary. We want to use the alignment of the type, not of
8887 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8888 the test for BLKmode means that can't happen. The test for
8889 BLKmode is because we never make mis-aligned MEMs with
8890 non-BLKmode.
8892 We don't need to do this at all if the machine doesn't have
8893 strict alignment. */
8894 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8895 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8896 > MEM_ALIGN (op0))
8897 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8899 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8900 rtx new;
8902 if (TYPE_ALIGN_OK (inner_type))
8903 abort ();
8905 if (TREE_ADDRESSABLE (inner_type))
8907 /* We can't make a bitwise copy of this object, so fail. */
8908 error ("cannot take the address of an unaligned member");
8909 return const0_rtx;
8912 new = assign_stack_temp_for_type
8913 (TYPE_MODE (inner_type),
8914 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8915 : int_size_in_bytes (inner_type),
8916 1, build_qualified_type (inner_type,
8917 (TYPE_QUALS (inner_type)
8918 | TYPE_QUAL_CONST)));
8920 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8921 (modifier == EXPAND_STACK_PARM
8922 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8924 op0 = new;
8927 op0 = force_operand (XEXP (op0, 0), target);
8930 if (flag_force_addr
8931 && !REG_P (op0)
8932 && modifier != EXPAND_CONST_ADDRESS
8933 && modifier != EXPAND_INITIALIZER
8934 && modifier != EXPAND_SUM)
8935 op0 = force_reg (Pmode, op0);
8937 if (REG_P (op0)
8938 && ! REG_USERVAR_P (op0))
8939 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8941 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8942 op0 = convert_memory_address (ptr_mode, op0);
8944 return op0;
8946 case ENTRY_VALUE_EXPR:
8947 abort ();
8949 /* COMPLEX type for Extended Pascal & Fortran */
8950 case COMPLEX_EXPR:
8952 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8953 rtx insns;
8955 /* Get the rtx code of the operands. */
8956 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8957 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8959 if (! target)
8960 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8962 start_sequence ();
8964 /* Move the real (op0) and imaginary (op1) parts to their location. */
8965 emit_move_insn (gen_realpart (mode, target), op0);
8966 emit_move_insn (gen_imagpart (mode, target), op1);
8968 insns = get_insns ();
8969 end_sequence ();
8971 /* Complex construction should appear as a single unit. */
8972 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8973 each with a separate pseudo as destination.
8974 It's not correct for flow to treat them as a unit. */
8975 if (GET_CODE (target) != CONCAT)
8976 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8977 else
8978 emit_insn (insns);
8980 return target;
8983 case REALPART_EXPR:
8984 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8985 return gen_realpart (mode, op0);
8987 case IMAGPART_EXPR:
8988 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8989 return gen_imagpart (mode, op0);
8991 case CONJ_EXPR:
8993 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8994 rtx imag_t;
8995 rtx insns;
8997 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8999 if (! target)
9000 target = gen_reg_rtx (mode);
9002 start_sequence ();
9004 /* Store the realpart and the negated imagpart to target. */
9005 emit_move_insn (gen_realpart (partmode, target),
9006 gen_realpart (partmode, op0));
9008 imag_t = gen_imagpart (partmode, target);
9009 temp = expand_unop (partmode,
9010 ! unsignedp && flag_trapv
9011 && (GET_MODE_CLASS(partmode) == MODE_INT)
9012 ? negv_optab : neg_optab,
9013 gen_imagpart (partmode, op0), imag_t, 0);
9014 if (temp != imag_t)
9015 emit_move_insn (imag_t, temp);
9017 insns = get_insns ();
9018 end_sequence ();
9020 /* Conjugate should appear as a single unit
9021 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9022 each with a separate pseudo as destination.
9023 It's not correct for flow to treat them as a unit. */
9024 if (GET_CODE (target) != CONCAT)
9025 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9026 else
9027 emit_insn (insns);
9029 return target;
9032 case RESX_EXPR:
9033 expand_resx_expr (exp);
9034 return const0_rtx;
9036 case TRY_CATCH_EXPR:
9038 tree handler = TREE_OPERAND (exp, 1);
9040 expand_eh_region_start ();
9041 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9042 expand_eh_handler (handler);
9044 return op0;
9047 case CATCH_EXPR:
9048 expand_start_catch (CATCH_TYPES (exp));
9049 expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
9050 expand_end_catch ();
9051 return const0_rtx;
9053 case EH_FILTER_EXPR:
9054 /* Should have been handled in expand_eh_handler. */
9055 abort ();
9057 case TRY_FINALLY_EXPR:
9059 tree try_block = TREE_OPERAND (exp, 0);
9060 tree finally_block = TREE_OPERAND (exp, 1);
9062 if ((!optimize && lang_protect_cleanup_actions == NULL)
9063 || unsafe_for_reeval (finally_block) > 1)
9065 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9066 is not sufficient, so we cannot expand the block twice.
9067 So we play games with GOTO_SUBROUTINE_EXPR to let us
9068 expand the thing only once. */
9069 /* When not optimizing, we go ahead with this form since
9070 (1) user breakpoints operate more predictably without
9071 code duplication, and
9072 (2) we're not running any of the global optimizers
9073 that would explode in time/space with the highly
9074 connected CFG created by the indirect branching. */
9076 rtx finally_label = gen_label_rtx ();
9077 rtx done_label = gen_label_rtx ();
9078 rtx return_link = gen_reg_rtx (Pmode);
9079 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9080 (tree) finally_label, (tree) return_link);
9081 TREE_SIDE_EFFECTS (cleanup) = 1;
9083 /* Start a new binding layer that will keep track of all cleanup
9084 actions to be performed. */
9085 expand_start_bindings (2);
9086 target_temp_slot_level = temp_slot_level;
9088 expand_decl_cleanup (NULL_TREE, cleanup);
9089 op0 = expand_expr (try_block, target, tmode, modifier);
9091 preserve_temp_slots (op0);
9092 expand_end_bindings (NULL_TREE, 0, 0);
9093 emit_jump (done_label);
9094 emit_label (finally_label);
9095 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9096 emit_indirect_jump (return_link);
9097 emit_label (done_label);
9099 else
9101 expand_start_bindings (2);
9102 target_temp_slot_level = temp_slot_level;
9104 expand_decl_cleanup (NULL_TREE, finally_block);
9105 op0 = expand_expr (try_block, target, tmode, modifier);
9107 preserve_temp_slots (op0);
9108 expand_end_bindings (NULL_TREE, 0, 0);
9111 return op0;
9114 case GOTO_SUBROUTINE_EXPR:
9116 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9117 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9118 rtx return_address = gen_label_rtx ();
9119 emit_move_insn (return_link,
9120 gen_rtx_LABEL_REF (Pmode, return_address));
9121 emit_jump (subr);
9122 emit_label (return_address);
9123 return const0_rtx;
9126 case VA_ARG_EXPR:
9127 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9129 case EXC_PTR_EXPR:
9130 return get_exception_pointer (cfun);
9132 case FILTER_EXPR:
9133 return get_exception_filter (cfun);
9135 case FDESC_EXPR:
9136 /* Function descriptors are not valid except for as
9137 initialization constants, and should not be expanded. */
9138 abort ();
9140 case SWITCH_EXPR:
9141 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
9142 "switch");
9143 if (SWITCH_BODY (exp))
9144 expand_expr_stmt (SWITCH_BODY (exp));
9145 if (SWITCH_LABELS (exp))
9147 tree duplicate = 0;
9148 tree vec = SWITCH_LABELS (exp);
9149 size_t i, n = TREE_VEC_LENGTH (vec);
9151 for (i = 0; i < n; ++i)
9153 tree elt = TREE_VEC_ELT (vec, i);
9154 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
9155 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
9156 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
9158 tree case_low = CASE_LOW (elt);
9159 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
9160 if (case_low && case_high)
9162 /* Case label is less than minimum for type. */
9163 if (TREE_CODE (min_value) == INTEGER_CST
9164 && tree_int_cst_compare (case_low, min_value) < 0
9165 && tree_int_cst_compare (case_high, min_value) < 0)
9167 warning ("case label value %d is less than minimum value for type",
9168 TREE_INT_CST (case_low));
9169 continue;
9172 /* Case value is greater than maximum for type. */
9173 if (TREE_CODE (max_value) == INTEGER_CST
9174 && tree_int_cst_compare (case_low, max_value) > 0
9175 && tree_int_cst_compare (case_high, max_value) > 0)
9177 warning ("case label value %d exceeds maximum value for type",
9178 TREE_INT_CST (case_high));
9179 continue;
9182 /* Saturate lower case label value to minimum. */
9183 if (TREE_CODE (min_value) == INTEGER_CST
9184 && tree_int_cst_compare (case_high, min_value) >= 0
9185 && tree_int_cst_compare (case_low, min_value) < 0)
9187 warning ("lower value %d in case label range less than minimum value for type",
9188 TREE_INT_CST (case_low));
9189 case_low = min_value;
9192 /* Saturate upper case label value to maximum. */
9193 if (TREE_CODE (max_value) == INTEGER_CST
9194 && tree_int_cst_compare (case_low, max_value) <= 0
9195 && tree_int_cst_compare (case_high, max_value) > 0)
9197 warning ("upper value %d in case label range exceeds maximum value for type",
9198 TREE_INT_CST (case_high));
9199 case_high = max_value;
9203 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
9204 if (duplicate)
9205 abort ();
9208 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
9209 return const0_rtx;
9211 case LABEL_EXPR:
9212 expand_label (TREE_OPERAND (exp, 0));
9213 return const0_rtx;
9215 case CASE_LABEL_EXPR:
9217 tree duplicate = 0;
9218 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
9219 &duplicate, false);
9220 if (duplicate)
9221 abort ();
9222 return const0_rtx;
9225 case ASM_EXPR:
9226 expand_asm_expr (exp);
9227 return const0_rtx;
9229 default:
9230 return lang_hooks.expand_expr (exp, original_target, tmode,
9231 modifier, alt_rtl);
9234 /* Here to do an ordinary binary operator, generating an instruction
9235 from the optab already placed in `this_optab'. */
9236 binop:
9237 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9238 subtarget, &op0, &op1, 0);
9239 binop2:
9240 if (modifier == EXPAND_STACK_PARM)
9241 target = 0;
9242 temp = expand_binop (mode, this_optab, op0, op1, target,
9243 unsignedp, OPTAB_LIB_WIDEN);
9244 if (temp == 0)
9245 abort ();
9246 return temp;
9249 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9250 when applied to the address of EXP produces an address known to be
9251 aligned more than BIGGEST_ALIGNMENT. */
9253 static int
9254 is_aligning_offset (tree offset, tree exp)
9256 /* Strip off any conversions. */
9257 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9258 || TREE_CODE (offset) == NOP_EXPR
9259 || TREE_CODE (offset) == CONVERT_EXPR)
9260 offset = TREE_OPERAND (offset, 0);
9262 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9263 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9264 if (TREE_CODE (offset) != BIT_AND_EXPR
9265 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9266 || compare_tree_int (TREE_OPERAND (offset, 1),
9267 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9268 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9269 return 0;
9271 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9272 It must be NEGATE_EXPR. Then strip any more conversions. */
9273 offset = TREE_OPERAND (offset, 0);
9274 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9275 || TREE_CODE (offset) == NOP_EXPR
9276 || TREE_CODE (offset) == CONVERT_EXPR)
9277 offset = TREE_OPERAND (offset, 0);
9279 if (TREE_CODE (offset) != NEGATE_EXPR)
9280 return 0;
9282 offset = TREE_OPERAND (offset, 0);
9283 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9284 || TREE_CODE (offset) == NOP_EXPR
9285 || TREE_CODE (offset) == CONVERT_EXPR)
9286 offset = TREE_OPERAND (offset, 0);
9288 /* This must now be the address of EXP. */
9289 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9292 /* Return the tree node if an ARG corresponds to a string constant or zero
9293 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9294 in bytes within the string that ARG is accessing. The type of the
9295 offset will be `sizetype'. */
9297 tree
9298 string_constant (tree arg, tree *ptr_offset)
9300 STRIP_NOPS (arg);
9302 if (TREE_CODE (arg) == ADDR_EXPR
9303 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9305 *ptr_offset = size_zero_node;
9306 return TREE_OPERAND (arg, 0);
9308 if (TREE_CODE (arg) == ADDR_EXPR
9309 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9310 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9312 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9313 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9315 else if (TREE_CODE (arg) == PLUS_EXPR)
9317 tree arg0 = TREE_OPERAND (arg, 0);
9318 tree arg1 = TREE_OPERAND (arg, 1);
9320 STRIP_NOPS (arg0);
9321 STRIP_NOPS (arg1);
9323 if (TREE_CODE (arg0) == ADDR_EXPR
9324 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9326 *ptr_offset = convert (sizetype, arg1);
9327 return TREE_OPERAND (arg0, 0);
9329 else if (TREE_CODE (arg1) == ADDR_EXPR
9330 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9332 *ptr_offset = convert (sizetype, arg0);
9333 return TREE_OPERAND (arg1, 0);
9337 return 0;
9340 /* Expand code for a post- or pre- increment or decrement
9341 and return the RTX for the result.
9342 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9344 static rtx
9345 expand_increment (tree exp, int post, int ignore)
9347 rtx op0, op1;
9348 rtx temp, value;
9349 tree incremented = TREE_OPERAND (exp, 0);
9350 optab this_optab = add_optab;
9351 int icode;
9352 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9353 int op0_is_copy = 0;
9354 int single_insn = 0;
9355 /* 1 means we can't store into OP0 directly,
9356 because it is a subreg narrower than a word,
9357 and we don't dare clobber the rest of the word. */
9358 int bad_subreg = 0;
9360 /* Stabilize any component ref that might need to be
9361 evaluated more than once below. */
9362 if (!post
9363 || TREE_CODE (incremented) == BIT_FIELD_REF
9364 || (TREE_CODE (incremented) == COMPONENT_REF
9365 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9366 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9367 incremented = stabilize_reference (incremented);
9368 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9369 ones into save exprs so that they don't accidentally get evaluated
9370 more than once by the code below. */
9371 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9372 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9373 incremented = save_expr (incremented);
9375 /* Compute the operands as RTX.
9376 Note whether OP0 is the actual lvalue or a copy of it:
9377 I believe it is a copy iff it is a register or subreg
9378 and insns were generated in computing it. */
9380 temp = get_last_insn ();
9381 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9383 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9384 in place but instead must do sign- or zero-extension during assignment,
9385 so we copy it into a new register and let the code below use it as
9386 a copy.
9388 Note that we can safely modify this SUBREG since it is know not to be
9389 shared (it was made by the expand_expr call above). */
9391 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9393 if (post)
9394 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9395 else
9396 bad_subreg = 1;
9398 else if (GET_CODE (op0) == SUBREG
9399 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9401 /* We cannot increment this SUBREG in place. If we are
9402 post-incrementing, get a copy of the old value. Otherwise,
9403 just mark that we cannot increment in place. */
9404 if (post)
9405 op0 = copy_to_reg (op0);
9406 else
9407 bad_subreg = 1;
9410 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
9411 && temp != get_last_insn ());
9412 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9414 /* Decide whether incrementing or decrementing. */
9415 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9416 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9417 this_optab = sub_optab;
9419 /* Convert decrement by a constant into a negative increment. */
9420 if (this_optab == sub_optab
9421 && GET_CODE (op1) == CONST_INT)
9423 op1 = GEN_INT (-INTVAL (op1));
9424 this_optab = add_optab;
9427 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9428 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9430 /* For a preincrement, see if we can do this with a single instruction. */
9431 if (!post)
9433 icode = (int) this_optab->handlers[(int) mode].insn_code;
9434 if (icode != (int) CODE_FOR_nothing
9435 /* Make sure that OP0 is valid for operands 0 and 1
9436 of the insn we want to queue. */
9437 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9438 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9439 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9440 single_insn = 1;
9443 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9444 then we cannot just increment OP0. We must therefore contrive to
9445 increment the original value. Then, for postincrement, we can return
9446 OP0 since it is a copy of the old value. For preincrement, expand here
9447 unless we can do it with a single insn.
9449 Likewise if storing directly into OP0 would clobber high bits
9450 we need to preserve (bad_subreg). */
9451 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9453 /* This is the easiest way to increment the value wherever it is.
9454 Problems with multiple evaluation of INCREMENTED are prevented
9455 because either (1) it is a component_ref or preincrement,
9456 in which case it was stabilized above, or (2) it is an array_ref
9457 with constant index in an array in a register, which is
9458 safe to reevaluate. */
9459 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9460 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9461 ? MINUS_EXPR : PLUS_EXPR),
9462 TREE_TYPE (exp),
9463 incremented,
9464 TREE_OPERAND (exp, 1));
9466 while (TREE_CODE (incremented) == NOP_EXPR
9467 || TREE_CODE (incremented) == CONVERT_EXPR)
9469 newexp = convert (TREE_TYPE (incremented), newexp);
9470 incremented = TREE_OPERAND (incremented, 0);
9473 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9474 return post ? op0 : temp;
9477 if (post)
9479 /* We have a true reference to the value in OP0.
9480 If there is an insn to add or subtract in this mode, queue it.
9481 Queuing the increment insn avoids the register shuffling
9482 that often results if we must increment now and first save
9483 the old value for subsequent use. */
9485 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9486 op0 = stabilize (op0);
9487 #endif
9489 icode = (int) this_optab->handlers[(int) mode].insn_code;
9490 if (icode != (int) CODE_FOR_nothing
9491 /* Make sure that OP0 is valid for operands 0 and 1
9492 of the insn we want to queue. */
9493 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9494 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9496 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9497 op1 = force_reg (mode, op1);
9499 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9501 if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
9503 rtx addr = (general_operand (XEXP (op0, 0), mode)
9504 ? force_reg (Pmode, XEXP (op0, 0))
9505 : copy_to_reg (XEXP (op0, 0)));
9506 rtx temp, result;
9508 op0 = replace_equiv_address (op0, addr);
9509 temp = force_reg (GET_MODE (op0), op0);
9510 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9511 op1 = force_reg (mode, op1);
9513 /* The increment queue is LIFO, thus we have to `queue'
9514 the instructions in reverse order. */
9515 enqueue_insn (op0, gen_move_insn (op0, temp));
9516 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9517 return result;
9521 /* Preincrement, or we can't increment with one simple insn. */
9522 if (post)
9523 /* Save a copy of the value before inc or dec, to return it later. */
9524 temp = value = copy_to_reg (op0);
9525 else
9526 /* Arrange to return the incremented value. */
9527 /* Copy the rtx because expand_binop will protect from the queue,
9528 and the results of that would be invalid for us to return
9529 if our caller does emit_queue before using our result. */
9530 temp = copy_rtx (value = op0);
9532 /* Increment however we can. */
9533 op1 = expand_binop (mode, this_optab, value, op1, op0,
9534 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9536 /* Make sure the value is stored into OP0. */
9537 if (op1 != op0)
9538 emit_move_insn (op0, op1);
9540 return temp;
9543 /* Generate code to calculate EXP using a store-flag instruction
9544 and return an rtx for the result. EXP is either a comparison
9545 or a TRUTH_NOT_EXPR whose operand is a comparison.
9547 If TARGET is nonzero, store the result there if convenient.
9549 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9550 cheap.
9552 Return zero if there is no suitable set-flag instruction
9553 available on this machine.
9555 Once expand_expr has been called on the arguments of the comparison,
9556 we are committed to doing the store flag, since it is not safe to
9557 re-evaluate the expression. We emit the store-flag insn by calling
9558 emit_store_flag, but only expand the arguments if we have a reason
9559 to believe that emit_store_flag will be successful. If we think that
9560 it will, but it isn't, we have to simulate the store-flag with a
9561 set/jump/set sequence. */
9563 static rtx
9564 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9566 enum rtx_code code;
9567 tree arg0, arg1, type;
9568 tree tem;
9569 enum machine_mode operand_mode;
9570 int invert = 0;
9571 int unsignedp;
9572 rtx op0, op1;
9573 enum insn_code icode;
9574 rtx subtarget = target;
9575 rtx result, label;
9577 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9578 result at the end. We can't simply invert the test since it would
9579 have already been inverted if it were valid. This case occurs for
9580 some floating-point comparisons. */
9582 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9583 invert = 1, exp = TREE_OPERAND (exp, 0);
9585 arg0 = TREE_OPERAND (exp, 0);
9586 arg1 = TREE_OPERAND (exp, 1);
9588 /* Don't crash if the comparison was erroneous. */
9589 if (arg0 == error_mark_node || arg1 == error_mark_node)
9590 return const0_rtx;
9592 type = TREE_TYPE (arg0);
9593 operand_mode = TYPE_MODE (type);
9594 unsignedp = TYPE_UNSIGNED (type);
9596 /* We won't bother with BLKmode store-flag operations because it would mean
9597 passing a lot of information to emit_store_flag. */
9598 if (operand_mode == BLKmode)
9599 return 0;
9601 /* We won't bother with store-flag operations involving function pointers
9602 when function pointers must be canonicalized before comparisons. */
9603 #ifdef HAVE_canonicalize_funcptr_for_compare
9604 if (HAVE_canonicalize_funcptr_for_compare
9605 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9606 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9607 == FUNCTION_TYPE))
9608 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9609 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9610 == FUNCTION_TYPE))))
9611 return 0;
9612 #endif
9614 STRIP_NOPS (arg0);
9615 STRIP_NOPS (arg1);
9617 /* Get the rtx comparison code to use. We know that EXP is a comparison
9618 operation of some type. Some comparisons against 1 and -1 can be
9619 converted to comparisons with zero. Do so here so that the tests
9620 below will be aware that we have a comparison with zero. These
9621 tests will not catch constants in the first operand, but constants
9622 are rarely passed as the first operand. */
9624 switch (TREE_CODE (exp))
9626 case EQ_EXPR:
9627 code = EQ;
9628 break;
9629 case NE_EXPR:
9630 code = NE;
9631 break;
9632 case LT_EXPR:
9633 if (integer_onep (arg1))
9634 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9635 else
9636 code = unsignedp ? LTU : LT;
9637 break;
9638 case LE_EXPR:
9639 if (! unsignedp && integer_all_onesp (arg1))
9640 arg1 = integer_zero_node, code = LT;
9641 else
9642 code = unsignedp ? LEU : LE;
9643 break;
9644 case GT_EXPR:
9645 if (! unsignedp && integer_all_onesp (arg1))
9646 arg1 = integer_zero_node, code = GE;
9647 else
9648 code = unsignedp ? GTU : GT;
9649 break;
9650 case GE_EXPR:
9651 if (integer_onep (arg1))
9652 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9653 else
9654 code = unsignedp ? GEU : GE;
9655 break;
9657 case UNORDERED_EXPR:
9658 code = UNORDERED;
9659 break;
9660 case ORDERED_EXPR:
9661 code = ORDERED;
9662 break;
9663 case UNLT_EXPR:
9664 code = UNLT;
9665 break;
9666 case UNLE_EXPR:
9667 code = UNLE;
9668 break;
9669 case UNGT_EXPR:
9670 code = UNGT;
9671 break;
9672 case UNGE_EXPR:
9673 code = UNGE;
9674 break;
9675 case UNEQ_EXPR:
9676 code = UNEQ;
9677 break;
9678 case LTGT_EXPR:
9679 code = LTGT;
9680 break;
9682 default:
9683 abort ();
9686 /* Put a constant second. */
9687 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9689 tem = arg0; arg0 = arg1; arg1 = tem;
9690 code = swap_condition (code);
9693 /* If this is an equality or inequality test of a single bit, we can
9694 do this by shifting the bit being tested to the low-order bit and
9695 masking the result with the constant 1. If the condition was EQ,
9696 we xor it with 1. This does not require an scc insn and is faster
9697 than an scc insn even if we have it.
9699 The code to make this transformation was moved into fold_single_bit_test,
9700 so we just call into the folder and expand its result. */
9702 if ((code == NE || code == EQ)
9703 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9704 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9706 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9707 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9708 arg0, arg1, type),
9709 target, VOIDmode, EXPAND_NORMAL);
9712 /* Now see if we are likely to be able to do this. Return if not. */
9713 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9714 return 0;
9716 icode = setcc_gen_code[(int) code];
9717 if (icode == CODE_FOR_nothing
9718 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9720 /* We can only do this if it is one of the special cases that
9721 can be handled without an scc insn. */
9722 if ((code == LT && integer_zerop (arg1))
9723 || (! only_cheap && code == GE && integer_zerop (arg1)))
9725 else if (BRANCH_COST >= 0
9726 && ! only_cheap && (code == NE || code == EQ)
9727 && TREE_CODE (type) != REAL_TYPE
9728 && ((abs_optab->handlers[(int) operand_mode].insn_code
9729 != CODE_FOR_nothing)
9730 || (ffs_optab->handlers[(int) operand_mode].insn_code
9731 != CODE_FOR_nothing)))
9733 else
9734 return 0;
9737 if (! get_subtarget (target)
9738 || GET_MODE (subtarget) != operand_mode)
9739 subtarget = 0;
9741 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9743 if (target == 0)
9744 target = gen_reg_rtx (mode);
9746 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9747 because, if the emit_store_flag does anything it will succeed and
9748 OP0 and OP1 will not be used subsequently. */
9750 result = emit_store_flag (target, code,
9751 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9752 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9753 operand_mode, unsignedp, 1);
9755 if (result)
9757 if (invert)
9758 result = expand_binop (mode, xor_optab, result, const1_rtx,
9759 result, 0, OPTAB_LIB_WIDEN);
9760 return result;
9763 /* If this failed, we have to do this with set/compare/jump/set code. */
9764 if (!REG_P (target)
9765 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9766 target = gen_reg_rtx (GET_MODE (target));
9768 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9769 result = compare_from_rtx (op0, op1, code, unsignedp,
9770 operand_mode, NULL_RTX);
9771 if (GET_CODE (result) == CONST_INT)
9772 return (((result == const0_rtx && ! invert)
9773 || (result != const0_rtx && invert))
9774 ? const0_rtx : const1_rtx);
9776 /* The code of RESULT may not match CODE if compare_from_rtx
9777 decided to swap its operands and reverse the original code.
9779 We know that compare_from_rtx returns either a CONST_INT or
9780 a new comparison code, so it is safe to just extract the
9781 code from RESULT. */
9782 code = GET_CODE (result);
9784 label = gen_label_rtx ();
9785 if (bcc_gen_fctn[(int) code] == 0)
9786 abort ();
9788 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9789 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9790 emit_label (label);
9792 return target;
9796 /* Stubs in case we haven't got a casesi insn. */
9797 #ifndef HAVE_casesi
9798 # define HAVE_casesi 0
9799 # define gen_casesi(a, b, c, d, e) (0)
9800 # define CODE_FOR_casesi CODE_FOR_nothing
9801 #endif
9803 /* If the machine does not have a case insn that compares the bounds,
9804 this means extra overhead for dispatch tables, which raises the
9805 threshold for using them. */
9806 #ifndef CASE_VALUES_THRESHOLD
9807 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9808 #endif /* CASE_VALUES_THRESHOLD */
9810 unsigned int
9811 case_values_threshold (void)
9813 return CASE_VALUES_THRESHOLD;
9816 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9817 0 otherwise (i.e. if there is no casesi instruction). */
9819 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9820 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9822 enum machine_mode index_mode = SImode;
9823 int index_bits = GET_MODE_BITSIZE (index_mode);
9824 rtx op1, op2, index;
9825 enum machine_mode op_mode;
9827 if (! HAVE_casesi)
9828 return 0;
9830 /* Convert the index to SImode. */
9831 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9833 enum machine_mode omode = TYPE_MODE (index_type);
9834 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9836 /* We must handle the endpoints in the original mode. */
9837 index_expr = build (MINUS_EXPR, index_type,
9838 index_expr, minval);
9839 minval = integer_zero_node;
9840 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9841 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9842 omode, 1, default_label);
9843 /* Now we can safely truncate. */
9844 index = convert_to_mode (index_mode, index, 0);
9846 else
9848 if (TYPE_MODE (index_type) != index_mode)
9850 index_expr = convert (lang_hooks.types.type_for_size
9851 (index_bits, 0), index_expr);
9852 index_type = TREE_TYPE (index_expr);
9855 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9857 emit_queue ();
9858 index = protect_from_queue (index, 0);
9859 do_pending_stack_adjust ();
9861 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9862 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9863 (index, op_mode))
9864 index = copy_to_mode_reg (op_mode, index);
9866 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9868 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9869 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9870 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9871 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9872 (op1, op_mode))
9873 op1 = copy_to_mode_reg (op_mode, op1);
9875 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9877 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9878 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9879 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9880 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9881 (op2, op_mode))
9882 op2 = copy_to_mode_reg (op_mode, op2);
9884 emit_jump_insn (gen_casesi (index, op1, op2,
9885 table_label, default_label));
9886 return 1;
9889 /* Attempt to generate a tablejump instruction; same concept. */
9890 #ifndef HAVE_tablejump
9891 #define HAVE_tablejump 0
9892 #define gen_tablejump(x, y) (0)
9893 #endif
9895 /* Subroutine of the next function.
9897 INDEX is the value being switched on, with the lowest value
9898 in the table already subtracted.
9899 MODE is its expected mode (needed if INDEX is constant).
9900 RANGE is the length of the jump table.
9901 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9903 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9904 index value is out of range. */
9906 static void
9907 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9908 rtx default_label)
9910 rtx temp, vector;
9912 if (INTVAL (range) > cfun->max_jumptable_ents)
9913 cfun->max_jumptable_ents = INTVAL (range);
9915 /* Do an unsigned comparison (in the proper mode) between the index
9916 expression and the value which represents the length of the range.
9917 Since we just finished subtracting the lower bound of the range
9918 from the index expression, this comparison allows us to simultaneously
9919 check that the original index expression value is both greater than
9920 or equal to the minimum value of the range and less than or equal to
9921 the maximum value of the range. */
9923 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9924 default_label);
9926 /* If index is in range, it must fit in Pmode.
9927 Convert to Pmode so we can index with it. */
9928 if (mode != Pmode)
9929 index = convert_to_mode (Pmode, index, 1);
9931 /* Don't let a MEM slip through, because then INDEX that comes
9932 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9933 and break_out_memory_refs will go to work on it and mess it up. */
9934 #ifdef PIC_CASE_VECTOR_ADDRESS
9935 if (flag_pic && !REG_P (index))
9936 index = copy_to_mode_reg (Pmode, index);
9937 #endif
9939 /* If flag_force_addr were to affect this address
9940 it could interfere with the tricky assumptions made
9941 about addresses that contain label-refs,
9942 which may be valid only very near the tablejump itself. */
9943 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9944 GET_MODE_SIZE, because this indicates how large insns are. The other
9945 uses should all be Pmode, because they are addresses. This code
9946 could fail if addresses and insns are not the same size. */
9947 index = gen_rtx_PLUS (Pmode,
9948 gen_rtx_MULT (Pmode, index,
9949 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9950 gen_rtx_LABEL_REF (Pmode, table_label));
9951 #ifdef PIC_CASE_VECTOR_ADDRESS
9952 if (flag_pic)
9953 index = PIC_CASE_VECTOR_ADDRESS (index);
9954 else
9955 #endif
9956 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9957 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9958 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9959 RTX_UNCHANGING_P (vector) = 1;
9960 MEM_NOTRAP_P (vector) = 1;
9961 convert_move (temp, vector, 0);
9963 emit_jump_insn (gen_tablejump (temp, table_label));
9965 /* If we are generating PIC code or if the table is PC-relative, the
9966 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9967 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9968 emit_barrier ();
9972 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9973 rtx table_label, rtx default_label)
9975 rtx index;
9977 if (! HAVE_tablejump)
9978 return 0;
9980 index_expr = fold (build (MINUS_EXPR, index_type,
9981 convert (index_type, index_expr),
9982 convert (index_type, minval)));
9983 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9984 emit_queue ();
9985 index = protect_from_queue (index, 0);
9986 do_pending_stack_adjust ();
9988 do_tablejump (index, TYPE_MODE (index_type),
9989 convert_modes (TYPE_MODE (index_type),
9990 TYPE_MODE (TREE_TYPE (range)),
9991 expand_expr (range, NULL_RTX,
9992 VOIDmode, 0),
9993 TYPE_UNSIGNED (TREE_TYPE (range))),
9994 table_label, default_label);
9995 return 1;
9998 /* Nonzero if the mode is a valid vector mode for this architecture.
9999 This returns nonzero even if there is no hardware support for the
10000 vector mode, but we can emulate with narrower modes. */
10003 vector_mode_valid_p (enum machine_mode mode)
10005 enum mode_class class = GET_MODE_CLASS (mode);
10006 enum machine_mode innermode;
10008 /* Doh! What's going on? */
10009 if (class != MODE_VECTOR_INT
10010 && class != MODE_VECTOR_FLOAT)
10011 return 0;
10013 /* Hardware support. Woo hoo! */
10014 if (VECTOR_MODE_SUPPORTED_P (mode))
10015 return 1;
10017 innermode = GET_MODE_INNER (mode);
10019 /* We should probably return 1 if requesting V4DI and we have no DI,
10020 but we have V2DI, but this is probably very unlikely. */
10022 /* If we have support for the inner mode, we can safely emulate it.
10023 We may not have V2DI, but me can emulate with a pair of DIs. */
10024 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10027 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10028 static rtx
10029 const_vector_from_tree (tree exp)
10031 rtvec v;
10032 int units, i;
10033 tree link, elt;
10034 enum machine_mode inner, mode;
10036 mode = TYPE_MODE (TREE_TYPE (exp));
10038 if (initializer_zerop (exp))
10039 return CONST0_RTX (mode);
10041 units = GET_MODE_NUNITS (mode);
10042 inner = GET_MODE_INNER (mode);
10044 v = rtvec_alloc (units);
10046 link = TREE_VECTOR_CST_ELTS (exp);
10047 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10049 elt = TREE_VALUE (link);
10051 if (TREE_CODE (elt) == REAL_CST)
10052 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10053 inner);
10054 else
10055 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10056 TREE_INT_CST_HIGH (elt),
10057 inner);
10060 /* Initialize remaining elements to 0. */
10061 for (; i < units; ++i)
10062 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10064 return gen_rtx_raw_CONST_VECTOR (mode, v);
10066 #include "gt-expr.h"