HandshakeCompletedEvent.java, [...]: Import cleanup.
[official-gcc.git] / gcc / expr.c
blob7ee692ba5442a6ed267ffcf97427df99b2fd0e8d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 #endif
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
219 void
220 init_expr_once (void)
222 rtx insn, pat;
223 enum machine_mode mode;
224 int num_clobbers;
225 rtx mem, mem1;
226 rtx reg;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
245 int regno;
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
263 REGNO (reg) = regno;
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
296 enum insn_code ic;
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
300 continue;
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
312 void
313 init_expr (void)
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
323 void
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
341 nothing to do. */
342 if (to == from)
343 return;
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
347 TO here. */
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
361 return;
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
370 else
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
374 return;
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
381 return;
384 if (to_real)
386 rtx value, insns;
387 convert_optab tab;
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else
395 tab = trunc_optab;
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 return;
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
413 start_sequence ();
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 1, from, from_mode);
416 insns = get_insns ();
417 end_sequence ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 from)
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
422 return;
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
439 to, from, UNKNOWN);
440 return;
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
451 to, from, UNKNOWN);
452 if (to_mode == full_mode)
453 return;
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
465 rtx insns;
466 rtx lowpart;
467 rtx fill_value;
468 rtx lowfrom;
469 int i;
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
475 != CODE_FOR_nothing)
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
484 return;
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
491 if (REG_P (to))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
500 return;
503 /* No special multiword conversion insn; do it by hand. */
504 start_sequence ();
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
515 else
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
524 if (unsignedp)
525 fill_value = const0_rtx;
526 else
528 #ifdef HAVE_slt
529 if (HAVE_slt
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
534 lowpart_mode, 0);
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
538 else
539 #endif
541 fill_value
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
544 NULL_RTX, 0);
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
562 end_sequence ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
566 return;
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
573 if (!((MEM_P (from)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || REG_P (from)
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
581 return;
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
592 if (!((MEM_P (from)
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || REG_P (from)
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
603 return;
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
611 != CODE_FOR_nothing)
613 if (flag_force_mem)
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
617 return;
619 else
621 enum machine_mode intermediate;
622 rtx tmp;
623 tree shift_amount;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
629 != CODE_FOR_nothing)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
638 return;
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
648 to, unsignedp);
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
650 to, unsignedp);
651 if (tmp != to)
652 emit_move_insn (to, tmp);
653 return;
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
661 to, from, UNKNOWN);
662 return;
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
676 return;
679 /* Mode combination is not recognized. */
680 gcc_unreachable ();
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
709 rtx temp;
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
722 if (mode == oldmode)
723 return x;
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
762 || (REG_P (x)
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 if (! unsignedp
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
791 subreg operation. */
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
800 return temp;
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
812 succeed. */
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
831 stpcpy. */
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
845 data.offset = 0;
846 data.from_addr = from_addr;
847 if (to)
849 to_addr = XEXP (to, 0);
850 data.to = to;
851 data.autinc_to
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
854 data.reverse
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
857 else
859 to_addr = NULL_RTX;
860 data.to = NULL_RTX;
861 data.autinc_to = 1;
862 #ifdef STACK_GROWS_DOWNWARD
863 data.reverse = 1;
864 #else
865 data.reverse = 0;
866 #endif
868 data.to_addr = to_addr;
869 data.from = from;
870 data.autinc_from
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
878 data.len = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
890 mode = tmode;
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
909 data.autinc_to = 1;
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
915 data.autinc_to = 1;
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
925 else
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
930 tmode != VOIDmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
934 break;
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
942 while (max_size > 1)
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
947 mode = tmode;
949 if (mode == VOIDmode)
950 break;
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
962 if (endp)
964 rtx to1;
966 gcc_assert (!data.reverse);
967 if (data.autinc_to)
969 if (endp == 2)
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
973 else
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
975 -1));
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
978 data.offset);
980 else
982 if (endp == 2)
983 --data.offset;
984 to1 = adjust_address (data.to, QImode, data.offset);
986 return to1;
988 else
989 return data.to;
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1005 else
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1010 tmode != VOIDmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1014 break;
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1027 mode = tmode;
1029 if (mode == VOIDmode)
1030 break;
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1039 gcc_assert (!l);
1040 return n_insns;
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1047 static void
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1056 if (data->reverse)
1057 data->offset -= size;
1059 if (data->to)
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1063 data->offset);
1064 else
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1070 data->offset);
1071 else
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1081 if (data->to)
1082 emit_insn ((*genfun) (to1, from1));
1083 else
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1087 #else
1088 gcc_unreachable ();
1089 #endif
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1100 data->len -= size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1114 0 otherwise. */
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1119 bool may_use_call;
1120 rtx retval = 0;
1121 unsigned int align;
1123 switch (method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1127 break;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1134 NO_DEFER_POP;
1135 break;
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1139 break;
1141 default:
1142 gcc_unreachable ();
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1149 gcc_assert (size);
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1161 return 0;
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1175 else
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1179 OK_DEFER_POP;
1181 return retval;
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1188 static bool
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1192 if (PUSH_ARGS)
1193 return true;
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1200 (void) fn;
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1202 return false;
1204 #endif
1206 /* If any argument goes in memory, then it might clobber an outgoing
1207 argument. */
1209 CUMULATIVE_ARGS args_so_far;
1210 tree fn, arg;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1221 return false;
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1223 NULL_TREE, 1))
1224 return false;
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1228 return true;
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1234 static bool
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1242 volatile_ok = 1;
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1270 rtx op2;
1271 rtx last = get_last_insn ();
1272 rtx pat;
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1285 if (pat)
1287 emit_insn (pat);
1288 volatile_ok = save_volatile_ok;
1289 return true;
1291 else
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1297 return false;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1303 static rtx
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1309 rtx retval;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1313 use them later. */
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1349 return retval;
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1358 void
1359 init_block_move_fn (const char *asmspec)
1361 if (!block_move_fn)
1363 tree args, fn;
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1368 NULL_TREE);
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1376 block_move_fn = fn;
1379 if (asmspec)
1380 set_user_assembler_name (block_move_fn, asmspec);
1383 static tree
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1388 if (!block_move_fn)
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1405 static void
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1439 if (tmp != iter)
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1445 true, top_label);
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1451 void
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1454 int i;
1455 #ifdef HAVE_load_multiple
1456 rtx pat;
1457 rtx last;
1458 #endif
1460 if (nregs == 0)
1461 return;
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1472 GEN_INT (nregs));
1473 if (pat)
1475 emit_insn (pat);
1476 return;
1478 else
1479 delete_insns_since (last);
1481 #endif
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1491 void
1492 move_block_from_reg (int regno, rtx x, int nregs)
1494 int i;
1496 if (nregs == 0)
1497 return;
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1505 GEN_INT (nregs));
1506 if (pat)
1508 emit_insn (pat);
1509 return;
1511 else
1512 delete_insns_since (last);
1514 #endif
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1520 gcc_assert (tem);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1535 int i, length;
1536 rtx *tmps;
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1546 if (i)
1547 tmps[0] = 0;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1561 where DST is non-consecutive registers represented by a PARALLEL.
1562 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1563 if not known. */
1565 void
1566 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1568 rtx *tmps, src;
1569 int start, i;
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1573 /* Check for a NULL entry, used to indicate that the parameter goes
1574 both on the stack and in registers. */
1575 if (XEXP (XVECEXP (dst, 0, 0), 0))
1576 start = 0;
1577 else
1578 start = 1;
1580 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1582 /* Process the pieces. */
1583 for (i = start; i < XVECLEN (dst, 0); i++)
1585 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1586 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1587 unsigned int bytelen = GET_MODE_SIZE (mode);
1588 int shift = 0;
1590 /* Handle trailing fragments that run over the size of the struct. */
1591 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1593 /* Arrange to shift the fragment to where it belongs.
1594 extract_bit_field loads to the lsb of the reg. */
1595 if (
1596 #ifdef BLOCK_REG_PADDING
1597 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1598 == (BYTES_BIG_ENDIAN ? upward : downward)
1599 #else
1600 BYTES_BIG_ENDIAN
1601 #endif
1603 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1604 bytelen = ssize - bytepos;
1605 gcc_assert (bytelen > 0);
1608 /* If we won't be loading directly from memory, protect the real source
1609 from strange tricks we might play; but make sure that the source can
1610 be loaded directly into the destination. */
1611 src = orig_src;
1612 if (!MEM_P (orig_src)
1613 && (!CONSTANT_P (orig_src)
1614 || (GET_MODE (orig_src) != mode
1615 && GET_MODE (orig_src) != VOIDmode)))
1617 if (GET_MODE (orig_src) == VOIDmode)
1618 src = gen_reg_rtx (mode);
1619 else
1620 src = gen_reg_rtx (GET_MODE (orig_src));
1622 emit_move_insn (src, orig_src);
1625 /* Optimize the access just a bit. */
1626 if (MEM_P (src)
1627 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1628 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1629 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1630 && bytelen == GET_MODE_SIZE (mode))
1632 tmps[i] = gen_reg_rtx (mode);
1633 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1635 else if (GET_CODE (src) == CONCAT)
1637 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1638 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1640 if ((bytepos == 0 && bytelen == slen0)
1641 || (bytepos != 0 && bytepos + bytelen <= slen))
1643 /* The following assumes that the concatenated objects all
1644 have the same size. In this case, a simple calculation
1645 can be used to determine the object and the bit field
1646 to be extracted. */
1647 tmps[i] = XEXP (src, bytepos / slen0);
1648 if (! CONSTANT_P (tmps[i])
1649 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1650 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1651 (bytepos % slen0) * BITS_PER_UNIT,
1652 1, NULL_RTX, mode, mode);
1654 else
1656 rtx mem;
1658 gcc_assert (!bytepos);
1659 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1660 emit_move_insn (mem, src);
1661 tmps[i] = adjust_address (mem, mode, 0);
1664 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1665 SIMD register, which is currently broken. While we get GCC
1666 to emit proper RTL for these cases, let's dump to memory. */
1667 else if (VECTOR_MODE_P (GET_MODE (dst))
1668 && REG_P (src))
1670 int slen = GET_MODE_SIZE (GET_MODE (src));
1671 rtx mem;
1673 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1674 emit_move_insn (mem, src);
1675 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1677 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1678 && XVECLEN (dst, 0) > 1)
1679 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1680 else if (CONSTANT_P (src)
1681 || (REG_P (src) && GET_MODE (src) == mode))
1682 tmps[i] = src;
1683 else
1684 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1685 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1686 mode, mode);
1688 if (shift)
1689 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1690 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1693 /* Copy the extracted pieces into the proper (probable) hard regs. */
1694 for (i = start; i < XVECLEN (dst, 0); i++)
1695 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1698 /* Emit code to move a block SRC to block DST, where SRC and DST are
1699 non-consecutive groups of registers, each represented by a PARALLEL. */
1701 void
1702 emit_group_move (rtx dst, rtx src)
1704 int i;
1706 gcc_assert (GET_CODE (src) == PARALLEL
1707 && GET_CODE (dst) == PARALLEL
1708 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1710 /* Skip first entry if NULL. */
1711 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1712 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1713 XEXP (XVECEXP (src, 0, i), 0));
1716 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1717 where SRC is non-consecutive registers represented by a PARALLEL.
1718 SSIZE represents the total size of block ORIG_DST, or -1 if not
1719 known. */
1721 void
1722 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1724 rtx *tmps, dst;
1725 int start, i;
1727 gcc_assert (GET_CODE (src) == PARALLEL);
1729 /* Check for a NULL entry, used to indicate that the parameter goes
1730 both on the stack and in registers. */
1731 if (XEXP (XVECEXP (src, 0, 0), 0))
1732 start = 0;
1733 else
1734 start = 1;
1736 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1738 /* Copy the (probable) hard regs into pseudos. */
1739 for (i = start; i < XVECLEN (src, 0); i++)
1741 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1742 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1743 emit_move_insn (tmps[i], reg);
1746 /* If we won't be storing directly into memory, protect the real destination
1747 from strange tricks we might play. */
1748 dst = orig_dst;
1749 if (GET_CODE (dst) == PARALLEL)
1751 rtx temp;
1753 /* We can get a PARALLEL dst if there is a conditional expression in
1754 a return statement. In that case, the dst and src are the same,
1755 so no action is necessary. */
1756 if (rtx_equal_p (dst, src))
1757 return;
1759 /* It is unclear if we can ever reach here, but we may as well handle
1760 it. Allocate a temporary, and split this into a store/load to/from
1761 the temporary. */
1763 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1764 emit_group_store (temp, src, type, ssize);
1765 emit_group_load (dst, temp, type, ssize);
1766 return;
1768 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1770 dst = gen_reg_rtx (GET_MODE (orig_dst));
1771 /* Make life a bit easier for combine. */
1772 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1775 /* Process the pieces. */
1776 for (i = start; i < XVECLEN (src, 0); i++)
1778 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1779 enum machine_mode mode = GET_MODE (tmps[i]);
1780 unsigned int bytelen = GET_MODE_SIZE (mode);
1781 rtx dest = dst;
1783 /* Handle trailing fragments that run over the size of the struct. */
1784 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1786 /* store_bit_field always takes its value from the lsb.
1787 Move the fragment to the lsb if it's not already there. */
1788 if (
1789 #ifdef BLOCK_REG_PADDING
1790 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1791 == (BYTES_BIG_ENDIAN ? upward : downward)
1792 #else
1793 BYTES_BIG_ENDIAN
1794 #endif
1797 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1798 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1799 build_int_cst (NULL_TREE, shift),
1800 tmps[i], 0);
1802 bytelen = ssize - bytepos;
1805 if (GET_CODE (dst) == CONCAT)
1807 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1808 dest = XEXP (dst, 0);
1809 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1811 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1812 dest = XEXP (dst, 1);
1814 else
1816 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1817 dest = assign_stack_temp (GET_MODE (dest),
1818 GET_MODE_SIZE (GET_MODE (dest)), 0);
1819 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1820 tmps[i]);
1821 dst = dest;
1822 break;
1826 /* Optimize the access just a bit. */
1827 if (MEM_P (dest)
1828 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1829 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1830 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1831 && bytelen == GET_MODE_SIZE (mode))
1832 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1833 else
1834 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1835 mode, tmps[i]);
1838 /* Copy from the pseudo into the (probable) hard reg. */
1839 if (orig_dst != dst)
1840 emit_move_insn (orig_dst, dst);
1843 /* Generate code to copy a BLKmode object of TYPE out of a
1844 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1845 is null, a stack temporary is created. TGTBLK is returned.
1847 The purpose of this routine is to handle functions that return
1848 BLKmode structures in registers. Some machines (the PA for example)
1849 want to return all small structures in registers regardless of the
1850 structure's alignment. */
1853 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1855 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1856 rtx src = NULL, dst = NULL;
1857 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1858 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1860 if (tgtblk == 0)
1862 tgtblk = assign_temp (build_qualified_type (type,
1863 (TYPE_QUALS (type)
1864 | TYPE_QUAL_CONST)),
1865 0, 1, 1);
1866 preserve_temp_slots (tgtblk);
1869 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1870 into a new pseudo which is a full word. */
1872 if (GET_MODE (srcreg) != BLKmode
1873 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1874 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1876 /* If the structure doesn't take up a whole number of words, see whether
1877 SRCREG is padded on the left or on the right. If it's on the left,
1878 set PADDING_CORRECTION to the number of bits to skip.
1880 In most ABIs, the structure will be returned at the least end of
1881 the register, which translates to right padding on little-endian
1882 targets and left padding on big-endian targets. The opposite
1883 holds if the structure is returned at the most significant
1884 end of the register. */
1885 if (bytes % UNITS_PER_WORD != 0
1886 && (targetm.calls.return_in_msb (type)
1887 ? !BYTES_BIG_ENDIAN
1888 : BYTES_BIG_ENDIAN))
1889 padding_correction
1890 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1892 /* Copy the structure BITSIZE bites at a time.
1894 We could probably emit more efficient code for machines which do not use
1895 strict alignment, but it doesn't seem worth the effort at the current
1896 time. */
1897 for (bitpos = 0, xbitpos = padding_correction;
1898 bitpos < bytes * BITS_PER_UNIT;
1899 bitpos += bitsize, xbitpos += bitsize)
1901 /* We need a new source operand each time xbitpos is on a
1902 word boundary and when xbitpos == padding_correction
1903 (the first time through). */
1904 if (xbitpos % BITS_PER_WORD == 0
1905 || xbitpos == padding_correction)
1906 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1907 GET_MODE (srcreg));
1909 /* We need a new destination operand each time bitpos is on
1910 a word boundary. */
1911 if (bitpos % BITS_PER_WORD == 0)
1912 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1914 /* Use xbitpos for the source extraction (right justified) and
1915 xbitpos for the destination store (left justified). */
1916 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1917 extract_bit_field (src, bitsize,
1918 xbitpos % BITS_PER_WORD, 1,
1919 NULL_RTX, word_mode, word_mode));
1922 return tgtblk;
1925 /* Add a USE expression for REG to the (possibly empty) list pointed
1926 to by CALL_FUSAGE. REG must denote a hard register. */
1928 void
1929 use_reg (rtx *call_fusage, rtx reg)
1931 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
1933 *call_fusage
1934 = gen_rtx_EXPR_LIST (VOIDmode,
1935 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1938 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1939 starting at REGNO. All of these registers must be hard registers. */
1941 void
1942 use_regs (rtx *call_fusage, int regno, int nregs)
1944 int i;
1946 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
1948 for (i = 0; i < nregs; i++)
1949 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1952 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1953 PARALLEL REGS. This is for calls that pass values in multiple
1954 non-contiguous locations. The Irix 6 ABI has examples of this. */
1956 void
1957 use_group_regs (rtx *call_fusage, rtx regs)
1959 int i;
1961 for (i = 0; i < XVECLEN (regs, 0); i++)
1963 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1965 /* A NULL entry means the parameter goes both on the stack and in
1966 registers. This can also be a MEM for targets that pass values
1967 partially on the stack and partially in registers. */
1968 if (reg != 0 && REG_P (reg))
1969 use_reg (call_fusage, reg);
1974 /* Determine whether the LEN bytes generated by CONSTFUN can be
1975 stored to memory using several move instructions. CONSTFUNDATA is
1976 a pointer which will be passed as argument in every CONSTFUN call.
1977 ALIGN is maximum alignment we can assume. Return nonzero if a
1978 call to store_by_pieces should succeed. */
1981 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1982 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
1983 void *constfundata, unsigned int align)
1985 unsigned HOST_WIDE_INT l;
1986 unsigned int max_size;
1987 HOST_WIDE_INT offset = 0;
1988 enum machine_mode mode, tmode;
1989 enum insn_code icode;
1990 int reverse;
1991 rtx cst;
1993 if (len == 0)
1994 return 1;
1996 if (! STORE_BY_PIECES_P (len, align))
1997 return 0;
1999 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2000 if (align >= GET_MODE_ALIGNMENT (tmode))
2001 align = GET_MODE_ALIGNMENT (tmode);
2002 else
2004 enum machine_mode xmode;
2006 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2007 tmode != VOIDmode;
2008 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2009 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2010 || SLOW_UNALIGNED_ACCESS (tmode, align))
2011 break;
2013 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2016 /* We would first store what we can in the largest integer mode, then go to
2017 successively smaller modes. */
2019 for (reverse = 0;
2020 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2021 reverse++)
2023 l = len;
2024 mode = VOIDmode;
2025 max_size = STORE_MAX_PIECES + 1;
2026 while (max_size > 1)
2028 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2029 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2030 if (GET_MODE_SIZE (tmode) < max_size)
2031 mode = tmode;
2033 if (mode == VOIDmode)
2034 break;
2036 icode = mov_optab->handlers[(int) mode].insn_code;
2037 if (icode != CODE_FOR_nothing
2038 && align >= GET_MODE_ALIGNMENT (mode))
2040 unsigned int size = GET_MODE_SIZE (mode);
2042 while (l >= size)
2044 if (reverse)
2045 offset -= size;
2047 cst = (*constfun) (constfundata, offset, mode);
2048 if (!LEGITIMATE_CONSTANT_P (cst))
2049 return 0;
2051 if (!reverse)
2052 offset += size;
2054 l -= size;
2058 max_size = GET_MODE_SIZE (mode);
2061 /* The code above should have handled everything. */
2062 gcc_assert (!l);
2065 return 1;
2068 /* Generate several move instructions to store LEN bytes generated by
2069 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2070 pointer which will be passed as argument in every CONSTFUN call.
2071 ALIGN is maximum alignment we can assume.
2072 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2073 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2074 stpcpy. */
2077 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2078 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2079 void *constfundata, unsigned int align, int endp)
2081 struct store_by_pieces data;
2083 if (len == 0)
2085 gcc_assert (endp != 2);
2086 return to;
2089 gcc_assert (STORE_BY_PIECES_P (len, align));
2090 data.constfun = constfun;
2091 data.constfundata = constfundata;
2092 data.len = len;
2093 data.to = to;
2094 store_by_pieces_1 (&data, align);
2095 if (endp)
2097 rtx to1;
2099 gcc_assert (!data.reverse);
2100 if (data.autinc_to)
2102 if (endp == 2)
2104 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2105 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2106 else
2107 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2108 -1));
2110 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2111 data.offset);
2113 else
2115 if (endp == 2)
2116 --data.offset;
2117 to1 = adjust_address (data.to, QImode, data.offset);
2119 return to1;
2121 else
2122 return data.to;
2125 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2126 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2128 static void
2129 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2131 struct store_by_pieces data;
2133 if (len == 0)
2134 return;
2136 data.constfun = clear_by_pieces_1;
2137 data.constfundata = NULL;
2138 data.len = len;
2139 data.to = to;
2140 store_by_pieces_1 (&data, align);
2143 /* Callback routine for clear_by_pieces.
2144 Return const0_rtx unconditionally. */
2146 static rtx
2147 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2148 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2149 enum machine_mode mode ATTRIBUTE_UNUSED)
2151 return const0_rtx;
2154 /* Subroutine of clear_by_pieces and store_by_pieces.
2155 Generate several move instructions to store LEN bytes of block TO. (A MEM
2156 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2158 static void
2159 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2160 unsigned int align ATTRIBUTE_UNUSED)
2162 rtx to_addr = XEXP (data->to, 0);
2163 unsigned int max_size = STORE_MAX_PIECES + 1;
2164 enum machine_mode mode = VOIDmode, tmode;
2165 enum insn_code icode;
2167 data->offset = 0;
2168 data->to_addr = to_addr;
2169 data->autinc_to
2170 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2171 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2173 data->explicit_inc_to = 0;
2174 data->reverse
2175 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2176 if (data->reverse)
2177 data->offset = data->len;
2179 /* If storing requires more than two move insns,
2180 copy addresses to registers (to make displacements shorter)
2181 and use post-increment if available. */
2182 if (!data->autinc_to
2183 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2185 /* Determine the main mode we'll be using. */
2186 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2187 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2188 if (GET_MODE_SIZE (tmode) < max_size)
2189 mode = tmode;
2191 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2193 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2194 data->autinc_to = 1;
2195 data->explicit_inc_to = -1;
2198 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2199 && ! data->autinc_to)
2201 data->to_addr = copy_addr_to_reg (to_addr);
2202 data->autinc_to = 1;
2203 data->explicit_inc_to = 1;
2206 if ( !data->autinc_to && CONSTANT_P (to_addr))
2207 data->to_addr = copy_addr_to_reg (to_addr);
2210 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2211 if (align >= GET_MODE_ALIGNMENT (tmode))
2212 align = GET_MODE_ALIGNMENT (tmode);
2213 else
2215 enum machine_mode xmode;
2217 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2218 tmode != VOIDmode;
2219 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2220 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2221 || SLOW_UNALIGNED_ACCESS (tmode, align))
2222 break;
2224 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2227 /* First store what we can in the largest integer mode, then go to
2228 successively smaller modes. */
2230 while (max_size > 1)
2232 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2233 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2234 if (GET_MODE_SIZE (tmode) < max_size)
2235 mode = tmode;
2237 if (mode == VOIDmode)
2238 break;
2240 icode = mov_optab->handlers[(int) mode].insn_code;
2241 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2242 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2244 max_size = GET_MODE_SIZE (mode);
2247 /* The code above should have handled everything. */
2248 gcc_assert (!data->len);
2251 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2252 with move instructions for mode MODE. GENFUN is the gen_... function
2253 to make a move insn for that mode. DATA has all the other info. */
2255 static void
2256 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2257 struct store_by_pieces *data)
2259 unsigned int size = GET_MODE_SIZE (mode);
2260 rtx to1, cst;
2262 while (data->len >= size)
2264 if (data->reverse)
2265 data->offset -= size;
2267 if (data->autinc_to)
2268 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2269 data->offset);
2270 else
2271 to1 = adjust_address (data->to, mode, data->offset);
2273 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2274 emit_insn (gen_add2_insn (data->to_addr,
2275 GEN_INT (-(HOST_WIDE_INT) size)));
2277 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2278 emit_insn ((*genfun) (to1, cst));
2280 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2281 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2283 if (! data->reverse)
2284 data->offset += size;
2286 data->len -= size;
2290 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2291 its length in bytes. */
2294 clear_storage (rtx object, rtx size)
2296 rtx retval = 0;
2297 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2298 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2300 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2301 just move a zero. Otherwise, do this a piece at a time. */
2302 if (GET_MODE (object) != BLKmode
2303 && GET_CODE (size) == CONST_INT
2304 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2305 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2306 else
2308 if (size == const0_rtx)
2310 else if (GET_CODE (size) == CONST_INT
2311 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2312 clear_by_pieces (object, INTVAL (size), align);
2313 else if (clear_storage_via_clrmem (object, size, align))
2315 else
2316 retval = clear_storage_via_libcall (object, size);
2319 return retval;
2322 /* A subroutine of clear_storage. Expand a clrmem pattern;
2323 return true if successful. */
2325 static bool
2326 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2328 /* Try the most limited insn first, because there's no point
2329 including more than one in the machine description unless
2330 the more limited one has some advantage. */
2332 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2333 enum machine_mode mode;
2335 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2336 mode = GET_MODE_WIDER_MODE (mode))
2338 enum insn_code code = clrmem_optab[(int) mode];
2339 insn_operand_predicate_fn pred;
2341 if (code != CODE_FOR_nothing
2342 /* We don't need MODE to be narrower than
2343 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2344 the mode mask, as it is returned by the macro, it will
2345 definitely be less than the actual mode mask. */
2346 && ((GET_CODE (size) == CONST_INT
2347 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2348 <= (GET_MODE_MASK (mode) >> 1)))
2349 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2350 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2351 || (*pred) (object, BLKmode))
2352 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2353 || (*pred) (opalign, VOIDmode)))
2355 rtx op1;
2356 rtx last = get_last_insn ();
2357 rtx pat;
2359 op1 = convert_to_mode (mode, size, 1);
2360 pred = insn_data[(int) code].operand[1].predicate;
2361 if (pred != 0 && ! (*pred) (op1, mode))
2362 op1 = copy_to_mode_reg (mode, op1);
2364 pat = GEN_FCN ((int) code) (object, op1, opalign);
2365 if (pat)
2367 emit_insn (pat);
2368 return true;
2370 else
2371 delete_insns_since (last);
2375 return false;
2378 /* A subroutine of clear_storage. Expand a call to memset.
2379 Return the return value of memset, 0 otherwise. */
2381 static rtx
2382 clear_storage_via_libcall (rtx object, rtx size)
2384 tree call_expr, arg_list, fn, object_tree, size_tree;
2385 enum machine_mode size_mode;
2386 rtx retval;
2388 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2389 place those into new pseudos into a VAR_DECL and use them later. */
2391 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2393 size_mode = TYPE_MODE (sizetype);
2394 size = convert_to_mode (size_mode, size, 1);
2395 size = copy_to_mode_reg (size_mode, size);
2397 /* It is incorrect to use the libcall calling conventions to call
2398 memset in this context. This could be a user call to memset and
2399 the user may wish to examine the return value from memset. For
2400 targets where libcalls and normal calls have different conventions
2401 for returning pointers, we could end up generating incorrect code. */
2403 object_tree = make_tree (ptr_type_node, object);
2404 size_tree = make_tree (sizetype, size);
2406 fn = clear_storage_libcall_fn (true);
2407 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2408 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2409 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2411 /* Now we have to build up the CALL_EXPR itself. */
2412 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2413 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2414 call_expr, arg_list, NULL_TREE);
2416 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2418 return retval;
2421 /* A subroutine of clear_storage_via_libcall. Create the tree node
2422 for the function we use for block clears. The first time FOR_CALL
2423 is true, we call assemble_external. */
2425 static GTY(()) tree block_clear_fn;
2427 void
2428 init_block_clear_fn (const char *asmspec)
2430 if (!block_clear_fn)
2432 tree fn, args;
2434 fn = get_identifier ("memset");
2435 args = build_function_type_list (ptr_type_node, ptr_type_node,
2436 integer_type_node, sizetype,
2437 NULL_TREE);
2439 fn = build_decl (FUNCTION_DECL, fn, args);
2440 DECL_EXTERNAL (fn) = 1;
2441 TREE_PUBLIC (fn) = 1;
2442 DECL_ARTIFICIAL (fn) = 1;
2443 TREE_NOTHROW (fn) = 1;
2445 block_clear_fn = fn;
2448 if (asmspec)
2449 set_user_assembler_name (block_clear_fn, asmspec);
2452 static tree
2453 clear_storage_libcall_fn (int for_call)
2455 static bool emitted_extern;
2457 if (!block_clear_fn)
2458 init_block_clear_fn (NULL);
2460 if (for_call && !emitted_extern)
2462 emitted_extern = true;
2463 make_decl_rtl (block_clear_fn);
2464 assemble_external (block_clear_fn);
2467 return block_clear_fn;
2470 /* Generate code to copy Y into X.
2471 Both Y and X must have the same mode, except that
2472 Y can be a constant with VOIDmode.
2473 This mode cannot be BLKmode; use emit_block_move for that.
2475 Return the last instruction emitted. */
2478 emit_move_insn (rtx x, rtx y)
2480 enum machine_mode mode = GET_MODE (x);
2481 rtx y_cst = NULL_RTX;
2482 rtx last_insn, set;
2484 gcc_assert (mode != BLKmode
2485 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2487 if (CONSTANT_P (y))
2489 if (optimize
2490 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2491 && (last_insn = compress_float_constant (x, y)))
2492 return last_insn;
2494 y_cst = y;
2496 if (!LEGITIMATE_CONSTANT_P (y))
2498 y = force_const_mem (mode, y);
2500 /* If the target's cannot_force_const_mem prevented the spill,
2501 assume that the target's move expanders will also take care
2502 of the non-legitimate constant. */
2503 if (!y)
2504 y = y_cst;
2508 /* If X or Y are memory references, verify that their addresses are valid
2509 for the machine. */
2510 if (MEM_P (x)
2511 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2512 && ! push_operand (x, GET_MODE (x)))
2513 || (flag_force_addr
2514 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2515 x = validize_mem (x);
2517 if (MEM_P (y)
2518 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2519 || (flag_force_addr
2520 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2521 y = validize_mem (y);
2523 gcc_assert (mode != BLKmode);
2525 last_insn = emit_move_insn_1 (x, y);
2527 if (y_cst && REG_P (x)
2528 && (set = single_set (last_insn)) != NULL_RTX
2529 && SET_DEST (set) == x
2530 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2531 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2533 return last_insn;
2536 /* Low level part of emit_move_insn.
2537 Called just like emit_move_insn, but assumes X and Y
2538 are basically valid. */
2541 emit_move_insn_1 (rtx x, rtx y)
2543 enum machine_mode mode = GET_MODE (x);
2544 enum machine_mode submode;
2545 enum mode_class class = GET_MODE_CLASS (mode);
2547 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2549 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2550 return
2551 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2553 /* Expand complex moves by moving real part and imag part, if possible. */
2554 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2555 && BLKmode != (submode = GET_MODE_INNER (mode))
2556 && (mov_optab->handlers[(int) submode].insn_code
2557 != CODE_FOR_nothing))
2559 /* Don't split destination if it is a stack push. */
2560 int stack = push_operand (x, GET_MODE (x));
2562 #ifdef PUSH_ROUNDING
2563 /* In case we output to the stack, but the size is smaller than the
2564 machine can push exactly, we need to use move instructions. */
2565 if (stack
2566 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2567 != GET_MODE_SIZE (submode)))
2569 rtx temp;
2570 HOST_WIDE_INT offset1, offset2;
2572 /* Do not use anti_adjust_stack, since we don't want to update
2573 stack_pointer_delta. */
2574 temp = expand_binop (Pmode,
2575 #ifdef STACK_GROWS_DOWNWARD
2576 sub_optab,
2577 #else
2578 add_optab,
2579 #endif
2580 stack_pointer_rtx,
2581 GEN_INT
2582 (PUSH_ROUNDING
2583 (GET_MODE_SIZE (GET_MODE (x)))),
2584 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2586 if (temp != stack_pointer_rtx)
2587 emit_move_insn (stack_pointer_rtx, temp);
2589 #ifdef STACK_GROWS_DOWNWARD
2590 offset1 = 0;
2591 offset2 = GET_MODE_SIZE (submode);
2592 #else
2593 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2594 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2595 + GET_MODE_SIZE (submode));
2596 #endif
2598 emit_move_insn (change_address (x, submode,
2599 gen_rtx_PLUS (Pmode,
2600 stack_pointer_rtx,
2601 GEN_INT (offset1))),
2602 gen_realpart (submode, y));
2603 emit_move_insn (change_address (x, submode,
2604 gen_rtx_PLUS (Pmode,
2605 stack_pointer_rtx,
2606 GEN_INT (offset2))),
2607 gen_imagpart (submode, y));
2609 else
2610 #endif
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
2616 if (stack)
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
2620 #ifdef STACK_GROWS_DOWNWARD
2621 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2622 gen_imagpart (submode, y));
2623 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2624 gen_realpart (submode, y));
2625 #else
2626 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2627 gen_realpart (submode, y));
2628 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2629 gen_imagpart (submode, y));
2630 #endif
2632 else
2634 rtx realpart_x, realpart_y;
2635 rtx imagpart_x, imagpart_y;
2637 /* If this is a complex value with each part being smaller than a
2638 word, the usual calling sequence will likely pack the pieces into
2639 a single register. Unfortunately, SUBREG of hard registers only
2640 deals in terms of words, so we have a problem converting input
2641 arguments to the CONCAT of two registers that is used elsewhere
2642 for complex values. If this is before reload, we can copy it into
2643 memory and reload. FIXME, we should see about using extract and
2644 insert on integer registers, but complex short and complex char
2645 variables should be rarely used. */
2646 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2647 && (reload_in_progress | reload_completed) == 0)
2649 int packed_dest_p
2650 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2651 int packed_src_p
2652 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2654 if (packed_dest_p || packed_src_p)
2656 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2657 ? MODE_FLOAT : MODE_INT);
2659 enum machine_mode reg_mode
2660 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2662 if (reg_mode != BLKmode)
2664 rtx mem = assign_stack_temp (reg_mode,
2665 GET_MODE_SIZE (mode), 0);
2666 rtx cmem = adjust_address (mem, mode, 0);
2668 if (packed_dest_p)
2670 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2672 emit_move_insn_1 (cmem, y);
2673 return emit_move_insn_1 (sreg, mem);
2675 else
2677 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2679 emit_move_insn_1 (mem, sreg);
2680 return emit_move_insn_1 (x, cmem);
2686 realpart_x = gen_realpart (submode, x);
2687 realpart_y = gen_realpart (submode, y);
2688 imagpart_x = gen_imagpart (submode, x);
2689 imagpart_y = gen_imagpart (submode, y);
2691 /* Show the output dies here. This is necessary for SUBREGs
2692 of pseudos since we cannot track their lifetimes correctly;
2693 hard regs shouldn't appear here except as return values.
2694 We never want to emit such a clobber after reload. */
2695 if (x != y
2696 && ! (reload_in_progress || reload_completed)
2697 && (GET_CODE (realpart_x) == SUBREG
2698 || GET_CODE (imagpart_x) == SUBREG))
2699 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2701 emit_move_insn (realpart_x, realpart_y);
2702 emit_move_insn (imagpart_x, imagpart_y);
2705 return get_last_insn ();
2708 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2709 find a mode to do it in. If we have a movcc, use it. Otherwise,
2710 find the MODE_INT mode of the same width. */
2711 else if (GET_MODE_CLASS (mode) == MODE_CC
2712 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2714 enum insn_code insn_code;
2715 enum machine_mode tmode = VOIDmode;
2716 rtx x1 = x, y1 = y;
2718 if (mode != CCmode
2719 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2720 tmode = CCmode;
2721 else
2722 for (tmode = QImode; tmode != VOIDmode;
2723 tmode = GET_MODE_WIDER_MODE (tmode))
2724 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2725 break;
2727 gcc_assert (tmode != VOIDmode);
2729 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2730 may call change_address which is not appropriate if we were
2731 called when a reload was in progress. We don't have to worry
2732 about changing the address since the size in bytes is supposed to
2733 be the same. Copy the MEM to change the mode and move any
2734 substitutions from the old MEM to the new one. */
2736 if (reload_in_progress)
2738 x = gen_lowpart_common (tmode, x1);
2739 if (x == 0 && MEM_P (x1))
2741 x = adjust_address_nv (x1, tmode, 0);
2742 copy_replacements (x1, x);
2745 y = gen_lowpart_common (tmode, y1);
2746 if (y == 0 && MEM_P (y1))
2748 y = adjust_address_nv (y1, tmode, 0);
2749 copy_replacements (y1, y);
2752 else
2754 x = gen_lowpart (tmode, x);
2755 y = gen_lowpart (tmode, y);
2758 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2759 return emit_insn (GEN_FCN (insn_code) (x, y));
2762 /* Try using a move pattern for the corresponding integer mode. This is
2763 only safe when simplify_subreg can convert MODE constants into integer
2764 constants. At present, it can only do this reliably if the value
2765 fits within a HOST_WIDE_INT. */
2766 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2767 && (submode = int_mode_for_mode (mode)) != BLKmode
2768 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2769 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2770 (simplify_gen_subreg (submode, x, mode, 0),
2771 simplify_gen_subreg (submode, y, mode, 0)));
2773 /* This will handle any multi-word or full-word mode that lacks a move_insn
2774 pattern. However, you will get better code if you define such patterns,
2775 even if they must turn into multiple assembler instructions. */
2776 else
2778 rtx last_insn = 0;
2779 rtx seq, inner;
2780 int need_clobber;
2781 int i;
2783 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2785 #ifdef PUSH_ROUNDING
2787 /* If X is a push on the stack, do the push now and replace
2788 X with a reference to the stack pointer. */
2789 if (push_operand (x, GET_MODE (x)))
2791 rtx temp;
2792 enum rtx_code code;
2794 /* Do not use anti_adjust_stack, since we don't want to update
2795 stack_pointer_delta. */
2796 temp = expand_binop (Pmode,
2797 #ifdef STACK_GROWS_DOWNWARD
2798 sub_optab,
2799 #else
2800 add_optab,
2801 #endif
2802 stack_pointer_rtx,
2803 GEN_INT
2804 (PUSH_ROUNDING
2805 (GET_MODE_SIZE (GET_MODE (x)))),
2806 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2808 if (temp != stack_pointer_rtx)
2809 emit_move_insn (stack_pointer_rtx, temp);
2811 code = GET_CODE (XEXP (x, 0));
2813 /* Just hope that small offsets off SP are OK. */
2814 if (code == POST_INC)
2815 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2816 GEN_INT (-((HOST_WIDE_INT)
2817 GET_MODE_SIZE (GET_MODE (x)))));
2818 else if (code == POST_DEC)
2819 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2820 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2821 else
2822 temp = stack_pointer_rtx;
2824 x = change_address (x, VOIDmode, temp);
2826 #endif
2828 /* If we are in reload, see if either operand is a MEM whose address
2829 is scheduled for replacement. */
2830 if (reload_in_progress && MEM_P (x)
2831 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2832 x = replace_equiv_address_nv (x, inner);
2833 if (reload_in_progress && MEM_P (y)
2834 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2835 y = replace_equiv_address_nv (y, inner);
2837 start_sequence ();
2839 need_clobber = 0;
2840 for (i = 0;
2841 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2842 i++)
2844 rtx xpart = operand_subword (x, i, 1, mode);
2845 rtx ypart = operand_subword (y, i, 1, mode);
2847 /* If we can't get a part of Y, put Y into memory if it is a
2848 constant. Otherwise, force it into a register. If we still
2849 can't get a part of Y, abort. */
2850 if (ypart == 0 && CONSTANT_P (y))
2852 y = force_const_mem (mode, y);
2853 ypart = operand_subword (y, i, 1, mode);
2855 else if (ypart == 0)
2856 ypart = operand_subword_force (y, i, mode);
2858 gcc_assert (xpart && ypart);
2860 need_clobber |= (GET_CODE (xpart) == SUBREG);
2862 last_insn = emit_move_insn (xpart, ypart);
2865 seq = get_insns ();
2866 end_sequence ();
2868 /* Show the output dies here. This is necessary for SUBREGs
2869 of pseudos since we cannot track their lifetimes correctly;
2870 hard regs shouldn't appear here except as return values.
2871 We never want to emit such a clobber after reload. */
2872 if (x != y
2873 && ! (reload_in_progress || reload_completed)
2874 && need_clobber != 0)
2875 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2877 emit_insn (seq);
2879 return last_insn;
2883 /* If Y is representable exactly in a narrower mode, and the target can
2884 perform the extension directly from constant or memory, then emit the
2885 move as an extension. */
2887 static rtx
2888 compress_float_constant (rtx x, rtx y)
2890 enum machine_mode dstmode = GET_MODE (x);
2891 enum machine_mode orig_srcmode = GET_MODE (y);
2892 enum machine_mode srcmode;
2893 REAL_VALUE_TYPE r;
2895 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2897 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2898 srcmode != orig_srcmode;
2899 srcmode = GET_MODE_WIDER_MODE (srcmode))
2901 enum insn_code ic;
2902 rtx trunc_y, last_insn;
2904 /* Skip if the target can't extend this way. */
2905 ic = can_extend_p (dstmode, srcmode, 0);
2906 if (ic == CODE_FOR_nothing)
2907 continue;
2909 /* Skip if the narrowed value isn't exact. */
2910 if (! exact_real_truncate (srcmode, &r))
2911 continue;
2913 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2915 if (LEGITIMATE_CONSTANT_P (trunc_y))
2917 /* Skip if the target needs extra instructions to perform
2918 the extension. */
2919 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2920 continue;
2922 else if (float_extend_from_mem[dstmode][srcmode])
2923 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2924 else
2925 continue;
2927 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2928 last_insn = get_last_insn ();
2930 if (REG_P (x))
2931 set_unique_reg_note (last_insn, REG_EQUAL, y);
2933 return last_insn;
2936 return NULL_RTX;
2939 /* Pushing data onto the stack. */
2941 /* Push a block of length SIZE (perhaps variable)
2942 and return an rtx to address the beginning of the block.
2943 The value may be virtual_outgoing_args_rtx.
2945 EXTRA is the number of bytes of padding to push in addition to SIZE.
2946 BELOW nonzero means this padding comes at low addresses;
2947 otherwise, the padding comes at high addresses. */
2950 push_block (rtx size, int extra, int below)
2952 rtx temp;
2954 size = convert_modes (Pmode, ptr_mode, size, 1);
2955 if (CONSTANT_P (size))
2956 anti_adjust_stack (plus_constant (size, extra));
2957 else if (REG_P (size) && extra == 0)
2958 anti_adjust_stack (size);
2959 else
2961 temp = copy_to_mode_reg (Pmode, size);
2962 if (extra != 0)
2963 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2964 temp, 0, OPTAB_LIB_WIDEN);
2965 anti_adjust_stack (temp);
2968 #ifndef STACK_GROWS_DOWNWARD
2969 if (0)
2970 #else
2971 if (1)
2972 #endif
2974 temp = virtual_outgoing_args_rtx;
2975 if (extra != 0 && below)
2976 temp = plus_constant (temp, extra);
2978 else
2980 if (GET_CODE (size) == CONST_INT)
2981 temp = plus_constant (virtual_outgoing_args_rtx,
2982 -INTVAL (size) - (below ? 0 : extra));
2983 else if (extra != 0 && !below)
2984 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2985 negate_rtx (Pmode, plus_constant (size, extra)));
2986 else
2987 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2988 negate_rtx (Pmode, size));
2991 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2994 #ifdef PUSH_ROUNDING
2996 /* Emit single push insn. */
2998 static void
2999 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3001 rtx dest_addr;
3002 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3003 rtx dest;
3004 enum insn_code icode;
3005 insn_operand_predicate_fn pred;
3007 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3008 /* If there is push pattern, use it. Otherwise try old way of throwing
3009 MEM representing push operation to move expander. */
3010 icode = push_optab->handlers[(int) mode].insn_code;
3011 if (icode != CODE_FOR_nothing)
3013 if (((pred = insn_data[(int) icode].operand[0].predicate)
3014 && !((*pred) (x, mode))))
3015 x = force_reg (mode, x);
3016 emit_insn (GEN_FCN (icode) (x));
3017 return;
3019 if (GET_MODE_SIZE (mode) == rounded_size)
3020 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3021 /* If we are to pad downward, adjust the stack pointer first and
3022 then store X into the stack location using an offset. This is
3023 because emit_move_insn does not know how to pad; it does not have
3024 access to type. */
3025 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3027 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3028 HOST_WIDE_INT offset;
3030 emit_move_insn (stack_pointer_rtx,
3031 expand_binop (Pmode,
3032 #ifdef STACK_GROWS_DOWNWARD
3033 sub_optab,
3034 #else
3035 add_optab,
3036 #endif
3037 stack_pointer_rtx,
3038 GEN_INT (rounded_size),
3039 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3041 offset = (HOST_WIDE_INT) padding_size;
3042 #ifdef STACK_GROWS_DOWNWARD
3043 if (STACK_PUSH_CODE == POST_DEC)
3044 /* We have already decremented the stack pointer, so get the
3045 previous value. */
3046 offset += (HOST_WIDE_INT) rounded_size;
3047 #else
3048 if (STACK_PUSH_CODE == POST_INC)
3049 /* We have already incremented the stack pointer, so get the
3050 previous value. */
3051 offset -= (HOST_WIDE_INT) rounded_size;
3052 #endif
3053 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3055 else
3057 #ifdef STACK_GROWS_DOWNWARD
3058 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3059 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3060 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3061 #else
3062 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3063 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3064 GEN_INT (rounded_size));
3065 #endif
3066 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3069 dest = gen_rtx_MEM (mode, dest_addr);
3071 if (type != 0)
3073 set_mem_attributes (dest, type, 1);
3075 if (flag_optimize_sibling_calls)
3076 /* Function incoming arguments may overlap with sibling call
3077 outgoing arguments and we cannot allow reordering of reads
3078 from function arguments with stores to outgoing arguments
3079 of sibling calls. */
3080 set_mem_alias_set (dest, 0);
3082 emit_move_insn (dest, x);
3084 #endif
3086 /* Generate code to push X onto the stack, assuming it has mode MODE and
3087 type TYPE.
3088 MODE is redundant except when X is a CONST_INT (since they don't
3089 carry mode info).
3090 SIZE is an rtx for the size of data to be copied (in bytes),
3091 needed only if X is BLKmode.
3093 ALIGN (in bits) is maximum alignment we can assume.
3095 If PARTIAL and REG are both nonzero, then copy that many of the first
3096 words of X into registers starting with REG, and push the rest of X.
3097 The amount of space pushed is decreased by PARTIAL words,
3098 rounded *down* to a multiple of PARM_BOUNDARY.
3099 REG must be a hard register in this case.
3100 If REG is zero but PARTIAL is not, take any all others actions for an
3101 argument partially in registers, but do not actually load any
3102 registers.
3104 EXTRA is the amount in bytes of extra space to leave next to this arg.
3105 This is ignored if an argument block has already been allocated.
3107 On a machine that lacks real push insns, ARGS_ADDR is the address of
3108 the bottom of the argument block for this call. We use indexing off there
3109 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3110 argument block has not been preallocated.
3112 ARGS_SO_FAR is the size of args previously pushed for this call.
3114 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3115 for arguments passed in registers. If nonzero, it will be the number
3116 of bytes required. */
3118 void
3119 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3120 unsigned int align, int partial, rtx reg, int extra,
3121 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3122 rtx alignment_pad)
3124 rtx xinner;
3125 enum direction stack_direction
3126 #ifdef STACK_GROWS_DOWNWARD
3127 = downward;
3128 #else
3129 = upward;
3130 #endif
3132 /* Decide where to pad the argument: `downward' for below,
3133 `upward' for above, or `none' for don't pad it.
3134 Default is below for small data on big-endian machines; else above. */
3135 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3137 /* Invert direction if stack is post-decrement.
3138 FIXME: why? */
3139 if (STACK_PUSH_CODE == POST_DEC)
3140 if (where_pad != none)
3141 where_pad = (where_pad == downward ? upward : downward);
3143 xinner = x;
3145 if (mode == BLKmode)
3147 /* Copy a block into the stack, entirely or partially. */
3149 rtx temp;
3150 int used = partial * UNITS_PER_WORD;
3151 int offset;
3152 int skip;
3154 if (reg && GET_CODE (reg) == PARALLEL)
3156 /* Use the size of the elt to compute offset. */
3157 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3158 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3159 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3161 else
3162 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3164 gcc_assert (size);
3166 used -= offset;
3168 /* USED is now the # of bytes we need not copy to the stack
3169 because registers will take care of them. */
3171 if (partial != 0)
3172 xinner = adjust_address (xinner, BLKmode, used);
3174 /* If the partial register-part of the arg counts in its stack size,
3175 skip the part of stack space corresponding to the registers.
3176 Otherwise, start copying to the beginning of the stack space,
3177 by setting SKIP to 0. */
3178 skip = (reg_parm_stack_space == 0) ? 0 : used;
3180 #ifdef PUSH_ROUNDING
3181 /* Do it with several push insns if that doesn't take lots of insns
3182 and if there is no difficulty with push insns that skip bytes
3183 on the stack for alignment purposes. */
3184 if (args_addr == 0
3185 && PUSH_ARGS
3186 && GET_CODE (size) == CONST_INT
3187 && skip == 0
3188 && MEM_ALIGN (xinner) >= align
3189 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3190 /* Here we avoid the case of a structure whose weak alignment
3191 forces many pushes of a small amount of data,
3192 and such small pushes do rounding that causes trouble. */
3193 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3194 || align >= BIGGEST_ALIGNMENT
3195 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3196 == (align / BITS_PER_UNIT)))
3197 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3199 /* Push padding now if padding above and stack grows down,
3200 or if padding below and stack grows up.
3201 But if space already allocated, this has already been done. */
3202 if (extra && args_addr == 0
3203 && where_pad != none && where_pad != stack_direction)
3204 anti_adjust_stack (GEN_INT (extra));
3206 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3208 else
3209 #endif /* PUSH_ROUNDING */
3211 rtx target;
3213 /* Otherwise make space on the stack and copy the data
3214 to the address of that space. */
3216 /* Deduct words put into registers from the size we must copy. */
3217 if (partial != 0)
3219 if (GET_CODE (size) == CONST_INT)
3220 size = GEN_INT (INTVAL (size) - used);
3221 else
3222 size = expand_binop (GET_MODE (size), sub_optab, size,
3223 GEN_INT (used), NULL_RTX, 0,
3224 OPTAB_LIB_WIDEN);
3227 /* Get the address of the stack space.
3228 In this case, we do not deal with EXTRA separately.
3229 A single stack adjust will do. */
3230 if (! args_addr)
3232 temp = push_block (size, extra, where_pad == downward);
3233 extra = 0;
3235 else if (GET_CODE (args_so_far) == CONST_INT)
3236 temp = memory_address (BLKmode,
3237 plus_constant (args_addr,
3238 skip + INTVAL (args_so_far)));
3239 else
3240 temp = memory_address (BLKmode,
3241 plus_constant (gen_rtx_PLUS (Pmode,
3242 args_addr,
3243 args_so_far),
3244 skip));
3246 if (!ACCUMULATE_OUTGOING_ARGS)
3248 /* If the source is referenced relative to the stack pointer,
3249 copy it to another register to stabilize it. We do not need
3250 to do this if we know that we won't be changing sp. */
3252 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3253 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3254 temp = copy_to_reg (temp);
3257 target = gen_rtx_MEM (BLKmode, temp);
3259 /* We do *not* set_mem_attributes here, because incoming arguments
3260 may overlap with sibling call outgoing arguments and we cannot
3261 allow reordering of reads from function arguments with stores
3262 to outgoing arguments of sibling calls. We do, however, want
3263 to record the alignment of the stack slot. */
3264 /* ALIGN may well be better aligned than TYPE, e.g. due to
3265 PARM_BOUNDARY. Assume the caller isn't lying. */
3266 set_mem_align (target, align);
3268 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3271 else if (partial > 0)
3273 /* Scalar partly in registers. */
3275 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3276 int i;
3277 int not_stack;
3278 /* # words of start of argument
3279 that we must make space for but need not store. */
3280 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3281 int args_offset = INTVAL (args_so_far);
3282 int skip;
3284 /* Push padding now if padding above and stack grows down,
3285 or if padding below and stack grows up.
3286 But if space already allocated, this has already been done. */
3287 if (extra && args_addr == 0
3288 && where_pad != none && where_pad != stack_direction)
3289 anti_adjust_stack (GEN_INT (extra));
3291 /* If we make space by pushing it, we might as well push
3292 the real data. Otherwise, we can leave OFFSET nonzero
3293 and leave the space uninitialized. */
3294 if (args_addr == 0)
3295 offset = 0;
3297 /* Now NOT_STACK gets the number of words that we don't need to
3298 allocate on the stack. */
3299 not_stack = partial - offset;
3301 /* If the partial register-part of the arg counts in its stack size,
3302 skip the part of stack space corresponding to the registers.
3303 Otherwise, start copying to the beginning of the stack space,
3304 by setting SKIP to 0. */
3305 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3307 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3308 x = validize_mem (force_const_mem (mode, x));
3310 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3311 SUBREGs of such registers are not allowed. */
3312 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3313 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3314 x = copy_to_reg (x);
3316 /* Loop over all the words allocated on the stack for this arg. */
3317 /* We can do it by words, because any scalar bigger than a word
3318 has a size a multiple of a word. */
3319 #ifndef PUSH_ARGS_REVERSED
3320 for (i = not_stack; i < size; i++)
3321 #else
3322 for (i = size - 1; i >= not_stack; i--)
3323 #endif
3324 if (i >= not_stack + offset)
3325 emit_push_insn (operand_subword_force (x, i, mode),
3326 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3327 0, args_addr,
3328 GEN_INT (args_offset + ((i - not_stack + skip)
3329 * UNITS_PER_WORD)),
3330 reg_parm_stack_space, alignment_pad);
3332 else
3334 rtx addr;
3335 rtx dest;
3337 /* Push padding now if padding above and stack grows down,
3338 or if padding below and stack grows up.
3339 But if space already allocated, this has already been done. */
3340 if (extra && args_addr == 0
3341 && where_pad != none && where_pad != stack_direction)
3342 anti_adjust_stack (GEN_INT (extra));
3344 #ifdef PUSH_ROUNDING
3345 if (args_addr == 0 && PUSH_ARGS)
3346 emit_single_push_insn (mode, x, type);
3347 else
3348 #endif
3350 if (GET_CODE (args_so_far) == CONST_INT)
3351 addr
3352 = memory_address (mode,
3353 plus_constant (args_addr,
3354 INTVAL (args_so_far)));
3355 else
3356 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3357 args_so_far));
3358 dest = gen_rtx_MEM (mode, addr);
3360 /* We do *not* set_mem_attributes here, because incoming arguments
3361 may overlap with sibling call outgoing arguments and we cannot
3362 allow reordering of reads from function arguments with stores
3363 to outgoing arguments of sibling calls. We do, however, want
3364 to record the alignment of the stack slot. */
3365 /* ALIGN may well be better aligned than TYPE, e.g. due to
3366 PARM_BOUNDARY. Assume the caller isn't lying. */
3367 set_mem_align (dest, align);
3369 emit_move_insn (dest, x);
3373 /* If part should go in registers, copy that part
3374 into the appropriate registers. Do this now, at the end,
3375 since mem-to-mem copies above may do function calls. */
3376 if (partial > 0 && reg != 0)
3378 /* Handle calls that pass values in multiple non-contiguous locations.
3379 The Irix 6 ABI has examples of this. */
3380 if (GET_CODE (reg) == PARALLEL)
3381 emit_group_load (reg, x, type, -1);
3382 else
3383 move_block_to_reg (REGNO (reg), x, partial, mode);
3386 if (extra && args_addr == 0 && where_pad == stack_direction)
3387 anti_adjust_stack (GEN_INT (extra));
3389 if (alignment_pad && args_addr == 0)
3390 anti_adjust_stack (alignment_pad);
3393 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3394 operations. */
3396 static rtx
3397 get_subtarget (rtx x)
3399 return (optimize
3400 || x == 0
3401 /* Only registers can be subtargets. */
3402 || !REG_P (x)
3403 /* Don't use hard regs to avoid extending their life. */
3404 || REGNO (x) < FIRST_PSEUDO_REGISTER
3405 ? 0 : x);
3408 /* Expand an assignment that stores the value of FROM into TO. */
3410 void
3411 expand_assignment (tree to, tree from)
3413 rtx to_rtx = 0;
3414 rtx result;
3416 /* Don't crash if the lhs of the assignment was erroneous. */
3418 if (TREE_CODE (to) == ERROR_MARK)
3420 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3421 return;
3424 /* Assignment of a structure component needs special treatment
3425 if the structure component's rtx is not simply a MEM.
3426 Assignment of an array element at a constant index, and assignment of
3427 an array element in an unaligned packed structure field, has the same
3428 problem. */
3430 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3431 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3432 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3434 enum machine_mode mode1;
3435 HOST_WIDE_INT bitsize, bitpos;
3436 rtx orig_to_rtx;
3437 tree offset;
3438 int unsignedp;
3439 int volatilep = 0;
3440 tree tem;
3442 push_temp_slots ();
3443 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3444 &unsignedp, &volatilep);
3446 /* If we are going to use store_bit_field and extract_bit_field,
3447 make sure to_rtx will be safe for multiple use. */
3449 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3451 if (offset != 0)
3453 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3455 gcc_assert (MEM_P (to_rtx));
3457 #ifdef POINTERS_EXTEND_UNSIGNED
3458 if (GET_MODE (offset_rtx) != Pmode)
3459 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3460 #else
3461 if (GET_MODE (offset_rtx) != ptr_mode)
3462 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3463 #endif
3465 /* A constant address in TO_RTX can have VOIDmode, we must not try
3466 to call force_reg for that case. Avoid that case. */
3467 if (MEM_P (to_rtx)
3468 && GET_MODE (to_rtx) == BLKmode
3469 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3470 && bitsize > 0
3471 && (bitpos % bitsize) == 0
3472 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3473 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3475 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3476 bitpos = 0;
3479 to_rtx = offset_address (to_rtx, offset_rtx,
3480 highest_pow2_factor_for_target (to,
3481 offset));
3484 if (MEM_P (to_rtx))
3486 /* If the field is at offset zero, we could have been given the
3487 DECL_RTX of the parent struct. Don't munge it. */
3488 to_rtx = shallow_copy_rtx (to_rtx);
3490 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3493 /* Deal with volatile and readonly fields. The former is only done
3494 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3495 if (volatilep && MEM_P (to_rtx))
3497 if (to_rtx == orig_to_rtx)
3498 to_rtx = copy_rtx (to_rtx);
3499 MEM_VOLATILE_P (to_rtx) = 1;
3502 if (MEM_P (to_rtx) && ! can_address_p (to))
3504 if (to_rtx == orig_to_rtx)
3505 to_rtx = copy_rtx (to_rtx);
3506 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3509 /* Optimize bitfld op= val in certain cases. */
3510 while (mode1 == VOIDmode
3511 && bitsize > 0 && bitsize < BITS_PER_WORD
3512 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3513 && !TREE_SIDE_EFFECTS (to)
3514 && !TREE_THIS_VOLATILE (to))
3516 tree src, op0, op1;
3517 rtx value, str_rtx = to_rtx;
3518 HOST_WIDE_INT bitpos1 = bitpos;
3519 optab binop;
3521 src = from;
3522 STRIP_NOPS (src);
3523 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3524 || !BINARY_CLASS_P (src))
3525 break;
3527 op0 = TREE_OPERAND (src, 0);
3528 op1 = TREE_OPERAND (src, 1);
3529 STRIP_NOPS (op0);
3531 if (! operand_equal_p (to, op0, 0))
3532 break;
3534 if (MEM_P (str_rtx))
3536 enum machine_mode mode = GET_MODE (str_rtx);
3537 HOST_WIDE_INT offset1;
3539 if (GET_MODE_BITSIZE (mode) == 0
3540 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3541 mode = word_mode;
3542 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3543 mode, 0);
3544 if (mode == VOIDmode)
3545 break;
3547 offset1 = bitpos1;
3548 bitpos1 %= GET_MODE_BITSIZE (mode);
3549 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3550 str_rtx = adjust_address (str_rtx, mode, offset1);
3552 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3553 break;
3555 /* If the bit field covers the whole REG/MEM, store_field
3556 will likely generate better code. */
3557 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3558 break;
3560 /* We can't handle fields split across multiple entities. */
3561 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3562 break;
3564 if (BYTES_BIG_ENDIAN)
3565 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3566 - bitsize;
3568 /* Special case some bitfield op= exp. */
3569 switch (TREE_CODE (src))
3571 case PLUS_EXPR:
3572 case MINUS_EXPR:
3573 /* For now, just optimize the case of the topmost bitfield
3574 where we don't need to do any masking and also
3575 1 bit bitfields where xor can be used.
3576 We might win by one instruction for the other bitfields
3577 too if insv/extv instructions aren't used, so that
3578 can be added later. */
3579 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3580 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3581 break;
3582 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3583 value = convert_modes (GET_MODE (str_rtx),
3584 TYPE_MODE (TREE_TYPE (op1)), value,
3585 TYPE_UNSIGNED (TREE_TYPE (op1)));
3587 /* We may be accessing data outside the field, which means
3588 we can alias adjacent data. */
3589 if (MEM_P (str_rtx))
3591 str_rtx = shallow_copy_rtx (str_rtx);
3592 set_mem_alias_set (str_rtx, 0);
3593 set_mem_expr (str_rtx, 0);
3596 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3597 if (bitsize == 1
3598 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3600 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3601 NULL_RTX);
3602 binop = xor_optab;
3604 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3605 build_int_cst (NULL_TREE, bitpos1),
3606 NULL_RTX, 1);
3607 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3608 value, str_rtx, 1, OPTAB_WIDEN);
3609 if (result != str_rtx)
3610 emit_move_insn (str_rtx, result);
3611 free_temp_slots ();
3612 pop_temp_slots ();
3613 return;
3615 default:
3616 break;
3619 break;
3622 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3623 TREE_TYPE (tem), get_alias_set (to));
3625 preserve_temp_slots (result);
3626 free_temp_slots ();
3627 pop_temp_slots ();
3629 /* If the value is meaningful, convert RESULT to the proper mode.
3630 Otherwise, return nothing. */
3631 return;
3634 /* If the rhs is a function call and its value is not an aggregate,
3635 call the function before we start to compute the lhs.
3636 This is needed for correct code for cases such as
3637 val = setjmp (buf) on machines where reference to val
3638 requires loading up part of an address in a separate insn.
3640 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3641 since it might be a promoted variable where the zero- or sign- extension
3642 needs to be done. Handling this in the normal way is safe because no
3643 computation is done before the call. */
3644 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3645 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3646 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3647 && REG_P (DECL_RTL (to))))
3649 rtx value;
3651 push_temp_slots ();
3652 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3653 if (to_rtx == 0)
3654 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3656 /* Handle calls that return values in multiple non-contiguous locations.
3657 The Irix 6 ABI has examples of this. */
3658 if (GET_CODE (to_rtx) == PARALLEL)
3659 emit_group_load (to_rtx, value, TREE_TYPE (from),
3660 int_size_in_bytes (TREE_TYPE (from)));
3661 else if (GET_MODE (to_rtx) == BLKmode)
3662 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3663 else
3665 if (POINTER_TYPE_P (TREE_TYPE (to)))
3666 value = convert_memory_address (GET_MODE (to_rtx), value);
3667 emit_move_insn (to_rtx, value);
3669 preserve_temp_slots (to_rtx);
3670 free_temp_slots ();
3671 pop_temp_slots ();
3672 return;
3675 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3676 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3678 if (to_rtx == 0)
3679 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3681 /* Don't move directly into a return register. */
3682 if (TREE_CODE (to) == RESULT_DECL
3683 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3685 rtx temp;
3687 push_temp_slots ();
3688 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3690 if (GET_CODE (to_rtx) == PARALLEL)
3691 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3692 int_size_in_bytes (TREE_TYPE (from)));
3693 else
3694 emit_move_insn (to_rtx, temp);
3696 preserve_temp_slots (to_rtx);
3697 free_temp_slots ();
3698 pop_temp_slots ();
3699 return;
3702 /* In case we are returning the contents of an object which overlaps
3703 the place the value is being stored, use a safe function when copying
3704 a value through a pointer into a structure value return block. */
3705 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3706 && current_function_returns_struct
3707 && !current_function_returns_pcc_struct)
3709 rtx from_rtx, size;
3711 push_temp_slots ();
3712 size = expr_size (from);
3713 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3715 emit_library_call (memmove_libfunc, LCT_NORMAL,
3716 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3717 XEXP (from_rtx, 0), Pmode,
3718 convert_to_mode (TYPE_MODE (sizetype),
3719 size, TYPE_UNSIGNED (sizetype)),
3720 TYPE_MODE (sizetype));
3722 preserve_temp_slots (to_rtx);
3723 free_temp_slots ();
3724 pop_temp_slots ();
3725 return;
3728 /* Compute FROM and store the value in the rtx we got. */
3730 push_temp_slots ();
3731 result = store_expr (from, to_rtx, 0);
3732 preserve_temp_slots (result);
3733 free_temp_slots ();
3734 pop_temp_slots ();
3735 return;
3738 /* Generate code for computing expression EXP,
3739 and storing the value into TARGET.
3741 If the mode is BLKmode then we may return TARGET itself.
3742 It turns out that in BLKmode it doesn't cause a problem.
3743 because C has no operators that could combine two different
3744 assignments into the same BLKmode object with different values
3745 with no sequence point. Will other languages need this to
3746 be more thorough?
3748 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3749 stack, and block moves may need to be treated specially. */
3752 store_expr (tree exp, rtx target, int call_param_p)
3754 rtx temp;
3755 rtx alt_rtl = NULL_RTX;
3756 int dont_return_target = 0;
3758 if (VOID_TYPE_P (TREE_TYPE (exp)))
3760 /* C++ can generate ?: expressions with a throw expression in one
3761 branch and an rvalue in the other. Here, we resolve attempts to
3762 store the throw expression's nonexistent result. */
3763 gcc_assert (!call_param_p);
3764 expand_expr (exp, const0_rtx, VOIDmode, 0);
3765 return NULL_RTX;
3767 if (TREE_CODE (exp) == COMPOUND_EXPR)
3769 /* Perform first part of compound expression, then assign from second
3770 part. */
3771 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3772 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3773 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3775 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3777 /* For conditional expression, get safe form of the target. Then
3778 test the condition, doing the appropriate assignment on either
3779 side. This avoids the creation of unnecessary temporaries.
3780 For non-BLKmode, it is more efficient not to do this. */
3782 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3784 do_pending_stack_adjust ();
3785 NO_DEFER_POP;
3786 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3787 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3788 emit_jump_insn (gen_jump (lab2));
3789 emit_barrier ();
3790 emit_label (lab1);
3791 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3792 emit_label (lab2);
3793 OK_DEFER_POP;
3795 return NULL_RTX;
3797 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3798 /* If this is a scalar in a register that is stored in a wider mode
3799 than the declared mode, compute the result into its declared mode
3800 and then convert to the wider mode. Our value is the computed
3801 expression. */
3803 rtx inner_target = 0;
3805 /* We can do the conversion inside EXP, which will often result
3806 in some optimizations. Do the conversion in two steps: first
3807 change the signedness, if needed, then the extend. But don't
3808 do this if the type of EXP is a subtype of something else
3809 since then the conversion might involve more than just
3810 converting modes. */
3811 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3812 && TREE_TYPE (TREE_TYPE (exp)) == 0
3813 && (!lang_hooks.reduce_bit_field_operations
3814 || (GET_MODE_PRECISION (GET_MODE (target))
3815 == TYPE_PRECISION (TREE_TYPE (exp)))))
3817 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3818 != SUBREG_PROMOTED_UNSIGNED_P (target))
3819 exp = convert
3820 (lang_hooks.types.signed_or_unsigned_type
3821 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3823 exp = convert (lang_hooks.types.type_for_mode
3824 (GET_MODE (SUBREG_REG (target)),
3825 SUBREG_PROMOTED_UNSIGNED_P (target)),
3826 exp);
3828 inner_target = SUBREG_REG (target);
3831 temp = expand_expr (exp, inner_target, VOIDmode,
3832 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3834 /* If TEMP is a VOIDmode constant, use convert_modes to make
3835 sure that we properly convert it. */
3836 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3838 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3839 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3840 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3841 GET_MODE (target), temp,
3842 SUBREG_PROMOTED_UNSIGNED_P (target));
3845 convert_move (SUBREG_REG (target), temp,
3846 SUBREG_PROMOTED_UNSIGNED_P (target));
3848 return NULL_RTX;
3850 else
3852 temp = expand_expr_real (exp, target, GET_MODE (target),
3853 (call_param_p
3854 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3855 &alt_rtl);
3856 /* Return TARGET if it's a specified hardware register.
3857 If TARGET is a volatile mem ref, either return TARGET
3858 or return a reg copied *from* TARGET; ANSI requires this.
3860 Otherwise, if TEMP is not TARGET, return TEMP
3861 if it is constant (for efficiency),
3862 or if we really want the correct value. */
3863 if (!(target && REG_P (target)
3864 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3865 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3866 && ! rtx_equal_p (temp, target)
3867 && CONSTANT_P (temp))
3868 dont_return_target = 1;
3871 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3872 the same as that of TARGET, adjust the constant. This is needed, for
3873 example, in case it is a CONST_DOUBLE and we want only a word-sized
3874 value. */
3875 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3876 && TREE_CODE (exp) != ERROR_MARK
3877 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3878 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3879 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3881 /* If value was not generated in the target, store it there.
3882 Convert the value to TARGET's type first if necessary and emit the
3883 pending incrementations that have been queued when expanding EXP.
3884 Note that we cannot emit the whole queue blindly because this will
3885 effectively disable the POST_INC optimization later.
3887 If TEMP and TARGET compare equal according to rtx_equal_p, but
3888 one or both of them are volatile memory refs, we have to distinguish
3889 two cases:
3890 - expand_expr has used TARGET. In this case, we must not generate
3891 another copy. This can be detected by TARGET being equal according
3892 to == .
3893 - expand_expr has not used TARGET - that means that the source just
3894 happens to have the same RTX form. Since temp will have been created
3895 by expand_expr, it will compare unequal according to == .
3896 We must generate a copy in this case, to reach the correct number
3897 of volatile memory references. */
3899 if ((! rtx_equal_p (temp, target)
3900 || (temp != target && (side_effects_p (temp)
3901 || side_effects_p (target))))
3902 && TREE_CODE (exp) != ERROR_MARK
3903 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3904 but TARGET is not valid memory reference, TEMP will differ
3905 from TARGET although it is really the same location. */
3906 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3907 /* If there's nothing to copy, don't bother. Don't call expr_size
3908 unless necessary, because some front-ends (C++) expr_size-hook
3909 aborts on objects that are not supposed to be bit-copied or
3910 bit-initialized. */
3911 && expr_size (exp) != const0_rtx)
3913 if (GET_MODE (temp) != GET_MODE (target)
3914 && GET_MODE (temp) != VOIDmode)
3916 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3917 if (dont_return_target)
3919 /* In this case, we will return TEMP,
3920 so make sure it has the proper mode.
3921 But don't forget to store the value into TARGET. */
3922 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3923 emit_move_insn (target, temp);
3925 else
3926 convert_move (target, temp, unsignedp);
3929 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3931 /* Handle copying a string constant into an array. The string
3932 constant may be shorter than the array. So copy just the string's
3933 actual length, and clear the rest. First get the size of the data
3934 type of the string, which is actually the size of the target. */
3935 rtx size = expr_size (exp);
3937 if (GET_CODE (size) == CONST_INT
3938 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3939 emit_block_move (target, temp, size,
3940 (call_param_p
3941 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3942 else
3944 /* Compute the size of the data to copy from the string. */
3945 tree copy_size
3946 = size_binop (MIN_EXPR,
3947 make_tree (sizetype, size),
3948 size_int (TREE_STRING_LENGTH (exp)));
3949 rtx copy_size_rtx
3950 = expand_expr (copy_size, NULL_RTX, VOIDmode,
3951 (call_param_p
3952 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
3953 rtx label = 0;
3955 /* Copy that much. */
3956 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
3957 TYPE_UNSIGNED (sizetype));
3958 emit_block_move (target, temp, copy_size_rtx,
3959 (call_param_p
3960 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3962 /* Figure out how much is left in TARGET that we have to clear.
3963 Do all calculations in ptr_mode. */
3964 if (GET_CODE (copy_size_rtx) == CONST_INT)
3966 size = plus_constant (size, -INTVAL (copy_size_rtx));
3967 target = adjust_address (target, BLKmode,
3968 INTVAL (copy_size_rtx));
3970 else
3972 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
3973 copy_size_rtx, NULL_RTX, 0,
3974 OPTAB_LIB_WIDEN);
3976 #ifdef POINTERS_EXTEND_UNSIGNED
3977 if (GET_MODE (copy_size_rtx) != Pmode)
3978 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
3979 TYPE_UNSIGNED (sizetype));
3980 #endif
3982 target = offset_address (target, copy_size_rtx,
3983 highest_pow2_factor (copy_size));
3984 label = gen_label_rtx ();
3985 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3986 GET_MODE (size), 0, label);
3989 if (size != const0_rtx)
3990 clear_storage (target, size);
3992 if (label)
3993 emit_label (label);
3996 /* Handle calls that return values in multiple non-contiguous locations.
3997 The Irix 6 ABI has examples of this. */
3998 else if (GET_CODE (target) == PARALLEL)
3999 emit_group_load (target, temp, TREE_TYPE (exp),
4000 int_size_in_bytes (TREE_TYPE (exp)));
4001 else if (GET_MODE (temp) == BLKmode)
4002 emit_block_move (target, temp, expr_size (exp),
4003 (call_param_p
4004 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4005 else
4007 temp = force_operand (temp, target);
4008 if (temp != target)
4009 emit_move_insn (target, temp);
4013 return NULL_RTX;
4016 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4017 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4018 are set to non-constant values and place it in *P_NC_ELTS. */
4020 static void
4021 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4022 HOST_WIDE_INT *p_nc_elts)
4024 HOST_WIDE_INT nz_elts, nc_elts;
4025 tree list;
4027 nz_elts = 0;
4028 nc_elts = 0;
4030 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4032 tree value = TREE_VALUE (list);
4033 tree purpose = TREE_PURPOSE (list);
4034 HOST_WIDE_INT mult;
4036 mult = 1;
4037 if (TREE_CODE (purpose) == RANGE_EXPR)
4039 tree lo_index = TREE_OPERAND (purpose, 0);
4040 tree hi_index = TREE_OPERAND (purpose, 1);
4042 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4043 mult = (tree_low_cst (hi_index, 1)
4044 - tree_low_cst (lo_index, 1) + 1);
4047 switch (TREE_CODE (value))
4049 case CONSTRUCTOR:
4051 HOST_WIDE_INT nz = 0, nc = 0;
4052 categorize_ctor_elements_1 (value, &nz, &nc);
4053 nz_elts += mult * nz;
4054 nc_elts += mult * nc;
4056 break;
4058 case INTEGER_CST:
4059 case REAL_CST:
4060 if (!initializer_zerop (value))
4061 nz_elts += mult;
4062 break;
4063 case COMPLEX_CST:
4064 if (!initializer_zerop (TREE_REALPART (value)))
4065 nz_elts += mult;
4066 if (!initializer_zerop (TREE_IMAGPART (value)))
4067 nz_elts += mult;
4068 break;
4069 case VECTOR_CST:
4071 tree v;
4072 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4073 if (!initializer_zerop (TREE_VALUE (v)))
4074 nz_elts += mult;
4076 break;
4078 default:
4079 nz_elts += mult;
4080 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4081 nc_elts += mult;
4082 break;
4086 *p_nz_elts += nz_elts;
4087 *p_nc_elts += nc_elts;
4090 void
4091 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4092 HOST_WIDE_INT *p_nc_elts)
4094 *p_nz_elts = 0;
4095 *p_nc_elts = 0;
4096 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4099 /* Count the number of scalars in TYPE. Return -1 on overflow or
4100 variable-sized. */
4102 HOST_WIDE_INT
4103 count_type_elements (tree type)
4105 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4106 switch (TREE_CODE (type))
4108 case ARRAY_TYPE:
4110 tree telts = array_type_nelts (type);
4111 if (telts && host_integerp (telts, 1))
4113 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4114 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4115 if (n == 0)
4116 return 0;
4117 else if (max / n > m)
4118 return n * m;
4120 return -1;
4123 case RECORD_TYPE:
4125 HOST_WIDE_INT n = 0, t;
4126 tree f;
4128 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4129 if (TREE_CODE (f) == FIELD_DECL)
4131 t = count_type_elements (TREE_TYPE (f));
4132 if (t < 0)
4133 return -1;
4134 n += t;
4137 return n;
4140 case UNION_TYPE:
4141 case QUAL_UNION_TYPE:
4143 /* Ho hum. How in the world do we guess here? Clearly it isn't
4144 right to count the fields. Guess based on the number of words. */
4145 HOST_WIDE_INT n = int_size_in_bytes (type);
4146 if (n < 0)
4147 return -1;
4148 return n / UNITS_PER_WORD;
4151 case COMPLEX_TYPE:
4152 return 2;
4154 case VECTOR_TYPE:
4155 return TYPE_VECTOR_SUBPARTS (type);
4157 case INTEGER_TYPE:
4158 case REAL_TYPE:
4159 case ENUMERAL_TYPE:
4160 case BOOLEAN_TYPE:
4161 case CHAR_TYPE:
4162 case POINTER_TYPE:
4163 case OFFSET_TYPE:
4164 case REFERENCE_TYPE:
4165 return 1;
4167 case VOID_TYPE:
4168 case METHOD_TYPE:
4169 case FILE_TYPE:
4170 case SET_TYPE:
4171 case FUNCTION_TYPE:
4172 case LANG_TYPE:
4173 default:
4174 gcc_unreachable ();
4178 /* Return 1 if EXP contains mostly (3/4) zeros. */
4181 mostly_zeros_p (tree exp)
4183 if (TREE_CODE (exp) == CONSTRUCTOR)
4186 HOST_WIDE_INT nz_elts, nc_elts, elts;
4188 /* If there are no ranges of true bits, it is all zero. */
4189 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4190 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4192 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4193 elts = count_type_elements (TREE_TYPE (exp));
4195 return nz_elts < elts / 4;
4198 return initializer_zerop (exp);
4201 /* Helper function for store_constructor.
4202 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4203 TYPE is the type of the CONSTRUCTOR, not the element type.
4204 CLEARED is as for store_constructor.
4205 ALIAS_SET is the alias set to use for any stores.
4207 This provides a recursive shortcut back to store_constructor when it isn't
4208 necessary to go through store_field. This is so that we can pass through
4209 the cleared field to let store_constructor know that we may not have to
4210 clear a substructure if the outer structure has already been cleared. */
4212 static void
4213 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4214 HOST_WIDE_INT bitpos, enum machine_mode mode,
4215 tree exp, tree type, int cleared, int alias_set)
4217 if (TREE_CODE (exp) == CONSTRUCTOR
4218 /* We can only call store_constructor recursively if the size and
4219 bit position are on a byte boundary. */
4220 && bitpos % BITS_PER_UNIT == 0
4221 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4222 /* If we have a nonzero bitpos for a register target, then we just
4223 let store_field do the bitfield handling. This is unlikely to
4224 generate unnecessary clear instructions anyways. */
4225 && (bitpos == 0 || MEM_P (target)))
4227 if (MEM_P (target))
4228 target
4229 = adjust_address (target,
4230 GET_MODE (target) == BLKmode
4231 || 0 != (bitpos
4232 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4233 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4236 /* Update the alias set, if required. */
4237 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4238 && MEM_ALIAS_SET (target) != 0)
4240 target = copy_rtx (target);
4241 set_mem_alias_set (target, alias_set);
4244 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4246 else
4247 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4250 /* Store the value of constructor EXP into the rtx TARGET.
4251 TARGET is either a REG or a MEM; we know it cannot conflict, since
4252 safe_from_p has been called.
4253 CLEARED is true if TARGET is known to have been zero'd.
4254 SIZE is the number of bytes of TARGET we are allowed to modify: this
4255 may not be the same as the size of EXP if we are assigning to a field
4256 which has been packed to exclude padding bits. */
4258 static void
4259 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4261 tree type = TREE_TYPE (exp);
4262 #ifdef WORD_REGISTER_OPERATIONS
4263 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4264 #endif
4266 switch (TREE_CODE (type))
4268 case RECORD_TYPE:
4269 case UNION_TYPE:
4270 case QUAL_UNION_TYPE:
4272 tree elt;
4274 /* If size is zero or the target is already cleared, do nothing. */
4275 if (size == 0 || cleared)
4276 cleared = 1;
4277 /* We either clear the aggregate or indicate the value is dead. */
4278 else if ((TREE_CODE (type) == UNION_TYPE
4279 || TREE_CODE (type) == QUAL_UNION_TYPE)
4280 && ! CONSTRUCTOR_ELTS (exp))
4281 /* If the constructor is empty, clear the union. */
4283 clear_storage (target, expr_size (exp));
4284 cleared = 1;
4287 /* If we are building a static constructor into a register,
4288 set the initial value as zero so we can fold the value into
4289 a constant. But if more than one register is involved,
4290 this probably loses. */
4291 else if (REG_P (target) && TREE_STATIC (exp)
4292 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4294 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4295 cleared = 1;
4298 /* If the constructor has fewer fields than the structure or
4299 if we are initializing the structure to mostly zeros, clear
4300 the whole structure first. Don't do this if TARGET is a
4301 register whose mode size isn't equal to SIZE since
4302 clear_storage can't handle this case. */
4303 else if (size > 0
4304 && ((list_length (CONSTRUCTOR_ELTS (exp))
4305 != fields_length (type))
4306 || mostly_zeros_p (exp))
4307 && (!REG_P (target)
4308 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4309 == size)))
4311 clear_storage (target, GEN_INT (size));
4312 cleared = 1;
4315 if (! cleared)
4316 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4318 /* Store each element of the constructor into the
4319 corresponding field of TARGET. */
4321 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4323 tree field = TREE_PURPOSE (elt);
4324 tree value = TREE_VALUE (elt);
4325 enum machine_mode mode;
4326 HOST_WIDE_INT bitsize;
4327 HOST_WIDE_INT bitpos = 0;
4328 tree offset;
4329 rtx to_rtx = target;
4331 /* Just ignore missing fields. We cleared the whole
4332 structure, above, if any fields are missing. */
4333 if (field == 0)
4334 continue;
4336 if (cleared && initializer_zerop (value))
4337 continue;
4339 if (host_integerp (DECL_SIZE (field), 1))
4340 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4341 else
4342 bitsize = -1;
4344 mode = DECL_MODE (field);
4345 if (DECL_BIT_FIELD (field))
4346 mode = VOIDmode;
4348 offset = DECL_FIELD_OFFSET (field);
4349 if (host_integerp (offset, 0)
4350 && host_integerp (bit_position (field), 0))
4352 bitpos = int_bit_position (field);
4353 offset = 0;
4355 else
4356 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4358 if (offset)
4360 rtx offset_rtx;
4362 offset
4363 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4364 make_tree (TREE_TYPE (exp),
4365 target));
4367 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4368 gcc_assert (MEM_P (to_rtx));
4370 #ifdef POINTERS_EXTEND_UNSIGNED
4371 if (GET_MODE (offset_rtx) != Pmode)
4372 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4373 #else
4374 if (GET_MODE (offset_rtx) != ptr_mode)
4375 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4376 #endif
4378 to_rtx = offset_address (to_rtx, offset_rtx,
4379 highest_pow2_factor (offset));
4382 #ifdef WORD_REGISTER_OPERATIONS
4383 /* If this initializes a field that is smaller than a
4384 word, at the start of a word, try to widen it to a full
4385 word. This special case allows us to output C++ member
4386 function initializations in a form that the optimizers
4387 can understand. */
4388 if (REG_P (target)
4389 && bitsize < BITS_PER_WORD
4390 && bitpos % BITS_PER_WORD == 0
4391 && GET_MODE_CLASS (mode) == MODE_INT
4392 && TREE_CODE (value) == INTEGER_CST
4393 && exp_size >= 0
4394 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4396 tree type = TREE_TYPE (value);
4398 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4400 type = lang_hooks.types.type_for_size
4401 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4402 value = convert (type, value);
4405 if (BYTES_BIG_ENDIAN)
4406 value
4407 = fold (build2 (LSHIFT_EXPR, type, value,
4408 build_int_cst (NULL_TREE,
4409 BITS_PER_WORD - bitsize)));
4410 bitsize = BITS_PER_WORD;
4411 mode = word_mode;
4413 #endif
4415 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4416 && DECL_NONADDRESSABLE_P (field))
4418 to_rtx = copy_rtx (to_rtx);
4419 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4422 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4423 value, type, cleared,
4424 get_alias_set (TREE_TYPE (field)));
4426 break;
4428 case ARRAY_TYPE:
4430 tree elt;
4431 int i;
4432 int need_to_clear;
4433 tree domain;
4434 tree elttype = TREE_TYPE (type);
4435 int const_bounds_p;
4436 HOST_WIDE_INT minelt = 0;
4437 HOST_WIDE_INT maxelt = 0;
4439 domain = TYPE_DOMAIN (type);
4440 const_bounds_p = (TYPE_MIN_VALUE (domain)
4441 && TYPE_MAX_VALUE (domain)
4442 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4443 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4445 /* If we have constant bounds for the range of the type, get them. */
4446 if (const_bounds_p)
4448 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4449 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4452 /* If the constructor has fewer elements than the array, clear
4453 the whole array first. Similarly if this is static
4454 constructor of a non-BLKmode object. */
4455 if (cleared)
4456 need_to_clear = 0;
4457 else if (REG_P (target) && TREE_STATIC (exp))
4458 need_to_clear = 1;
4459 else
4461 HOST_WIDE_INT count = 0, zero_count = 0;
4462 need_to_clear = ! const_bounds_p;
4464 /* This loop is a more accurate version of the loop in
4465 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4466 is also needed to check for missing elements. */
4467 for (elt = CONSTRUCTOR_ELTS (exp);
4468 elt != NULL_TREE && ! need_to_clear;
4469 elt = TREE_CHAIN (elt))
4471 tree index = TREE_PURPOSE (elt);
4472 HOST_WIDE_INT this_node_count;
4474 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4476 tree lo_index = TREE_OPERAND (index, 0);
4477 tree hi_index = TREE_OPERAND (index, 1);
4479 if (! host_integerp (lo_index, 1)
4480 || ! host_integerp (hi_index, 1))
4482 need_to_clear = 1;
4483 break;
4486 this_node_count = (tree_low_cst (hi_index, 1)
4487 - tree_low_cst (lo_index, 1) + 1);
4489 else
4490 this_node_count = 1;
4492 count += this_node_count;
4493 if (mostly_zeros_p (TREE_VALUE (elt)))
4494 zero_count += this_node_count;
4497 /* Clear the entire array first if there are any missing
4498 elements, or if the incidence of zero elements is >=
4499 75%. */
4500 if (! need_to_clear
4501 && (count < maxelt - minelt + 1
4502 || 4 * zero_count >= 3 * count))
4503 need_to_clear = 1;
4506 if (need_to_clear && size > 0)
4508 if (REG_P (target))
4509 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4510 else
4511 clear_storage (target, GEN_INT (size));
4512 cleared = 1;
4515 if (!cleared && REG_P (target))
4516 /* Inform later passes that the old value is dead. */
4517 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4519 /* Store each element of the constructor into the
4520 corresponding element of TARGET, determined by counting the
4521 elements. */
4522 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4523 elt;
4524 elt = TREE_CHAIN (elt), i++)
4526 enum machine_mode mode;
4527 HOST_WIDE_INT bitsize;
4528 HOST_WIDE_INT bitpos;
4529 int unsignedp;
4530 tree value = TREE_VALUE (elt);
4531 tree index = TREE_PURPOSE (elt);
4532 rtx xtarget = target;
4534 if (cleared && initializer_zerop (value))
4535 continue;
4537 unsignedp = TYPE_UNSIGNED (elttype);
4538 mode = TYPE_MODE (elttype);
4539 if (mode == BLKmode)
4540 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4541 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4542 : -1);
4543 else
4544 bitsize = GET_MODE_BITSIZE (mode);
4546 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4548 tree lo_index = TREE_OPERAND (index, 0);
4549 tree hi_index = TREE_OPERAND (index, 1);
4550 rtx index_r, pos_rtx;
4551 HOST_WIDE_INT lo, hi, count;
4552 tree position;
4554 /* If the range is constant and "small", unroll the loop. */
4555 if (const_bounds_p
4556 && host_integerp (lo_index, 0)
4557 && host_integerp (hi_index, 0)
4558 && (lo = tree_low_cst (lo_index, 0),
4559 hi = tree_low_cst (hi_index, 0),
4560 count = hi - lo + 1,
4561 (!MEM_P (target)
4562 || count <= 2
4563 || (host_integerp (TYPE_SIZE (elttype), 1)
4564 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4565 <= 40 * 8)))))
4567 lo -= minelt; hi -= minelt;
4568 for (; lo <= hi; lo++)
4570 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4572 if (MEM_P (target)
4573 && !MEM_KEEP_ALIAS_SET_P (target)
4574 && TREE_CODE (type) == ARRAY_TYPE
4575 && TYPE_NONALIASED_COMPONENT (type))
4577 target = copy_rtx (target);
4578 MEM_KEEP_ALIAS_SET_P (target) = 1;
4581 store_constructor_field
4582 (target, bitsize, bitpos, mode, value, type, cleared,
4583 get_alias_set (elttype));
4586 else
4588 rtx loop_start = gen_label_rtx ();
4589 rtx loop_end = gen_label_rtx ();
4590 tree exit_cond;
4592 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4593 unsignedp = TYPE_UNSIGNED (domain);
4595 index = build_decl (VAR_DECL, NULL_TREE, domain);
4597 index_r
4598 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4599 &unsignedp, 0));
4600 SET_DECL_RTL (index, index_r);
4601 store_expr (lo_index, index_r, 0);
4603 /* Build the head of the loop. */
4604 do_pending_stack_adjust ();
4605 emit_label (loop_start);
4607 /* Assign value to element index. */
4608 position
4609 = convert (ssizetype,
4610 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4611 index, TYPE_MIN_VALUE (domain))));
4612 position = size_binop (MULT_EXPR, position,
4613 convert (ssizetype,
4614 TYPE_SIZE_UNIT (elttype)));
4616 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4617 xtarget = offset_address (target, pos_rtx,
4618 highest_pow2_factor (position));
4619 xtarget = adjust_address (xtarget, mode, 0);
4620 if (TREE_CODE (value) == CONSTRUCTOR)
4621 store_constructor (value, xtarget, cleared,
4622 bitsize / BITS_PER_UNIT);
4623 else
4624 store_expr (value, xtarget, 0);
4626 /* Generate a conditional jump to exit the loop. */
4627 exit_cond = build2 (LT_EXPR, integer_type_node,
4628 index, hi_index);
4629 jumpif (exit_cond, loop_end);
4631 /* Update the loop counter, and jump to the head of
4632 the loop. */
4633 expand_assignment (index,
4634 build2 (PLUS_EXPR, TREE_TYPE (index),
4635 index, integer_one_node));
4637 emit_jump (loop_start);
4639 /* Build the end of the loop. */
4640 emit_label (loop_end);
4643 else if ((index != 0 && ! host_integerp (index, 0))
4644 || ! host_integerp (TYPE_SIZE (elttype), 1))
4646 tree position;
4648 if (index == 0)
4649 index = ssize_int (1);
4651 if (minelt)
4652 index = fold_convert (ssizetype,
4653 fold (build2 (MINUS_EXPR,
4654 TREE_TYPE (index),
4655 index,
4656 TYPE_MIN_VALUE (domain))));
4658 position = size_binop (MULT_EXPR, index,
4659 convert (ssizetype,
4660 TYPE_SIZE_UNIT (elttype)));
4661 xtarget = offset_address (target,
4662 expand_expr (position, 0, VOIDmode, 0),
4663 highest_pow2_factor (position));
4664 xtarget = adjust_address (xtarget, mode, 0);
4665 store_expr (value, xtarget, 0);
4667 else
4669 if (index != 0)
4670 bitpos = ((tree_low_cst (index, 0) - minelt)
4671 * tree_low_cst (TYPE_SIZE (elttype), 1));
4672 else
4673 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4675 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4676 && TREE_CODE (type) == ARRAY_TYPE
4677 && TYPE_NONALIASED_COMPONENT (type))
4679 target = copy_rtx (target);
4680 MEM_KEEP_ALIAS_SET_P (target) = 1;
4682 store_constructor_field (target, bitsize, bitpos, mode, value,
4683 type, cleared, get_alias_set (elttype));
4686 break;
4689 case VECTOR_TYPE:
4691 tree elt;
4692 int i;
4693 int need_to_clear;
4694 int icode = 0;
4695 tree elttype = TREE_TYPE (type);
4696 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4697 enum machine_mode eltmode = TYPE_MODE (elttype);
4698 HOST_WIDE_INT bitsize;
4699 HOST_WIDE_INT bitpos;
4700 rtx *vector = NULL;
4701 unsigned n_elts;
4703 gcc_assert (eltmode != BLKmode);
4705 n_elts = TYPE_VECTOR_SUBPARTS (type);
4706 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4708 enum machine_mode mode = GET_MODE (target);
4710 icode = (int) vec_init_optab->handlers[mode].insn_code;
4711 if (icode != CODE_FOR_nothing)
4713 unsigned int i;
4715 vector = alloca (n_elts);
4716 for (i = 0; i < n_elts; i++)
4717 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4721 /* If the constructor has fewer elements than the vector,
4722 clear the whole array first. Similarly if this is static
4723 constructor of a non-BLKmode object. */
4724 if (cleared)
4725 need_to_clear = 0;
4726 else if (REG_P (target) && TREE_STATIC (exp))
4727 need_to_clear = 1;
4728 else
4730 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4732 for (elt = CONSTRUCTOR_ELTS (exp);
4733 elt != NULL_TREE;
4734 elt = TREE_CHAIN (elt))
4736 int n_elts_here = tree_low_cst
4737 (int_const_binop (TRUNC_DIV_EXPR,
4738 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4739 TYPE_SIZE (elttype), 0), 1);
4741 count += n_elts_here;
4742 if (mostly_zeros_p (TREE_VALUE (elt)))
4743 zero_count += n_elts_here;
4746 /* Clear the entire vector first if there are any missing elements,
4747 or if the incidence of zero elements is >= 75%. */
4748 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4751 if (need_to_clear && size > 0 && !vector)
4753 if (REG_P (target))
4754 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4755 else
4756 clear_storage (target, GEN_INT (size));
4757 cleared = 1;
4760 if (!cleared && REG_P (target))
4761 /* Inform later passes that the old value is dead. */
4762 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4764 /* Store each element of the constructor into the corresponding
4765 element of TARGET, determined by counting the elements. */
4766 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4767 elt;
4768 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4770 tree value = TREE_VALUE (elt);
4771 tree index = TREE_PURPOSE (elt);
4772 HOST_WIDE_INT eltpos;
4774 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4775 if (cleared && initializer_zerop (value))
4776 continue;
4778 if (index != 0)
4779 eltpos = tree_low_cst (index, 1);
4780 else
4781 eltpos = i;
4783 if (vector)
4785 /* Vector CONSTRUCTORs should only be built from smaller
4786 vectors in the case of BLKmode vectors. */
4787 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4788 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4790 else
4792 enum machine_mode value_mode =
4793 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4794 ? TYPE_MODE (TREE_TYPE (value))
4795 : eltmode;
4796 bitpos = eltpos * elt_size;
4797 store_constructor_field (target, bitsize, bitpos,
4798 value_mode, value, type,
4799 cleared, get_alias_set (elttype));
4803 if (vector)
4804 emit_insn (GEN_FCN (icode)
4805 (target,
4806 gen_rtx_PARALLEL (GET_MODE (target),
4807 gen_rtvec_v (n_elts, vector))));
4808 break;
4811 /* Set constructor assignments. */
4812 case SET_TYPE:
4814 tree elt = CONSTRUCTOR_ELTS (exp);
4815 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4816 tree domain = TYPE_DOMAIN (type);
4817 tree domain_min, domain_max, bitlength;
4819 /* The default implementation strategy is to extract the
4820 constant parts of the constructor, use that to initialize
4821 the target, and then "or" in whatever non-constant ranges
4822 we need in addition.
4824 If a large set is all zero or all ones, it is probably
4825 better to set it using memset. Also, if a large set has
4826 just a single range, it may also be better to first clear
4827 all the first clear the set (using memset), and set the
4828 bits we want. */
4830 /* Check for all zeros. */
4831 if (elt == NULL_TREE && size > 0)
4833 if (!cleared)
4834 clear_storage (target, GEN_INT (size));
4835 return;
4838 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4839 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4840 bitlength = size_binop (PLUS_EXPR,
4841 size_diffop (domain_max, domain_min),
4842 ssize_int (1));
4844 nbits = tree_low_cst (bitlength, 1);
4846 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4847 that are "complicated" (more than one range), initialize
4848 (the constant parts) by copying from a constant. */
4849 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4850 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4852 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4853 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4854 char *bit_buffer = alloca (nbits);
4855 HOST_WIDE_INT word = 0;
4856 unsigned int bit_pos = 0;
4857 unsigned int ibit = 0;
4858 unsigned int offset = 0; /* In bytes from beginning of set. */
4860 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4861 for (;;)
4863 if (bit_buffer[ibit])
4865 if (BYTES_BIG_ENDIAN)
4866 word |= (1 << (set_word_size - 1 - bit_pos));
4867 else
4868 word |= 1 << bit_pos;
4871 bit_pos++; ibit++;
4872 if (bit_pos >= set_word_size || ibit == nbits)
4874 if (word != 0 || ! cleared)
4876 rtx datum = gen_int_mode (word, mode);
4877 rtx to_rtx;
4879 /* The assumption here is that it is safe to
4880 use XEXP if the set is multi-word, but not
4881 if it's single-word. */
4882 if (MEM_P (target))
4883 to_rtx = adjust_address (target, mode, offset);
4884 else
4886 gcc_assert (!offset);
4887 to_rtx = target;
4889 emit_move_insn (to_rtx, datum);
4892 if (ibit == nbits)
4893 break;
4894 word = 0;
4895 bit_pos = 0;
4896 offset += set_word_size / BITS_PER_UNIT;
4900 else if (!cleared)
4901 /* Don't bother clearing storage if the set is all ones. */
4902 if (TREE_CHAIN (elt) != NULL_TREE
4903 || (TREE_PURPOSE (elt) == NULL_TREE
4904 ? nbits != 1
4905 : ( ! host_integerp (TREE_VALUE (elt), 0)
4906 || ! host_integerp (TREE_PURPOSE (elt), 0)
4907 || (tree_low_cst (TREE_VALUE (elt), 0)
4908 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4909 != (HOST_WIDE_INT) nbits))))
4910 clear_storage (target, expr_size (exp));
4912 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4914 /* Start of range of element or NULL. */
4915 tree startbit = TREE_PURPOSE (elt);
4916 /* End of range of element, or element value. */
4917 tree endbit = TREE_VALUE (elt);
4918 HOST_WIDE_INT startb, endb;
4919 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4921 bitlength_rtx = expand_expr (bitlength,
4922 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4924 /* Handle non-range tuple element like [ expr ]. */
4925 if (startbit == NULL_TREE)
4927 startbit = save_expr (endbit);
4928 endbit = startbit;
4931 startbit = convert (sizetype, startbit);
4932 endbit = convert (sizetype, endbit);
4933 if (! integer_zerop (domain_min))
4935 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4936 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4938 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4939 EXPAND_CONST_ADDRESS);
4940 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4941 EXPAND_CONST_ADDRESS);
4943 if (REG_P (target))
4945 targetx
4946 = assign_temp
4947 ((build_qualified_type (lang_hooks.types.type_for_mode
4948 (GET_MODE (target), 0),
4949 TYPE_QUAL_CONST)),
4950 0, 1, 1);
4951 emit_move_insn (targetx, target);
4954 else
4956 gcc_assert (MEM_P (target));
4957 targetx = target;
4960 /* Optimization: If startbit and endbit are constants divisible
4961 by BITS_PER_UNIT, call memset instead. */
4962 if (TREE_CODE (startbit) == INTEGER_CST
4963 && TREE_CODE (endbit) == INTEGER_CST
4964 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4965 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4967 emit_library_call (memset_libfunc, LCT_NORMAL,
4968 VOIDmode, 3,
4969 plus_constant (XEXP (targetx, 0),
4970 startb / BITS_PER_UNIT),
4971 Pmode,
4972 constm1_rtx, TYPE_MODE (integer_type_node),
4973 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4974 TYPE_MODE (sizetype));
4976 else
4977 emit_library_call (setbits_libfunc, LCT_NORMAL,
4978 VOIDmode, 4, XEXP (targetx, 0),
4979 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4980 startbit_rtx, TYPE_MODE (sizetype),
4981 endbit_rtx, TYPE_MODE (sizetype));
4983 if (REG_P (target))
4984 emit_move_insn (target, targetx);
4986 break;
4988 default:
4989 gcc_unreachable ();
4993 /* Store the value of EXP (an expression tree)
4994 into a subfield of TARGET which has mode MODE and occupies
4995 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4996 If MODE is VOIDmode, it means that we are storing into a bit-field.
4998 Always return const0_rtx unless we have something particular to
4999 return.
5001 TYPE is the type of the underlying object,
5003 ALIAS_SET is the alias set for the destination. This value will
5004 (in general) be different from that for TARGET, since TARGET is a
5005 reference to the containing structure. */
5007 static rtx
5008 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5009 enum machine_mode mode, tree exp, tree type, int alias_set)
5011 HOST_WIDE_INT width_mask = 0;
5013 if (TREE_CODE (exp) == ERROR_MARK)
5014 return const0_rtx;
5016 /* If we have nothing to store, do nothing unless the expression has
5017 side-effects. */
5018 if (bitsize == 0)
5019 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5020 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5021 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5023 /* If we are storing into an unaligned field of an aligned union that is
5024 in a register, we may have the mode of TARGET being an integer mode but
5025 MODE == BLKmode. In that case, get an aligned object whose size and
5026 alignment are the same as TARGET and store TARGET into it (we can avoid
5027 the store if the field being stored is the entire width of TARGET). Then
5028 call ourselves recursively to store the field into a BLKmode version of
5029 that object. Finally, load from the object into TARGET. This is not
5030 very efficient in general, but should only be slightly more expensive
5031 than the otherwise-required unaligned accesses. Perhaps this can be
5032 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5033 twice, once with emit_move_insn and once via store_field. */
5035 if (mode == BLKmode
5036 && (REG_P (target) || GET_CODE (target) == SUBREG))
5038 rtx object = assign_temp (type, 0, 1, 1);
5039 rtx blk_object = adjust_address (object, BLKmode, 0);
5041 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5042 emit_move_insn (object, target);
5044 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5046 emit_move_insn (target, object);
5048 /* We want to return the BLKmode version of the data. */
5049 return blk_object;
5052 if (GET_CODE (target) == CONCAT)
5054 /* We're storing into a struct containing a single __complex. */
5056 gcc_assert (!bitpos);
5057 return store_expr (exp, target, 0);
5060 /* If the structure is in a register or if the component
5061 is a bit field, we cannot use addressing to access it.
5062 Use bit-field techniques or SUBREG to store in it. */
5064 if (mode == VOIDmode
5065 || (mode != BLKmode && ! direct_store[(int) mode]
5066 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5067 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5068 || REG_P (target)
5069 || GET_CODE (target) == SUBREG
5070 /* If the field isn't aligned enough to store as an ordinary memref,
5071 store it as a bit field. */
5072 || (mode != BLKmode
5073 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5074 || bitpos % GET_MODE_ALIGNMENT (mode))
5075 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5076 || (bitpos % BITS_PER_UNIT != 0)))
5077 /* If the RHS and field are a constant size and the size of the
5078 RHS isn't the same size as the bitfield, we must use bitfield
5079 operations. */
5080 || (bitsize >= 0
5081 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5082 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5084 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5086 /* If BITSIZE is narrower than the size of the type of EXP
5087 we will be narrowing TEMP. Normally, what's wanted are the
5088 low-order bits. However, if EXP's type is a record and this is
5089 big-endian machine, we want the upper BITSIZE bits. */
5090 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5091 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5092 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5093 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5094 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5095 - bitsize),
5096 NULL_RTX, 1);
5098 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5099 MODE. */
5100 if (mode != VOIDmode && mode != BLKmode
5101 && mode != TYPE_MODE (TREE_TYPE (exp)))
5102 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5104 /* If the modes of TARGET and TEMP are both BLKmode, both
5105 must be in memory and BITPOS must be aligned on a byte
5106 boundary. If so, we simply do a block copy. */
5107 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5109 gcc_assert (MEM_P (target) && MEM_P (temp)
5110 && !(bitpos % BITS_PER_UNIT));
5112 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5113 emit_block_move (target, temp,
5114 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5115 / BITS_PER_UNIT),
5116 BLOCK_OP_NORMAL);
5118 return const0_rtx;
5121 /* Store the value in the bitfield. */
5122 store_bit_field (target, bitsize, bitpos, mode, temp);
5124 return const0_rtx;
5126 else
5128 /* Now build a reference to just the desired component. */
5129 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5131 if (to_rtx == target)
5132 to_rtx = copy_rtx (to_rtx);
5134 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5135 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5136 set_mem_alias_set (to_rtx, alias_set);
5138 return store_expr (exp, to_rtx, 0);
5142 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5143 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5144 codes and find the ultimate containing object, which we return.
5146 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5147 bit position, and *PUNSIGNEDP to the signedness of the field.
5148 If the position of the field is variable, we store a tree
5149 giving the variable offset (in units) in *POFFSET.
5150 This offset is in addition to the bit position.
5151 If the position is not variable, we store 0 in *POFFSET.
5153 If any of the extraction expressions is volatile,
5154 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5156 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5157 is a mode that can be used to access the field. In that case, *PBITSIZE
5158 is redundant.
5160 If the field describes a variable-sized object, *PMODE is set to
5161 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5162 this case, but the address of the object can be found. */
5164 tree
5165 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5166 HOST_WIDE_INT *pbitpos, tree *poffset,
5167 enum machine_mode *pmode, int *punsignedp,
5168 int *pvolatilep)
5170 tree size_tree = 0;
5171 enum machine_mode mode = VOIDmode;
5172 tree offset = size_zero_node;
5173 tree bit_offset = bitsize_zero_node;
5174 tree tem;
5176 /* First get the mode, signedness, and size. We do this from just the
5177 outermost expression. */
5178 if (TREE_CODE (exp) == COMPONENT_REF)
5180 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5181 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5182 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5184 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5186 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5188 size_tree = TREE_OPERAND (exp, 1);
5189 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5191 else
5193 mode = TYPE_MODE (TREE_TYPE (exp));
5194 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5196 if (mode == BLKmode)
5197 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5198 else
5199 *pbitsize = GET_MODE_BITSIZE (mode);
5202 if (size_tree != 0)
5204 if (! host_integerp (size_tree, 1))
5205 mode = BLKmode, *pbitsize = -1;
5206 else
5207 *pbitsize = tree_low_cst (size_tree, 1);
5210 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5211 and find the ultimate containing object. */
5212 while (1)
5214 if (TREE_CODE (exp) == BIT_FIELD_REF)
5215 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5216 else if (TREE_CODE (exp) == COMPONENT_REF)
5218 tree field = TREE_OPERAND (exp, 1);
5219 tree this_offset = component_ref_field_offset (exp);
5221 /* If this field hasn't been filled in yet, don't go
5222 past it. This should only happen when folding expressions
5223 made during type construction. */
5224 if (this_offset == 0)
5225 break;
5227 offset = size_binop (PLUS_EXPR, offset, this_offset);
5228 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5229 DECL_FIELD_BIT_OFFSET (field));
5231 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5234 else if (TREE_CODE (exp) == ARRAY_REF
5235 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5237 tree index = TREE_OPERAND (exp, 1);
5238 tree low_bound = array_ref_low_bound (exp);
5239 tree unit_size = array_ref_element_size (exp);
5241 /* We assume all arrays have sizes that are a multiple of a byte.
5242 First subtract the lower bound, if any, in the type of the
5243 index, then convert to sizetype and multiply by the size of the
5244 array element. */
5245 if (! integer_zerop (low_bound))
5246 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5247 index, low_bound));
5249 offset = size_binop (PLUS_EXPR, offset,
5250 size_binop (MULT_EXPR,
5251 convert (sizetype, index),
5252 unit_size));
5255 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5256 conversions that don't change the mode, and all view conversions
5257 except those that need to "step up" the alignment. */
5258 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5259 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5260 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5261 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5262 && STRICT_ALIGNMENT
5263 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5264 < BIGGEST_ALIGNMENT)
5265 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5266 || TYPE_ALIGN_OK (TREE_TYPE
5267 (TREE_OPERAND (exp, 0))))))
5268 && ! ((TREE_CODE (exp) == NOP_EXPR
5269 || TREE_CODE (exp) == CONVERT_EXPR)
5270 && (TYPE_MODE (TREE_TYPE (exp))
5271 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5272 break;
5274 /* If any reference in the chain is volatile, the effect is volatile. */
5275 if (TREE_THIS_VOLATILE (exp))
5276 *pvolatilep = 1;
5278 exp = TREE_OPERAND (exp, 0);
5281 /* If OFFSET is constant, see if we can return the whole thing as a
5282 constant bit position. Otherwise, split it up. */
5283 if (host_integerp (offset, 0)
5284 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5285 bitsize_unit_node))
5286 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5287 && host_integerp (tem, 0))
5288 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5289 else
5290 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5292 *pmode = mode;
5293 return exp;
5296 /* Return a tree of sizetype representing the size, in bytes, of the element
5297 of EXP, an ARRAY_REF. */
5299 tree
5300 array_ref_element_size (tree exp)
5302 tree aligned_size = TREE_OPERAND (exp, 3);
5303 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5305 /* If a size was specified in the ARRAY_REF, it's the size measured
5306 in alignment units of the element type. So multiply by that value. */
5307 if (aligned_size)
5309 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5310 sizetype from another type of the same width and signedness. */
5311 if (TREE_TYPE (aligned_size) != sizetype)
5312 aligned_size = fold_convert (sizetype, aligned_size);
5313 return size_binop (MULT_EXPR, aligned_size,
5314 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5317 /* Otherwise, take the size from that of the element type. Substitute
5318 any PLACEHOLDER_EXPR that we have. */
5319 else
5320 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5323 /* Return a tree representing the lower bound of the array mentioned in
5324 EXP, an ARRAY_REF. */
5326 tree
5327 array_ref_low_bound (tree exp)
5329 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5331 /* If a lower bound is specified in EXP, use it. */
5332 if (TREE_OPERAND (exp, 2))
5333 return TREE_OPERAND (exp, 2);
5335 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5336 substituting for a PLACEHOLDER_EXPR as needed. */
5337 if (domain_type && TYPE_MIN_VALUE (domain_type))
5338 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5340 /* Otherwise, return a zero of the appropriate type. */
5341 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5344 /* Return a tree representing the upper bound of the array mentioned in
5345 EXP, an ARRAY_REF. */
5347 tree
5348 array_ref_up_bound (tree exp)
5350 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5352 /* If there is a domain type and it has an upper bound, use it, substituting
5353 for a PLACEHOLDER_EXPR as needed. */
5354 if (domain_type && TYPE_MAX_VALUE (domain_type))
5355 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5357 /* Otherwise fail. */
5358 return NULL_TREE;
5361 /* Return a tree representing the offset, in bytes, of the field referenced
5362 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5364 tree
5365 component_ref_field_offset (tree exp)
5367 tree aligned_offset = TREE_OPERAND (exp, 2);
5368 tree field = TREE_OPERAND (exp, 1);
5370 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5371 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5372 value. */
5373 if (aligned_offset)
5375 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5376 sizetype from another type of the same width and signedness. */
5377 if (TREE_TYPE (aligned_offset) != sizetype)
5378 aligned_offset = fold_convert (sizetype, aligned_offset);
5379 return size_binop (MULT_EXPR, aligned_offset,
5380 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5383 /* Otherwise, take the offset from that of the field. Substitute
5384 any PLACEHOLDER_EXPR that we have. */
5385 else
5386 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5389 /* Return 1 if T is an expression that get_inner_reference handles. */
5392 handled_component_p (tree t)
5394 switch (TREE_CODE (t))
5396 case BIT_FIELD_REF:
5397 case COMPONENT_REF:
5398 case ARRAY_REF:
5399 case ARRAY_RANGE_REF:
5400 case NON_LVALUE_EXPR:
5401 case VIEW_CONVERT_EXPR:
5402 return 1;
5404 /* ??? Sure they are handled, but get_inner_reference may return
5405 a different PBITSIZE, depending upon whether the expression is
5406 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5407 case NOP_EXPR:
5408 case CONVERT_EXPR:
5409 return (TYPE_MODE (TREE_TYPE (t))
5410 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5412 default:
5413 return 0;
5417 /* Given an rtx VALUE that may contain additions and multiplications, return
5418 an equivalent value that just refers to a register, memory, or constant.
5419 This is done by generating instructions to perform the arithmetic and
5420 returning a pseudo-register containing the value.
5422 The returned value may be a REG, SUBREG, MEM or constant. */
5425 force_operand (rtx value, rtx target)
5427 rtx op1, op2;
5428 /* Use subtarget as the target for operand 0 of a binary operation. */
5429 rtx subtarget = get_subtarget (target);
5430 enum rtx_code code = GET_CODE (value);
5432 /* Check for subreg applied to an expression produced by loop optimizer. */
5433 if (code == SUBREG
5434 && !REG_P (SUBREG_REG (value))
5435 && !MEM_P (SUBREG_REG (value)))
5437 value = simplify_gen_subreg (GET_MODE (value),
5438 force_reg (GET_MODE (SUBREG_REG (value)),
5439 force_operand (SUBREG_REG (value),
5440 NULL_RTX)),
5441 GET_MODE (SUBREG_REG (value)),
5442 SUBREG_BYTE (value));
5443 code = GET_CODE (value);
5446 /* Check for a PIC address load. */
5447 if ((code == PLUS || code == MINUS)
5448 && XEXP (value, 0) == pic_offset_table_rtx
5449 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5450 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5451 || GET_CODE (XEXP (value, 1)) == CONST))
5453 if (!subtarget)
5454 subtarget = gen_reg_rtx (GET_MODE (value));
5455 emit_move_insn (subtarget, value);
5456 return subtarget;
5459 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5461 if (!target)
5462 target = gen_reg_rtx (GET_MODE (value));
5463 convert_move (target, force_operand (XEXP (value, 0), NULL),
5464 code == ZERO_EXTEND);
5465 return target;
5468 if (ARITHMETIC_P (value))
5470 op2 = XEXP (value, 1);
5471 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5472 subtarget = 0;
5473 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5475 code = PLUS;
5476 op2 = negate_rtx (GET_MODE (value), op2);
5479 /* Check for an addition with OP2 a constant integer and our first
5480 operand a PLUS of a virtual register and something else. In that
5481 case, we want to emit the sum of the virtual register and the
5482 constant first and then add the other value. This allows virtual
5483 register instantiation to simply modify the constant rather than
5484 creating another one around this addition. */
5485 if (code == PLUS && GET_CODE (op2) == CONST_INT
5486 && GET_CODE (XEXP (value, 0)) == PLUS
5487 && REG_P (XEXP (XEXP (value, 0), 0))
5488 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5489 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5491 rtx temp = expand_simple_binop (GET_MODE (value), code,
5492 XEXP (XEXP (value, 0), 0), op2,
5493 subtarget, 0, OPTAB_LIB_WIDEN);
5494 return expand_simple_binop (GET_MODE (value), code, temp,
5495 force_operand (XEXP (XEXP (value,
5496 0), 1), 0),
5497 target, 0, OPTAB_LIB_WIDEN);
5500 op1 = force_operand (XEXP (value, 0), subtarget);
5501 op2 = force_operand (op2, NULL_RTX);
5502 switch (code)
5504 case MULT:
5505 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5506 case DIV:
5507 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5508 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5509 target, 1, OPTAB_LIB_WIDEN);
5510 else
5511 return expand_divmod (0,
5512 FLOAT_MODE_P (GET_MODE (value))
5513 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5514 GET_MODE (value), op1, op2, target, 0);
5515 break;
5516 case MOD:
5517 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5518 target, 0);
5519 break;
5520 case UDIV:
5521 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5522 target, 1);
5523 break;
5524 case UMOD:
5525 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5526 target, 1);
5527 break;
5528 case ASHIFTRT:
5529 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5530 target, 0, OPTAB_LIB_WIDEN);
5531 break;
5532 default:
5533 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5534 target, 1, OPTAB_LIB_WIDEN);
5537 if (UNARY_P (value))
5539 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5540 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5543 #ifdef INSN_SCHEDULING
5544 /* On machines that have insn scheduling, we want all memory reference to be
5545 explicit, so we need to deal with such paradoxical SUBREGs. */
5546 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5547 && (GET_MODE_SIZE (GET_MODE (value))
5548 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5549 value
5550 = simplify_gen_subreg (GET_MODE (value),
5551 force_reg (GET_MODE (SUBREG_REG (value)),
5552 force_operand (SUBREG_REG (value),
5553 NULL_RTX)),
5554 GET_MODE (SUBREG_REG (value)),
5555 SUBREG_BYTE (value));
5556 #endif
5558 return value;
5561 /* Subroutine of expand_expr: return nonzero iff there is no way that
5562 EXP can reference X, which is being modified. TOP_P is nonzero if this
5563 call is going to be used to determine whether we need a temporary
5564 for EXP, as opposed to a recursive call to this function.
5566 It is always safe for this routine to return zero since it merely
5567 searches for optimization opportunities. */
5570 safe_from_p (rtx x, tree exp, int top_p)
5572 rtx exp_rtl = 0;
5573 int i, nops;
5575 if (x == 0
5576 /* If EXP has varying size, we MUST use a target since we currently
5577 have no way of allocating temporaries of variable size
5578 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5579 So we assume here that something at a higher level has prevented a
5580 clash. This is somewhat bogus, but the best we can do. Only
5581 do this when X is BLKmode and when we are at the top level. */
5582 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5583 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5584 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5585 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5586 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5587 != INTEGER_CST)
5588 && GET_MODE (x) == BLKmode)
5589 /* If X is in the outgoing argument area, it is always safe. */
5590 || (MEM_P (x)
5591 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5592 || (GET_CODE (XEXP (x, 0)) == PLUS
5593 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5594 return 1;
5596 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5597 find the underlying pseudo. */
5598 if (GET_CODE (x) == SUBREG)
5600 x = SUBREG_REG (x);
5601 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5602 return 0;
5605 /* Now look at our tree code and possibly recurse. */
5606 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5608 case tcc_declaration:
5609 exp_rtl = DECL_RTL_IF_SET (exp);
5610 break;
5612 case tcc_constant:
5613 return 1;
5615 case tcc_exceptional:
5616 if (TREE_CODE (exp) == TREE_LIST)
5618 while (1)
5620 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5621 return 0;
5622 exp = TREE_CHAIN (exp);
5623 if (!exp)
5624 return 1;
5625 if (TREE_CODE (exp) != TREE_LIST)
5626 return safe_from_p (x, exp, 0);
5629 else if (TREE_CODE (exp) == ERROR_MARK)
5630 return 1; /* An already-visited SAVE_EXPR? */
5631 else
5632 return 0;
5634 case tcc_statement:
5635 /* The only case we look at here is the DECL_INITIAL inside a
5636 DECL_EXPR. */
5637 return (TREE_CODE (exp) != DECL_EXPR
5638 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5639 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5640 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5642 case tcc_binary:
5643 case tcc_comparison:
5644 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5645 return 0;
5646 /* Fall through. */
5648 case tcc_unary:
5649 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5651 case tcc_expression:
5652 case tcc_reference:
5653 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5654 the expression. If it is set, we conflict iff we are that rtx or
5655 both are in memory. Otherwise, we check all operands of the
5656 expression recursively. */
5658 switch (TREE_CODE (exp))
5660 case ADDR_EXPR:
5661 /* If the operand is static or we are static, we can't conflict.
5662 Likewise if we don't conflict with the operand at all. */
5663 if (staticp (TREE_OPERAND (exp, 0))
5664 || TREE_STATIC (exp)
5665 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5666 return 1;
5668 /* Otherwise, the only way this can conflict is if we are taking
5669 the address of a DECL a that address if part of X, which is
5670 very rare. */
5671 exp = TREE_OPERAND (exp, 0);
5672 if (DECL_P (exp))
5674 if (!DECL_RTL_SET_P (exp)
5675 || !MEM_P (DECL_RTL (exp)))
5676 return 0;
5677 else
5678 exp_rtl = XEXP (DECL_RTL (exp), 0);
5680 break;
5682 case MISALIGNED_INDIRECT_REF:
5683 case ALIGN_INDIRECT_REF:
5684 case INDIRECT_REF:
5685 if (MEM_P (x)
5686 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5687 get_alias_set (exp)))
5688 return 0;
5689 break;
5691 case CALL_EXPR:
5692 /* Assume that the call will clobber all hard registers and
5693 all of memory. */
5694 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5695 || MEM_P (x))
5696 return 0;
5697 break;
5699 case WITH_CLEANUP_EXPR:
5700 case CLEANUP_POINT_EXPR:
5701 /* Lowered by gimplify.c. */
5702 gcc_unreachable ();
5704 case SAVE_EXPR:
5705 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5707 default:
5708 break;
5711 /* If we have an rtx, we do not need to scan our operands. */
5712 if (exp_rtl)
5713 break;
5715 nops = first_rtl_op (TREE_CODE (exp));
5716 for (i = 0; i < nops; i++)
5717 if (TREE_OPERAND (exp, i) != 0
5718 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5719 return 0;
5721 /* If this is a language-specific tree code, it may require
5722 special handling. */
5723 if ((unsigned int) TREE_CODE (exp)
5724 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5725 && !lang_hooks.safe_from_p (x, exp))
5726 return 0;
5727 break;
5729 case tcc_type:
5730 /* Should never get a type here. */
5731 gcc_unreachable ();
5734 /* If we have an rtl, find any enclosed object. Then see if we conflict
5735 with it. */
5736 if (exp_rtl)
5738 if (GET_CODE (exp_rtl) == SUBREG)
5740 exp_rtl = SUBREG_REG (exp_rtl);
5741 if (REG_P (exp_rtl)
5742 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5743 return 0;
5746 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5747 are memory and they conflict. */
5748 return ! (rtx_equal_p (x, exp_rtl)
5749 || (MEM_P (x) && MEM_P (exp_rtl)
5750 && true_dependence (exp_rtl, VOIDmode, x,
5751 rtx_addr_varies_p)));
5754 /* If we reach here, it is safe. */
5755 return 1;
5759 /* Return the highest power of two that EXP is known to be a multiple of.
5760 This is used in updating alignment of MEMs in array references. */
5762 static unsigned HOST_WIDE_INT
5763 highest_pow2_factor (tree exp)
5765 unsigned HOST_WIDE_INT c0, c1;
5767 switch (TREE_CODE (exp))
5769 case INTEGER_CST:
5770 /* We can find the lowest bit that's a one. If the low
5771 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5772 We need to handle this case since we can find it in a COND_EXPR,
5773 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5774 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5775 later ICE. */
5776 if (TREE_CONSTANT_OVERFLOW (exp))
5777 return BIGGEST_ALIGNMENT;
5778 else
5780 /* Note: tree_low_cst is intentionally not used here,
5781 we don't care about the upper bits. */
5782 c0 = TREE_INT_CST_LOW (exp);
5783 c0 &= -c0;
5784 return c0 ? c0 : BIGGEST_ALIGNMENT;
5786 break;
5788 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5789 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5790 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5791 return MIN (c0, c1);
5793 case MULT_EXPR:
5794 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5795 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5796 return c0 * c1;
5798 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5799 case CEIL_DIV_EXPR:
5800 if (integer_pow2p (TREE_OPERAND (exp, 1))
5801 && host_integerp (TREE_OPERAND (exp, 1), 1))
5803 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5804 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5805 return MAX (1, c0 / c1);
5807 break;
5809 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5810 case SAVE_EXPR:
5811 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5813 case COMPOUND_EXPR:
5814 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5816 case COND_EXPR:
5817 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5818 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5819 return MIN (c0, c1);
5821 default:
5822 break;
5825 return 1;
5828 /* Similar, except that the alignment requirements of TARGET are
5829 taken into account. Assume it is at least as aligned as its
5830 type, unless it is a COMPONENT_REF in which case the layout of
5831 the structure gives the alignment. */
5833 static unsigned HOST_WIDE_INT
5834 highest_pow2_factor_for_target (tree target, tree exp)
5836 unsigned HOST_WIDE_INT target_align, factor;
5838 factor = highest_pow2_factor (exp);
5839 if (TREE_CODE (target) == COMPONENT_REF)
5840 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5841 else
5842 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5843 return MAX (factor, target_align);
5846 /* Expands variable VAR. */
5848 void
5849 expand_var (tree var)
5851 if (DECL_EXTERNAL (var))
5852 return;
5854 if (TREE_STATIC (var))
5855 /* If this is an inlined copy of a static local variable,
5856 look up the original decl. */
5857 var = DECL_ORIGIN (var);
5859 if (TREE_STATIC (var)
5860 ? !TREE_ASM_WRITTEN (var)
5861 : !DECL_RTL_SET_P (var))
5863 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5864 /* Should be ignored. */;
5865 else if (lang_hooks.expand_decl (var))
5866 /* OK. */;
5867 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5868 expand_decl (var);
5869 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5870 rest_of_decl_compilation (var, 0, 0);
5871 else
5872 /* No expansion needed. */
5873 gcc_assert (TREE_CODE (var) == TYPE_DECL
5874 || TREE_CODE (var) == CONST_DECL
5875 || TREE_CODE (var) == FUNCTION_DECL
5876 || TREE_CODE (var) == LABEL_DECL);
5880 /* Subroutine of expand_expr. Expand the two operands of a binary
5881 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5882 The value may be stored in TARGET if TARGET is nonzero. The
5883 MODIFIER argument is as documented by expand_expr. */
5885 static void
5886 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5887 enum expand_modifier modifier)
5889 if (! safe_from_p (target, exp1, 1))
5890 target = 0;
5891 if (operand_equal_p (exp0, exp1, 0))
5893 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5894 *op1 = copy_rtx (*op0);
5896 else
5898 /* If we need to preserve evaluation order, copy exp0 into its own
5899 temporary variable so that it can't be clobbered by exp1. */
5900 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5901 exp0 = save_expr (exp0);
5902 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5903 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
5908 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
5909 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
5911 static rtx
5912 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
5913 enum expand_modifier modifier)
5915 rtx result, subtarget;
5916 tree inner, offset;
5917 HOST_WIDE_INT bitsize, bitpos;
5918 int volatilep, unsignedp;
5919 enum machine_mode mode1;
5921 /* If we are taking the address of a constant and are at the top level,
5922 we have to use output_constant_def since we can't call force_const_mem
5923 at top level. */
5924 /* ??? This should be considered a front-end bug. We should not be
5925 generating ADDR_EXPR of something that isn't an LVALUE. The only
5926 exception here is STRING_CST. */
5927 if (TREE_CODE (exp) == CONSTRUCTOR
5928 || CONSTANT_CLASS_P (exp))
5929 return XEXP (output_constant_def (exp, 0), 0);
5931 /* Everything must be something allowed by is_gimple_addressable. */
5932 switch (TREE_CODE (exp))
5934 case INDIRECT_REF:
5935 /* This case will happen via recursion for &a->b. */
5936 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
5938 case CONST_DECL:
5939 /* Recurse and make the output_constant_def clause above handle this. */
5940 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
5941 tmode, modifier);
5943 case REALPART_EXPR:
5944 /* The real part of the complex number is always first, therefore
5945 the address is the same as the address of the parent object. */
5946 offset = 0;
5947 bitpos = 0;
5948 inner = TREE_OPERAND (exp, 0);
5949 break;
5951 case IMAGPART_EXPR:
5952 /* The imaginary part of the complex number is always second.
5953 The expression is therefore always offset by the size of the
5954 scalar type. */
5955 offset = 0;
5956 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
5957 inner = TREE_OPERAND (exp, 0);
5958 break;
5960 default:
5961 /* If the object is a DECL, then expand it for its rtl. Don't bypass
5962 expand_expr, as that can have various side effects; LABEL_DECLs for
5963 example, may not have their DECL_RTL set yet. Assume language
5964 specific tree nodes can be expanded in some interesting way. */
5965 if (DECL_P (exp)
5966 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
5968 result = expand_expr (exp, target, tmode,
5969 modifier == EXPAND_INITIALIZER
5970 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
5972 /* If the DECL isn't in memory, then the DECL wasn't properly
5973 marked TREE_ADDRESSABLE, which will be either a front-end
5974 or a tree optimizer bug. */
5975 gcc_assert (GET_CODE (result) == MEM);
5976 result = XEXP (result, 0);
5978 /* ??? Is this needed anymore? */
5979 if (DECL_P (exp) && !TREE_USED (exp) == 0)
5981 assemble_external (exp);
5982 TREE_USED (exp) = 1;
5985 if (modifier != EXPAND_INITIALIZER
5986 && modifier != EXPAND_CONST_ADDRESS)
5987 result = force_operand (result, target);
5988 return result;
5991 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5992 &mode1, &unsignedp, &volatilep);
5993 break;
5996 /* We must have made progress. */
5997 gcc_assert (inner != exp);
5999 subtarget = offset || bitpos ? NULL_RTX : target;
6000 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6002 if (offset)
6004 rtx tmp;
6006 if (modifier != EXPAND_NORMAL)
6007 result = force_operand (result, NULL);
6008 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6010 result = convert_memory_address (tmode, result);
6011 tmp = convert_memory_address (tmode, tmp);
6013 if (modifier == EXPAND_SUM)
6014 result = gen_rtx_PLUS (tmode, result, tmp);
6015 else
6017 subtarget = bitpos ? NULL_RTX : target;
6018 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6019 1, OPTAB_LIB_WIDEN);
6023 if (bitpos)
6025 /* Someone beforehand should have rejected taking the address
6026 of such an object. */
6027 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6029 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6030 if (modifier < EXPAND_SUM)
6031 result = force_operand (result, target);
6034 return result;
6037 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6038 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6040 static rtx
6041 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6042 enum expand_modifier modifier)
6044 enum machine_mode rmode;
6045 rtx result;
6047 /* Target mode of VOIDmode says "whatever's natural". */
6048 if (tmode == VOIDmode)
6049 tmode = TYPE_MODE (TREE_TYPE (exp));
6051 /* We can get called with some Weird Things if the user does silliness
6052 like "(short) &a". In that case, convert_memory_address won't do
6053 the right thing, so ignore the given target mode. */
6054 if (tmode != Pmode && tmode != ptr_mode)
6055 tmode = Pmode;
6057 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6058 tmode, modifier);
6060 /* Despite expand_expr claims concerning ignoring TMODE when not
6061 strictly convenient, stuff breaks if we don't honor it. Note
6062 that combined with the above, we only do this for pointer modes. */
6063 rmode = GET_MODE (result);
6064 if (rmode == VOIDmode)
6065 rmode = tmode;
6066 if (rmode != tmode)
6067 result = convert_memory_address (tmode, result);
6069 return result;
6073 /* expand_expr: generate code for computing expression EXP.
6074 An rtx for the computed value is returned. The value is never null.
6075 In the case of a void EXP, const0_rtx is returned.
6077 The value may be stored in TARGET if TARGET is nonzero.
6078 TARGET is just a suggestion; callers must assume that
6079 the rtx returned may not be the same as TARGET.
6081 If TARGET is CONST0_RTX, it means that the value will be ignored.
6083 If TMODE is not VOIDmode, it suggests generating the
6084 result in mode TMODE. But this is done only when convenient.
6085 Otherwise, TMODE is ignored and the value generated in its natural mode.
6086 TMODE is just a suggestion; callers must assume that
6087 the rtx returned may not have mode TMODE.
6089 Note that TARGET may have neither TMODE nor MODE. In that case, it
6090 probably will not be used.
6092 If MODIFIER is EXPAND_SUM then when EXP is an addition
6093 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6094 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6095 products as above, or REG or MEM, or constant.
6096 Ordinarily in such cases we would output mul or add instructions
6097 and then return a pseudo reg containing the sum.
6099 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6100 it also marks a label as absolutely required (it can't be dead).
6101 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6102 This is used for outputting expressions used in initializers.
6104 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6105 with a constant address even if that address is not normally legitimate.
6106 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6108 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6109 a call parameter. Such targets require special care as we haven't yet
6110 marked TARGET so that it's safe from being trashed by libcalls. We
6111 don't want to use TARGET for anything but the final result;
6112 Intermediate values must go elsewhere. Additionally, calls to
6113 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6115 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6116 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6117 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6118 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6119 recursively. */
6121 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6122 enum expand_modifier, rtx *);
6125 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6126 enum expand_modifier modifier, rtx *alt_rtl)
6128 int rn = -1;
6129 rtx ret, last = NULL;
6131 /* Handle ERROR_MARK before anybody tries to access its type. */
6132 if (TREE_CODE (exp) == ERROR_MARK
6133 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6135 ret = CONST0_RTX (tmode);
6136 return ret ? ret : const0_rtx;
6139 if (flag_non_call_exceptions)
6141 rn = lookup_stmt_eh_region (exp);
6142 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6143 if (rn >= 0)
6144 last = get_last_insn ();
6147 /* If this is an expression of some kind and it has an associated line
6148 number, then emit the line number before expanding the expression.
6150 We need to save and restore the file and line information so that
6151 errors discovered during expansion are emitted with the right
6152 information. It would be better of the diagnostic routines
6153 used the file/line information embedded in the tree nodes rather
6154 than globals. */
6155 if (cfun && EXPR_HAS_LOCATION (exp))
6157 location_t saved_location = input_location;
6158 input_location = EXPR_LOCATION (exp);
6159 emit_line_note (input_location);
6161 /* Record where the insns produced belong. */
6162 record_block_change (TREE_BLOCK (exp));
6164 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6166 input_location = saved_location;
6168 else
6170 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6173 /* If using non-call exceptions, mark all insns that may trap.
6174 expand_call() will mark CALL_INSNs before we get to this code,
6175 but it doesn't handle libcalls, and these may trap. */
6176 if (rn >= 0)
6178 rtx insn;
6179 for (insn = next_real_insn (last); insn;
6180 insn = next_real_insn (insn))
6182 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6183 /* If we want exceptions for non-call insns, any
6184 may_trap_p instruction may throw. */
6185 && GET_CODE (PATTERN (insn)) != CLOBBER
6186 && GET_CODE (PATTERN (insn)) != USE
6187 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6189 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6190 REG_NOTES (insn));
6195 return ret;
6198 static rtx
6199 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6200 enum expand_modifier modifier, rtx *alt_rtl)
6202 rtx op0, op1, temp;
6203 tree type = TREE_TYPE (exp);
6204 int unsignedp;
6205 enum machine_mode mode;
6206 enum tree_code code = TREE_CODE (exp);
6207 optab this_optab;
6208 rtx subtarget, original_target;
6209 int ignore;
6210 tree context;
6211 bool reduce_bit_field = false;
6212 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6213 ? reduce_to_bit_field_precision ((expr), \
6214 target, \
6215 type) \
6216 : (expr))
6218 mode = TYPE_MODE (type);
6219 unsignedp = TYPE_UNSIGNED (type);
6220 if (lang_hooks.reduce_bit_field_operations
6221 && TREE_CODE (type) == INTEGER_TYPE
6222 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6224 /* An operation in what may be a bit-field type needs the
6225 result to be reduced to the precision of the bit-field type,
6226 which is narrower than that of the type's mode. */
6227 reduce_bit_field = true;
6228 if (modifier == EXPAND_STACK_PARM)
6229 target = 0;
6232 /* Use subtarget as the target for operand 0 of a binary operation. */
6233 subtarget = get_subtarget (target);
6234 original_target = target;
6235 ignore = (target == const0_rtx
6236 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6237 || code == CONVERT_EXPR || code == COND_EXPR
6238 || code == VIEW_CONVERT_EXPR)
6239 && TREE_CODE (type) == VOID_TYPE));
6241 /* If we are going to ignore this result, we need only do something
6242 if there is a side-effect somewhere in the expression. If there
6243 is, short-circuit the most common cases here. Note that we must
6244 not call expand_expr with anything but const0_rtx in case this
6245 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6247 if (ignore)
6249 if (! TREE_SIDE_EFFECTS (exp))
6250 return const0_rtx;
6252 /* Ensure we reference a volatile object even if value is ignored, but
6253 don't do this if all we are doing is taking its address. */
6254 if (TREE_THIS_VOLATILE (exp)
6255 && TREE_CODE (exp) != FUNCTION_DECL
6256 && mode != VOIDmode && mode != BLKmode
6257 && modifier != EXPAND_CONST_ADDRESS)
6259 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6260 if (MEM_P (temp))
6261 temp = copy_to_reg (temp);
6262 return const0_rtx;
6265 if (TREE_CODE_CLASS (code) == tcc_unary
6266 || code == COMPONENT_REF || code == INDIRECT_REF)
6267 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6268 modifier);
6270 else if (TREE_CODE_CLASS (code) == tcc_binary
6271 || TREE_CODE_CLASS (code) == tcc_comparison
6272 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6274 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6275 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6276 return const0_rtx;
6278 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6279 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6280 /* If the second operand has no side effects, just evaluate
6281 the first. */
6282 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6283 modifier);
6284 else if (code == BIT_FIELD_REF)
6286 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6287 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6288 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6289 return const0_rtx;
6292 target = 0;
6295 /* If will do cse, generate all results into pseudo registers
6296 since 1) that allows cse to find more things
6297 and 2) otherwise cse could produce an insn the machine
6298 cannot support. An exception is a CONSTRUCTOR into a multi-word
6299 MEM: that's much more likely to be most efficient into the MEM.
6300 Another is a CALL_EXPR which must return in memory. */
6302 if (! cse_not_expected && mode != BLKmode && target
6303 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6304 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6305 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6306 target = 0;
6308 switch (code)
6310 case LABEL_DECL:
6312 tree function = decl_function_context (exp);
6314 temp = label_rtx (exp);
6315 temp = gen_rtx_LABEL_REF (Pmode, temp);
6317 if (function != current_function_decl
6318 && function != 0)
6319 LABEL_REF_NONLOCAL_P (temp) = 1;
6321 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6322 return temp;
6325 case SSA_NAME:
6326 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6327 NULL);
6329 case PARM_DECL:
6330 case VAR_DECL:
6331 /* If a static var's type was incomplete when the decl was written,
6332 but the type is complete now, lay out the decl now. */
6333 if (DECL_SIZE (exp) == 0
6334 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6335 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6336 layout_decl (exp, 0);
6338 /* ... fall through ... */
6340 case FUNCTION_DECL:
6341 case RESULT_DECL:
6342 gcc_assert (DECL_RTL (exp));
6344 /* Ensure variable marked as used even if it doesn't go through
6345 a parser. If it hasn't be used yet, write out an external
6346 definition. */
6347 if (! TREE_USED (exp))
6349 assemble_external (exp);
6350 TREE_USED (exp) = 1;
6353 /* Show we haven't gotten RTL for this yet. */
6354 temp = 0;
6356 /* Variables inherited from containing functions should have
6357 been lowered by this point. */
6358 context = decl_function_context (exp);
6359 gcc_assert (!context
6360 || context == current_function_decl
6361 || TREE_STATIC (exp)
6362 /* ??? C++ creates functions that are not TREE_STATIC. */
6363 || TREE_CODE (exp) == FUNCTION_DECL);
6365 /* This is the case of an array whose size is to be determined
6366 from its initializer, while the initializer is still being parsed.
6367 See expand_decl. */
6369 if (MEM_P (DECL_RTL (exp))
6370 && REG_P (XEXP (DECL_RTL (exp), 0)))
6371 temp = validize_mem (DECL_RTL (exp));
6373 /* If DECL_RTL is memory, we are in the normal case and either
6374 the address is not valid or it is not a register and -fforce-addr
6375 is specified, get the address into a register. */
6377 else if (MEM_P (DECL_RTL (exp))
6378 && modifier != EXPAND_CONST_ADDRESS
6379 && modifier != EXPAND_SUM
6380 && modifier != EXPAND_INITIALIZER
6381 && (! memory_address_p (DECL_MODE (exp),
6382 XEXP (DECL_RTL (exp), 0))
6383 || (flag_force_addr
6384 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6386 if (alt_rtl)
6387 *alt_rtl = DECL_RTL (exp);
6388 temp = replace_equiv_address (DECL_RTL (exp),
6389 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6392 /* If we got something, return it. But first, set the alignment
6393 if the address is a register. */
6394 if (temp != 0)
6396 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6397 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6399 return temp;
6402 /* If the mode of DECL_RTL does not match that of the decl, it
6403 must be a promoted value. We return a SUBREG of the wanted mode,
6404 but mark it so that we know that it was already extended. */
6406 if (REG_P (DECL_RTL (exp))
6407 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6409 enum machine_mode pmode;
6411 /* Get the signedness used for this variable. Ensure we get the
6412 same mode we got when the variable was declared. */
6413 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6414 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6415 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6417 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6418 SUBREG_PROMOTED_VAR_P (temp) = 1;
6419 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6420 return temp;
6423 return DECL_RTL (exp);
6425 case INTEGER_CST:
6426 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6427 TREE_INT_CST_HIGH (exp), mode);
6429 /* ??? If overflow is set, fold will have done an incomplete job,
6430 which can result in (plus xx (const_int 0)), which can get
6431 simplified by validate_replace_rtx during virtual register
6432 instantiation, which can result in unrecognizable insns.
6433 Avoid this by forcing all overflows into registers. */
6434 if (TREE_CONSTANT_OVERFLOW (exp)
6435 && modifier != EXPAND_INITIALIZER)
6436 temp = force_reg (mode, temp);
6438 return temp;
6440 case VECTOR_CST:
6441 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6442 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6443 return const_vector_from_tree (exp);
6444 else
6445 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6446 TREE_VECTOR_CST_ELTS (exp)),
6447 ignore ? const0_rtx : target, tmode, modifier);
6449 case CONST_DECL:
6450 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6452 case REAL_CST:
6453 /* If optimized, generate immediate CONST_DOUBLE
6454 which will be turned into memory by reload if necessary.
6456 We used to force a register so that loop.c could see it. But
6457 this does not allow gen_* patterns to perform optimizations with
6458 the constants. It also produces two insns in cases like "x = 1.0;".
6459 On most machines, floating-point constants are not permitted in
6460 many insns, so we'd end up copying it to a register in any case.
6462 Now, we do the copying in expand_binop, if appropriate. */
6463 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6464 TYPE_MODE (TREE_TYPE (exp)));
6466 case COMPLEX_CST:
6467 /* Handle evaluating a complex constant in a CONCAT target. */
6468 if (original_target && GET_CODE (original_target) == CONCAT)
6470 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6471 rtx rtarg, itarg;
6473 rtarg = XEXP (original_target, 0);
6474 itarg = XEXP (original_target, 1);
6476 /* Move the real and imaginary parts separately. */
6477 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6478 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6480 if (op0 != rtarg)
6481 emit_move_insn (rtarg, op0);
6482 if (op1 != itarg)
6483 emit_move_insn (itarg, op1);
6485 return original_target;
6488 /* ... fall through ... */
6490 case STRING_CST:
6491 temp = output_constant_def (exp, 1);
6493 /* temp contains a constant address.
6494 On RISC machines where a constant address isn't valid,
6495 make some insns to get that address into a register. */
6496 if (modifier != EXPAND_CONST_ADDRESS
6497 && modifier != EXPAND_INITIALIZER
6498 && modifier != EXPAND_SUM
6499 && (! memory_address_p (mode, XEXP (temp, 0))
6500 || flag_force_addr))
6501 return replace_equiv_address (temp,
6502 copy_rtx (XEXP (temp, 0)));
6503 return temp;
6505 case SAVE_EXPR:
6507 tree val = TREE_OPERAND (exp, 0);
6508 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6510 if (!SAVE_EXPR_RESOLVED_P (exp))
6512 /* We can indeed still hit this case, typically via builtin
6513 expanders calling save_expr immediately before expanding
6514 something. Assume this means that we only have to deal
6515 with non-BLKmode values. */
6516 gcc_assert (GET_MODE (ret) != BLKmode);
6518 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6519 DECL_ARTIFICIAL (val) = 1;
6520 DECL_IGNORED_P (val) = 1;
6521 TREE_OPERAND (exp, 0) = val;
6522 SAVE_EXPR_RESOLVED_P (exp) = 1;
6524 if (!CONSTANT_P (ret))
6525 ret = copy_to_reg (ret);
6526 SET_DECL_RTL (val, ret);
6529 return ret;
6532 case GOTO_EXPR:
6533 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6534 expand_goto (TREE_OPERAND (exp, 0));
6535 else
6536 expand_computed_goto (TREE_OPERAND (exp, 0));
6537 return const0_rtx;
6539 case CONSTRUCTOR:
6540 /* If we don't need the result, just ensure we evaluate any
6541 subexpressions. */
6542 if (ignore)
6544 tree elt;
6546 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6547 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6549 return const0_rtx;
6552 /* All elts simple constants => refer to a constant in memory. But
6553 if this is a non-BLKmode mode, let it store a field at a time
6554 since that should make a CONST_INT or CONST_DOUBLE when we
6555 fold. Likewise, if we have a target we can use, it is best to
6556 store directly into the target unless the type is large enough
6557 that memcpy will be used. If we are making an initializer and
6558 all operands are constant, put it in memory as well.
6560 FIXME: Avoid trying to fill vector constructors piece-meal.
6561 Output them with output_constant_def below unless we're sure
6562 they're zeros. This should go away when vector initializers
6563 are treated like VECTOR_CST instead of arrays.
6565 else if ((TREE_STATIC (exp)
6566 && ((mode == BLKmode
6567 && ! (target != 0 && safe_from_p (target, exp, 1)))
6568 || TREE_ADDRESSABLE (exp)
6569 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6570 && (! MOVE_BY_PIECES_P
6571 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6572 TYPE_ALIGN (type)))
6573 && ! mostly_zeros_p (exp))))
6574 || ((modifier == EXPAND_INITIALIZER
6575 || modifier == EXPAND_CONST_ADDRESS)
6576 && TREE_CONSTANT (exp)))
6578 rtx constructor = output_constant_def (exp, 1);
6580 if (modifier != EXPAND_CONST_ADDRESS
6581 && modifier != EXPAND_INITIALIZER
6582 && modifier != EXPAND_SUM)
6583 constructor = validize_mem (constructor);
6585 return constructor;
6587 else
6589 /* Handle calls that pass values in multiple non-contiguous
6590 locations. The Irix 6 ABI has examples of this. */
6591 if (target == 0 || ! safe_from_p (target, exp, 1)
6592 || GET_CODE (target) == PARALLEL
6593 || modifier == EXPAND_STACK_PARM)
6594 target
6595 = assign_temp (build_qualified_type (type,
6596 (TYPE_QUALS (type)
6597 | (TREE_READONLY (exp)
6598 * TYPE_QUAL_CONST))),
6599 0, TREE_ADDRESSABLE (exp), 1);
6601 store_constructor (exp, target, 0, int_expr_size (exp));
6602 return target;
6605 case MISALIGNED_INDIRECT_REF:
6606 case ALIGN_INDIRECT_REF:
6607 case INDIRECT_REF:
6609 tree exp1 = TREE_OPERAND (exp, 0);
6610 tree orig;
6612 if (code == MISALIGNED_INDIRECT_REF
6613 && !targetm.vectorize.misaligned_mem_ok (mode))
6614 abort ();
6616 if (modifier != EXPAND_WRITE)
6618 tree t;
6620 t = fold_read_from_constant_string (exp);
6621 if (t)
6622 return expand_expr (t, target, tmode, modifier);
6625 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6626 op0 = memory_address (mode, op0);
6628 if (code == ALIGN_INDIRECT_REF)
6630 int align = TYPE_ALIGN_UNIT (type);
6631 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6632 op0 = memory_address (mode, op0);
6635 temp = gen_rtx_MEM (mode, op0);
6637 orig = REF_ORIGINAL (exp);
6638 if (!orig)
6639 orig = exp;
6640 set_mem_attributes (temp, orig, 0);
6642 return temp;
6645 case ARRAY_REF:
6648 tree array = TREE_OPERAND (exp, 0);
6649 tree low_bound = array_ref_low_bound (exp);
6650 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6651 HOST_WIDE_INT i;
6653 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
6655 /* Optimize the special-case of a zero lower bound.
6657 We convert the low_bound to sizetype to avoid some problems
6658 with constant folding. (E.g. suppose the lower bound is 1,
6659 and its mode is QI. Without the conversion, (ARRAY
6660 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6661 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6663 if (! integer_zerop (low_bound))
6664 index = size_diffop (index, convert (sizetype, low_bound));
6666 /* Fold an expression like: "foo"[2].
6667 This is not done in fold so it won't happen inside &.
6668 Don't fold if this is for wide characters since it's too
6669 difficult to do correctly and this is a very rare case. */
6671 if (modifier != EXPAND_CONST_ADDRESS
6672 && modifier != EXPAND_INITIALIZER
6673 && modifier != EXPAND_MEMORY)
6675 tree t = fold_read_from_constant_string (exp);
6677 if (t)
6678 return expand_expr (t, target, tmode, modifier);
6681 /* If this is a constant index into a constant array,
6682 just get the value from the array. Handle both the cases when
6683 we have an explicit constructor and when our operand is a variable
6684 that was declared const. */
6686 if (modifier != EXPAND_CONST_ADDRESS
6687 && modifier != EXPAND_INITIALIZER
6688 && modifier != EXPAND_MEMORY
6689 && TREE_CODE (array) == CONSTRUCTOR
6690 && ! TREE_SIDE_EFFECTS (array)
6691 && TREE_CODE (index) == INTEGER_CST
6692 && 0 > compare_tree_int (index,
6693 list_length (CONSTRUCTOR_ELTS
6694 (TREE_OPERAND (exp, 0)))))
6696 tree elem;
6698 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6699 i = TREE_INT_CST_LOW (index);
6700 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6703 if (elem)
6704 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6705 modifier);
6708 else if (optimize >= 1
6709 && modifier != EXPAND_CONST_ADDRESS
6710 && modifier != EXPAND_INITIALIZER
6711 && modifier != EXPAND_MEMORY
6712 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6713 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6714 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6715 && targetm.binds_local_p (array))
6717 if (TREE_CODE (index) == INTEGER_CST)
6719 tree init = DECL_INITIAL (array);
6721 if (TREE_CODE (init) == CONSTRUCTOR)
6723 tree elem;
6725 for (elem = CONSTRUCTOR_ELTS (init);
6726 (elem
6727 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6728 elem = TREE_CHAIN (elem))
6731 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6732 return expand_expr (fold (TREE_VALUE (elem)), target,
6733 tmode, modifier);
6735 else if (TREE_CODE (init) == STRING_CST
6736 && 0 > compare_tree_int (index,
6737 TREE_STRING_LENGTH (init)))
6739 tree type = TREE_TYPE (TREE_TYPE (init));
6740 enum machine_mode mode = TYPE_MODE (type);
6742 if (GET_MODE_CLASS (mode) == MODE_INT
6743 && GET_MODE_SIZE (mode) == 1)
6744 return gen_int_mode (TREE_STRING_POINTER (init)
6745 [TREE_INT_CST_LOW (index)], mode);
6750 goto normal_inner_ref;
6752 case COMPONENT_REF:
6753 /* If the operand is a CONSTRUCTOR, we can just extract the
6754 appropriate field if it is present. */
6755 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6757 tree elt;
6759 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6760 elt = TREE_CHAIN (elt))
6761 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6762 /* We can normally use the value of the field in the
6763 CONSTRUCTOR. However, if this is a bitfield in
6764 an integral mode that we can fit in a HOST_WIDE_INT,
6765 we must mask only the number of bits in the bitfield,
6766 since this is done implicitly by the constructor. If
6767 the bitfield does not meet either of those conditions,
6768 we can't do this optimization. */
6769 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6770 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6771 == MODE_INT)
6772 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6773 <= HOST_BITS_PER_WIDE_INT))))
6775 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6776 && modifier == EXPAND_STACK_PARM)
6777 target = 0;
6778 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6779 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6781 HOST_WIDE_INT bitsize
6782 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6783 enum machine_mode imode
6784 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6786 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6788 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6789 op0 = expand_and (imode, op0, op1, target);
6791 else
6793 tree count
6794 = build_int_cst (NULL_TREE,
6795 GET_MODE_BITSIZE (imode) - bitsize);
6797 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6798 target, 0);
6799 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6800 target, 0);
6804 return op0;
6807 goto normal_inner_ref;
6809 case BIT_FIELD_REF:
6810 case ARRAY_RANGE_REF:
6811 normal_inner_ref:
6813 enum machine_mode mode1;
6814 HOST_WIDE_INT bitsize, bitpos;
6815 tree offset;
6816 int volatilep = 0;
6817 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6818 &mode1, &unsignedp, &volatilep);
6819 rtx orig_op0;
6821 /* If we got back the original object, something is wrong. Perhaps
6822 we are evaluating an expression too early. In any event, don't
6823 infinitely recurse. */
6824 gcc_assert (tem != exp);
6826 /* If TEM's type is a union of variable size, pass TARGET to the inner
6827 computation, since it will need a temporary and TARGET is known
6828 to have to do. This occurs in unchecked conversion in Ada. */
6830 orig_op0 = op0
6831 = expand_expr (tem,
6832 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6833 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6834 != INTEGER_CST)
6835 && modifier != EXPAND_STACK_PARM
6836 ? target : NULL_RTX),
6837 VOIDmode,
6838 (modifier == EXPAND_INITIALIZER
6839 || modifier == EXPAND_CONST_ADDRESS
6840 || modifier == EXPAND_STACK_PARM)
6841 ? modifier : EXPAND_NORMAL);
6843 /* If this is a constant, put it into a register if it is a
6844 legitimate constant and OFFSET is 0 and memory if it isn't. */
6845 if (CONSTANT_P (op0))
6847 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6848 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6849 && offset == 0)
6850 op0 = force_reg (mode, op0);
6851 else
6852 op0 = validize_mem (force_const_mem (mode, op0));
6855 /* Otherwise, if this object not in memory and we either have an
6856 offset or a BLKmode result, put it there. This case can't occur in
6857 C, but can in Ada if we have unchecked conversion of an expression
6858 from a scalar type to an array or record type or for an
6859 ARRAY_RANGE_REF whose type is BLKmode. */
6860 else if (!MEM_P (op0)
6861 && (offset != 0
6862 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6864 tree nt = build_qualified_type (TREE_TYPE (tem),
6865 (TYPE_QUALS (TREE_TYPE (tem))
6866 | TYPE_QUAL_CONST));
6867 rtx memloc = assign_temp (nt, 1, 1, 1);
6869 emit_move_insn (memloc, op0);
6870 op0 = memloc;
6873 if (offset != 0)
6875 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6876 EXPAND_SUM);
6878 gcc_assert (MEM_P (op0));
6880 #ifdef POINTERS_EXTEND_UNSIGNED
6881 if (GET_MODE (offset_rtx) != Pmode)
6882 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6883 #else
6884 if (GET_MODE (offset_rtx) != ptr_mode)
6885 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6886 #endif
6888 if (GET_MODE (op0) == BLKmode
6889 /* A constant address in OP0 can have VOIDmode, we must
6890 not try to call force_reg in that case. */
6891 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6892 && bitsize != 0
6893 && (bitpos % bitsize) == 0
6894 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6895 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6897 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6898 bitpos = 0;
6901 op0 = offset_address (op0, offset_rtx,
6902 highest_pow2_factor (offset));
6905 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6906 record its alignment as BIGGEST_ALIGNMENT. */
6907 if (MEM_P (op0) && bitpos == 0 && offset != 0
6908 && is_aligning_offset (offset, tem))
6909 set_mem_align (op0, BIGGEST_ALIGNMENT);
6911 /* Don't forget about volatility even if this is a bitfield. */
6912 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6914 if (op0 == orig_op0)
6915 op0 = copy_rtx (op0);
6917 MEM_VOLATILE_P (op0) = 1;
6920 /* The following code doesn't handle CONCAT.
6921 Assume only bitpos == 0 can be used for CONCAT, due to
6922 one element arrays having the same mode as its element. */
6923 if (GET_CODE (op0) == CONCAT)
6925 gcc_assert (bitpos == 0
6926 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
6927 return op0;
6930 /* In cases where an aligned union has an unaligned object
6931 as a field, we might be extracting a BLKmode value from
6932 an integer-mode (e.g., SImode) object. Handle this case
6933 by doing the extract into an object as wide as the field
6934 (which we know to be the width of a basic mode), then
6935 storing into memory, and changing the mode to BLKmode. */
6936 if (mode1 == VOIDmode
6937 || REG_P (op0) || GET_CODE (op0) == SUBREG
6938 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6939 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6940 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6941 && modifier != EXPAND_CONST_ADDRESS
6942 && modifier != EXPAND_INITIALIZER)
6943 /* If the field isn't aligned enough to fetch as a memref,
6944 fetch it as a bit field. */
6945 || (mode1 != BLKmode
6946 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6947 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6948 || (MEM_P (op0)
6949 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6950 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6951 && ((modifier == EXPAND_CONST_ADDRESS
6952 || modifier == EXPAND_INITIALIZER)
6953 ? STRICT_ALIGNMENT
6954 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6955 || (bitpos % BITS_PER_UNIT != 0)))
6956 /* If the type and the field are a constant size and the
6957 size of the type isn't the same size as the bitfield,
6958 we must use bitfield operations. */
6959 || (bitsize >= 0
6960 && TYPE_SIZE (TREE_TYPE (exp))
6961 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6962 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6963 bitsize)))
6965 enum machine_mode ext_mode = mode;
6967 if (ext_mode == BLKmode
6968 && ! (target != 0 && MEM_P (op0)
6969 && MEM_P (target)
6970 && bitpos % BITS_PER_UNIT == 0))
6971 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6973 if (ext_mode == BLKmode)
6975 if (target == 0)
6976 target = assign_temp (type, 0, 1, 1);
6978 if (bitsize == 0)
6979 return target;
6981 /* In this case, BITPOS must start at a byte boundary and
6982 TARGET, if specified, must be a MEM. */
6983 gcc_assert (MEM_P (op0)
6984 && (!target || MEM_P (target))
6985 && !(bitpos % BITS_PER_UNIT));
6987 emit_block_move (target,
6988 adjust_address (op0, VOIDmode,
6989 bitpos / BITS_PER_UNIT),
6990 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6991 / BITS_PER_UNIT),
6992 (modifier == EXPAND_STACK_PARM
6993 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
6995 return target;
6998 op0 = validize_mem (op0);
7000 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7001 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7003 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7004 (modifier == EXPAND_STACK_PARM
7005 ? NULL_RTX : target),
7006 ext_mode, ext_mode);
7008 /* If the result is a record type and BITSIZE is narrower than
7009 the mode of OP0, an integral mode, and this is a big endian
7010 machine, we must put the field into the high-order bits. */
7011 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7012 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7013 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7014 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7015 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7016 - bitsize),
7017 op0, 1);
7019 /* If the result type is BLKmode, store the data into a temporary
7020 of the appropriate type, but with the mode corresponding to the
7021 mode for the data we have (op0's mode). It's tempting to make
7022 this a constant type, since we know it's only being stored once,
7023 but that can cause problems if we are taking the address of this
7024 COMPONENT_REF because the MEM of any reference via that address
7025 will have flags corresponding to the type, which will not
7026 necessarily be constant. */
7027 if (mode == BLKmode)
7029 rtx new
7030 = assign_stack_temp_for_type
7031 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7033 emit_move_insn (new, op0);
7034 op0 = copy_rtx (new);
7035 PUT_MODE (op0, BLKmode);
7036 set_mem_attributes (op0, exp, 1);
7039 return op0;
7042 /* If the result is BLKmode, use that to access the object
7043 now as well. */
7044 if (mode == BLKmode)
7045 mode1 = BLKmode;
7047 /* Get a reference to just this component. */
7048 if (modifier == EXPAND_CONST_ADDRESS
7049 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7050 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7051 else
7052 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7054 if (op0 == orig_op0)
7055 op0 = copy_rtx (op0);
7057 set_mem_attributes (op0, exp, 0);
7058 if (REG_P (XEXP (op0, 0)))
7059 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7061 MEM_VOLATILE_P (op0) |= volatilep;
7062 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7063 || modifier == EXPAND_CONST_ADDRESS
7064 || modifier == EXPAND_INITIALIZER)
7065 return op0;
7066 else if (target == 0)
7067 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7069 convert_move (target, op0, unsignedp);
7070 return target;
7073 case OBJ_TYPE_REF:
7074 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7076 case CALL_EXPR:
7077 /* Check for a built-in function. */
7078 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7079 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7080 == FUNCTION_DECL)
7081 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7083 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7084 == BUILT_IN_FRONTEND)
7085 return lang_hooks.expand_expr (exp, original_target,
7086 tmode, modifier,
7087 alt_rtl);
7088 else
7089 return expand_builtin (exp, target, subtarget, tmode, ignore);
7092 return expand_call (exp, target, ignore);
7094 case NON_LVALUE_EXPR:
7095 case NOP_EXPR:
7096 case CONVERT_EXPR:
7097 if (TREE_OPERAND (exp, 0) == error_mark_node)
7098 return const0_rtx;
7100 if (TREE_CODE (type) == UNION_TYPE)
7102 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7104 /* If both input and output are BLKmode, this conversion isn't doing
7105 anything except possibly changing memory attribute. */
7106 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7108 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7109 modifier);
7111 result = copy_rtx (result);
7112 set_mem_attributes (result, exp, 0);
7113 return result;
7116 if (target == 0)
7118 if (TYPE_MODE (type) != BLKmode)
7119 target = gen_reg_rtx (TYPE_MODE (type));
7120 else
7121 target = assign_temp (type, 0, 1, 1);
7124 if (MEM_P (target))
7125 /* Store data into beginning of memory target. */
7126 store_expr (TREE_OPERAND (exp, 0),
7127 adjust_address (target, TYPE_MODE (valtype), 0),
7128 modifier == EXPAND_STACK_PARM);
7130 else
7132 gcc_assert (REG_P (target));
7134 /* Store this field into a union of the proper type. */
7135 store_field (target,
7136 MIN ((int_size_in_bytes (TREE_TYPE
7137 (TREE_OPERAND (exp, 0)))
7138 * BITS_PER_UNIT),
7139 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7140 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7141 type, 0);
7144 /* Return the entire union. */
7145 return target;
7148 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7150 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7151 modifier);
7153 /* If the signedness of the conversion differs and OP0 is
7154 a promoted SUBREG, clear that indication since we now
7155 have to do the proper extension. */
7156 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7157 && GET_CODE (op0) == SUBREG)
7158 SUBREG_PROMOTED_VAR_P (op0) = 0;
7160 return REDUCE_BIT_FIELD (op0);
7163 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7164 op0 = REDUCE_BIT_FIELD (op0);
7165 if (GET_MODE (op0) == mode)
7166 return op0;
7168 /* If OP0 is a constant, just convert it into the proper mode. */
7169 if (CONSTANT_P (op0))
7171 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7172 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7174 if (modifier == EXPAND_INITIALIZER)
7175 return simplify_gen_subreg (mode, op0, inner_mode,
7176 subreg_lowpart_offset (mode,
7177 inner_mode));
7178 else
7179 return convert_modes (mode, inner_mode, op0,
7180 TYPE_UNSIGNED (inner_type));
7183 if (modifier == EXPAND_INITIALIZER)
7184 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7186 if (target == 0)
7187 return
7188 convert_to_mode (mode, op0,
7189 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7190 else
7191 convert_move (target, op0,
7192 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7193 return target;
7195 case VIEW_CONVERT_EXPR:
7196 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7198 /* If the input and output modes are both the same, we are done.
7199 Otherwise, if neither mode is BLKmode and both are integral and within
7200 a word, we can use gen_lowpart. If neither is true, make sure the
7201 operand is in memory and convert the MEM to the new mode. */
7202 if (TYPE_MODE (type) == GET_MODE (op0))
7204 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7205 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7206 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7207 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7208 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7209 op0 = gen_lowpart (TYPE_MODE (type), op0);
7210 else if (!MEM_P (op0))
7212 /* If the operand is not a MEM, force it into memory. Since we
7213 are going to be be changing the mode of the MEM, don't call
7214 force_const_mem for constants because we don't allow pool
7215 constants to change mode. */
7216 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7218 gcc_assert (!TREE_ADDRESSABLE (exp));
7220 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7221 target
7222 = assign_stack_temp_for_type
7223 (TYPE_MODE (inner_type),
7224 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7226 emit_move_insn (target, op0);
7227 op0 = target;
7230 /* At this point, OP0 is in the correct mode. If the output type is such
7231 that the operand is known to be aligned, indicate that it is.
7232 Otherwise, we need only be concerned about alignment for non-BLKmode
7233 results. */
7234 if (MEM_P (op0))
7236 op0 = copy_rtx (op0);
7238 if (TYPE_ALIGN_OK (type))
7239 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7240 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7241 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7243 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7244 HOST_WIDE_INT temp_size
7245 = MAX (int_size_in_bytes (inner_type),
7246 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7247 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7248 temp_size, 0, type);
7249 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7251 gcc_assert (!TREE_ADDRESSABLE (exp));
7253 if (GET_MODE (op0) == BLKmode)
7254 emit_block_move (new_with_op0_mode, op0,
7255 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7256 (modifier == EXPAND_STACK_PARM
7257 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7258 else
7259 emit_move_insn (new_with_op0_mode, op0);
7261 op0 = new;
7264 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7267 return op0;
7269 case PLUS_EXPR:
7270 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7271 something else, make sure we add the register to the constant and
7272 then to the other thing. This case can occur during strength
7273 reduction and doing it this way will produce better code if the
7274 frame pointer or argument pointer is eliminated.
7276 fold-const.c will ensure that the constant is always in the inner
7277 PLUS_EXPR, so the only case we need to do anything about is if
7278 sp, ap, or fp is our second argument, in which case we must swap
7279 the innermost first argument and our second argument. */
7281 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7282 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7283 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7284 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7285 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7286 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7288 tree t = TREE_OPERAND (exp, 1);
7290 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7291 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7294 /* If the result is to be ptr_mode and we are adding an integer to
7295 something, we might be forming a constant. So try to use
7296 plus_constant. If it produces a sum and we can't accept it,
7297 use force_operand. This allows P = &ARR[const] to generate
7298 efficient code on machines where a SYMBOL_REF is not a valid
7299 address.
7301 If this is an EXPAND_SUM call, always return the sum. */
7302 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7303 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7305 if (modifier == EXPAND_STACK_PARM)
7306 target = 0;
7307 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7308 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7309 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7311 rtx constant_part;
7313 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7314 EXPAND_SUM);
7315 /* Use immed_double_const to ensure that the constant is
7316 truncated according to the mode of OP1, then sign extended
7317 to a HOST_WIDE_INT. Using the constant directly can result
7318 in non-canonical RTL in a 64x32 cross compile. */
7319 constant_part
7320 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7321 (HOST_WIDE_INT) 0,
7322 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7323 op1 = plus_constant (op1, INTVAL (constant_part));
7324 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7325 op1 = force_operand (op1, target);
7326 return REDUCE_BIT_FIELD (op1);
7329 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7330 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7331 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7333 rtx constant_part;
7335 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7336 (modifier == EXPAND_INITIALIZER
7337 ? EXPAND_INITIALIZER : EXPAND_SUM));
7338 if (! CONSTANT_P (op0))
7340 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7341 VOIDmode, modifier);
7342 /* Return a PLUS if modifier says it's OK. */
7343 if (modifier == EXPAND_SUM
7344 || modifier == EXPAND_INITIALIZER)
7345 return simplify_gen_binary (PLUS, mode, op0, op1);
7346 goto binop2;
7348 /* Use immed_double_const to ensure that the constant is
7349 truncated according to the mode of OP1, then sign extended
7350 to a HOST_WIDE_INT. Using the constant directly can result
7351 in non-canonical RTL in a 64x32 cross compile. */
7352 constant_part
7353 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7354 (HOST_WIDE_INT) 0,
7355 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7356 op0 = plus_constant (op0, INTVAL (constant_part));
7357 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7358 op0 = force_operand (op0, target);
7359 return REDUCE_BIT_FIELD (op0);
7363 /* No sense saving up arithmetic to be done
7364 if it's all in the wrong mode to form part of an address.
7365 And force_operand won't know whether to sign-extend or
7366 zero-extend. */
7367 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7368 || mode != ptr_mode)
7370 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7371 subtarget, &op0, &op1, 0);
7372 if (op0 == const0_rtx)
7373 return op1;
7374 if (op1 == const0_rtx)
7375 return op0;
7376 goto binop2;
7379 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7380 subtarget, &op0, &op1, modifier);
7381 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7383 case MINUS_EXPR:
7384 /* For initializers, we are allowed to return a MINUS of two
7385 symbolic constants. Here we handle all cases when both operands
7386 are constant. */
7387 /* Handle difference of two symbolic constants,
7388 for the sake of an initializer. */
7389 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7390 && really_constant_p (TREE_OPERAND (exp, 0))
7391 && really_constant_p (TREE_OPERAND (exp, 1)))
7393 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7394 NULL_RTX, &op0, &op1, modifier);
7396 /* If the last operand is a CONST_INT, use plus_constant of
7397 the negated constant. Else make the MINUS. */
7398 if (GET_CODE (op1) == CONST_INT)
7399 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7400 else
7401 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7404 /* No sense saving up arithmetic to be done
7405 if it's all in the wrong mode to form part of an address.
7406 And force_operand won't know whether to sign-extend or
7407 zero-extend. */
7408 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7409 || mode != ptr_mode)
7410 goto binop;
7412 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7413 subtarget, &op0, &op1, modifier);
7415 /* Convert A - const to A + (-const). */
7416 if (GET_CODE (op1) == CONST_INT)
7418 op1 = negate_rtx (mode, op1);
7419 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7422 goto binop2;
7424 case MULT_EXPR:
7425 /* If first operand is constant, swap them.
7426 Thus the following special case checks need only
7427 check the second operand. */
7428 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7430 tree t1 = TREE_OPERAND (exp, 0);
7431 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7432 TREE_OPERAND (exp, 1) = t1;
7435 /* Attempt to return something suitable for generating an
7436 indexed address, for machines that support that. */
7438 if (modifier == EXPAND_SUM && mode == ptr_mode
7439 && host_integerp (TREE_OPERAND (exp, 1), 0))
7441 tree exp1 = TREE_OPERAND (exp, 1);
7443 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7444 EXPAND_SUM);
7446 if (!REG_P (op0))
7447 op0 = force_operand (op0, NULL_RTX);
7448 if (!REG_P (op0))
7449 op0 = copy_to_mode_reg (mode, op0);
7451 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7452 gen_int_mode (tree_low_cst (exp1, 0),
7453 TYPE_MODE (TREE_TYPE (exp1)))));
7456 if (modifier == EXPAND_STACK_PARM)
7457 target = 0;
7459 /* Check for multiplying things that have been extended
7460 from a narrower type. If this machine supports multiplying
7461 in that narrower type with a result in the desired type,
7462 do it that way, and avoid the explicit type-conversion. */
7463 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7464 && TREE_CODE (type) == INTEGER_TYPE
7465 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7466 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7467 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7468 && int_fits_type_p (TREE_OPERAND (exp, 1),
7469 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7470 /* Don't use a widening multiply if a shift will do. */
7471 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7472 > HOST_BITS_PER_WIDE_INT)
7473 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7475 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7476 && (TYPE_PRECISION (TREE_TYPE
7477 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7478 == TYPE_PRECISION (TREE_TYPE
7479 (TREE_OPERAND
7480 (TREE_OPERAND (exp, 0), 0))))
7481 /* If both operands are extended, they must either both
7482 be zero-extended or both be sign-extended. */
7483 && (TYPE_UNSIGNED (TREE_TYPE
7484 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7485 == TYPE_UNSIGNED (TREE_TYPE
7486 (TREE_OPERAND
7487 (TREE_OPERAND (exp, 0), 0)))))))
7489 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7490 enum machine_mode innermode = TYPE_MODE (op0type);
7491 bool zextend_p = TYPE_UNSIGNED (op0type);
7492 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7493 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7495 if (mode == GET_MODE_WIDER_MODE (innermode))
7497 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7499 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7500 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7501 TREE_OPERAND (exp, 1),
7502 NULL_RTX, &op0, &op1, 0);
7503 else
7504 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7505 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7506 NULL_RTX, &op0, &op1, 0);
7507 goto binop3;
7509 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7510 && innermode == word_mode)
7512 rtx htem, hipart;
7513 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7514 NULL_RTX, VOIDmode, 0);
7515 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7516 op1 = convert_modes (innermode, mode,
7517 expand_expr (TREE_OPERAND (exp, 1),
7518 NULL_RTX, VOIDmode, 0),
7519 unsignedp);
7520 else
7521 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7522 NULL_RTX, VOIDmode, 0);
7523 temp = expand_binop (mode, other_optab, op0, op1, target,
7524 unsignedp, OPTAB_LIB_WIDEN);
7525 hipart = gen_highpart (innermode, temp);
7526 htem = expand_mult_highpart_adjust (innermode, hipart,
7527 op0, op1, hipart,
7528 zextend_p);
7529 if (htem != hipart)
7530 emit_move_insn (hipart, htem);
7531 return REDUCE_BIT_FIELD (temp);
7535 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7536 subtarget, &op0, &op1, 0);
7537 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7539 case TRUNC_DIV_EXPR:
7540 case FLOOR_DIV_EXPR:
7541 case CEIL_DIV_EXPR:
7542 case ROUND_DIV_EXPR:
7543 case EXACT_DIV_EXPR:
7544 if (modifier == EXPAND_STACK_PARM)
7545 target = 0;
7546 /* Possible optimization: compute the dividend with EXPAND_SUM
7547 then if the divisor is constant can optimize the case
7548 where some terms of the dividend have coeffs divisible by it. */
7549 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7550 subtarget, &op0, &op1, 0);
7551 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7553 case RDIV_EXPR:
7554 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7555 expensive divide. If not, combine will rebuild the original
7556 computation. */
7557 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7558 && TREE_CODE (type) == REAL_TYPE
7559 && !real_onep (TREE_OPERAND (exp, 0)))
7560 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7561 build2 (RDIV_EXPR, type,
7562 build_real (type, dconst1),
7563 TREE_OPERAND (exp, 1))),
7564 target, tmode, modifier);
7566 goto binop;
7568 case TRUNC_MOD_EXPR:
7569 case FLOOR_MOD_EXPR:
7570 case CEIL_MOD_EXPR:
7571 case ROUND_MOD_EXPR:
7572 if (modifier == EXPAND_STACK_PARM)
7573 target = 0;
7574 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7575 subtarget, &op0, &op1, 0);
7576 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7578 case FIX_ROUND_EXPR:
7579 case FIX_FLOOR_EXPR:
7580 case FIX_CEIL_EXPR:
7581 gcc_unreachable (); /* Not used for C. */
7583 case FIX_TRUNC_EXPR:
7584 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7585 if (target == 0 || modifier == EXPAND_STACK_PARM)
7586 target = gen_reg_rtx (mode);
7587 expand_fix (target, op0, unsignedp);
7588 return target;
7590 case FLOAT_EXPR:
7591 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7592 if (target == 0 || modifier == EXPAND_STACK_PARM)
7593 target = gen_reg_rtx (mode);
7594 /* expand_float can't figure out what to do if FROM has VOIDmode.
7595 So give it the correct mode. With -O, cse will optimize this. */
7596 if (GET_MODE (op0) == VOIDmode)
7597 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7598 op0);
7599 expand_float (target, op0,
7600 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7601 return target;
7603 case NEGATE_EXPR:
7604 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7605 if (modifier == EXPAND_STACK_PARM)
7606 target = 0;
7607 temp = expand_unop (mode,
7608 optab_for_tree_code (NEGATE_EXPR, type),
7609 op0, target, 0);
7610 gcc_assert (temp);
7611 return REDUCE_BIT_FIELD (temp);
7613 case ABS_EXPR:
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7615 if (modifier == EXPAND_STACK_PARM)
7616 target = 0;
7618 /* ABS_EXPR is not valid for complex arguments. */
7619 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7620 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7622 /* Unsigned abs is simply the operand. Testing here means we don't
7623 risk generating incorrect code below. */
7624 if (TYPE_UNSIGNED (type))
7625 return op0;
7627 return expand_abs (mode, op0, target, unsignedp,
7628 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7630 case MAX_EXPR:
7631 case MIN_EXPR:
7632 target = original_target;
7633 if (target == 0
7634 || modifier == EXPAND_STACK_PARM
7635 || (MEM_P (target) && MEM_VOLATILE_P (target))
7636 || GET_MODE (target) != mode
7637 || (REG_P (target)
7638 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7639 target = gen_reg_rtx (mode);
7640 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7641 target, &op0, &op1, 0);
7643 /* First try to do it with a special MIN or MAX instruction.
7644 If that does not win, use a conditional jump to select the proper
7645 value. */
7646 this_optab = optab_for_tree_code (code, type);
7647 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7648 OPTAB_WIDEN);
7649 if (temp != 0)
7650 return temp;
7652 /* At this point, a MEM target is no longer useful; we will get better
7653 code without it. */
7655 if (MEM_P (target))
7656 target = gen_reg_rtx (mode);
7658 /* If op1 was placed in target, swap op0 and op1. */
7659 if (target != op0 && target == op1)
7661 rtx tem = op0;
7662 op0 = op1;
7663 op1 = tem;
7666 if (target != op0)
7667 emit_move_insn (target, op0);
7669 op0 = gen_label_rtx ();
7671 /* If this mode is an integer too wide to compare properly,
7672 compare word by word. Rely on cse to optimize constant cases. */
7673 if (GET_MODE_CLASS (mode) == MODE_INT
7674 && ! can_compare_p (GE, mode, ccp_jump))
7676 if (code == MAX_EXPR)
7677 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7678 NULL_RTX, op0);
7679 else
7680 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7681 NULL_RTX, op0);
7683 else
7685 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7686 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7688 emit_move_insn (target, op1);
7689 emit_label (op0);
7690 return target;
7692 case BIT_NOT_EXPR:
7693 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7694 if (modifier == EXPAND_STACK_PARM)
7695 target = 0;
7696 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7697 gcc_assert (temp);
7698 return temp;
7700 /* ??? Can optimize bitwise operations with one arg constant.
7701 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7702 and (a bitwise1 b) bitwise2 b (etc)
7703 but that is probably not worth while. */
7705 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7706 boolean values when we want in all cases to compute both of them. In
7707 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7708 as actual zero-or-1 values and then bitwise anding. In cases where
7709 there cannot be any side effects, better code would be made by
7710 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7711 how to recognize those cases. */
7713 case TRUTH_AND_EXPR:
7714 code = BIT_AND_EXPR;
7715 case BIT_AND_EXPR:
7716 goto binop;
7718 case TRUTH_OR_EXPR:
7719 code = BIT_IOR_EXPR;
7720 case BIT_IOR_EXPR:
7721 goto binop;
7723 case TRUTH_XOR_EXPR:
7724 code = BIT_XOR_EXPR;
7725 case BIT_XOR_EXPR:
7726 goto binop;
7728 case LSHIFT_EXPR:
7729 case RSHIFT_EXPR:
7730 case LROTATE_EXPR:
7731 case RROTATE_EXPR:
7732 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7733 subtarget = 0;
7734 if (modifier == EXPAND_STACK_PARM)
7735 target = 0;
7736 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7737 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7738 unsignedp);
7740 /* Could determine the answer when only additive constants differ. Also,
7741 the addition of one can be handled by changing the condition. */
7742 case LT_EXPR:
7743 case LE_EXPR:
7744 case GT_EXPR:
7745 case GE_EXPR:
7746 case EQ_EXPR:
7747 case NE_EXPR:
7748 case UNORDERED_EXPR:
7749 case ORDERED_EXPR:
7750 case UNLT_EXPR:
7751 case UNLE_EXPR:
7752 case UNGT_EXPR:
7753 case UNGE_EXPR:
7754 case UNEQ_EXPR:
7755 case LTGT_EXPR:
7756 temp = do_store_flag (exp,
7757 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7758 tmode != VOIDmode ? tmode : mode, 0);
7759 if (temp != 0)
7760 return temp;
7762 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7763 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7764 && original_target
7765 && REG_P (original_target)
7766 && (GET_MODE (original_target)
7767 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7769 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7770 VOIDmode, 0);
7772 /* If temp is constant, we can just compute the result. */
7773 if (GET_CODE (temp) == CONST_INT)
7775 if (INTVAL (temp) != 0)
7776 emit_move_insn (target, const1_rtx);
7777 else
7778 emit_move_insn (target, const0_rtx);
7780 return target;
7783 if (temp != original_target)
7785 enum machine_mode mode1 = GET_MODE (temp);
7786 if (mode1 == VOIDmode)
7787 mode1 = tmode != VOIDmode ? tmode : mode;
7789 temp = copy_to_mode_reg (mode1, temp);
7792 op1 = gen_label_rtx ();
7793 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7794 GET_MODE (temp), unsignedp, op1);
7795 emit_move_insn (temp, const1_rtx);
7796 emit_label (op1);
7797 return temp;
7800 /* If no set-flag instruction, must generate a conditional store
7801 into a temporary variable. Drop through and handle this
7802 like && and ||. */
7804 if (! ignore
7805 && (target == 0
7806 || modifier == EXPAND_STACK_PARM
7807 || ! safe_from_p (target, exp, 1)
7808 /* Make sure we don't have a hard reg (such as function's return
7809 value) live across basic blocks, if not optimizing. */
7810 || (!optimize && REG_P (target)
7811 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7812 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7814 if (target)
7815 emit_move_insn (target, const0_rtx);
7817 op1 = gen_label_rtx ();
7818 jumpifnot (exp, op1);
7820 if (target)
7821 emit_move_insn (target, const1_rtx);
7823 emit_label (op1);
7824 return ignore ? const0_rtx : target;
7826 case TRUTH_NOT_EXPR:
7827 if (modifier == EXPAND_STACK_PARM)
7828 target = 0;
7829 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7830 /* The parser is careful to generate TRUTH_NOT_EXPR
7831 only with operands that are always zero or one. */
7832 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7833 target, 1, OPTAB_LIB_WIDEN);
7834 gcc_assert (temp);
7835 return temp;
7837 case STATEMENT_LIST:
7839 tree_stmt_iterator iter;
7841 gcc_assert (ignore);
7843 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7844 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7846 return const0_rtx;
7848 case COND_EXPR:
7849 /* If it's void, we don't need to worry about computing a value. */
7850 if (VOID_TYPE_P (TREE_TYPE (exp)))
7852 tree pred = TREE_OPERAND (exp, 0);
7853 tree then_ = TREE_OPERAND (exp, 1);
7854 tree else_ = TREE_OPERAND (exp, 2);
7856 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
7857 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
7858 && TREE_CODE (else_) == GOTO_EXPR
7859 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
7861 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7862 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7865 /* Note that COND_EXPRs whose type is a structure or union
7866 are required to be constructed to contain assignments of
7867 a temporary variable, so that we can evaluate them here
7868 for side effect only. If type is void, we must do likewise. */
7870 gcc_assert (!TREE_ADDRESSABLE (type)
7871 && !ignore
7872 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7873 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7875 /* If we are not to produce a result, we have no target. Otherwise,
7876 if a target was specified use it; it will not be used as an
7877 intermediate target unless it is safe. If no target, use a
7878 temporary. */
7880 if (modifier != EXPAND_STACK_PARM
7881 && original_target
7882 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7883 && GET_MODE (original_target) == mode
7884 #ifdef HAVE_conditional_move
7885 && (! can_conditionally_move_p (mode)
7886 || REG_P (original_target))
7887 #endif
7888 && !MEM_P (original_target))
7889 temp = original_target;
7890 else
7891 temp = assign_temp (type, 0, 0, 1);
7893 do_pending_stack_adjust ();
7894 NO_DEFER_POP;
7895 op0 = gen_label_rtx ();
7896 op1 = gen_label_rtx ();
7897 jumpifnot (TREE_OPERAND (exp, 0), op0);
7898 store_expr (TREE_OPERAND (exp, 1), temp,
7899 modifier == EXPAND_STACK_PARM);
7901 emit_jump_insn (gen_jump (op1));
7902 emit_barrier ();
7903 emit_label (op0);
7904 store_expr (TREE_OPERAND (exp, 2), temp,
7905 modifier == EXPAND_STACK_PARM);
7907 emit_label (op1);
7908 OK_DEFER_POP;
7909 return temp;
7911 case VEC_COND_EXPR:
7912 target = expand_vec_cond_expr (exp, target);
7913 return target;
7915 case MODIFY_EXPR:
7917 tree lhs = TREE_OPERAND (exp, 0);
7918 tree rhs = TREE_OPERAND (exp, 1);
7920 gcc_assert (ignore);
7922 /* Check for |= or &= of a bitfield of size one into another bitfield
7923 of size 1. In this case, (unless we need the result of the
7924 assignment) we can do this more efficiently with a
7925 test followed by an assignment, if necessary.
7927 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7928 things change so we do, this code should be enhanced to
7929 support it. */
7930 if (TREE_CODE (lhs) == COMPONENT_REF
7931 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7932 || TREE_CODE (rhs) == BIT_AND_EXPR)
7933 && TREE_OPERAND (rhs, 0) == lhs
7934 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7935 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7936 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7938 rtx label = gen_label_rtx ();
7940 do_jump (TREE_OPERAND (rhs, 1),
7941 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7942 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7943 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7944 (TREE_CODE (rhs) == BIT_IOR_EXPR
7945 ? integer_one_node
7946 : integer_zero_node)));
7947 do_pending_stack_adjust ();
7948 emit_label (label);
7949 return const0_rtx;
7952 expand_assignment (lhs, rhs);
7954 return const0_rtx;
7957 case RETURN_EXPR:
7958 if (!TREE_OPERAND (exp, 0))
7959 expand_null_return ();
7960 else
7961 expand_return (TREE_OPERAND (exp, 0));
7962 return const0_rtx;
7964 case ADDR_EXPR:
7965 return expand_expr_addr_expr (exp, target, tmode, modifier);
7967 /* COMPLEX type for Extended Pascal & Fortran */
7968 case COMPLEX_EXPR:
7970 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7971 rtx insns;
7973 /* Get the rtx code of the operands. */
7974 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7975 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7977 if (! target)
7978 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7980 start_sequence ();
7982 /* Move the real (op0) and imaginary (op1) parts to their location. */
7983 emit_move_insn (gen_realpart (mode, target), op0);
7984 emit_move_insn (gen_imagpart (mode, target), op1);
7986 insns = get_insns ();
7987 end_sequence ();
7989 /* Complex construction should appear as a single unit. */
7990 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7991 each with a separate pseudo as destination.
7992 It's not correct for flow to treat them as a unit. */
7993 if (GET_CODE (target) != CONCAT)
7994 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7995 else
7996 emit_insn (insns);
7998 return target;
8001 case REALPART_EXPR:
8002 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8003 return gen_realpart (mode, op0);
8005 case IMAGPART_EXPR:
8006 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8007 return gen_imagpart (mode, op0);
8009 case RESX_EXPR:
8010 expand_resx_expr (exp);
8011 return const0_rtx;
8013 case TRY_CATCH_EXPR:
8014 case CATCH_EXPR:
8015 case EH_FILTER_EXPR:
8016 case TRY_FINALLY_EXPR:
8017 /* Lowered by tree-eh.c. */
8018 gcc_unreachable ();
8020 case WITH_CLEANUP_EXPR:
8021 case CLEANUP_POINT_EXPR:
8022 case TARGET_EXPR:
8023 case CASE_LABEL_EXPR:
8024 case VA_ARG_EXPR:
8025 case BIND_EXPR:
8026 case INIT_EXPR:
8027 case CONJ_EXPR:
8028 case COMPOUND_EXPR:
8029 case PREINCREMENT_EXPR:
8030 case PREDECREMENT_EXPR:
8031 case POSTINCREMENT_EXPR:
8032 case POSTDECREMENT_EXPR:
8033 case LOOP_EXPR:
8034 case EXIT_EXPR:
8035 case LABELED_BLOCK_EXPR:
8036 case EXIT_BLOCK_EXPR:
8037 case TRUTH_ANDIF_EXPR:
8038 case TRUTH_ORIF_EXPR:
8039 /* Lowered by gimplify.c. */
8040 gcc_unreachable ();
8042 case EXC_PTR_EXPR:
8043 return get_exception_pointer (cfun);
8045 case FILTER_EXPR:
8046 return get_exception_filter (cfun);
8048 case FDESC_EXPR:
8049 /* Function descriptors are not valid except for as
8050 initialization constants, and should not be expanded. */
8051 gcc_unreachable ();
8053 case SWITCH_EXPR:
8054 expand_case (exp);
8055 return const0_rtx;
8057 case LABEL_EXPR:
8058 expand_label (TREE_OPERAND (exp, 0));
8059 return const0_rtx;
8061 case ASM_EXPR:
8062 expand_asm_expr (exp);
8063 return const0_rtx;
8065 case WITH_SIZE_EXPR:
8066 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8067 have pulled out the size to use in whatever context it needed. */
8068 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8069 modifier, alt_rtl);
8071 case REALIGN_LOAD_EXPR:
8073 tree oprnd0 = TREE_OPERAND (exp, 0);
8074 tree oprnd1 = TREE_OPERAND (exp, 1);
8075 tree oprnd2 = TREE_OPERAND (exp, 2);
8076 rtx op2;
8078 this_optab = optab_for_tree_code (code, type);
8079 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8080 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8081 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8082 target, unsignedp);
8083 if (temp == 0)
8084 abort ();
8085 return temp;
8089 default:
8090 return lang_hooks.expand_expr (exp, original_target, tmode,
8091 modifier, alt_rtl);
8094 /* Here to do an ordinary binary operator. */
8095 binop:
8096 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8097 subtarget, &op0, &op1, 0);
8098 binop2:
8099 this_optab = optab_for_tree_code (code, type);
8100 binop3:
8101 if (modifier == EXPAND_STACK_PARM)
8102 target = 0;
8103 temp = expand_binop (mode, this_optab, op0, op1, target,
8104 unsignedp, OPTAB_LIB_WIDEN);
8105 gcc_assert (temp);
8106 return REDUCE_BIT_FIELD (temp);
8108 #undef REDUCE_BIT_FIELD
8110 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8111 signedness of TYPE), possibly returning the result in TARGET. */
8112 static rtx
8113 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8115 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8116 if (target && GET_MODE (target) != GET_MODE (exp))
8117 target = 0;
8118 if (TYPE_UNSIGNED (type))
8120 rtx mask;
8121 if (prec < HOST_BITS_PER_WIDE_INT)
8122 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8123 GET_MODE (exp));
8124 else
8125 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8126 ((unsigned HOST_WIDE_INT) 1
8127 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8128 GET_MODE (exp));
8129 return expand_and (GET_MODE (exp), exp, mask, target);
8131 else
8133 tree count = build_int_cst (NULL_TREE,
8134 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8135 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8136 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8140 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8141 when applied to the address of EXP produces an address known to be
8142 aligned more than BIGGEST_ALIGNMENT. */
8144 static int
8145 is_aligning_offset (tree offset, tree exp)
8147 /* Strip off any conversions. */
8148 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8149 || TREE_CODE (offset) == NOP_EXPR
8150 || TREE_CODE (offset) == CONVERT_EXPR)
8151 offset = TREE_OPERAND (offset, 0);
8153 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8154 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8155 if (TREE_CODE (offset) != BIT_AND_EXPR
8156 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8157 || compare_tree_int (TREE_OPERAND (offset, 1),
8158 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8159 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8160 return 0;
8162 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8163 It must be NEGATE_EXPR. Then strip any more conversions. */
8164 offset = TREE_OPERAND (offset, 0);
8165 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8166 || TREE_CODE (offset) == NOP_EXPR
8167 || TREE_CODE (offset) == CONVERT_EXPR)
8168 offset = TREE_OPERAND (offset, 0);
8170 if (TREE_CODE (offset) != NEGATE_EXPR)
8171 return 0;
8173 offset = TREE_OPERAND (offset, 0);
8174 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8175 || TREE_CODE (offset) == NOP_EXPR
8176 || TREE_CODE (offset) == CONVERT_EXPR)
8177 offset = TREE_OPERAND (offset, 0);
8179 /* This must now be the address of EXP. */
8180 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8183 /* Return the tree node if an ARG corresponds to a string constant or zero
8184 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8185 in bytes within the string that ARG is accessing. The type of the
8186 offset will be `sizetype'. */
8188 tree
8189 string_constant (tree arg, tree *ptr_offset)
8191 tree array, offset;
8192 STRIP_NOPS (arg);
8194 if (TREE_CODE (arg) == ADDR_EXPR)
8196 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8198 *ptr_offset = size_zero_node;
8199 return TREE_OPERAND (arg, 0);
8201 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8203 array = TREE_OPERAND (arg, 0);
8204 offset = size_zero_node;
8206 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8208 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8209 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8210 if (TREE_CODE (array) != STRING_CST
8211 && TREE_CODE (array) != VAR_DECL)
8212 return 0;
8214 else
8215 return 0;
8217 else if (TREE_CODE (arg) == PLUS_EXPR)
8219 tree arg0 = TREE_OPERAND (arg, 0);
8220 tree arg1 = TREE_OPERAND (arg, 1);
8222 STRIP_NOPS (arg0);
8223 STRIP_NOPS (arg1);
8225 if (TREE_CODE (arg0) == ADDR_EXPR
8226 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8227 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8229 array = TREE_OPERAND (arg0, 0);
8230 offset = arg1;
8232 else if (TREE_CODE (arg1) == ADDR_EXPR
8233 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8234 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8236 array = TREE_OPERAND (arg1, 0);
8237 offset = arg0;
8239 else
8240 return 0;
8242 else
8243 return 0;
8245 if (TREE_CODE (array) == STRING_CST)
8247 *ptr_offset = convert (sizetype, offset);
8248 return array;
8250 else if (TREE_CODE (array) == VAR_DECL)
8252 int length;
8254 /* Variables initialized to string literals can be handled too. */
8255 if (DECL_INITIAL (array) == NULL_TREE
8256 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8257 return 0;
8259 /* If they are read-only, non-volatile and bind locally. */
8260 if (! TREE_READONLY (array)
8261 || TREE_SIDE_EFFECTS (array)
8262 || ! targetm.binds_local_p (array))
8263 return 0;
8265 /* Avoid const char foo[4] = "abcde"; */
8266 if (DECL_SIZE_UNIT (array) == NULL_TREE
8267 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8268 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8269 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8270 return 0;
8272 /* If variable is bigger than the string literal, OFFSET must be constant
8273 and inside of the bounds of the string literal. */
8274 offset = convert (sizetype, offset);
8275 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8276 && (! host_integerp (offset, 1)
8277 || compare_tree_int (offset, length) >= 0))
8278 return 0;
8280 *ptr_offset = offset;
8281 return DECL_INITIAL (array);
8284 return 0;
8287 /* Generate code to calculate EXP using a store-flag instruction
8288 and return an rtx for the result. EXP is either a comparison
8289 or a TRUTH_NOT_EXPR whose operand is a comparison.
8291 If TARGET is nonzero, store the result there if convenient.
8293 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8294 cheap.
8296 Return zero if there is no suitable set-flag instruction
8297 available on this machine.
8299 Once expand_expr has been called on the arguments of the comparison,
8300 we are committed to doing the store flag, since it is not safe to
8301 re-evaluate the expression. We emit the store-flag insn by calling
8302 emit_store_flag, but only expand the arguments if we have a reason
8303 to believe that emit_store_flag will be successful. If we think that
8304 it will, but it isn't, we have to simulate the store-flag with a
8305 set/jump/set sequence. */
8307 static rtx
8308 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8310 enum rtx_code code;
8311 tree arg0, arg1, type;
8312 tree tem;
8313 enum machine_mode operand_mode;
8314 int invert = 0;
8315 int unsignedp;
8316 rtx op0, op1;
8317 enum insn_code icode;
8318 rtx subtarget = target;
8319 rtx result, label;
8321 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8322 result at the end. We can't simply invert the test since it would
8323 have already been inverted if it were valid. This case occurs for
8324 some floating-point comparisons. */
8326 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8327 invert = 1, exp = TREE_OPERAND (exp, 0);
8329 arg0 = TREE_OPERAND (exp, 0);
8330 arg1 = TREE_OPERAND (exp, 1);
8332 /* Don't crash if the comparison was erroneous. */
8333 if (arg0 == error_mark_node || arg1 == error_mark_node)
8334 return const0_rtx;
8336 type = TREE_TYPE (arg0);
8337 operand_mode = TYPE_MODE (type);
8338 unsignedp = TYPE_UNSIGNED (type);
8340 /* We won't bother with BLKmode store-flag operations because it would mean
8341 passing a lot of information to emit_store_flag. */
8342 if (operand_mode == BLKmode)
8343 return 0;
8345 /* We won't bother with store-flag operations involving function pointers
8346 when function pointers must be canonicalized before comparisons. */
8347 #ifdef HAVE_canonicalize_funcptr_for_compare
8348 if (HAVE_canonicalize_funcptr_for_compare
8349 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8350 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8351 == FUNCTION_TYPE))
8352 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8353 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8354 == FUNCTION_TYPE))))
8355 return 0;
8356 #endif
8358 STRIP_NOPS (arg0);
8359 STRIP_NOPS (arg1);
8361 /* Get the rtx comparison code to use. We know that EXP is a comparison
8362 operation of some type. Some comparisons against 1 and -1 can be
8363 converted to comparisons with zero. Do so here so that the tests
8364 below will be aware that we have a comparison with zero. These
8365 tests will not catch constants in the first operand, but constants
8366 are rarely passed as the first operand. */
8368 switch (TREE_CODE (exp))
8370 case EQ_EXPR:
8371 code = EQ;
8372 break;
8373 case NE_EXPR:
8374 code = NE;
8375 break;
8376 case LT_EXPR:
8377 if (integer_onep (arg1))
8378 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8379 else
8380 code = unsignedp ? LTU : LT;
8381 break;
8382 case LE_EXPR:
8383 if (! unsignedp && integer_all_onesp (arg1))
8384 arg1 = integer_zero_node, code = LT;
8385 else
8386 code = unsignedp ? LEU : LE;
8387 break;
8388 case GT_EXPR:
8389 if (! unsignedp && integer_all_onesp (arg1))
8390 arg1 = integer_zero_node, code = GE;
8391 else
8392 code = unsignedp ? GTU : GT;
8393 break;
8394 case GE_EXPR:
8395 if (integer_onep (arg1))
8396 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8397 else
8398 code = unsignedp ? GEU : GE;
8399 break;
8401 case UNORDERED_EXPR:
8402 code = UNORDERED;
8403 break;
8404 case ORDERED_EXPR:
8405 code = ORDERED;
8406 break;
8407 case UNLT_EXPR:
8408 code = UNLT;
8409 break;
8410 case UNLE_EXPR:
8411 code = UNLE;
8412 break;
8413 case UNGT_EXPR:
8414 code = UNGT;
8415 break;
8416 case UNGE_EXPR:
8417 code = UNGE;
8418 break;
8419 case UNEQ_EXPR:
8420 code = UNEQ;
8421 break;
8422 case LTGT_EXPR:
8423 code = LTGT;
8424 break;
8426 default:
8427 gcc_unreachable ();
8430 /* Put a constant second. */
8431 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8433 tem = arg0; arg0 = arg1; arg1 = tem;
8434 code = swap_condition (code);
8437 /* If this is an equality or inequality test of a single bit, we can
8438 do this by shifting the bit being tested to the low-order bit and
8439 masking the result with the constant 1. If the condition was EQ,
8440 we xor it with 1. This does not require an scc insn and is faster
8441 than an scc insn even if we have it.
8443 The code to make this transformation was moved into fold_single_bit_test,
8444 so we just call into the folder and expand its result. */
8446 if ((code == NE || code == EQ)
8447 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8448 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8450 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8451 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8452 arg0, arg1, type),
8453 target, VOIDmode, EXPAND_NORMAL);
8456 /* Now see if we are likely to be able to do this. Return if not. */
8457 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8458 return 0;
8460 icode = setcc_gen_code[(int) code];
8461 if (icode == CODE_FOR_nothing
8462 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8464 /* We can only do this if it is one of the special cases that
8465 can be handled without an scc insn. */
8466 if ((code == LT && integer_zerop (arg1))
8467 || (! only_cheap && code == GE && integer_zerop (arg1)))
8469 else if (BRANCH_COST >= 0
8470 && ! only_cheap && (code == NE || code == EQ)
8471 && TREE_CODE (type) != REAL_TYPE
8472 && ((abs_optab->handlers[(int) operand_mode].insn_code
8473 != CODE_FOR_nothing)
8474 || (ffs_optab->handlers[(int) operand_mode].insn_code
8475 != CODE_FOR_nothing)))
8477 else
8478 return 0;
8481 if (! get_subtarget (target)
8482 || GET_MODE (subtarget) != operand_mode)
8483 subtarget = 0;
8485 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8487 if (target == 0)
8488 target = gen_reg_rtx (mode);
8490 result = emit_store_flag (target, code, op0, op1,
8491 operand_mode, unsignedp, 1);
8493 if (result)
8495 if (invert)
8496 result = expand_binop (mode, xor_optab, result, const1_rtx,
8497 result, 0, OPTAB_LIB_WIDEN);
8498 return result;
8501 /* If this failed, we have to do this with set/compare/jump/set code. */
8502 if (!REG_P (target)
8503 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8504 target = gen_reg_rtx (GET_MODE (target));
8506 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8507 result = compare_from_rtx (op0, op1, code, unsignedp,
8508 operand_mode, NULL_RTX);
8509 if (GET_CODE (result) == CONST_INT)
8510 return (((result == const0_rtx && ! invert)
8511 || (result != const0_rtx && invert))
8512 ? const0_rtx : const1_rtx);
8514 /* The code of RESULT may not match CODE if compare_from_rtx
8515 decided to swap its operands and reverse the original code.
8517 We know that compare_from_rtx returns either a CONST_INT or
8518 a new comparison code, so it is safe to just extract the
8519 code from RESULT. */
8520 code = GET_CODE (result);
8522 label = gen_label_rtx ();
8523 gcc_assert (bcc_gen_fctn[(int) code]);
8525 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8526 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8527 emit_label (label);
8529 return target;
8533 /* Stubs in case we haven't got a casesi insn. */
8534 #ifndef HAVE_casesi
8535 # define HAVE_casesi 0
8536 # define gen_casesi(a, b, c, d, e) (0)
8537 # define CODE_FOR_casesi CODE_FOR_nothing
8538 #endif
8540 /* If the machine does not have a case insn that compares the bounds,
8541 this means extra overhead for dispatch tables, which raises the
8542 threshold for using them. */
8543 #ifndef CASE_VALUES_THRESHOLD
8544 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8545 #endif /* CASE_VALUES_THRESHOLD */
8547 unsigned int
8548 case_values_threshold (void)
8550 return CASE_VALUES_THRESHOLD;
8553 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8554 0 otherwise (i.e. if there is no casesi instruction). */
8556 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8557 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8559 enum machine_mode index_mode = SImode;
8560 int index_bits = GET_MODE_BITSIZE (index_mode);
8561 rtx op1, op2, index;
8562 enum machine_mode op_mode;
8564 if (! HAVE_casesi)
8565 return 0;
8567 /* Convert the index to SImode. */
8568 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8570 enum machine_mode omode = TYPE_MODE (index_type);
8571 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8573 /* We must handle the endpoints in the original mode. */
8574 index_expr = build2 (MINUS_EXPR, index_type,
8575 index_expr, minval);
8576 minval = integer_zero_node;
8577 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8578 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8579 omode, 1, default_label);
8580 /* Now we can safely truncate. */
8581 index = convert_to_mode (index_mode, index, 0);
8583 else
8585 if (TYPE_MODE (index_type) != index_mode)
8587 index_expr = convert (lang_hooks.types.type_for_size
8588 (index_bits, 0), index_expr);
8589 index_type = TREE_TYPE (index_expr);
8592 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8595 do_pending_stack_adjust ();
8597 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8598 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8599 (index, op_mode))
8600 index = copy_to_mode_reg (op_mode, index);
8602 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8604 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8605 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8606 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8607 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8608 (op1, op_mode))
8609 op1 = copy_to_mode_reg (op_mode, op1);
8611 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8613 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8614 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8615 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8616 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8617 (op2, op_mode))
8618 op2 = copy_to_mode_reg (op_mode, op2);
8620 emit_jump_insn (gen_casesi (index, op1, op2,
8621 table_label, default_label));
8622 return 1;
8625 /* Attempt to generate a tablejump instruction; same concept. */
8626 #ifndef HAVE_tablejump
8627 #define HAVE_tablejump 0
8628 #define gen_tablejump(x, y) (0)
8629 #endif
8631 /* Subroutine of the next function.
8633 INDEX is the value being switched on, with the lowest value
8634 in the table already subtracted.
8635 MODE is its expected mode (needed if INDEX is constant).
8636 RANGE is the length of the jump table.
8637 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8639 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8640 index value is out of range. */
8642 static void
8643 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8644 rtx default_label)
8646 rtx temp, vector;
8648 if (INTVAL (range) > cfun->max_jumptable_ents)
8649 cfun->max_jumptable_ents = INTVAL (range);
8651 /* Do an unsigned comparison (in the proper mode) between the index
8652 expression and the value which represents the length of the range.
8653 Since we just finished subtracting the lower bound of the range
8654 from the index expression, this comparison allows us to simultaneously
8655 check that the original index expression value is both greater than
8656 or equal to the minimum value of the range and less than or equal to
8657 the maximum value of the range. */
8659 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8660 default_label);
8662 /* If index is in range, it must fit in Pmode.
8663 Convert to Pmode so we can index with it. */
8664 if (mode != Pmode)
8665 index = convert_to_mode (Pmode, index, 1);
8667 /* Don't let a MEM slip through, because then INDEX that comes
8668 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8669 and break_out_memory_refs will go to work on it and mess it up. */
8670 #ifdef PIC_CASE_VECTOR_ADDRESS
8671 if (flag_pic && !REG_P (index))
8672 index = copy_to_mode_reg (Pmode, index);
8673 #endif
8675 /* If flag_force_addr were to affect this address
8676 it could interfere with the tricky assumptions made
8677 about addresses that contain label-refs,
8678 which may be valid only very near the tablejump itself. */
8679 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8680 GET_MODE_SIZE, because this indicates how large insns are. The other
8681 uses should all be Pmode, because they are addresses. This code
8682 could fail if addresses and insns are not the same size. */
8683 index = gen_rtx_PLUS (Pmode,
8684 gen_rtx_MULT (Pmode, index,
8685 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8686 gen_rtx_LABEL_REF (Pmode, table_label));
8687 #ifdef PIC_CASE_VECTOR_ADDRESS
8688 if (flag_pic)
8689 index = PIC_CASE_VECTOR_ADDRESS (index);
8690 else
8691 #endif
8692 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8693 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8694 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8695 convert_move (temp, vector, 0);
8697 emit_jump_insn (gen_tablejump (temp, table_label));
8699 /* If we are generating PIC code or if the table is PC-relative, the
8700 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8701 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8702 emit_barrier ();
8706 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8707 rtx table_label, rtx default_label)
8709 rtx index;
8711 if (! HAVE_tablejump)
8712 return 0;
8714 index_expr = fold (build2 (MINUS_EXPR, index_type,
8715 convert (index_type, index_expr),
8716 convert (index_type, minval)));
8717 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8718 do_pending_stack_adjust ();
8720 do_tablejump (index, TYPE_MODE (index_type),
8721 convert_modes (TYPE_MODE (index_type),
8722 TYPE_MODE (TREE_TYPE (range)),
8723 expand_expr (range, NULL_RTX,
8724 VOIDmode, 0),
8725 TYPE_UNSIGNED (TREE_TYPE (range))),
8726 table_label, default_label);
8727 return 1;
8730 /* Nonzero if the mode is a valid vector mode for this architecture.
8731 This returns nonzero even if there is no hardware support for the
8732 vector mode, but we can emulate with narrower modes. */
8735 vector_mode_valid_p (enum machine_mode mode)
8737 enum mode_class class = GET_MODE_CLASS (mode);
8738 enum machine_mode innermode;
8740 /* Doh! What's going on? */
8741 if (class != MODE_VECTOR_INT
8742 && class != MODE_VECTOR_FLOAT)
8743 return 0;
8745 /* Hardware support. Woo hoo! */
8746 if (targetm.vector_mode_supported_p (mode))
8747 return 1;
8749 innermode = GET_MODE_INNER (mode);
8751 /* We should probably return 1 if requesting V4DI and we have no DI,
8752 but we have V2DI, but this is probably very unlikely. */
8754 /* If we have support for the inner mode, we can safely emulate it.
8755 We may not have V2DI, but me can emulate with a pair of DIs. */
8756 return targetm.scalar_mode_supported_p (innermode);
8759 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8760 static rtx
8761 const_vector_from_tree (tree exp)
8763 rtvec v;
8764 int units, i;
8765 tree link, elt;
8766 enum machine_mode inner, mode;
8768 mode = TYPE_MODE (TREE_TYPE (exp));
8770 if (initializer_zerop (exp))
8771 return CONST0_RTX (mode);
8773 units = GET_MODE_NUNITS (mode);
8774 inner = GET_MODE_INNER (mode);
8776 v = rtvec_alloc (units);
8778 link = TREE_VECTOR_CST_ELTS (exp);
8779 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8781 elt = TREE_VALUE (link);
8783 if (TREE_CODE (elt) == REAL_CST)
8784 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8785 inner);
8786 else
8787 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8788 TREE_INT_CST_HIGH (elt),
8789 inner);
8792 /* Initialize remaining elements to 0. */
8793 for (; i < units; ++i)
8794 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8796 return gen_rtx_CONST_VECTOR (mode, v);
8798 #include "gt-expr.h"