* configure.ac: (target_alias): Default to $host_alias, not
[official-gcc.git] / gcc / expr.c
blobbc19bc1cc17abb6a8062a80f494d523d799e6e31
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 #endif
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
219 void
220 init_expr_once (void)
222 rtx insn, pat;
223 enum machine_mode mode;
224 int num_clobbers;
225 rtx mem, mem1;
226 rtx reg;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
245 int regno;
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
263 REGNO (reg) = regno;
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
296 enum insn_code ic;
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
300 continue;
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
312 void
313 init_expr (void)
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
323 void
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
341 nothing to do. */
342 if (to == from)
343 return;
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
347 TO here. */
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
361 return;
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
370 else
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
374 return;
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
381 return;
384 if (to_real)
386 rtx value, insns;
387 convert_optab tab;
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else
395 tab = trunc_optab;
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 return;
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
413 start_sequence ();
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 1, from, from_mode);
416 insns = get_insns ();
417 end_sequence ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 from)
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
422 return;
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
439 to, from, UNKNOWN);
440 return;
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
451 to, from, UNKNOWN);
452 if (to_mode == full_mode)
453 return;
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
465 rtx insns;
466 rtx lowpart;
467 rtx fill_value;
468 rtx lowfrom;
469 int i;
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
475 != CODE_FOR_nothing)
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
484 return;
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
491 if (REG_P (to))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
500 return;
503 /* No special multiword conversion insn; do it by hand. */
504 start_sequence ();
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
515 else
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
524 if (unsignedp)
525 fill_value = const0_rtx;
526 else
528 #ifdef HAVE_slt
529 if (HAVE_slt
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
534 lowpart_mode, 0);
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
538 else
539 #endif
541 fill_value
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
544 NULL_RTX, 0);
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
562 end_sequence ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
566 return;
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
573 if (!((MEM_P (from)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || REG_P (from)
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
581 return;
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
592 if (!((MEM_P (from)
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || REG_P (from)
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
603 return;
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
611 != CODE_FOR_nothing)
613 if (flag_force_mem)
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
617 return;
619 else
621 enum machine_mode intermediate;
622 rtx tmp;
623 tree shift_amount;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
629 != CODE_FOR_nothing)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
638 return;
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
648 to, unsignedp);
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
650 to, unsignedp);
651 if (tmp != to)
652 emit_move_insn (to, tmp);
653 return;
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
661 to, from, UNKNOWN);
662 return;
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
676 return;
679 /* Mode combination is not recognized. */
680 gcc_unreachable ();
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
709 rtx temp;
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
722 if (mode == oldmode)
723 return x;
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
762 || (REG_P (x)
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 if (! unsignedp
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
791 subreg operation. */
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
800 return temp;
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
812 succeed. */
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
831 stpcpy. */
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
845 data.offset = 0;
846 data.from_addr = from_addr;
847 if (to)
849 to_addr = XEXP (to, 0);
850 data.to = to;
851 data.autinc_to
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
854 data.reverse
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
857 else
859 to_addr = NULL_RTX;
860 data.to = NULL_RTX;
861 data.autinc_to = 1;
862 #ifdef STACK_GROWS_DOWNWARD
863 data.reverse = 1;
864 #else
865 data.reverse = 0;
866 #endif
868 data.to_addr = to_addr;
869 data.from = from;
870 data.autinc_from
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
878 data.len = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
890 mode = tmode;
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
909 data.autinc_to = 1;
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
915 data.autinc_to = 1;
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
925 else
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
930 tmode != VOIDmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
934 break;
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
942 while (max_size > 1)
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
947 mode = tmode;
949 if (mode == VOIDmode)
950 break;
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
962 if (endp)
964 rtx to1;
966 gcc_assert (!data.reverse);
967 if (data.autinc_to)
969 if (endp == 2)
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
973 else
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
975 -1));
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
978 data.offset);
980 else
982 if (endp == 2)
983 --data.offset;
984 to1 = adjust_address (data.to, QImode, data.offset);
986 return to1;
988 else
989 return data.to;
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1005 else
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1010 tmode != VOIDmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1014 break;
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1027 mode = tmode;
1029 if (mode == VOIDmode)
1030 break;
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1039 gcc_assert (!l);
1040 return n_insns;
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1047 static void
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1056 if (data->reverse)
1057 data->offset -= size;
1059 if (data->to)
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1063 data->offset);
1064 else
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1070 data->offset);
1071 else
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1081 if (data->to)
1082 emit_insn ((*genfun) (to1, from1));
1083 else
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1087 #else
1088 gcc_unreachable ();
1089 #endif
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1100 data->len -= size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1114 0 otherwise. */
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1119 bool may_use_call;
1120 rtx retval = 0;
1121 unsigned int align;
1123 switch (method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1127 break;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1134 NO_DEFER_POP;
1135 break;
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1139 break;
1141 default:
1142 gcc_unreachable ();
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1149 gcc_assert (size);
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1161 return 0;
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1175 else
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1179 OK_DEFER_POP;
1181 return retval;
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1188 static bool
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1192 if (PUSH_ARGS)
1193 return true;
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1200 (void) fn;
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1202 return false;
1204 #endif
1206 /* If any argument goes in memory, then it might clobber an outgoing
1207 argument. */
1209 CUMULATIVE_ARGS args_so_far;
1210 tree fn, arg;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1221 return false;
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1223 NULL_TREE, 1))
1224 return false;
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1228 return true;
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1234 static bool
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1242 volatile_ok = 1;
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1270 rtx op2;
1271 rtx last = get_last_insn ();
1272 rtx pat;
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1285 if (pat)
1287 emit_insn (pat);
1288 volatile_ok = save_volatile_ok;
1289 return true;
1291 else
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1297 return false;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1303 static rtx
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1309 rtx retval;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1313 use them later. */
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1349 return retval;
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1358 void
1359 init_block_move_fn (const char *asmspec)
1361 if (!block_move_fn)
1363 tree args, fn;
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1368 NULL_TREE);
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1376 block_move_fn = fn;
1379 if (asmspec)
1380 set_user_assembler_name (block_move_fn, asmspec);
1383 static tree
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1388 if (!block_move_fn)
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1405 static void
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1439 if (tmp != iter)
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1445 true, top_label);
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1451 void
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1454 int i;
1455 #ifdef HAVE_load_multiple
1456 rtx pat;
1457 rtx last;
1458 #endif
1460 if (nregs == 0)
1461 return;
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1472 GEN_INT (nregs));
1473 if (pat)
1475 emit_insn (pat);
1476 return;
1478 else
1479 delete_insns_since (last);
1481 #endif
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1491 void
1492 move_block_from_reg (int regno, rtx x, int nregs)
1494 int i;
1496 if (nregs == 0)
1497 return;
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1505 GEN_INT (nregs));
1506 if (pat)
1508 emit_insn (pat);
1509 return;
1511 else
1512 delete_insns_since (last);
1514 #endif
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1520 gcc_assert (tem);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1535 int i, length;
1536 rtx *tmps;
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1546 if (i)
1547 tmps[0] = 0;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1561 where DST is non-consecutive registers represented by a PARALLEL.
1562 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1563 if not known. */
1565 void
1566 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1568 rtx *tmps, src;
1569 int start, i;
1570 enum machine_mode m = GET_MODE (orig_src);
1572 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 if (!SCALAR_INT_MODE_P (m)
1575 && !MEM_P (orig_src) && GET_CODE (orig_src) != CONCAT)
1577 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1578 if (imode == BLKmode)
1579 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1580 else
1581 src = gen_reg_rtx (imode);
1582 if (imode != BLKmode)
1583 src = gen_lowpart (GET_MODE (orig_src), src);
1584 emit_move_insn (src, orig_src);
1585 /* ...and back again. */
1586 if (imode != BLKmode)
1587 src = gen_lowpart (imode, src);
1588 emit_group_load (dst, src, type, ssize);
1589 return;
1592 /* Check for a NULL entry, used to indicate that the parameter goes
1593 both on the stack and in registers. */
1594 if (XEXP (XVECEXP (dst, 0, 0), 0))
1595 start = 0;
1596 else
1597 start = 1;
1599 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1601 /* Process the pieces. */
1602 for (i = start; i < XVECLEN (dst, 0); i++)
1604 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1605 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1606 unsigned int bytelen = GET_MODE_SIZE (mode);
1607 int shift = 0;
1609 /* Handle trailing fragments that run over the size of the struct. */
1610 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1612 /* Arrange to shift the fragment to where it belongs.
1613 extract_bit_field loads to the lsb of the reg. */
1614 if (
1615 #ifdef BLOCK_REG_PADDING
1616 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1617 == (BYTES_BIG_ENDIAN ? upward : downward)
1618 #else
1619 BYTES_BIG_ENDIAN
1620 #endif
1622 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1623 bytelen = ssize - bytepos;
1624 gcc_assert (bytelen > 0);
1627 /* If we won't be loading directly from memory, protect the real source
1628 from strange tricks we might play; but make sure that the source can
1629 be loaded directly into the destination. */
1630 src = orig_src;
1631 if (!MEM_P (orig_src)
1632 && (!CONSTANT_P (orig_src)
1633 || (GET_MODE (orig_src) != mode
1634 && GET_MODE (orig_src) != VOIDmode)))
1636 if (GET_MODE (orig_src) == VOIDmode)
1637 src = gen_reg_rtx (mode);
1638 else
1639 src = gen_reg_rtx (GET_MODE (orig_src));
1641 emit_move_insn (src, orig_src);
1644 /* Optimize the access just a bit. */
1645 if (MEM_P (src)
1646 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1647 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1648 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1649 && bytelen == GET_MODE_SIZE (mode))
1651 tmps[i] = gen_reg_rtx (mode);
1652 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1654 else if (GET_CODE (src) == CONCAT)
1656 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1657 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1659 if ((bytepos == 0 && bytelen == slen0)
1660 || (bytepos != 0 && bytepos + bytelen <= slen))
1662 /* The following assumes that the concatenated objects all
1663 have the same size. In this case, a simple calculation
1664 can be used to determine the object and the bit field
1665 to be extracted. */
1666 tmps[i] = XEXP (src, bytepos / slen0);
1667 if (! CONSTANT_P (tmps[i])
1668 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1669 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1670 (bytepos % slen0) * BITS_PER_UNIT,
1671 1, NULL_RTX, mode, mode);
1673 else
1675 rtx mem;
1677 gcc_assert (!bytepos);
1678 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1679 emit_move_insn (mem, src);
1680 tmps[i] = adjust_address (mem, mode, 0);
1683 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1684 SIMD register, which is currently broken. While we get GCC
1685 to emit proper RTL for these cases, let's dump to memory. */
1686 else if (VECTOR_MODE_P (GET_MODE (dst))
1687 && REG_P (src))
1689 int slen = GET_MODE_SIZE (GET_MODE (src));
1690 rtx mem;
1692 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1693 emit_move_insn (mem, src);
1694 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1696 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1697 && XVECLEN (dst, 0) > 1)
1698 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1699 else if (CONSTANT_P (src)
1700 || (REG_P (src) && GET_MODE (src) == mode))
1701 tmps[i] = src;
1702 else
1703 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1704 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1705 mode, mode);
1707 if (shift)
1708 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1709 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1712 /* Copy the extracted pieces into the proper (probable) hard regs. */
1713 for (i = start; i < XVECLEN (dst, 0); i++)
1714 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1717 /* Emit code to move a block SRC to block DST, where SRC and DST are
1718 non-consecutive groups of registers, each represented by a PARALLEL. */
1720 void
1721 emit_group_move (rtx dst, rtx src)
1723 int i;
1725 gcc_assert (GET_CODE (src) == PARALLEL
1726 && GET_CODE (dst) == PARALLEL
1727 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1729 /* Skip first entry if NULL. */
1730 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1731 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1732 XEXP (XVECEXP (src, 0, i), 0));
1735 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1736 where SRC is non-consecutive registers represented by a PARALLEL.
1737 SSIZE represents the total size of block ORIG_DST, or -1 if not
1738 known. */
1740 void
1741 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1743 rtx *tmps, dst;
1744 int start, i;
1745 enum machine_mode m = GET_MODE (orig_dst);
1747 gcc_assert (GET_CODE (src) == PARALLEL);
1749 if (!SCALAR_INT_MODE_P (m)
1750 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1752 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1753 if (imode == BLKmode)
1754 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1755 else
1756 dst = gen_reg_rtx (imode);
1757 emit_group_store (dst, src, type, ssize);
1758 if (imode != BLKmode)
1759 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1760 emit_move_insn (orig_dst, dst);
1761 return;
1764 /* Check for a NULL entry, used to indicate that the parameter goes
1765 both on the stack and in registers. */
1766 if (XEXP (XVECEXP (src, 0, 0), 0))
1767 start = 0;
1768 else
1769 start = 1;
1771 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1773 /* Copy the (probable) hard regs into pseudos. */
1774 for (i = start; i < XVECLEN (src, 0); i++)
1776 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1777 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1778 emit_move_insn (tmps[i], reg);
1781 /* If we won't be storing directly into memory, protect the real destination
1782 from strange tricks we might play. */
1783 dst = orig_dst;
1784 if (GET_CODE (dst) == PARALLEL)
1786 rtx temp;
1788 /* We can get a PARALLEL dst if there is a conditional expression in
1789 a return statement. In that case, the dst and src are the same,
1790 so no action is necessary. */
1791 if (rtx_equal_p (dst, src))
1792 return;
1794 /* It is unclear if we can ever reach here, but we may as well handle
1795 it. Allocate a temporary, and split this into a store/load to/from
1796 the temporary. */
1798 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1799 emit_group_store (temp, src, type, ssize);
1800 emit_group_load (dst, temp, type, ssize);
1801 return;
1803 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1805 dst = gen_reg_rtx (GET_MODE (orig_dst));
1806 /* Make life a bit easier for combine. */
1807 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1810 /* Process the pieces. */
1811 for (i = start; i < XVECLEN (src, 0); i++)
1813 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1814 enum machine_mode mode = GET_MODE (tmps[i]);
1815 unsigned int bytelen = GET_MODE_SIZE (mode);
1816 rtx dest = dst;
1818 /* Handle trailing fragments that run over the size of the struct. */
1819 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1821 /* store_bit_field always takes its value from the lsb.
1822 Move the fragment to the lsb if it's not already there. */
1823 if (
1824 #ifdef BLOCK_REG_PADDING
1825 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1826 == (BYTES_BIG_ENDIAN ? upward : downward)
1827 #else
1828 BYTES_BIG_ENDIAN
1829 #endif
1832 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1833 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1834 build_int_cst (NULL_TREE, shift),
1835 tmps[i], 0);
1837 bytelen = ssize - bytepos;
1840 if (GET_CODE (dst) == CONCAT)
1842 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1843 dest = XEXP (dst, 0);
1844 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1846 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1847 dest = XEXP (dst, 1);
1849 else
1851 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1852 dest = assign_stack_temp (GET_MODE (dest),
1853 GET_MODE_SIZE (GET_MODE (dest)), 0);
1854 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1855 tmps[i]);
1856 dst = dest;
1857 break;
1861 /* Optimize the access just a bit. */
1862 if (MEM_P (dest)
1863 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1864 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1865 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1866 && bytelen == GET_MODE_SIZE (mode))
1867 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1868 else
1869 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1870 mode, tmps[i]);
1873 /* Copy from the pseudo into the (probable) hard reg. */
1874 if (orig_dst != dst)
1875 emit_move_insn (orig_dst, dst);
1878 /* Generate code to copy a BLKmode object of TYPE out of a
1879 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1880 is null, a stack temporary is created. TGTBLK is returned.
1882 The purpose of this routine is to handle functions that return
1883 BLKmode structures in registers. Some machines (the PA for example)
1884 want to return all small structures in registers regardless of the
1885 structure's alignment. */
1888 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1890 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1891 rtx src = NULL, dst = NULL;
1892 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1893 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1895 if (tgtblk == 0)
1897 tgtblk = assign_temp (build_qualified_type (type,
1898 (TYPE_QUALS (type)
1899 | TYPE_QUAL_CONST)),
1900 0, 1, 1);
1901 preserve_temp_slots (tgtblk);
1904 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1905 into a new pseudo which is a full word. */
1907 if (GET_MODE (srcreg) != BLKmode
1908 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1909 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1911 /* If the structure doesn't take up a whole number of words, see whether
1912 SRCREG is padded on the left or on the right. If it's on the left,
1913 set PADDING_CORRECTION to the number of bits to skip.
1915 In most ABIs, the structure will be returned at the least end of
1916 the register, which translates to right padding on little-endian
1917 targets and left padding on big-endian targets. The opposite
1918 holds if the structure is returned at the most significant
1919 end of the register. */
1920 if (bytes % UNITS_PER_WORD != 0
1921 && (targetm.calls.return_in_msb (type)
1922 ? !BYTES_BIG_ENDIAN
1923 : BYTES_BIG_ENDIAN))
1924 padding_correction
1925 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1927 /* Copy the structure BITSIZE bites at a time.
1929 We could probably emit more efficient code for machines which do not use
1930 strict alignment, but it doesn't seem worth the effort at the current
1931 time. */
1932 for (bitpos = 0, xbitpos = padding_correction;
1933 bitpos < bytes * BITS_PER_UNIT;
1934 bitpos += bitsize, xbitpos += bitsize)
1936 /* We need a new source operand each time xbitpos is on a
1937 word boundary and when xbitpos == padding_correction
1938 (the first time through). */
1939 if (xbitpos % BITS_PER_WORD == 0
1940 || xbitpos == padding_correction)
1941 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1942 GET_MODE (srcreg));
1944 /* We need a new destination operand each time bitpos is on
1945 a word boundary. */
1946 if (bitpos % BITS_PER_WORD == 0)
1947 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1949 /* Use xbitpos for the source extraction (right justified) and
1950 xbitpos for the destination store (left justified). */
1951 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1952 extract_bit_field (src, bitsize,
1953 xbitpos % BITS_PER_WORD, 1,
1954 NULL_RTX, word_mode, word_mode));
1957 return tgtblk;
1960 /* Add a USE expression for REG to the (possibly empty) list pointed
1961 to by CALL_FUSAGE. REG must denote a hard register. */
1963 void
1964 use_reg (rtx *call_fusage, rtx reg)
1966 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
1968 *call_fusage
1969 = gen_rtx_EXPR_LIST (VOIDmode,
1970 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1973 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1974 starting at REGNO. All of these registers must be hard registers. */
1976 void
1977 use_regs (rtx *call_fusage, int regno, int nregs)
1979 int i;
1981 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
1983 for (i = 0; i < nregs; i++)
1984 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1987 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1988 PARALLEL REGS. This is for calls that pass values in multiple
1989 non-contiguous locations. The Irix 6 ABI has examples of this. */
1991 void
1992 use_group_regs (rtx *call_fusage, rtx regs)
1994 int i;
1996 for (i = 0; i < XVECLEN (regs, 0); i++)
1998 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2000 /* A NULL entry means the parameter goes both on the stack and in
2001 registers. This can also be a MEM for targets that pass values
2002 partially on the stack and partially in registers. */
2003 if (reg != 0 && REG_P (reg))
2004 use_reg (call_fusage, reg);
2009 /* Determine whether the LEN bytes generated by CONSTFUN can be
2010 stored to memory using several move instructions. CONSTFUNDATA is
2011 a pointer which will be passed as argument in every CONSTFUN call.
2012 ALIGN is maximum alignment we can assume. Return nonzero if a
2013 call to store_by_pieces should succeed. */
2016 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2017 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2018 void *constfundata, unsigned int align)
2020 unsigned HOST_WIDE_INT l;
2021 unsigned int max_size;
2022 HOST_WIDE_INT offset = 0;
2023 enum machine_mode mode, tmode;
2024 enum insn_code icode;
2025 int reverse;
2026 rtx cst;
2028 if (len == 0)
2029 return 1;
2031 if (! STORE_BY_PIECES_P (len, align))
2032 return 0;
2034 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2035 if (align >= GET_MODE_ALIGNMENT (tmode))
2036 align = GET_MODE_ALIGNMENT (tmode);
2037 else
2039 enum machine_mode xmode;
2041 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2042 tmode != VOIDmode;
2043 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2044 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2045 || SLOW_UNALIGNED_ACCESS (tmode, align))
2046 break;
2048 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2051 /* We would first store what we can in the largest integer mode, then go to
2052 successively smaller modes. */
2054 for (reverse = 0;
2055 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2056 reverse++)
2058 l = len;
2059 mode = VOIDmode;
2060 max_size = STORE_MAX_PIECES + 1;
2061 while (max_size > 1)
2063 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2064 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2065 if (GET_MODE_SIZE (tmode) < max_size)
2066 mode = tmode;
2068 if (mode == VOIDmode)
2069 break;
2071 icode = mov_optab->handlers[(int) mode].insn_code;
2072 if (icode != CODE_FOR_nothing
2073 && align >= GET_MODE_ALIGNMENT (mode))
2075 unsigned int size = GET_MODE_SIZE (mode);
2077 while (l >= size)
2079 if (reverse)
2080 offset -= size;
2082 cst = (*constfun) (constfundata, offset, mode);
2083 if (!LEGITIMATE_CONSTANT_P (cst))
2084 return 0;
2086 if (!reverse)
2087 offset += size;
2089 l -= size;
2093 max_size = GET_MODE_SIZE (mode);
2096 /* The code above should have handled everything. */
2097 gcc_assert (!l);
2100 return 1;
2103 /* Generate several move instructions to store LEN bytes generated by
2104 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2105 pointer which will be passed as argument in every CONSTFUN call.
2106 ALIGN is maximum alignment we can assume.
2107 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2108 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2109 stpcpy. */
2112 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2113 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2114 void *constfundata, unsigned int align, int endp)
2116 struct store_by_pieces data;
2118 if (len == 0)
2120 gcc_assert (endp != 2);
2121 return to;
2124 gcc_assert (STORE_BY_PIECES_P (len, align));
2125 data.constfun = constfun;
2126 data.constfundata = constfundata;
2127 data.len = len;
2128 data.to = to;
2129 store_by_pieces_1 (&data, align);
2130 if (endp)
2132 rtx to1;
2134 gcc_assert (!data.reverse);
2135 if (data.autinc_to)
2137 if (endp == 2)
2139 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2140 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2141 else
2142 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2143 -1));
2145 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2146 data.offset);
2148 else
2150 if (endp == 2)
2151 --data.offset;
2152 to1 = adjust_address (data.to, QImode, data.offset);
2154 return to1;
2156 else
2157 return data.to;
2160 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2161 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2163 static void
2164 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2166 struct store_by_pieces data;
2168 if (len == 0)
2169 return;
2171 data.constfun = clear_by_pieces_1;
2172 data.constfundata = NULL;
2173 data.len = len;
2174 data.to = to;
2175 store_by_pieces_1 (&data, align);
2178 /* Callback routine for clear_by_pieces.
2179 Return const0_rtx unconditionally. */
2181 static rtx
2182 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2183 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2184 enum machine_mode mode ATTRIBUTE_UNUSED)
2186 return const0_rtx;
2189 /* Subroutine of clear_by_pieces and store_by_pieces.
2190 Generate several move instructions to store LEN bytes of block TO. (A MEM
2191 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2193 static void
2194 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2195 unsigned int align ATTRIBUTE_UNUSED)
2197 rtx to_addr = XEXP (data->to, 0);
2198 unsigned int max_size = STORE_MAX_PIECES + 1;
2199 enum machine_mode mode = VOIDmode, tmode;
2200 enum insn_code icode;
2202 data->offset = 0;
2203 data->to_addr = to_addr;
2204 data->autinc_to
2205 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2206 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2208 data->explicit_inc_to = 0;
2209 data->reverse
2210 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2211 if (data->reverse)
2212 data->offset = data->len;
2214 /* If storing requires more than two move insns,
2215 copy addresses to registers (to make displacements shorter)
2216 and use post-increment if available. */
2217 if (!data->autinc_to
2218 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2220 /* Determine the main mode we'll be using. */
2221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2222 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2223 if (GET_MODE_SIZE (tmode) < max_size)
2224 mode = tmode;
2226 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2228 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2229 data->autinc_to = 1;
2230 data->explicit_inc_to = -1;
2233 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2234 && ! data->autinc_to)
2236 data->to_addr = copy_addr_to_reg (to_addr);
2237 data->autinc_to = 1;
2238 data->explicit_inc_to = 1;
2241 if ( !data->autinc_to && CONSTANT_P (to_addr))
2242 data->to_addr = copy_addr_to_reg (to_addr);
2245 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2246 if (align >= GET_MODE_ALIGNMENT (tmode))
2247 align = GET_MODE_ALIGNMENT (tmode);
2248 else
2250 enum machine_mode xmode;
2252 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2253 tmode != VOIDmode;
2254 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2255 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2256 || SLOW_UNALIGNED_ACCESS (tmode, align))
2257 break;
2259 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2262 /* First store what we can in the largest integer mode, then go to
2263 successively smaller modes. */
2265 while (max_size > 1)
2267 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2268 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2269 if (GET_MODE_SIZE (tmode) < max_size)
2270 mode = tmode;
2272 if (mode == VOIDmode)
2273 break;
2275 icode = mov_optab->handlers[(int) mode].insn_code;
2276 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2277 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2279 max_size = GET_MODE_SIZE (mode);
2282 /* The code above should have handled everything. */
2283 gcc_assert (!data->len);
2286 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2287 with move instructions for mode MODE. GENFUN is the gen_... function
2288 to make a move insn for that mode. DATA has all the other info. */
2290 static void
2291 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2292 struct store_by_pieces *data)
2294 unsigned int size = GET_MODE_SIZE (mode);
2295 rtx to1, cst;
2297 while (data->len >= size)
2299 if (data->reverse)
2300 data->offset -= size;
2302 if (data->autinc_to)
2303 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2304 data->offset);
2305 else
2306 to1 = adjust_address (data->to, mode, data->offset);
2308 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2309 emit_insn (gen_add2_insn (data->to_addr,
2310 GEN_INT (-(HOST_WIDE_INT) size)));
2312 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2313 emit_insn ((*genfun) (to1, cst));
2315 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2316 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2318 if (! data->reverse)
2319 data->offset += size;
2321 data->len -= size;
2325 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2326 its length in bytes. */
2329 clear_storage (rtx object, rtx size)
2331 rtx retval = 0;
2332 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2333 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2335 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2336 just move a zero. Otherwise, do this a piece at a time. */
2337 if (GET_MODE (object) != BLKmode
2338 && GET_CODE (size) == CONST_INT
2339 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2340 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2341 else
2343 if (size == const0_rtx)
2345 else if (GET_CODE (size) == CONST_INT
2346 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2347 clear_by_pieces (object, INTVAL (size), align);
2348 else if (clear_storage_via_clrmem (object, size, align))
2350 else
2351 retval = clear_storage_via_libcall (object, size);
2354 return retval;
2357 /* A subroutine of clear_storage. Expand a clrmem pattern;
2358 return true if successful. */
2360 static bool
2361 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2363 /* Try the most limited insn first, because there's no point
2364 including more than one in the machine description unless
2365 the more limited one has some advantage. */
2367 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2368 enum machine_mode mode;
2370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2371 mode = GET_MODE_WIDER_MODE (mode))
2373 enum insn_code code = clrmem_optab[(int) mode];
2374 insn_operand_predicate_fn pred;
2376 if (code != CODE_FOR_nothing
2377 /* We don't need MODE to be narrower than
2378 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2379 the mode mask, as it is returned by the macro, it will
2380 definitely be less than the actual mode mask. */
2381 && ((GET_CODE (size) == CONST_INT
2382 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2383 <= (GET_MODE_MASK (mode) >> 1)))
2384 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2385 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2386 || (*pred) (object, BLKmode))
2387 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2388 || (*pred) (opalign, VOIDmode)))
2390 rtx op1;
2391 rtx last = get_last_insn ();
2392 rtx pat;
2394 op1 = convert_to_mode (mode, size, 1);
2395 pred = insn_data[(int) code].operand[1].predicate;
2396 if (pred != 0 && ! (*pred) (op1, mode))
2397 op1 = copy_to_mode_reg (mode, op1);
2399 pat = GEN_FCN ((int) code) (object, op1, opalign);
2400 if (pat)
2402 emit_insn (pat);
2403 return true;
2405 else
2406 delete_insns_since (last);
2410 return false;
2413 /* A subroutine of clear_storage. Expand a call to memset.
2414 Return the return value of memset, 0 otherwise. */
2416 static rtx
2417 clear_storage_via_libcall (rtx object, rtx size)
2419 tree call_expr, arg_list, fn, object_tree, size_tree;
2420 enum machine_mode size_mode;
2421 rtx retval;
2423 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2424 place those into new pseudos into a VAR_DECL and use them later. */
2426 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2428 size_mode = TYPE_MODE (sizetype);
2429 size = convert_to_mode (size_mode, size, 1);
2430 size = copy_to_mode_reg (size_mode, size);
2432 /* It is incorrect to use the libcall calling conventions to call
2433 memset in this context. This could be a user call to memset and
2434 the user may wish to examine the return value from memset. For
2435 targets where libcalls and normal calls have different conventions
2436 for returning pointers, we could end up generating incorrect code. */
2438 object_tree = make_tree (ptr_type_node, object);
2439 size_tree = make_tree (sizetype, size);
2441 fn = clear_storage_libcall_fn (true);
2442 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2443 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2444 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2446 /* Now we have to build up the CALL_EXPR itself. */
2447 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2448 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2449 call_expr, arg_list, NULL_TREE);
2451 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2453 return retval;
2456 /* A subroutine of clear_storage_via_libcall. Create the tree node
2457 for the function we use for block clears. The first time FOR_CALL
2458 is true, we call assemble_external. */
2460 static GTY(()) tree block_clear_fn;
2462 void
2463 init_block_clear_fn (const char *asmspec)
2465 if (!block_clear_fn)
2467 tree fn, args;
2469 fn = get_identifier ("memset");
2470 args = build_function_type_list (ptr_type_node, ptr_type_node,
2471 integer_type_node, sizetype,
2472 NULL_TREE);
2474 fn = build_decl (FUNCTION_DECL, fn, args);
2475 DECL_EXTERNAL (fn) = 1;
2476 TREE_PUBLIC (fn) = 1;
2477 DECL_ARTIFICIAL (fn) = 1;
2478 TREE_NOTHROW (fn) = 1;
2480 block_clear_fn = fn;
2483 if (asmspec)
2484 set_user_assembler_name (block_clear_fn, asmspec);
2487 static tree
2488 clear_storage_libcall_fn (int for_call)
2490 static bool emitted_extern;
2492 if (!block_clear_fn)
2493 init_block_clear_fn (NULL);
2495 if (for_call && !emitted_extern)
2497 emitted_extern = true;
2498 make_decl_rtl (block_clear_fn);
2499 assemble_external (block_clear_fn);
2502 return block_clear_fn;
2505 /* Generate code to copy Y into X.
2506 Both Y and X must have the same mode, except that
2507 Y can be a constant with VOIDmode.
2508 This mode cannot be BLKmode; use emit_block_move for that.
2510 Return the last instruction emitted. */
2513 emit_move_insn (rtx x, rtx y)
2515 enum machine_mode mode = GET_MODE (x);
2516 rtx y_cst = NULL_RTX;
2517 rtx last_insn, set;
2519 gcc_assert (mode != BLKmode
2520 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2522 if (CONSTANT_P (y))
2524 if (optimize
2525 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2526 && (last_insn = compress_float_constant (x, y)))
2527 return last_insn;
2529 y_cst = y;
2531 if (!LEGITIMATE_CONSTANT_P (y))
2533 y = force_const_mem (mode, y);
2535 /* If the target's cannot_force_const_mem prevented the spill,
2536 assume that the target's move expanders will also take care
2537 of the non-legitimate constant. */
2538 if (!y)
2539 y = y_cst;
2543 /* If X or Y are memory references, verify that their addresses are valid
2544 for the machine. */
2545 if (MEM_P (x)
2546 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2547 && ! push_operand (x, GET_MODE (x)))
2548 || (flag_force_addr
2549 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2550 x = validize_mem (x);
2552 if (MEM_P (y)
2553 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2554 || (flag_force_addr
2555 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2556 y = validize_mem (y);
2558 gcc_assert (mode != BLKmode);
2560 last_insn = emit_move_insn_1 (x, y);
2562 if (y_cst && REG_P (x)
2563 && (set = single_set (last_insn)) != NULL_RTX
2564 && SET_DEST (set) == x
2565 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2566 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2568 return last_insn;
2571 /* Low level part of emit_move_insn.
2572 Called just like emit_move_insn, but assumes X and Y
2573 are basically valid. */
2576 emit_move_insn_1 (rtx x, rtx y)
2578 enum machine_mode mode = GET_MODE (x);
2579 enum machine_mode submode;
2580 enum mode_class class = GET_MODE_CLASS (mode);
2582 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2584 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2585 return
2586 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2588 /* Expand complex moves by moving real part and imag part, if possible. */
2589 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2590 && BLKmode != (submode = GET_MODE_INNER (mode))
2591 && (mov_optab->handlers[(int) submode].insn_code
2592 != CODE_FOR_nothing))
2594 /* Don't split destination if it is a stack push. */
2595 int stack = push_operand (x, GET_MODE (x));
2597 #ifdef PUSH_ROUNDING
2598 /* In case we output to the stack, but the size is smaller than the
2599 machine can push exactly, we need to use move instructions. */
2600 if (stack
2601 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2602 != GET_MODE_SIZE (submode)))
2604 rtx temp;
2605 HOST_WIDE_INT offset1, offset2;
2607 /* Do not use anti_adjust_stack, since we don't want to update
2608 stack_pointer_delta. */
2609 temp = expand_binop (Pmode,
2610 #ifdef STACK_GROWS_DOWNWARD
2611 sub_optab,
2612 #else
2613 add_optab,
2614 #endif
2615 stack_pointer_rtx,
2616 GEN_INT
2617 (PUSH_ROUNDING
2618 (GET_MODE_SIZE (GET_MODE (x)))),
2619 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2621 if (temp != stack_pointer_rtx)
2622 emit_move_insn (stack_pointer_rtx, temp);
2624 #ifdef STACK_GROWS_DOWNWARD
2625 offset1 = 0;
2626 offset2 = GET_MODE_SIZE (submode);
2627 #else
2628 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2629 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2630 + GET_MODE_SIZE (submode));
2631 #endif
2633 emit_move_insn (change_address (x, submode,
2634 gen_rtx_PLUS (Pmode,
2635 stack_pointer_rtx,
2636 GEN_INT (offset1))),
2637 gen_realpart (submode, y));
2638 emit_move_insn (change_address (x, submode,
2639 gen_rtx_PLUS (Pmode,
2640 stack_pointer_rtx,
2641 GEN_INT (offset2))),
2642 gen_imagpart (submode, y));
2644 else
2645 #endif
2646 /* If this is a stack, push the highpart first, so it
2647 will be in the argument order.
2649 In that case, change_address is used only to convert
2650 the mode, not to change the address. */
2651 if (stack)
2653 /* Note that the real part always precedes the imag part in memory
2654 regardless of machine's endianness. */
2655 #ifdef STACK_GROWS_DOWNWARD
2656 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2657 gen_imagpart (submode, y));
2658 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2659 gen_realpart (submode, y));
2660 #else
2661 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2662 gen_realpart (submode, y));
2663 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2664 gen_imagpart (submode, y));
2665 #endif
2667 else
2669 rtx realpart_x, realpart_y;
2670 rtx imagpart_x, imagpart_y;
2672 /* If this is a complex value with each part being smaller than a
2673 word, the usual calling sequence will likely pack the pieces into
2674 a single register. Unfortunately, SUBREG of hard registers only
2675 deals in terms of words, so we have a problem converting input
2676 arguments to the CONCAT of two registers that is used elsewhere
2677 for complex values. If this is before reload, we can copy it into
2678 memory and reload. FIXME, we should see about using extract and
2679 insert on integer registers, but complex short and complex char
2680 variables should be rarely used. */
2681 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2682 && (reload_in_progress | reload_completed) == 0)
2684 int packed_dest_p
2685 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2686 int packed_src_p
2687 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2689 if (packed_dest_p || packed_src_p)
2691 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2692 ? MODE_FLOAT : MODE_INT);
2694 enum machine_mode reg_mode
2695 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2697 if (reg_mode != BLKmode)
2699 rtx mem = assign_stack_temp (reg_mode,
2700 GET_MODE_SIZE (mode), 0);
2701 rtx cmem = adjust_address (mem, mode, 0);
2703 if (packed_dest_p)
2705 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2707 emit_move_insn_1 (cmem, y);
2708 return emit_move_insn_1 (sreg, mem);
2710 else
2712 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2714 emit_move_insn_1 (mem, sreg);
2715 return emit_move_insn_1 (x, cmem);
2721 realpart_x = gen_realpart (submode, x);
2722 realpart_y = gen_realpart (submode, y);
2723 imagpart_x = gen_imagpart (submode, x);
2724 imagpart_y = gen_imagpart (submode, y);
2726 /* Show the output dies here. This is necessary for SUBREGs
2727 of pseudos since we cannot track their lifetimes correctly;
2728 hard regs shouldn't appear here except as return values.
2729 We never want to emit such a clobber after reload. */
2730 if (x != y
2731 && ! (reload_in_progress || reload_completed)
2732 && (GET_CODE (realpart_x) == SUBREG
2733 || GET_CODE (imagpart_x) == SUBREG))
2734 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2736 emit_move_insn (realpart_x, realpart_y);
2737 emit_move_insn (imagpart_x, imagpart_y);
2740 return get_last_insn ();
2743 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2744 find a mode to do it in. If we have a movcc, use it. Otherwise,
2745 find the MODE_INT mode of the same width. */
2746 else if (GET_MODE_CLASS (mode) == MODE_CC
2747 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2749 enum insn_code insn_code;
2750 enum machine_mode tmode = VOIDmode;
2751 rtx x1 = x, y1 = y;
2753 if (mode != CCmode
2754 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2755 tmode = CCmode;
2756 else
2757 for (tmode = QImode; tmode != VOIDmode;
2758 tmode = GET_MODE_WIDER_MODE (tmode))
2759 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2760 break;
2762 gcc_assert (tmode != VOIDmode);
2764 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2765 may call change_address which is not appropriate if we were
2766 called when a reload was in progress. We don't have to worry
2767 about changing the address since the size in bytes is supposed to
2768 be the same. Copy the MEM to change the mode and move any
2769 substitutions from the old MEM to the new one. */
2771 if (reload_in_progress)
2773 x = gen_lowpart_common (tmode, x1);
2774 if (x == 0 && MEM_P (x1))
2776 x = adjust_address_nv (x1, tmode, 0);
2777 copy_replacements (x1, x);
2780 y = gen_lowpart_common (tmode, y1);
2781 if (y == 0 && MEM_P (y1))
2783 y = adjust_address_nv (y1, tmode, 0);
2784 copy_replacements (y1, y);
2787 else
2789 x = gen_lowpart (tmode, x);
2790 y = gen_lowpart (tmode, y);
2793 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2794 return emit_insn (GEN_FCN (insn_code) (x, y));
2797 /* Try using a move pattern for the corresponding integer mode. This is
2798 only safe when simplify_subreg can convert MODE constants into integer
2799 constants. At present, it can only do this reliably if the value
2800 fits within a HOST_WIDE_INT. */
2801 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2802 && (submode = int_mode_for_mode (mode)) != BLKmode
2803 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2804 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2805 (simplify_gen_subreg (submode, x, mode, 0),
2806 simplify_gen_subreg (submode, y, mode, 0)));
2808 /* This will handle any multi-word or full-word mode that lacks a move_insn
2809 pattern. However, you will get better code if you define such patterns,
2810 even if they must turn into multiple assembler instructions. */
2811 else
2813 rtx last_insn = 0;
2814 rtx seq, inner;
2815 int need_clobber;
2816 int i;
2818 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2820 #ifdef PUSH_ROUNDING
2822 /* If X is a push on the stack, do the push now and replace
2823 X with a reference to the stack pointer. */
2824 if (push_operand (x, GET_MODE (x)))
2826 rtx temp;
2827 enum rtx_code code;
2829 /* Do not use anti_adjust_stack, since we don't want to update
2830 stack_pointer_delta. */
2831 temp = expand_binop (Pmode,
2832 #ifdef STACK_GROWS_DOWNWARD
2833 sub_optab,
2834 #else
2835 add_optab,
2836 #endif
2837 stack_pointer_rtx,
2838 GEN_INT
2839 (PUSH_ROUNDING
2840 (GET_MODE_SIZE (GET_MODE (x)))),
2841 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2843 if (temp != stack_pointer_rtx)
2844 emit_move_insn (stack_pointer_rtx, temp);
2846 code = GET_CODE (XEXP (x, 0));
2848 /* Just hope that small offsets off SP are OK. */
2849 if (code == POST_INC)
2850 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2851 GEN_INT (-((HOST_WIDE_INT)
2852 GET_MODE_SIZE (GET_MODE (x)))));
2853 else if (code == POST_DEC)
2854 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2855 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2856 else
2857 temp = stack_pointer_rtx;
2859 x = change_address (x, VOIDmode, temp);
2861 #endif
2863 /* If we are in reload, see if either operand is a MEM whose address
2864 is scheduled for replacement. */
2865 if (reload_in_progress && MEM_P (x)
2866 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2867 x = replace_equiv_address_nv (x, inner);
2868 if (reload_in_progress && MEM_P (y)
2869 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2870 y = replace_equiv_address_nv (y, inner);
2872 start_sequence ();
2874 need_clobber = 0;
2875 for (i = 0;
2876 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2877 i++)
2879 rtx xpart = operand_subword (x, i, 1, mode);
2880 rtx ypart = operand_subword (y, i, 1, mode);
2882 /* If we can't get a part of Y, put Y into memory if it is a
2883 constant. Otherwise, force it into a register. If we still
2884 can't get a part of Y, abort. */
2885 if (ypart == 0 && CONSTANT_P (y))
2887 y = force_const_mem (mode, y);
2888 ypart = operand_subword (y, i, 1, mode);
2890 else if (ypart == 0)
2891 ypart = operand_subword_force (y, i, mode);
2893 gcc_assert (xpart && ypart);
2895 need_clobber |= (GET_CODE (xpart) == SUBREG);
2897 last_insn = emit_move_insn (xpart, ypart);
2900 seq = get_insns ();
2901 end_sequence ();
2903 /* Show the output dies here. This is necessary for SUBREGs
2904 of pseudos since we cannot track their lifetimes correctly;
2905 hard regs shouldn't appear here except as return values.
2906 We never want to emit such a clobber after reload. */
2907 if (x != y
2908 && ! (reload_in_progress || reload_completed)
2909 && need_clobber != 0)
2910 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2912 emit_insn (seq);
2914 return last_insn;
2918 /* If Y is representable exactly in a narrower mode, and the target can
2919 perform the extension directly from constant or memory, then emit the
2920 move as an extension. */
2922 static rtx
2923 compress_float_constant (rtx x, rtx y)
2925 enum machine_mode dstmode = GET_MODE (x);
2926 enum machine_mode orig_srcmode = GET_MODE (y);
2927 enum machine_mode srcmode;
2928 REAL_VALUE_TYPE r;
2930 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2932 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2933 srcmode != orig_srcmode;
2934 srcmode = GET_MODE_WIDER_MODE (srcmode))
2936 enum insn_code ic;
2937 rtx trunc_y, last_insn;
2939 /* Skip if the target can't extend this way. */
2940 ic = can_extend_p (dstmode, srcmode, 0);
2941 if (ic == CODE_FOR_nothing)
2942 continue;
2944 /* Skip if the narrowed value isn't exact. */
2945 if (! exact_real_truncate (srcmode, &r))
2946 continue;
2948 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2950 if (LEGITIMATE_CONSTANT_P (trunc_y))
2952 /* Skip if the target needs extra instructions to perform
2953 the extension. */
2954 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2955 continue;
2957 else if (float_extend_from_mem[dstmode][srcmode])
2958 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2959 else
2960 continue;
2962 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2963 last_insn = get_last_insn ();
2965 if (REG_P (x))
2966 set_unique_reg_note (last_insn, REG_EQUAL, y);
2968 return last_insn;
2971 return NULL_RTX;
2974 /* Pushing data onto the stack. */
2976 /* Push a block of length SIZE (perhaps variable)
2977 and return an rtx to address the beginning of the block.
2978 The value may be virtual_outgoing_args_rtx.
2980 EXTRA is the number of bytes of padding to push in addition to SIZE.
2981 BELOW nonzero means this padding comes at low addresses;
2982 otherwise, the padding comes at high addresses. */
2985 push_block (rtx size, int extra, int below)
2987 rtx temp;
2989 size = convert_modes (Pmode, ptr_mode, size, 1);
2990 if (CONSTANT_P (size))
2991 anti_adjust_stack (plus_constant (size, extra));
2992 else if (REG_P (size) && extra == 0)
2993 anti_adjust_stack (size);
2994 else
2996 temp = copy_to_mode_reg (Pmode, size);
2997 if (extra != 0)
2998 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2999 temp, 0, OPTAB_LIB_WIDEN);
3000 anti_adjust_stack (temp);
3003 #ifndef STACK_GROWS_DOWNWARD
3004 if (0)
3005 #else
3006 if (1)
3007 #endif
3009 temp = virtual_outgoing_args_rtx;
3010 if (extra != 0 && below)
3011 temp = plus_constant (temp, extra);
3013 else
3015 if (GET_CODE (size) == CONST_INT)
3016 temp = plus_constant (virtual_outgoing_args_rtx,
3017 -INTVAL (size) - (below ? 0 : extra));
3018 else if (extra != 0 && !below)
3019 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3020 negate_rtx (Pmode, plus_constant (size, extra)));
3021 else
3022 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3023 negate_rtx (Pmode, size));
3026 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3029 #ifdef PUSH_ROUNDING
3031 /* Emit single push insn. */
3033 static void
3034 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3036 rtx dest_addr;
3037 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3038 rtx dest;
3039 enum insn_code icode;
3040 insn_operand_predicate_fn pred;
3042 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3043 /* If there is push pattern, use it. Otherwise try old way of throwing
3044 MEM representing push operation to move expander. */
3045 icode = push_optab->handlers[(int) mode].insn_code;
3046 if (icode != CODE_FOR_nothing)
3048 if (((pred = insn_data[(int) icode].operand[0].predicate)
3049 && !((*pred) (x, mode))))
3050 x = force_reg (mode, x);
3051 emit_insn (GEN_FCN (icode) (x));
3052 return;
3054 if (GET_MODE_SIZE (mode) == rounded_size)
3055 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3056 /* If we are to pad downward, adjust the stack pointer first and
3057 then store X into the stack location using an offset. This is
3058 because emit_move_insn does not know how to pad; it does not have
3059 access to type. */
3060 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3062 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3063 HOST_WIDE_INT offset;
3065 emit_move_insn (stack_pointer_rtx,
3066 expand_binop (Pmode,
3067 #ifdef STACK_GROWS_DOWNWARD
3068 sub_optab,
3069 #else
3070 add_optab,
3071 #endif
3072 stack_pointer_rtx,
3073 GEN_INT (rounded_size),
3074 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3076 offset = (HOST_WIDE_INT) padding_size;
3077 #ifdef STACK_GROWS_DOWNWARD
3078 if (STACK_PUSH_CODE == POST_DEC)
3079 /* We have already decremented the stack pointer, so get the
3080 previous value. */
3081 offset += (HOST_WIDE_INT) rounded_size;
3082 #else
3083 if (STACK_PUSH_CODE == POST_INC)
3084 /* We have already incremented the stack pointer, so get the
3085 previous value. */
3086 offset -= (HOST_WIDE_INT) rounded_size;
3087 #endif
3088 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3090 else
3092 #ifdef STACK_GROWS_DOWNWARD
3093 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3094 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3095 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3096 #else
3097 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3098 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3099 GEN_INT (rounded_size));
3100 #endif
3101 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3104 dest = gen_rtx_MEM (mode, dest_addr);
3106 if (type != 0)
3108 set_mem_attributes (dest, type, 1);
3110 if (flag_optimize_sibling_calls)
3111 /* Function incoming arguments may overlap with sibling call
3112 outgoing arguments and we cannot allow reordering of reads
3113 from function arguments with stores to outgoing arguments
3114 of sibling calls. */
3115 set_mem_alias_set (dest, 0);
3117 emit_move_insn (dest, x);
3119 #endif
3121 /* Generate code to push X onto the stack, assuming it has mode MODE and
3122 type TYPE.
3123 MODE is redundant except when X is a CONST_INT (since they don't
3124 carry mode info).
3125 SIZE is an rtx for the size of data to be copied (in bytes),
3126 needed only if X is BLKmode.
3128 ALIGN (in bits) is maximum alignment we can assume.
3130 If PARTIAL and REG are both nonzero, then copy that many of the first
3131 words of X into registers starting with REG, and push the rest of X.
3132 The amount of space pushed is decreased by PARTIAL words,
3133 rounded *down* to a multiple of PARM_BOUNDARY.
3134 REG must be a hard register in this case.
3135 If REG is zero but PARTIAL is not, take any all others actions for an
3136 argument partially in registers, but do not actually load any
3137 registers.
3139 EXTRA is the amount in bytes of extra space to leave next to this arg.
3140 This is ignored if an argument block has already been allocated.
3142 On a machine that lacks real push insns, ARGS_ADDR is the address of
3143 the bottom of the argument block for this call. We use indexing off there
3144 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3145 argument block has not been preallocated.
3147 ARGS_SO_FAR is the size of args previously pushed for this call.
3149 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3150 for arguments passed in registers. If nonzero, it will be the number
3151 of bytes required. */
3153 void
3154 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3155 unsigned int align, int partial, rtx reg, int extra,
3156 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3157 rtx alignment_pad)
3159 rtx xinner;
3160 enum direction stack_direction
3161 #ifdef STACK_GROWS_DOWNWARD
3162 = downward;
3163 #else
3164 = upward;
3165 #endif
3167 /* Decide where to pad the argument: `downward' for below,
3168 `upward' for above, or `none' for don't pad it.
3169 Default is below for small data on big-endian machines; else above. */
3170 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3172 /* Invert direction if stack is post-decrement.
3173 FIXME: why? */
3174 if (STACK_PUSH_CODE == POST_DEC)
3175 if (where_pad != none)
3176 where_pad = (where_pad == downward ? upward : downward);
3178 xinner = x;
3180 if (mode == BLKmode)
3182 /* Copy a block into the stack, entirely or partially. */
3184 rtx temp;
3185 int used = partial * UNITS_PER_WORD;
3186 int offset;
3187 int skip;
3189 if (reg && GET_CODE (reg) == PARALLEL)
3191 /* Use the size of the elt to compute offset. */
3192 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3193 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3194 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3196 else
3197 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3199 gcc_assert (size);
3201 used -= offset;
3203 /* USED is now the # of bytes we need not copy to the stack
3204 because registers will take care of them. */
3206 if (partial != 0)
3207 xinner = adjust_address (xinner, BLKmode, used);
3209 /* If the partial register-part of the arg counts in its stack size,
3210 skip the part of stack space corresponding to the registers.
3211 Otherwise, start copying to the beginning of the stack space,
3212 by setting SKIP to 0. */
3213 skip = (reg_parm_stack_space == 0) ? 0 : used;
3215 #ifdef PUSH_ROUNDING
3216 /* Do it with several push insns if that doesn't take lots of insns
3217 and if there is no difficulty with push insns that skip bytes
3218 on the stack for alignment purposes. */
3219 if (args_addr == 0
3220 && PUSH_ARGS
3221 && GET_CODE (size) == CONST_INT
3222 && skip == 0
3223 && MEM_ALIGN (xinner) >= align
3224 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3225 /* Here we avoid the case of a structure whose weak alignment
3226 forces many pushes of a small amount of data,
3227 and such small pushes do rounding that causes trouble. */
3228 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3229 || align >= BIGGEST_ALIGNMENT
3230 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3231 == (align / BITS_PER_UNIT)))
3232 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3234 /* Push padding now if padding above and stack grows down,
3235 or if padding below and stack grows up.
3236 But if space already allocated, this has already been done. */
3237 if (extra && args_addr == 0
3238 && where_pad != none && where_pad != stack_direction)
3239 anti_adjust_stack (GEN_INT (extra));
3241 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3243 else
3244 #endif /* PUSH_ROUNDING */
3246 rtx target;
3248 /* Otherwise make space on the stack and copy the data
3249 to the address of that space. */
3251 /* Deduct words put into registers from the size we must copy. */
3252 if (partial != 0)
3254 if (GET_CODE (size) == CONST_INT)
3255 size = GEN_INT (INTVAL (size) - used);
3256 else
3257 size = expand_binop (GET_MODE (size), sub_optab, size,
3258 GEN_INT (used), NULL_RTX, 0,
3259 OPTAB_LIB_WIDEN);
3262 /* Get the address of the stack space.
3263 In this case, we do not deal with EXTRA separately.
3264 A single stack adjust will do. */
3265 if (! args_addr)
3267 temp = push_block (size, extra, where_pad == downward);
3268 extra = 0;
3270 else if (GET_CODE (args_so_far) == CONST_INT)
3271 temp = memory_address (BLKmode,
3272 plus_constant (args_addr,
3273 skip + INTVAL (args_so_far)));
3274 else
3275 temp = memory_address (BLKmode,
3276 plus_constant (gen_rtx_PLUS (Pmode,
3277 args_addr,
3278 args_so_far),
3279 skip));
3281 if (!ACCUMULATE_OUTGOING_ARGS)
3283 /* If the source is referenced relative to the stack pointer,
3284 copy it to another register to stabilize it. We do not need
3285 to do this if we know that we won't be changing sp. */
3287 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3288 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3289 temp = copy_to_reg (temp);
3292 target = gen_rtx_MEM (BLKmode, temp);
3294 /* We do *not* set_mem_attributes here, because incoming arguments
3295 may overlap with sibling call outgoing arguments and we cannot
3296 allow reordering of reads from function arguments with stores
3297 to outgoing arguments of sibling calls. We do, however, want
3298 to record the alignment of the stack slot. */
3299 /* ALIGN may well be better aligned than TYPE, e.g. due to
3300 PARM_BOUNDARY. Assume the caller isn't lying. */
3301 set_mem_align (target, align);
3303 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3306 else if (partial > 0)
3308 /* Scalar partly in registers. */
3310 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3311 int i;
3312 int not_stack;
3313 /* # words of start of argument
3314 that we must make space for but need not store. */
3315 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3316 int args_offset = INTVAL (args_so_far);
3317 int skip;
3319 /* Push padding now if padding above and stack grows down,
3320 or if padding below and stack grows up.
3321 But if space already allocated, this has already been done. */
3322 if (extra && args_addr == 0
3323 && where_pad != none && where_pad != stack_direction)
3324 anti_adjust_stack (GEN_INT (extra));
3326 /* If we make space by pushing it, we might as well push
3327 the real data. Otherwise, we can leave OFFSET nonzero
3328 and leave the space uninitialized. */
3329 if (args_addr == 0)
3330 offset = 0;
3332 /* Now NOT_STACK gets the number of words that we don't need to
3333 allocate on the stack. */
3334 not_stack = partial - offset;
3336 /* If the partial register-part of the arg counts in its stack size,
3337 skip the part of stack space corresponding to the registers.
3338 Otherwise, start copying to the beginning of the stack space,
3339 by setting SKIP to 0. */
3340 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3342 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3343 x = validize_mem (force_const_mem (mode, x));
3345 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3346 SUBREGs of such registers are not allowed. */
3347 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3348 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3349 x = copy_to_reg (x);
3351 /* Loop over all the words allocated on the stack for this arg. */
3352 /* We can do it by words, because any scalar bigger than a word
3353 has a size a multiple of a word. */
3354 #ifndef PUSH_ARGS_REVERSED
3355 for (i = not_stack; i < size; i++)
3356 #else
3357 for (i = size - 1; i >= not_stack; i--)
3358 #endif
3359 if (i >= not_stack + offset)
3360 emit_push_insn (operand_subword_force (x, i, mode),
3361 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3362 0, args_addr,
3363 GEN_INT (args_offset + ((i - not_stack + skip)
3364 * UNITS_PER_WORD)),
3365 reg_parm_stack_space, alignment_pad);
3367 else
3369 rtx addr;
3370 rtx dest;
3372 /* Push padding now if padding above and stack grows down,
3373 or if padding below and stack grows up.
3374 But if space already allocated, this has already been done. */
3375 if (extra && args_addr == 0
3376 && where_pad != none && where_pad != stack_direction)
3377 anti_adjust_stack (GEN_INT (extra));
3379 #ifdef PUSH_ROUNDING
3380 if (args_addr == 0 && PUSH_ARGS)
3381 emit_single_push_insn (mode, x, type);
3382 else
3383 #endif
3385 if (GET_CODE (args_so_far) == CONST_INT)
3386 addr
3387 = memory_address (mode,
3388 plus_constant (args_addr,
3389 INTVAL (args_so_far)));
3390 else
3391 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3392 args_so_far));
3393 dest = gen_rtx_MEM (mode, addr);
3395 /* We do *not* set_mem_attributes here, because incoming arguments
3396 may overlap with sibling call outgoing arguments and we cannot
3397 allow reordering of reads from function arguments with stores
3398 to outgoing arguments of sibling calls. We do, however, want
3399 to record the alignment of the stack slot. */
3400 /* ALIGN may well be better aligned than TYPE, e.g. due to
3401 PARM_BOUNDARY. Assume the caller isn't lying. */
3402 set_mem_align (dest, align);
3404 emit_move_insn (dest, x);
3408 /* If part should go in registers, copy that part
3409 into the appropriate registers. Do this now, at the end,
3410 since mem-to-mem copies above may do function calls. */
3411 if (partial > 0 && reg != 0)
3413 /* Handle calls that pass values in multiple non-contiguous locations.
3414 The Irix 6 ABI has examples of this. */
3415 if (GET_CODE (reg) == PARALLEL)
3416 emit_group_load (reg, x, type, -1);
3417 else
3418 move_block_to_reg (REGNO (reg), x, partial, mode);
3421 if (extra && args_addr == 0 && where_pad == stack_direction)
3422 anti_adjust_stack (GEN_INT (extra));
3424 if (alignment_pad && args_addr == 0)
3425 anti_adjust_stack (alignment_pad);
3428 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3429 operations. */
3431 static rtx
3432 get_subtarget (rtx x)
3434 return (optimize
3435 || x == 0
3436 /* Only registers can be subtargets. */
3437 || !REG_P (x)
3438 /* Don't use hard regs to avoid extending their life. */
3439 || REGNO (x) < FIRST_PSEUDO_REGISTER
3440 ? 0 : x);
3443 /* Expand an assignment that stores the value of FROM into TO. */
3445 void
3446 expand_assignment (tree to, tree from)
3448 rtx to_rtx = 0;
3449 rtx result;
3451 /* Don't crash if the lhs of the assignment was erroneous. */
3453 if (TREE_CODE (to) == ERROR_MARK)
3455 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3456 return;
3459 /* Assignment of a structure component needs special treatment
3460 if the structure component's rtx is not simply a MEM.
3461 Assignment of an array element at a constant index, and assignment of
3462 an array element in an unaligned packed structure field, has the same
3463 problem. */
3465 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3466 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3467 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3469 enum machine_mode mode1;
3470 HOST_WIDE_INT bitsize, bitpos;
3471 rtx orig_to_rtx;
3472 tree offset;
3473 int unsignedp;
3474 int volatilep = 0;
3475 tree tem;
3477 push_temp_slots ();
3478 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3479 &unsignedp, &volatilep);
3481 /* If we are going to use store_bit_field and extract_bit_field,
3482 make sure to_rtx will be safe for multiple use. */
3484 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3486 if (offset != 0)
3488 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3490 gcc_assert (MEM_P (to_rtx));
3492 #ifdef POINTERS_EXTEND_UNSIGNED
3493 if (GET_MODE (offset_rtx) != Pmode)
3494 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3495 #else
3496 if (GET_MODE (offset_rtx) != ptr_mode)
3497 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3498 #endif
3500 /* A constant address in TO_RTX can have VOIDmode, we must not try
3501 to call force_reg for that case. Avoid that case. */
3502 if (MEM_P (to_rtx)
3503 && GET_MODE (to_rtx) == BLKmode
3504 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3505 && bitsize > 0
3506 && (bitpos % bitsize) == 0
3507 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3508 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3510 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3511 bitpos = 0;
3514 to_rtx = offset_address (to_rtx, offset_rtx,
3515 highest_pow2_factor_for_target (to,
3516 offset));
3519 if (MEM_P (to_rtx))
3521 /* If the field is at offset zero, we could have been given the
3522 DECL_RTX of the parent struct. Don't munge it. */
3523 to_rtx = shallow_copy_rtx (to_rtx);
3525 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3528 /* Deal with volatile and readonly fields. The former is only done
3529 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3530 if (volatilep && MEM_P (to_rtx))
3532 if (to_rtx == orig_to_rtx)
3533 to_rtx = copy_rtx (to_rtx);
3534 MEM_VOLATILE_P (to_rtx) = 1;
3537 if (MEM_P (to_rtx) && ! can_address_p (to))
3539 if (to_rtx == orig_to_rtx)
3540 to_rtx = copy_rtx (to_rtx);
3541 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3544 /* Optimize bitfld op= val in certain cases. */
3545 while (mode1 == VOIDmode
3546 && bitsize > 0 && bitsize < BITS_PER_WORD
3547 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3548 && !TREE_SIDE_EFFECTS (to)
3549 && !TREE_THIS_VOLATILE (to))
3551 tree src, op0, op1;
3552 rtx value, str_rtx = to_rtx;
3553 HOST_WIDE_INT bitpos1 = bitpos;
3554 optab binop;
3556 src = from;
3557 STRIP_NOPS (src);
3558 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3559 || !BINARY_CLASS_P (src))
3560 break;
3562 op0 = TREE_OPERAND (src, 0);
3563 op1 = TREE_OPERAND (src, 1);
3564 STRIP_NOPS (op0);
3566 if (! operand_equal_p (to, op0, 0))
3567 break;
3569 if (MEM_P (str_rtx))
3571 enum machine_mode mode = GET_MODE (str_rtx);
3572 HOST_WIDE_INT offset1;
3574 if (GET_MODE_BITSIZE (mode) == 0
3575 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3576 mode = word_mode;
3577 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3578 mode, 0);
3579 if (mode == VOIDmode)
3580 break;
3582 offset1 = bitpos1;
3583 bitpos1 %= GET_MODE_BITSIZE (mode);
3584 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3585 str_rtx = adjust_address (str_rtx, mode, offset1);
3587 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3588 break;
3590 /* If the bit field covers the whole REG/MEM, store_field
3591 will likely generate better code. */
3592 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3593 break;
3595 /* We can't handle fields split across multiple entities. */
3596 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3597 break;
3599 if (BYTES_BIG_ENDIAN)
3600 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3601 - bitsize;
3603 /* Special case some bitfield op= exp. */
3604 switch (TREE_CODE (src))
3606 case PLUS_EXPR:
3607 case MINUS_EXPR:
3608 /* For now, just optimize the case of the topmost bitfield
3609 where we don't need to do any masking and also
3610 1 bit bitfields where xor can be used.
3611 We might win by one instruction for the other bitfields
3612 too if insv/extv instructions aren't used, so that
3613 can be added later. */
3614 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3615 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3616 break;
3617 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3618 value = convert_modes (GET_MODE (str_rtx),
3619 TYPE_MODE (TREE_TYPE (op1)), value,
3620 TYPE_UNSIGNED (TREE_TYPE (op1)));
3622 /* We may be accessing data outside the field, which means
3623 we can alias adjacent data. */
3624 if (MEM_P (str_rtx))
3626 str_rtx = shallow_copy_rtx (str_rtx);
3627 set_mem_alias_set (str_rtx, 0);
3628 set_mem_expr (str_rtx, 0);
3631 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3632 if (bitsize == 1
3633 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3635 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3636 NULL_RTX);
3637 binop = xor_optab;
3639 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3640 build_int_cst (NULL_TREE, bitpos1),
3641 NULL_RTX, 1);
3642 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3643 value, str_rtx, 1, OPTAB_WIDEN);
3644 if (result != str_rtx)
3645 emit_move_insn (str_rtx, result);
3646 free_temp_slots ();
3647 pop_temp_slots ();
3648 return;
3650 default:
3651 break;
3654 break;
3657 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3658 TREE_TYPE (tem), get_alias_set (to));
3660 preserve_temp_slots (result);
3661 free_temp_slots ();
3662 pop_temp_slots ();
3664 /* If the value is meaningful, convert RESULT to the proper mode.
3665 Otherwise, return nothing. */
3666 return;
3669 /* If the rhs is a function call and its value is not an aggregate,
3670 call the function before we start to compute the lhs.
3671 This is needed for correct code for cases such as
3672 val = setjmp (buf) on machines where reference to val
3673 requires loading up part of an address in a separate insn.
3675 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3676 since it might be a promoted variable where the zero- or sign- extension
3677 needs to be done. Handling this in the normal way is safe because no
3678 computation is done before the call. */
3679 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3680 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3681 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3682 && REG_P (DECL_RTL (to))))
3684 rtx value;
3686 push_temp_slots ();
3687 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3688 if (to_rtx == 0)
3689 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3691 /* Handle calls that return values in multiple non-contiguous locations.
3692 The Irix 6 ABI has examples of this. */
3693 if (GET_CODE (to_rtx) == PARALLEL)
3694 emit_group_load (to_rtx, value, TREE_TYPE (from),
3695 int_size_in_bytes (TREE_TYPE (from)));
3696 else if (GET_MODE (to_rtx) == BLKmode)
3697 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3698 else
3700 if (POINTER_TYPE_P (TREE_TYPE (to)))
3701 value = convert_memory_address (GET_MODE (to_rtx), value);
3702 emit_move_insn (to_rtx, value);
3704 preserve_temp_slots (to_rtx);
3705 free_temp_slots ();
3706 pop_temp_slots ();
3707 return;
3710 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3711 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3713 if (to_rtx == 0)
3714 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3716 /* Don't move directly into a return register. */
3717 if (TREE_CODE (to) == RESULT_DECL
3718 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3720 rtx temp;
3722 push_temp_slots ();
3723 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3725 if (GET_CODE (to_rtx) == PARALLEL)
3726 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3727 int_size_in_bytes (TREE_TYPE (from)));
3728 else
3729 emit_move_insn (to_rtx, temp);
3731 preserve_temp_slots (to_rtx);
3732 free_temp_slots ();
3733 pop_temp_slots ();
3734 return;
3737 /* In case we are returning the contents of an object which overlaps
3738 the place the value is being stored, use a safe function when copying
3739 a value through a pointer into a structure value return block. */
3740 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3741 && current_function_returns_struct
3742 && !current_function_returns_pcc_struct)
3744 rtx from_rtx, size;
3746 push_temp_slots ();
3747 size = expr_size (from);
3748 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3750 emit_library_call (memmove_libfunc, LCT_NORMAL,
3751 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3752 XEXP (from_rtx, 0), Pmode,
3753 convert_to_mode (TYPE_MODE (sizetype),
3754 size, TYPE_UNSIGNED (sizetype)),
3755 TYPE_MODE (sizetype));
3757 preserve_temp_slots (to_rtx);
3758 free_temp_slots ();
3759 pop_temp_slots ();
3760 return;
3763 /* Compute FROM and store the value in the rtx we got. */
3765 push_temp_slots ();
3766 result = store_expr (from, to_rtx, 0);
3767 preserve_temp_slots (result);
3768 free_temp_slots ();
3769 pop_temp_slots ();
3770 return;
3773 /* Generate code for computing expression EXP,
3774 and storing the value into TARGET.
3776 If the mode is BLKmode then we may return TARGET itself.
3777 It turns out that in BLKmode it doesn't cause a problem.
3778 because C has no operators that could combine two different
3779 assignments into the same BLKmode object with different values
3780 with no sequence point. Will other languages need this to
3781 be more thorough?
3783 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3784 stack, and block moves may need to be treated specially. */
3787 store_expr (tree exp, rtx target, int call_param_p)
3789 rtx temp;
3790 rtx alt_rtl = NULL_RTX;
3791 int dont_return_target = 0;
3793 if (VOID_TYPE_P (TREE_TYPE (exp)))
3795 /* C++ can generate ?: expressions with a throw expression in one
3796 branch and an rvalue in the other. Here, we resolve attempts to
3797 store the throw expression's nonexistent result. */
3798 gcc_assert (!call_param_p);
3799 expand_expr (exp, const0_rtx, VOIDmode, 0);
3800 return NULL_RTX;
3802 if (TREE_CODE (exp) == COMPOUND_EXPR)
3804 /* Perform first part of compound expression, then assign from second
3805 part. */
3806 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3807 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3808 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3810 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3812 /* For conditional expression, get safe form of the target. Then
3813 test the condition, doing the appropriate assignment on either
3814 side. This avoids the creation of unnecessary temporaries.
3815 For non-BLKmode, it is more efficient not to do this. */
3817 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3819 do_pending_stack_adjust ();
3820 NO_DEFER_POP;
3821 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3822 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3823 emit_jump_insn (gen_jump (lab2));
3824 emit_barrier ();
3825 emit_label (lab1);
3826 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3827 emit_label (lab2);
3828 OK_DEFER_POP;
3830 return NULL_RTX;
3832 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3833 /* If this is a scalar in a register that is stored in a wider mode
3834 than the declared mode, compute the result into its declared mode
3835 and then convert to the wider mode. Our value is the computed
3836 expression. */
3838 rtx inner_target = 0;
3840 /* We can do the conversion inside EXP, which will often result
3841 in some optimizations. Do the conversion in two steps: first
3842 change the signedness, if needed, then the extend. But don't
3843 do this if the type of EXP is a subtype of something else
3844 since then the conversion might involve more than just
3845 converting modes. */
3846 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3847 && TREE_TYPE (TREE_TYPE (exp)) == 0
3848 && (!lang_hooks.reduce_bit_field_operations
3849 || (GET_MODE_PRECISION (GET_MODE (target))
3850 == TYPE_PRECISION (TREE_TYPE (exp)))))
3852 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3853 != SUBREG_PROMOTED_UNSIGNED_P (target))
3854 exp = convert
3855 (lang_hooks.types.signed_or_unsigned_type
3856 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3858 exp = convert (lang_hooks.types.type_for_mode
3859 (GET_MODE (SUBREG_REG (target)),
3860 SUBREG_PROMOTED_UNSIGNED_P (target)),
3861 exp);
3863 inner_target = SUBREG_REG (target);
3866 temp = expand_expr (exp, inner_target, VOIDmode,
3867 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3869 /* If TEMP is a VOIDmode constant, use convert_modes to make
3870 sure that we properly convert it. */
3871 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3873 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3874 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3875 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3876 GET_MODE (target), temp,
3877 SUBREG_PROMOTED_UNSIGNED_P (target));
3880 convert_move (SUBREG_REG (target), temp,
3881 SUBREG_PROMOTED_UNSIGNED_P (target));
3883 return NULL_RTX;
3885 else
3887 temp = expand_expr_real (exp, target, GET_MODE (target),
3888 (call_param_p
3889 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3890 &alt_rtl);
3891 /* Return TARGET if it's a specified hardware register.
3892 If TARGET is a volatile mem ref, either return TARGET
3893 or return a reg copied *from* TARGET; ANSI requires this.
3895 Otherwise, if TEMP is not TARGET, return TEMP
3896 if it is constant (for efficiency),
3897 or if we really want the correct value. */
3898 if (!(target && REG_P (target)
3899 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3900 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3901 && ! rtx_equal_p (temp, target)
3902 && CONSTANT_P (temp))
3903 dont_return_target = 1;
3906 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3907 the same as that of TARGET, adjust the constant. This is needed, for
3908 example, in case it is a CONST_DOUBLE and we want only a word-sized
3909 value. */
3910 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3911 && TREE_CODE (exp) != ERROR_MARK
3912 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3913 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3914 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3916 /* If value was not generated in the target, store it there.
3917 Convert the value to TARGET's type first if necessary and emit the
3918 pending incrementations that have been queued when expanding EXP.
3919 Note that we cannot emit the whole queue blindly because this will
3920 effectively disable the POST_INC optimization later.
3922 If TEMP and TARGET compare equal according to rtx_equal_p, but
3923 one or both of them are volatile memory refs, we have to distinguish
3924 two cases:
3925 - expand_expr has used TARGET. In this case, we must not generate
3926 another copy. This can be detected by TARGET being equal according
3927 to == .
3928 - expand_expr has not used TARGET - that means that the source just
3929 happens to have the same RTX form. Since temp will have been created
3930 by expand_expr, it will compare unequal according to == .
3931 We must generate a copy in this case, to reach the correct number
3932 of volatile memory references. */
3934 if ((! rtx_equal_p (temp, target)
3935 || (temp != target && (side_effects_p (temp)
3936 || side_effects_p (target))))
3937 && TREE_CODE (exp) != ERROR_MARK
3938 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3939 but TARGET is not valid memory reference, TEMP will differ
3940 from TARGET although it is really the same location. */
3941 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3942 /* If there's nothing to copy, don't bother. Don't call expr_size
3943 unless necessary, because some front-ends (C++) expr_size-hook
3944 aborts on objects that are not supposed to be bit-copied or
3945 bit-initialized. */
3946 && expr_size (exp) != const0_rtx)
3948 if (GET_MODE (temp) != GET_MODE (target)
3949 && GET_MODE (temp) != VOIDmode)
3951 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3952 if (dont_return_target)
3954 /* In this case, we will return TEMP,
3955 so make sure it has the proper mode.
3956 But don't forget to store the value into TARGET. */
3957 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3958 emit_move_insn (target, temp);
3960 else
3961 convert_move (target, temp, unsignedp);
3964 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3966 /* Handle copying a string constant into an array. The string
3967 constant may be shorter than the array. So copy just the string's
3968 actual length, and clear the rest. First get the size of the data
3969 type of the string, which is actually the size of the target. */
3970 rtx size = expr_size (exp);
3972 if (GET_CODE (size) == CONST_INT
3973 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3974 emit_block_move (target, temp, size,
3975 (call_param_p
3976 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3977 else
3979 /* Compute the size of the data to copy from the string. */
3980 tree copy_size
3981 = size_binop (MIN_EXPR,
3982 make_tree (sizetype, size),
3983 size_int (TREE_STRING_LENGTH (exp)));
3984 rtx copy_size_rtx
3985 = expand_expr (copy_size, NULL_RTX, VOIDmode,
3986 (call_param_p
3987 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
3988 rtx label = 0;
3990 /* Copy that much. */
3991 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
3992 TYPE_UNSIGNED (sizetype));
3993 emit_block_move (target, temp, copy_size_rtx,
3994 (call_param_p
3995 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3997 /* Figure out how much is left in TARGET that we have to clear.
3998 Do all calculations in ptr_mode. */
3999 if (GET_CODE (copy_size_rtx) == CONST_INT)
4001 size = plus_constant (size, -INTVAL (copy_size_rtx));
4002 target = adjust_address (target, BLKmode,
4003 INTVAL (copy_size_rtx));
4005 else
4007 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4008 copy_size_rtx, NULL_RTX, 0,
4009 OPTAB_LIB_WIDEN);
4011 #ifdef POINTERS_EXTEND_UNSIGNED
4012 if (GET_MODE (copy_size_rtx) != Pmode)
4013 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4014 TYPE_UNSIGNED (sizetype));
4015 #endif
4017 target = offset_address (target, copy_size_rtx,
4018 highest_pow2_factor (copy_size));
4019 label = gen_label_rtx ();
4020 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4021 GET_MODE (size), 0, label);
4024 if (size != const0_rtx)
4025 clear_storage (target, size);
4027 if (label)
4028 emit_label (label);
4031 /* Handle calls that return values in multiple non-contiguous locations.
4032 The Irix 6 ABI has examples of this. */
4033 else if (GET_CODE (target) == PARALLEL)
4034 emit_group_load (target, temp, TREE_TYPE (exp),
4035 int_size_in_bytes (TREE_TYPE (exp)));
4036 else if (GET_MODE (temp) == BLKmode)
4037 emit_block_move (target, temp, expr_size (exp),
4038 (call_param_p
4039 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4040 else
4042 temp = force_operand (temp, target);
4043 if (temp != target)
4044 emit_move_insn (target, temp);
4048 return NULL_RTX;
4051 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4052 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4053 are set to non-constant values and place it in *P_NC_ELTS. */
4055 static void
4056 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4057 HOST_WIDE_INT *p_nc_elts)
4059 HOST_WIDE_INT nz_elts, nc_elts;
4060 tree list;
4062 nz_elts = 0;
4063 nc_elts = 0;
4065 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4067 tree value = TREE_VALUE (list);
4068 tree purpose = TREE_PURPOSE (list);
4069 HOST_WIDE_INT mult;
4071 mult = 1;
4072 if (TREE_CODE (purpose) == RANGE_EXPR)
4074 tree lo_index = TREE_OPERAND (purpose, 0);
4075 tree hi_index = TREE_OPERAND (purpose, 1);
4077 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4078 mult = (tree_low_cst (hi_index, 1)
4079 - tree_low_cst (lo_index, 1) + 1);
4082 switch (TREE_CODE (value))
4084 case CONSTRUCTOR:
4086 HOST_WIDE_INT nz = 0, nc = 0;
4087 categorize_ctor_elements_1 (value, &nz, &nc);
4088 nz_elts += mult * nz;
4089 nc_elts += mult * nc;
4091 break;
4093 case INTEGER_CST:
4094 case REAL_CST:
4095 if (!initializer_zerop (value))
4096 nz_elts += mult;
4097 break;
4098 case COMPLEX_CST:
4099 if (!initializer_zerop (TREE_REALPART (value)))
4100 nz_elts += mult;
4101 if (!initializer_zerop (TREE_IMAGPART (value)))
4102 nz_elts += mult;
4103 break;
4104 case VECTOR_CST:
4106 tree v;
4107 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4108 if (!initializer_zerop (TREE_VALUE (v)))
4109 nz_elts += mult;
4111 break;
4113 default:
4114 nz_elts += mult;
4115 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4116 nc_elts += mult;
4117 break;
4121 *p_nz_elts += nz_elts;
4122 *p_nc_elts += nc_elts;
4125 void
4126 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4127 HOST_WIDE_INT *p_nc_elts)
4129 *p_nz_elts = 0;
4130 *p_nc_elts = 0;
4131 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4134 /* Count the number of scalars in TYPE. Return -1 on overflow or
4135 variable-sized. */
4137 HOST_WIDE_INT
4138 count_type_elements (tree type)
4140 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4141 switch (TREE_CODE (type))
4143 case ARRAY_TYPE:
4145 tree telts = array_type_nelts (type);
4146 if (telts && host_integerp (telts, 1))
4148 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4149 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4150 if (n == 0)
4151 return 0;
4152 else if (max / n > m)
4153 return n * m;
4155 return -1;
4158 case RECORD_TYPE:
4160 HOST_WIDE_INT n = 0, t;
4161 tree f;
4163 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4164 if (TREE_CODE (f) == FIELD_DECL)
4166 t = count_type_elements (TREE_TYPE (f));
4167 if (t < 0)
4168 return -1;
4169 n += t;
4172 return n;
4175 case UNION_TYPE:
4176 case QUAL_UNION_TYPE:
4178 /* Ho hum. How in the world do we guess here? Clearly it isn't
4179 right to count the fields. Guess based on the number of words. */
4180 HOST_WIDE_INT n = int_size_in_bytes (type);
4181 if (n < 0)
4182 return -1;
4183 return n / UNITS_PER_WORD;
4186 case COMPLEX_TYPE:
4187 return 2;
4189 case VECTOR_TYPE:
4190 return TYPE_VECTOR_SUBPARTS (type);
4192 case INTEGER_TYPE:
4193 case REAL_TYPE:
4194 case ENUMERAL_TYPE:
4195 case BOOLEAN_TYPE:
4196 case CHAR_TYPE:
4197 case POINTER_TYPE:
4198 case OFFSET_TYPE:
4199 case REFERENCE_TYPE:
4200 return 1;
4202 case VOID_TYPE:
4203 case METHOD_TYPE:
4204 case FILE_TYPE:
4205 case SET_TYPE:
4206 case FUNCTION_TYPE:
4207 case LANG_TYPE:
4208 default:
4209 gcc_unreachable ();
4213 /* Return 1 if EXP contains mostly (3/4) zeros. */
4216 mostly_zeros_p (tree exp)
4218 if (TREE_CODE (exp) == CONSTRUCTOR)
4221 HOST_WIDE_INT nz_elts, nc_elts, elts;
4223 /* If there are no ranges of true bits, it is all zero. */
4224 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4225 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4227 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4228 elts = count_type_elements (TREE_TYPE (exp));
4230 return nz_elts < elts / 4;
4233 return initializer_zerop (exp);
4236 /* Helper function for store_constructor.
4237 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4238 TYPE is the type of the CONSTRUCTOR, not the element type.
4239 CLEARED is as for store_constructor.
4240 ALIAS_SET is the alias set to use for any stores.
4242 This provides a recursive shortcut back to store_constructor when it isn't
4243 necessary to go through store_field. This is so that we can pass through
4244 the cleared field to let store_constructor know that we may not have to
4245 clear a substructure if the outer structure has already been cleared. */
4247 static void
4248 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4249 HOST_WIDE_INT bitpos, enum machine_mode mode,
4250 tree exp, tree type, int cleared, int alias_set)
4252 if (TREE_CODE (exp) == CONSTRUCTOR
4253 /* We can only call store_constructor recursively if the size and
4254 bit position are on a byte boundary. */
4255 && bitpos % BITS_PER_UNIT == 0
4256 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4257 /* If we have a nonzero bitpos for a register target, then we just
4258 let store_field do the bitfield handling. This is unlikely to
4259 generate unnecessary clear instructions anyways. */
4260 && (bitpos == 0 || MEM_P (target)))
4262 if (MEM_P (target))
4263 target
4264 = adjust_address (target,
4265 GET_MODE (target) == BLKmode
4266 || 0 != (bitpos
4267 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4268 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4271 /* Update the alias set, if required. */
4272 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4273 && MEM_ALIAS_SET (target) != 0)
4275 target = copy_rtx (target);
4276 set_mem_alias_set (target, alias_set);
4279 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4281 else
4282 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4285 /* Store the value of constructor EXP into the rtx TARGET.
4286 TARGET is either a REG or a MEM; we know it cannot conflict, since
4287 safe_from_p has been called.
4288 CLEARED is true if TARGET is known to have been zero'd.
4289 SIZE is the number of bytes of TARGET we are allowed to modify: this
4290 may not be the same as the size of EXP if we are assigning to a field
4291 which has been packed to exclude padding bits. */
4293 static void
4294 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4296 tree type = TREE_TYPE (exp);
4297 #ifdef WORD_REGISTER_OPERATIONS
4298 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4299 #endif
4301 switch (TREE_CODE (type))
4303 case RECORD_TYPE:
4304 case UNION_TYPE:
4305 case QUAL_UNION_TYPE:
4307 tree elt;
4309 /* If size is zero or the target is already cleared, do nothing. */
4310 if (size == 0 || cleared)
4311 cleared = 1;
4312 /* We either clear the aggregate or indicate the value is dead. */
4313 else if ((TREE_CODE (type) == UNION_TYPE
4314 || TREE_CODE (type) == QUAL_UNION_TYPE)
4315 && ! CONSTRUCTOR_ELTS (exp))
4316 /* If the constructor is empty, clear the union. */
4318 clear_storage (target, expr_size (exp));
4319 cleared = 1;
4322 /* If we are building a static constructor into a register,
4323 set the initial value as zero so we can fold the value into
4324 a constant. But if more than one register is involved,
4325 this probably loses. */
4326 else if (REG_P (target) && TREE_STATIC (exp)
4327 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4329 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4330 cleared = 1;
4333 /* If the constructor has fewer fields than the structure or
4334 if we are initializing the structure to mostly zeros, clear
4335 the whole structure first. Don't do this if TARGET is a
4336 register whose mode size isn't equal to SIZE since
4337 clear_storage can't handle this case. */
4338 else if (size > 0
4339 && ((list_length (CONSTRUCTOR_ELTS (exp))
4340 != fields_length (type))
4341 || mostly_zeros_p (exp))
4342 && (!REG_P (target)
4343 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4344 == size)))
4346 clear_storage (target, GEN_INT (size));
4347 cleared = 1;
4350 if (! cleared)
4351 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4353 /* Store each element of the constructor into the
4354 corresponding field of TARGET. */
4356 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4358 tree field = TREE_PURPOSE (elt);
4359 tree value = TREE_VALUE (elt);
4360 enum machine_mode mode;
4361 HOST_WIDE_INT bitsize;
4362 HOST_WIDE_INT bitpos = 0;
4363 tree offset;
4364 rtx to_rtx = target;
4366 /* Just ignore missing fields. We cleared the whole
4367 structure, above, if any fields are missing. */
4368 if (field == 0)
4369 continue;
4371 if (cleared && initializer_zerop (value))
4372 continue;
4374 if (host_integerp (DECL_SIZE (field), 1))
4375 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4376 else
4377 bitsize = -1;
4379 mode = DECL_MODE (field);
4380 if (DECL_BIT_FIELD (field))
4381 mode = VOIDmode;
4383 offset = DECL_FIELD_OFFSET (field);
4384 if (host_integerp (offset, 0)
4385 && host_integerp (bit_position (field), 0))
4387 bitpos = int_bit_position (field);
4388 offset = 0;
4390 else
4391 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4393 if (offset)
4395 rtx offset_rtx;
4397 offset
4398 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4399 make_tree (TREE_TYPE (exp),
4400 target));
4402 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4403 gcc_assert (MEM_P (to_rtx));
4405 #ifdef POINTERS_EXTEND_UNSIGNED
4406 if (GET_MODE (offset_rtx) != Pmode)
4407 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4408 #else
4409 if (GET_MODE (offset_rtx) != ptr_mode)
4410 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4411 #endif
4413 to_rtx = offset_address (to_rtx, offset_rtx,
4414 highest_pow2_factor (offset));
4417 #ifdef WORD_REGISTER_OPERATIONS
4418 /* If this initializes a field that is smaller than a
4419 word, at the start of a word, try to widen it to a full
4420 word. This special case allows us to output C++ member
4421 function initializations in a form that the optimizers
4422 can understand. */
4423 if (REG_P (target)
4424 && bitsize < BITS_PER_WORD
4425 && bitpos % BITS_PER_WORD == 0
4426 && GET_MODE_CLASS (mode) == MODE_INT
4427 && TREE_CODE (value) == INTEGER_CST
4428 && exp_size >= 0
4429 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4431 tree type = TREE_TYPE (value);
4433 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4435 type = lang_hooks.types.type_for_size
4436 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4437 value = convert (type, value);
4440 if (BYTES_BIG_ENDIAN)
4441 value
4442 = fold (build2 (LSHIFT_EXPR, type, value,
4443 build_int_cst (NULL_TREE,
4444 BITS_PER_WORD - bitsize)));
4445 bitsize = BITS_PER_WORD;
4446 mode = word_mode;
4448 #endif
4450 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4451 && DECL_NONADDRESSABLE_P (field))
4453 to_rtx = copy_rtx (to_rtx);
4454 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4457 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4458 value, type, cleared,
4459 get_alias_set (TREE_TYPE (field)));
4461 break;
4463 case ARRAY_TYPE:
4465 tree elt;
4466 int i;
4467 int need_to_clear;
4468 tree domain;
4469 tree elttype = TREE_TYPE (type);
4470 int const_bounds_p;
4471 HOST_WIDE_INT minelt = 0;
4472 HOST_WIDE_INT maxelt = 0;
4474 domain = TYPE_DOMAIN (type);
4475 const_bounds_p = (TYPE_MIN_VALUE (domain)
4476 && TYPE_MAX_VALUE (domain)
4477 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4478 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4480 /* If we have constant bounds for the range of the type, get them. */
4481 if (const_bounds_p)
4483 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4484 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4487 /* If the constructor has fewer elements than the array, clear
4488 the whole array first. Similarly if this is static
4489 constructor of a non-BLKmode object. */
4490 if (cleared)
4491 need_to_clear = 0;
4492 else if (REG_P (target) && TREE_STATIC (exp))
4493 need_to_clear = 1;
4494 else
4496 HOST_WIDE_INT count = 0, zero_count = 0;
4497 need_to_clear = ! const_bounds_p;
4499 /* This loop is a more accurate version of the loop in
4500 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4501 is also needed to check for missing elements. */
4502 for (elt = CONSTRUCTOR_ELTS (exp);
4503 elt != NULL_TREE && ! need_to_clear;
4504 elt = TREE_CHAIN (elt))
4506 tree index = TREE_PURPOSE (elt);
4507 HOST_WIDE_INT this_node_count;
4509 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4511 tree lo_index = TREE_OPERAND (index, 0);
4512 tree hi_index = TREE_OPERAND (index, 1);
4514 if (! host_integerp (lo_index, 1)
4515 || ! host_integerp (hi_index, 1))
4517 need_to_clear = 1;
4518 break;
4521 this_node_count = (tree_low_cst (hi_index, 1)
4522 - tree_low_cst (lo_index, 1) + 1);
4524 else
4525 this_node_count = 1;
4527 count += this_node_count;
4528 if (mostly_zeros_p (TREE_VALUE (elt)))
4529 zero_count += this_node_count;
4532 /* Clear the entire array first if there are any missing
4533 elements, or if the incidence of zero elements is >=
4534 75%. */
4535 if (! need_to_clear
4536 && (count < maxelt - minelt + 1
4537 || 4 * zero_count >= 3 * count))
4538 need_to_clear = 1;
4541 if (need_to_clear && size > 0)
4543 if (REG_P (target))
4544 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4545 else
4546 clear_storage (target, GEN_INT (size));
4547 cleared = 1;
4550 if (!cleared && REG_P (target))
4551 /* Inform later passes that the old value is dead. */
4552 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4554 /* Store each element of the constructor into the
4555 corresponding element of TARGET, determined by counting the
4556 elements. */
4557 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4558 elt;
4559 elt = TREE_CHAIN (elt), i++)
4561 enum machine_mode mode;
4562 HOST_WIDE_INT bitsize;
4563 HOST_WIDE_INT bitpos;
4564 int unsignedp;
4565 tree value = TREE_VALUE (elt);
4566 tree index = TREE_PURPOSE (elt);
4567 rtx xtarget = target;
4569 if (cleared && initializer_zerop (value))
4570 continue;
4572 unsignedp = TYPE_UNSIGNED (elttype);
4573 mode = TYPE_MODE (elttype);
4574 if (mode == BLKmode)
4575 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4576 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4577 : -1);
4578 else
4579 bitsize = GET_MODE_BITSIZE (mode);
4581 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4583 tree lo_index = TREE_OPERAND (index, 0);
4584 tree hi_index = TREE_OPERAND (index, 1);
4585 rtx index_r, pos_rtx;
4586 HOST_WIDE_INT lo, hi, count;
4587 tree position;
4589 /* If the range is constant and "small", unroll the loop. */
4590 if (const_bounds_p
4591 && host_integerp (lo_index, 0)
4592 && host_integerp (hi_index, 0)
4593 && (lo = tree_low_cst (lo_index, 0),
4594 hi = tree_low_cst (hi_index, 0),
4595 count = hi - lo + 1,
4596 (!MEM_P (target)
4597 || count <= 2
4598 || (host_integerp (TYPE_SIZE (elttype), 1)
4599 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4600 <= 40 * 8)))))
4602 lo -= minelt; hi -= minelt;
4603 for (; lo <= hi; lo++)
4605 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4607 if (MEM_P (target)
4608 && !MEM_KEEP_ALIAS_SET_P (target)
4609 && TREE_CODE (type) == ARRAY_TYPE
4610 && TYPE_NONALIASED_COMPONENT (type))
4612 target = copy_rtx (target);
4613 MEM_KEEP_ALIAS_SET_P (target) = 1;
4616 store_constructor_field
4617 (target, bitsize, bitpos, mode, value, type, cleared,
4618 get_alias_set (elttype));
4621 else
4623 rtx loop_start = gen_label_rtx ();
4624 rtx loop_end = gen_label_rtx ();
4625 tree exit_cond;
4627 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4628 unsignedp = TYPE_UNSIGNED (domain);
4630 index = build_decl (VAR_DECL, NULL_TREE, domain);
4632 index_r
4633 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4634 &unsignedp, 0));
4635 SET_DECL_RTL (index, index_r);
4636 store_expr (lo_index, index_r, 0);
4638 /* Build the head of the loop. */
4639 do_pending_stack_adjust ();
4640 emit_label (loop_start);
4642 /* Assign value to element index. */
4643 position
4644 = convert (ssizetype,
4645 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4646 index, TYPE_MIN_VALUE (domain))));
4647 position = size_binop (MULT_EXPR, position,
4648 convert (ssizetype,
4649 TYPE_SIZE_UNIT (elttype)));
4651 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4652 xtarget = offset_address (target, pos_rtx,
4653 highest_pow2_factor (position));
4654 xtarget = adjust_address (xtarget, mode, 0);
4655 if (TREE_CODE (value) == CONSTRUCTOR)
4656 store_constructor (value, xtarget, cleared,
4657 bitsize / BITS_PER_UNIT);
4658 else
4659 store_expr (value, xtarget, 0);
4661 /* Generate a conditional jump to exit the loop. */
4662 exit_cond = build2 (LT_EXPR, integer_type_node,
4663 index, hi_index);
4664 jumpif (exit_cond, loop_end);
4666 /* Update the loop counter, and jump to the head of
4667 the loop. */
4668 expand_assignment (index,
4669 build2 (PLUS_EXPR, TREE_TYPE (index),
4670 index, integer_one_node));
4672 emit_jump (loop_start);
4674 /* Build the end of the loop. */
4675 emit_label (loop_end);
4678 else if ((index != 0 && ! host_integerp (index, 0))
4679 || ! host_integerp (TYPE_SIZE (elttype), 1))
4681 tree position;
4683 if (index == 0)
4684 index = ssize_int (1);
4686 if (minelt)
4687 index = fold_convert (ssizetype,
4688 fold (build2 (MINUS_EXPR,
4689 TREE_TYPE (index),
4690 index,
4691 TYPE_MIN_VALUE (domain))));
4693 position = size_binop (MULT_EXPR, index,
4694 convert (ssizetype,
4695 TYPE_SIZE_UNIT (elttype)));
4696 xtarget = offset_address (target,
4697 expand_expr (position, 0, VOIDmode, 0),
4698 highest_pow2_factor (position));
4699 xtarget = adjust_address (xtarget, mode, 0);
4700 store_expr (value, xtarget, 0);
4702 else
4704 if (index != 0)
4705 bitpos = ((tree_low_cst (index, 0) - minelt)
4706 * tree_low_cst (TYPE_SIZE (elttype), 1));
4707 else
4708 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4710 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4711 && TREE_CODE (type) == ARRAY_TYPE
4712 && TYPE_NONALIASED_COMPONENT (type))
4714 target = copy_rtx (target);
4715 MEM_KEEP_ALIAS_SET_P (target) = 1;
4717 store_constructor_field (target, bitsize, bitpos, mode, value,
4718 type, cleared, get_alias_set (elttype));
4721 break;
4724 case VECTOR_TYPE:
4726 tree elt;
4727 int i;
4728 int need_to_clear;
4729 int icode = 0;
4730 tree elttype = TREE_TYPE (type);
4731 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4732 enum machine_mode eltmode = TYPE_MODE (elttype);
4733 HOST_WIDE_INT bitsize;
4734 HOST_WIDE_INT bitpos;
4735 rtx *vector = NULL;
4736 unsigned n_elts;
4738 gcc_assert (eltmode != BLKmode);
4740 n_elts = TYPE_VECTOR_SUBPARTS (type);
4741 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4743 enum machine_mode mode = GET_MODE (target);
4745 icode = (int) vec_init_optab->handlers[mode].insn_code;
4746 if (icode != CODE_FOR_nothing)
4748 unsigned int i;
4750 vector = alloca (n_elts);
4751 for (i = 0; i < n_elts; i++)
4752 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4756 /* If the constructor has fewer elements than the vector,
4757 clear the whole array first. Similarly if this is static
4758 constructor of a non-BLKmode object. */
4759 if (cleared)
4760 need_to_clear = 0;
4761 else if (REG_P (target) && TREE_STATIC (exp))
4762 need_to_clear = 1;
4763 else
4765 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4767 for (elt = CONSTRUCTOR_ELTS (exp);
4768 elt != NULL_TREE;
4769 elt = TREE_CHAIN (elt))
4771 int n_elts_here = tree_low_cst
4772 (int_const_binop (TRUNC_DIV_EXPR,
4773 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4774 TYPE_SIZE (elttype), 0), 1);
4776 count += n_elts_here;
4777 if (mostly_zeros_p (TREE_VALUE (elt)))
4778 zero_count += n_elts_here;
4781 /* Clear the entire vector first if there are any missing elements,
4782 or if the incidence of zero elements is >= 75%. */
4783 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4786 if (need_to_clear && size > 0 && !vector)
4788 if (REG_P (target))
4789 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4790 else
4791 clear_storage (target, GEN_INT (size));
4792 cleared = 1;
4795 if (!cleared && REG_P (target))
4796 /* Inform later passes that the old value is dead. */
4797 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4799 /* Store each element of the constructor into the corresponding
4800 element of TARGET, determined by counting the elements. */
4801 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4802 elt;
4803 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4805 tree value = TREE_VALUE (elt);
4806 tree index = TREE_PURPOSE (elt);
4807 HOST_WIDE_INT eltpos;
4809 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4810 if (cleared && initializer_zerop (value))
4811 continue;
4813 if (index != 0)
4814 eltpos = tree_low_cst (index, 1);
4815 else
4816 eltpos = i;
4818 if (vector)
4820 /* Vector CONSTRUCTORs should only be built from smaller
4821 vectors in the case of BLKmode vectors. */
4822 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4823 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4825 else
4827 enum machine_mode value_mode =
4828 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4829 ? TYPE_MODE (TREE_TYPE (value))
4830 : eltmode;
4831 bitpos = eltpos * elt_size;
4832 store_constructor_field (target, bitsize, bitpos,
4833 value_mode, value, type,
4834 cleared, get_alias_set (elttype));
4838 if (vector)
4839 emit_insn (GEN_FCN (icode)
4840 (target,
4841 gen_rtx_PARALLEL (GET_MODE (target),
4842 gen_rtvec_v (n_elts, vector))));
4843 break;
4846 /* Set constructor assignments. */
4847 case SET_TYPE:
4849 tree elt = CONSTRUCTOR_ELTS (exp);
4850 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4851 tree domain = TYPE_DOMAIN (type);
4852 tree domain_min, domain_max, bitlength;
4854 /* The default implementation strategy is to extract the
4855 constant parts of the constructor, use that to initialize
4856 the target, and then "or" in whatever non-constant ranges
4857 we need in addition.
4859 If a large set is all zero or all ones, it is probably
4860 better to set it using memset. Also, if a large set has
4861 just a single range, it may also be better to first clear
4862 all the first clear the set (using memset), and set the
4863 bits we want. */
4865 /* Check for all zeros. */
4866 if (elt == NULL_TREE && size > 0)
4868 if (!cleared)
4869 clear_storage (target, GEN_INT (size));
4870 return;
4873 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4874 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4875 bitlength = size_binop (PLUS_EXPR,
4876 size_diffop (domain_max, domain_min),
4877 ssize_int (1));
4879 nbits = tree_low_cst (bitlength, 1);
4881 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4882 that are "complicated" (more than one range), initialize
4883 (the constant parts) by copying from a constant. */
4884 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4885 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4887 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4888 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4889 char *bit_buffer = alloca (nbits);
4890 HOST_WIDE_INT word = 0;
4891 unsigned int bit_pos = 0;
4892 unsigned int ibit = 0;
4893 unsigned int offset = 0; /* In bytes from beginning of set. */
4895 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4896 for (;;)
4898 if (bit_buffer[ibit])
4900 if (BYTES_BIG_ENDIAN)
4901 word |= (1 << (set_word_size - 1 - bit_pos));
4902 else
4903 word |= 1 << bit_pos;
4906 bit_pos++; ibit++;
4907 if (bit_pos >= set_word_size || ibit == nbits)
4909 if (word != 0 || ! cleared)
4911 rtx datum = gen_int_mode (word, mode);
4912 rtx to_rtx;
4914 /* The assumption here is that it is safe to
4915 use XEXP if the set is multi-word, but not
4916 if it's single-word. */
4917 if (MEM_P (target))
4918 to_rtx = adjust_address (target, mode, offset);
4919 else
4921 gcc_assert (!offset);
4922 to_rtx = target;
4924 emit_move_insn (to_rtx, datum);
4927 if (ibit == nbits)
4928 break;
4929 word = 0;
4930 bit_pos = 0;
4931 offset += set_word_size / BITS_PER_UNIT;
4935 else if (!cleared)
4936 /* Don't bother clearing storage if the set is all ones. */
4937 if (TREE_CHAIN (elt) != NULL_TREE
4938 || (TREE_PURPOSE (elt) == NULL_TREE
4939 ? nbits != 1
4940 : ( ! host_integerp (TREE_VALUE (elt), 0)
4941 || ! host_integerp (TREE_PURPOSE (elt), 0)
4942 || (tree_low_cst (TREE_VALUE (elt), 0)
4943 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4944 != (HOST_WIDE_INT) nbits))))
4945 clear_storage (target, expr_size (exp));
4947 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4949 /* Start of range of element or NULL. */
4950 tree startbit = TREE_PURPOSE (elt);
4951 /* End of range of element, or element value. */
4952 tree endbit = TREE_VALUE (elt);
4953 HOST_WIDE_INT startb, endb;
4954 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4956 bitlength_rtx = expand_expr (bitlength,
4957 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4959 /* Handle non-range tuple element like [ expr ]. */
4960 if (startbit == NULL_TREE)
4962 startbit = save_expr (endbit);
4963 endbit = startbit;
4966 startbit = convert (sizetype, startbit);
4967 endbit = convert (sizetype, endbit);
4968 if (! integer_zerop (domain_min))
4970 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4971 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4973 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4974 EXPAND_CONST_ADDRESS);
4975 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4976 EXPAND_CONST_ADDRESS);
4978 if (REG_P (target))
4980 targetx
4981 = assign_temp
4982 ((build_qualified_type (lang_hooks.types.type_for_mode
4983 (GET_MODE (target), 0),
4984 TYPE_QUAL_CONST)),
4985 0, 1, 1);
4986 emit_move_insn (targetx, target);
4989 else
4991 gcc_assert (MEM_P (target));
4992 targetx = target;
4995 /* Optimization: If startbit and endbit are constants divisible
4996 by BITS_PER_UNIT, call memset instead. */
4997 if (TREE_CODE (startbit) == INTEGER_CST
4998 && TREE_CODE (endbit) == INTEGER_CST
4999 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5000 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5002 emit_library_call (memset_libfunc, LCT_NORMAL,
5003 VOIDmode, 3,
5004 plus_constant (XEXP (targetx, 0),
5005 startb / BITS_PER_UNIT),
5006 Pmode,
5007 constm1_rtx, TYPE_MODE (integer_type_node),
5008 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5009 TYPE_MODE (sizetype));
5011 else
5012 emit_library_call (setbits_libfunc, LCT_NORMAL,
5013 VOIDmode, 4, XEXP (targetx, 0),
5014 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5015 startbit_rtx, TYPE_MODE (sizetype),
5016 endbit_rtx, TYPE_MODE (sizetype));
5018 if (REG_P (target))
5019 emit_move_insn (target, targetx);
5021 break;
5023 default:
5024 gcc_unreachable ();
5028 /* Store the value of EXP (an expression tree)
5029 into a subfield of TARGET which has mode MODE and occupies
5030 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5031 If MODE is VOIDmode, it means that we are storing into a bit-field.
5033 Always return const0_rtx unless we have something particular to
5034 return.
5036 TYPE is the type of the underlying object,
5038 ALIAS_SET is the alias set for the destination. This value will
5039 (in general) be different from that for TARGET, since TARGET is a
5040 reference to the containing structure. */
5042 static rtx
5043 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5044 enum machine_mode mode, tree exp, tree type, int alias_set)
5046 HOST_WIDE_INT width_mask = 0;
5048 if (TREE_CODE (exp) == ERROR_MARK)
5049 return const0_rtx;
5051 /* If we have nothing to store, do nothing unless the expression has
5052 side-effects. */
5053 if (bitsize == 0)
5054 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5055 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5056 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5058 /* If we are storing into an unaligned field of an aligned union that is
5059 in a register, we may have the mode of TARGET being an integer mode but
5060 MODE == BLKmode. In that case, get an aligned object whose size and
5061 alignment are the same as TARGET and store TARGET into it (we can avoid
5062 the store if the field being stored is the entire width of TARGET). Then
5063 call ourselves recursively to store the field into a BLKmode version of
5064 that object. Finally, load from the object into TARGET. This is not
5065 very efficient in general, but should only be slightly more expensive
5066 than the otherwise-required unaligned accesses. Perhaps this can be
5067 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5068 twice, once with emit_move_insn and once via store_field. */
5070 if (mode == BLKmode
5071 && (REG_P (target) || GET_CODE (target) == SUBREG))
5073 rtx object = assign_temp (type, 0, 1, 1);
5074 rtx blk_object = adjust_address (object, BLKmode, 0);
5076 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5077 emit_move_insn (object, target);
5079 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5081 emit_move_insn (target, object);
5083 /* We want to return the BLKmode version of the data. */
5084 return blk_object;
5087 if (GET_CODE (target) == CONCAT)
5089 /* We're storing into a struct containing a single __complex. */
5091 gcc_assert (!bitpos);
5092 return store_expr (exp, target, 0);
5095 /* If the structure is in a register or if the component
5096 is a bit field, we cannot use addressing to access it.
5097 Use bit-field techniques or SUBREG to store in it. */
5099 if (mode == VOIDmode
5100 || (mode != BLKmode && ! direct_store[(int) mode]
5101 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5102 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5103 || REG_P (target)
5104 || GET_CODE (target) == SUBREG
5105 /* If the field isn't aligned enough to store as an ordinary memref,
5106 store it as a bit field. */
5107 || (mode != BLKmode
5108 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5109 || bitpos % GET_MODE_ALIGNMENT (mode))
5110 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5111 || (bitpos % BITS_PER_UNIT != 0)))
5112 /* If the RHS and field are a constant size and the size of the
5113 RHS isn't the same size as the bitfield, we must use bitfield
5114 operations. */
5115 || (bitsize >= 0
5116 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5117 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5119 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5121 /* If BITSIZE is narrower than the size of the type of EXP
5122 we will be narrowing TEMP. Normally, what's wanted are the
5123 low-order bits. However, if EXP's type is a record and this is
5124 big-endian machine, we want the upper BITSIZE bits. */
5125 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5126 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5127 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5128 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5129 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5130 - bitsize),
5131 NULL_RTX, 1);
5133 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5134 MODE. */
5135 if (mode != VOIDmode && mode != BLKmode
5136 && mode != TYPE_MODE (TREE_TYPE (exp)))
5137 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5139 /* If the modes of TARGET and TEMP are both BLKmode, both
5140 must be in memory and BITPOS must be aligned on a byte
5141 boundary. If so, we simply do a block copy. */
5142 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5144 gcc_assert (MEM_P (target) && MEM_P (temp)
5145 && !(bitpos % BITS_PER_UNIT));
5147 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5148 emit_block_move (target, temp,
5149 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5150 / BITS_PER_UNIT),
5151 BLOCK_OP_NORMAL);
5153 return const0_rtx;
5156 /* Store the value in the bitfield. */
5157 store_bit_field (target, bitsize, bitpos, mode, temp);
5159 return const0_rtx;
5161 else
5163 /* Now build a reference to just the desired component. */
5164 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5166 if (to_rtx == target)
5167 to_rtx = copy_rtx (to_rtx);
5169 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5170 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5171 set_mem_alias_set (to_rtx, alias_set);
5173 return store_expr (exp, to_rtx, 0);
5177 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5178 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5179 codes and find the ultimate containing object, which we return.
5181 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5182 bit position, and *PUNSIGNEDP to the signedness of the field.
5183 If the position of the field is variable, we store a tree
5184 giving the variable offset (in units) in *POFFSET.
5185 This offset is in addition to the bit position.
5186 If the position is not variable, we store 0 in *POFFSET.
5188 If any of the extraction expressions is volatile,
5189 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5191 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5192 is a mode that can be used to access the field. In that case, *PBITSIZE
5193 is redundant.
5195 If the field describes a variable-sized object, *PMODE is set to
5196 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5197 this case, but the address of the object can be found. */
5199 tree
5200 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5201 HOST_WIDE_INT *pbitpos, tree *poffset,
5202 enum machine_mode *pmode, int *punsignedp,
5203 int *pvolatilep)
5205 tree size_tree = 0;
5206 enum machine_mode mode = VOIDmode;
5207 tree offset = size_zero_node;
5208 tree bit_offset = bitsize_zero_node;
5209 tree tem;
5211 /* First get the mode, signedness, and size. We do this from just the
5212 outermost expression. */
5213 if (TREE_CODE (exp) == COMPONENT_REF)
5215 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5216 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5217 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5219 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5221 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5223 size_tree = TREE_OPERAND (exp, 1);
5224 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5226 else
5228 mode = TYPE_MODE (TREE_TYPE (exp));
5229 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5231 if (mode == BLKmode)
5232 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5233 else
5234 *pbitsize = GET_MODE_BITSIZE (mode);
5237 if (size_tree != 0)
5239 if (! host_integerp (size_tree, 1))
5240 mode = BLKmode, *pbitsize = -1;
5241 else
5242 *pbitsize = tree_low_cst (size_tree, 1);
5245 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5246 and find the ultimate containing object. */
5247 while (1)
5249 if (TREE_CODE (exp) == BIT_FIELD_REF)
5250 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5251 else if (TREE_CODE (exp) == COMPONENT_REF)
5253 tree field = TREE_OPERAND (exp, 1);
5254 tree this_offset = component_ref_field_offset (exp);
5256 /* If this field hasn't been filled in yet, don't go
5257 past it. This should only happen when folding expressions
5258 made during type construction. */
5259 if (this_offset == 0)
5260 break;
5262 offset = size_binop (PLUS_EXPR, offset, this_offset);
5263 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5264 DECL_FIELD_BIT_OFFSET (field));
5266 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5269 else if (TREE_CODE (exp) == ARRAY_REF
5270 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5272 tree index = TREE_OPERAND (exp, 1);
5273 tree low_bound = array_ref_low_bound (exp);
5274 tree unit_size = array_ref_element_size (exp);
5276 /* We assume all arrays have sizes that are a multiple of a byte.
5277 First subtract the lower bound, if any, in the type of the
5278 index, then convert to sizetype and multiply by the size of the
5279 array element. */
5280 if (! integer_zerop (low_bound))
5281 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5282 index, low_bound));
5284 offset = size_binop (PLUS_EXPR, offset,
5285 size_binop (MULT_EXPR,
5286 convert (sizetype, index),
5287 unit_size));
5290 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5291 conversions that don't change the mode, and all view conversions
5292 except those that need to "step up" the alignment. */
5293 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5294 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5295 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5296 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5297 && STRICT_ALIGNMENT
5298 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5299 < BIGGEST_ALIGNMENT)
5300 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5301 || TYPE_ALIGN_OK (TREE_TYPE
5302 (TREE_OPERAND (exp, 0))))))
5303 && ! ((TREE_CODE (exp) == NOP_EXPR
5304 || TREE_CODE (exp) == CONVERT_EXPR)
5305 && (TYPE_MODE (TREE_TYPE (exp))
5306 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5307 break;
5309 /* If any reference in the chain is volatile, the effect is volatile. */
5310 if (TREE_THIS_VOLATILE (exp))
5311 *pvolatilep = 1;
5313 exp = TREE_OPERAND (exp, 0);
5316 /* If OFFSET is constant, see if we can return the whole thing as a
5317 constant bit position. Otherwise, split it up. */
5318 if (host_integerp (offset, 0)
5319 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5320 bitsize_unit_node))
5321 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5322 && host_integerp (tem, 0))
5323 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5324 else
5325 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5327 *pmode = mode;
5328 return exp;
5331 /* Return a tree of sizetype representing the size, in bytes, of the element
5332 of EXP, an ARRAY_REF. */
5334 tree
5335 array_ref_element_size (tree exp)
5337 tree aligned_size = TREE_OPERAND (exp, 3);
5338 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5340 /* If a size was specified in the ARRAY_REF, it's the size measured
5341 in alignment units of the element type. So multiply by that value. */
5342 if (aligned_size)
5344 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5345 sizetype from another type of the same width and signedness. */
5346 if (TREE_TYPE (aligned_size) != sizetype)
5347 aligned_size = fold_convert (sizetype, aligned_size);
5348 return size_binop (MULT_EXPR, aligned_size,
5349 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5352 /* Otherwise, take the size from that of the element type. Substitute
5353 any PLACEHOLDER_EXPR that we have. */
5354 else
5355 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5358 /* Return a tree representing the lower bound of the array mentioned in
5359 EXP, an ARRAY_REF. */
5361 tree
5362 array_ref_low_bound (tree exp)
5364 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5366 /* If a lower bound is specified in EXP, use it. */
5367 if (TREE_OPERAND (exp, 2))
5368 return TREE_OPERAND (exp, 2);
5370 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5371 substituting for a PLACEHOLDER_EXPR as needed. */
5372 if (domain_type && TYPE_MIN_VALUE (domain_type))
5373 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5375 /* Otherwise, return a zero of the appropriate type. */
5376 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5379 /* Return a tree representing the upper bound of the array mentioned in
5380 EXP, an ARRAY_REF. */
5382 tree
5383 array_ref_up_bound (tree exp)
5385 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5387 /* If there is a domain type and it has an upper bound, use it, substituting
5388 for a PLACEHOLDER_EXPR as needed. */
5389 if (domain_type && TYPE_MAX_VALUE (domain_type))
5390 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5392 /* Otherwise fail. */
5393 return NULL_TREE;
5396 /* Return a tree representing the offset, in bytes, of the field referenced
5397 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5399 tree
5400 component_ref_field_offset (tree exp)
5402 tree aligned_offset = TREE_OPERAND (exp, 2);
5403 tree field = TREE_OPERAND (exp, 1);
5405 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5406 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5407 value. */
5408 if (aligned_offset)
5410 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5411 sizetype from another type of the same width and signedness. */
5412 if (TREE_TYPE (aligned_offset) != sizetype)
5413 aligned_offset = fold_convert (sizetype, aligned_offset);
5414 return size_binop (MULT_EXPR, aligned_offset,
5415 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5418 /* Otherwise, take the offset from that of the field. Substitute
5419 any PLACEHOLDER_EXPR that we have. */
5420 else
5421 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5424 /* Return 1 if T is an expression that get_inner_reference handles. */
5427 handled_component_p (tree t)
5429 switch (TREE_CODE (t))
5431 case BIT_FIELD_REF:
5432 case COMPONENT_REF:
5433 case ARRAY_REF:
5434 case ARRAY_RANGE_REF:
5435 case NON_LVALUE_EXPR:
5436 case VIEW_CONVERT_EXPR:
5437 return 1;
5439 /* ??? Sure they are handled, but get_inner_reference may return
5440 a different PBITSIZE, depending upon whether the expression is
5441 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5442 case NOP_EXPR:
5443 case CONVERT_EXPR:
5444 return (TYPE_MODE (TREE_TYPE (t))
5445 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5447 default:
5448 return 0;
5452 /* Given an rtx VALUE that may contain additions and multiplications, return
5453 an equivalent value that just refers to a register, memory, or constant.
5454 This is done by generating instructions to perform the arithmetic and
5455 returning a pseudo-register containing the value.
5457 The returned value may be a REG, SUBREG, MEM or constant. */
5460 force_operand (rtx value, rtx target)
5462 rtx op1, op2;
5463 /* Use subtarget as the target for operand 0 of a binary operation. */
5464 rtx subtarget = get_subtarget (target);
5465 enum rtx_code code = GET_CODE (value);
5467 /* Check for subreg applied to an expression produced by loop optimizer. */
5468 if (code == SUBREG
5469 && !REG_P (SUBREG_REG (value))
5470 && !MEM_P (SUBREG_REG (value)))
5472 value = simplify_gen_subreg (GET_MODE (value),
5473 force_reg (GET_MODE (SUBREG_REG (value)),
5474 force_operand (SUBREG_REG (value),
5475 NULL_RTX)),
5476 GET_MODE (SUBREG_REG (value)),
5477 SUBREG_BYTE (value));
5478 code = GET_CODE (value);
5481 /* Check for a PIC address load. */
5482 if ((code == PLUS || code == MINUS)
5483 && XEXP (value, 0) == pic_offset_table_rtx
5484 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5485 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5486 || GET_CODE (XEXP (value, 1)) == CONST))
5488 if (!subtarget)
5489 subtarget = gen_reg_rtx (GET_MODE (value));
5490 emit_move_insn (subtarget, value);
5491 return subtarget;
5494 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5496 if (!target)
5497 target = gen_reg_rtx (GET_MODE (value));
5498 convert_move (target, force_operand (XEXP (value, 0), NULL),
5499 code == ZERO_EXTEND);
5500 return target;
5503 if (ARITHMETIC_P (value))
5505 op2 = XEXP (value, 1);
5506 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5507 subtarget = 0;
5508 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5510 code = PLUS;
5511 op2 = negate_rtx (GET_MODE (value), op2);
5514 /* Check for an addition with OP2 a constant integer and our first
5515 operand a PLUS of a virtual register and something else. In that
5516 case, we want to emit the sum of the virtual register and the
5517 constant first and then add the other value. This allows virtual
5518 register instantiation to simply modify the constant rather than
5519 creating another one around this addition. */
5520 if (code == PLUS && GET_CODE (op2) == CONST_INT
5521 && GET_CODE (XEXP (value, 0)) == PLUS
5522 && REG_P (XEXP (XEXP (value, 0), 0))
5523 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5524 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5526 rtx temp = expand_simple_binop (GET_MODE (value), code,
5527 XEXP (XEXP (value, 0), 0), op2,
5528 subtarget, 0, OPTAB_LIB_WIDEN);
5529 return expand_simple_binop (GET_MODE (value), code, temp,
5530 force_operand (XEXP (XEXP (value,
5531 0), 1), 0),
5532 target, 0, OPTAB_LIB_WIDEN);
5535 op1 = force_operand (XEXP (value, 0), subtarget);
5536 op2 = force_operand (op2, NULL_RTX);
5537 switch (code)
5539 case MULT:
5540 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5541 case DIV:
5542 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5543 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5544 target, 1, OPTAB_LIB_WIDEN);
5545 else
5546 return expand_divmod (0,
5547 FLOAT_MODE_P (GET_MODE (value))
5548 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5549 GET_MODE (value), op1, op2, target, 0);
5550 break;
5551 case MOD:
5552 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5553 target, 0);
5554 break;
5555 case UDIV:
5556 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5557 target, 1);
5558 break;
5559 case UMOD:
5560 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5561 target, 1);
5562 break;
5563 case ASHIFTRT:
5564 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5565 target, 0, OPTAB_LIB_WIDEN);
5566 break;
5567 default:
5568 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5569 target, 1, OPTAB_LIB_WIDEN);
5572 if (UNARY_P (value))
5574 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5575 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5578 #ifdef INSN_SCHEDULING
5579 /* On machines that have insn scheduling, we want all memory reference to be
5580 explicit, so we need to deal with such paradoxical SUBREGs. */
5581 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5582 && (GET_MODE_SIZE (GET_MODE (value))
5583 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5584 value
5585 = simplify_gen_subreg (GET_MODE (value),
5586 force_reg (GET_MODE (SUBREG_REG (value)),
5587 force_operand (SUBREG_REG (value),
5588 NULL_RTX)),
5589 GET_MODE (SUBREG_REG (value)),
5590 SUBREG_BYTE (value));
5591 #endif
5593 return value;
5596 /* Subroutine of expand_expr: return nonzero iff there is no way that
5597 EXP can reference X, which is being modified. TOP_P is nonzero if this
5598 call is going to be used to determine whether we need a temporary
5599 for EXP, as opposed to a recursive call to this function.
5601 It is always safe for this routine to return zero since it merely
5602 searches for optimization opportunities. */
5605 safe_from_p (rtx x, tree exp, int top_p)
5607 rtx exp_rtl = 0;
5608 int i, nops;
5610 if (x == 0
5611 /* If EXP has varying size, we MUST use a target since we currently
5612 have no way of allocating temporaries of variable size
5613 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5614 So we assume here that something at a higher level has prevented a
5615 clash. This is somewhat bogus, but the best we can do. Only
5616 do this when X is BLKmode and when we are at the top level. */
5617 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5618 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5619 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5620 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5621 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5622 != INTEGER_CST)
5623 && GET_MODE (x) == BLKmode)
5624 /* If X is in the outgoing argument area, it is always safe. */
5625 || (MEM_P (x)
5626 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5627 || (GET_CODE (XEXP (x, 0)) == PLUS
5628 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5629 return 1;
5631 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5632 find the underlying pseudo. */
5633 if (GET_CODE (x) == SUBREG)
5635 x = SUBREG_REG (x);
5636 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5637 return 0;
5640 /* Now look at our tree code and possibly recurse. */
5641 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5643 case tcc_declaration:
5644 exp_rtl = DECL_RTL_IF_SET (exp);
5645 break;
5647 case tcc_constant:
5648 return 1;
5650 case tcc_exceptional:
5651 if (TREE_CODE (exp) == TREE_LIST)
5653 while (1)
5655 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5656 return 0;
5657 exp = TREE_CHAIN (exp);
5658 if (!exp)
5659 return 1;
5660 if (TREE_CODE (exp) != TREE_LIST)
5661 return safe_from_p (x, exp, 0);
5664 else if (TREE_CODE (exp) == ERROR_MARK)
5665 return 1; /* An already-visited SAVE_EXPR? */
5666 else
5667 return 0;
5669 case tcc_statement:
5670 /* The only case we look at here is the DECL_INITIAL inside a
5671 DECL_EXPR. */
5672 return (TREE_CODE (exp) != DECL_EXPR
5673 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5674 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5675 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5677 case tcc_binary:
5678 case tcc_comparison:
5679 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5680 return 0;
5681 /* Fall through. */
5683 case tcc_unary:
5684 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5686 case tcc_expression:
5687 case tcc_reference:
5688 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5689 the expression. If it is set, we conflict iff we are that rtx or
5690 both are in memory. Otherwise, we check all operands of the
5691 expression recursively. */
5693 switch (TREE_CODE (exp))
5695 case ADDR_EXPR:
5696 /* If the operand is static or we are static, we can't conflict.
5697 Likewise if we don't conflict with the operand at all. */
5698 if (staticp (TREE_OPERAND (exp, 0))
5699 || TREE_STATIC (exp)
5700 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5701 return 1;
5703 /* Otherwise, the only way this can conflict is if we are taking
5704 the address of a DECL a that address if part of X, which is
5705 very rare. */
5706 exp = TREE_OPERAND (exp, 0);
5707 if (DECL_P (exp))
5709 if (!DECL_RTL_SET_P (exp)
5710 || !MEM_P (DECL_RTL (exp)))
5711 return 0;
5712 else
5713 exp_rtl = XEXP (DECL_RTL (exp), 0);
5715 break;
5717 case MISALIGNED_INDIRECT_REF:
5718 case ALIGN_INDIRECT_REF:
5719 case INDIRECT_REF:
5720 if (MEM_P (x)
5721 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5722 get_alias_set (exp)))
5723 return 0;
5724 break;
5726 case CALL_EXPR:
5727 /* Assume that the call will clobber all hard registers and
5728 all of memory. */
5729 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5730 || MEM_P (x))
5731 return 0;
5732 break;
5734 case WITH_CLEANUP_EXPR:
5735 case CLEANUP_POINT_EXPR:
5736 /* Lowered by gimplify.c. */
5737 gcc_unreachable ();
5739 case SAVE_EXPR:
5740 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5742 default:
5743 break;
5746 /* If we have an rtx, we do not need to scan our operands. */
5747 if (exp_rtl)
5748 break;
5750 nops = first_rtl_op (TREE_CODE (exp));
5751 for (i = 0; i < nops; i++)
5752 if (TREE_OPERAND (exp, i) != 0
5753 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5754 return 0;
5756 /* If this is a language-specific tree code, it may require
5757 special handling. */
5758 if ((unsigned int) TREE_CODE (exp)
5759 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5760 && !lang_hooks.safe_from_p (x, exp))
5761 return 0;
5762 break;
5764 case tcc_type:
5765 /* Should never get a type here. */
5766 gcc_unreachable ();
5769 /* If we have an rtl, find any enclosed object. Then see if we conflict
5770 with it. */
5771 if (exp_rtl)
5773 if (GET_CODE (exp_rtl) == SUBREG)
5775 exp_rtl = SUBREG_REG (exp_rtl);
5776 if (REG_P (exp_rtl)
5777 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5778 return 0;
5781 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5782 are memory and they conflict. */
5783 return ! (rtx_equal_p (x, exp_rtl)
5784 || (MEM_P (x) && MEM_P (exp_rtl)
5785 && true_dependence (exp_rtl, VOIDmode, x,
5786 rtx_addr_varies_p)));
5789 /* If we reach here, it is safe. */
5790 return 1;
5794 /* Return the highest power of two that EXP is known to be a multiple of.
5795 This is used in updating alignment of MEMs in array references. */
5797 static unsigned HOST_WIDE_INT
5798 highest_pow2_factor (tree exp)
5800 unsigned HOST_WIDE_INT c0, c1;
5802 switch (TREE_CODE (exp))
5804 case INTEGER_CST:
5805 /* We can find the lowest bit that's a one. If the low
5806 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5807 We need to handle this case since we can find it in a COND_EXPR,
5808 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5809 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5810 later ICE. */
5811 if (TREE_CONSTANT_OVERFLOW (exp))
5812 return BIGGEST_ALIGNMENT;
5813 else
5815 /* Note: tree_low_cst is intentionally not used here,
5816 we don't care about the upper bits. */
5817 c0 = TREE_INT_CST_LOW (exp);
5818 c0 &= -c0;
5819 return c0 ? c0 : BIGGEST_ALIGNMENT;
5821 break;
5823 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5824 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5825 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5826 return MIN (c0, c1);
5828 case MULT_EXPR:
5829 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5830 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5831 return c0 * c1;
5833 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5834 case CEIL_DIV_EXPR:
5835 if (integer_pow2p (TREE_OPERAND (exp, 1))
5836 && host_integerp (TREE_OPERAND (exp, 1), 1))
5838 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5839 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5840 return MAX (1, c0 / c1);
5842 break;
5844 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5845 case SAVE_EXPR:
5846 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5848 case COMPOUND_EXPR:
5849 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5851 case COND_EXPR:
5852 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5853 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5854 return MIN (c0, c1);
5856 default:
5857 break;
5860 return 1;
5863 /* Similar, except that the alignment requirements of TARGET are
5864 taken into account. Assume it is at least as aligned as its
5865 type, unless it is a COMPONENT_REF in which case the layout of
5866 the structure gives the alignment. */
5868 static unsigned HOST_WIDE_INT
5869 highest_pow2_factor_for_target (tree target, tree exp)
5871 unsigned HOST_WIDE_INT target_align, factor;
5873 factor = highest_pow2_factor (exp);
5874 if (TREE_CODE (target) == COMPONENT_REF)
5875 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5876 else
5877 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5878 return MAX (factor, target_align);
5881 /* Expands variable VAR. */
5883 void
5884 expand_var (tree var)
5886 if (DECL_EXTERNAL (var))
5887 return;
5889 if (TREE_STATIC (var))
5890 /* If this is an inlined copy of a static local variable,
5891 look up the original decl. */
5892 var = DECL_ORIGIN (var);
5894 if (TREE_STATIC (var)
5895 ? !TREE_ASM_WRITTEN (var)
5896 : !DECL_RTL_SET_P (var))
5898 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5899 /* Should be ignored. */;
5900 else if (lang_hooks.expand_decl (var))
5901 /* OK. */;
5902 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5903 expand_decl (var);
5904 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5905 rest_of_decl_compilation (var, 0, 0);
5906 else
5907 /* No expansion needed. */
5908 gcc_assert (TREE_CODE (var) == TYPE_DECL
5909 || TREE_CODE (var) == CONST_DECL
5910 || TREE_CODE (var) == FUNCTION_DECL
5911 || TREE_CODE (var) == LABEL_DECL);
5915 /* Subroutine of expand_expr. Expand the two operands of a binary
5916 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5917 The value may be stored in TARGET if TARGET is nonzero. The
5918 MODIFIER argument is as documented by expand_expr. */
5920 static void
5921 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5922 enum expand_modifier modifier)
5924 if (! safe_from_p (target, exp1, 1))
5925 target = 0;
5926 if (operand_equal_p (exp0, exp1, 0))
5928 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5929 *op1 = copy_rtx (*op0);
5931 else
5933 /* If we need to preserve evaluation order, copy exp0 into its own
5934 temporary variable so that it can't be clobbered by exp1. */
5935 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5936 exp0 = save_expr (exp0);
5937 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5938 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
5943 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
5944 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
5946 static rtx
5947 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
5948 enum expand_modifier modifier)
5950 rtx result, subtarget;
5951 tree inner, offset;
5952 HOST_WIDE_INT bitsize, bitpos;
5953 int volatilep, unsignedp;
5954 enum machine_mode mode1;
5956 /* If we are taking the address of a constant and are at the top level,
5957 we have to use output_constant_def since we can't call force_const_mem
5958 at top level. */
5959 /* ??? This should be considered a front-end bug. We should not be
5960 generating ADDR_EXPR of something that isn't an LVALUE. The only
5961 exception here is STRING_CST. */
5962 if (TREE_CODE (exp) == CONSTRUCTOR
5963 || CONSTANT_CLASS_P (exp))
5964 return XEXP (output_constant_def (exp, 0), 0);
5966 /* Everything must be something allowed by is_gimple_addressable. */
5967 switch (TREE_CODE (exp))
5969 case INDIRECT_REF:
5970 /* This case will happen via recursion for &a->b. */
5971 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
5973 case CONST_DECL:
5974 /* Recurse and make the output_constant_def clause above handle this. */
5975 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
5976 tmode, modifier);
5978 case REALPART_EXPR:
5979 /* The real part of the complex number is always first, therefore
5980 the address is the same as the address of the parent object. */
5981 offset = 0;
5982 bitpos = 0;
5983 inner = TREE_OPERAND (exp, 0);
5984 break;
5986 case IMAGPART_EXPR:
5987 /* The imaginary part of the complex number is always second.
5988 The expression is therefore always offset by the size of the
5989 scalar type. */
5990 offset = 0;
5991 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
5992 inner = TREE_OPERAND (exp, 0);
5993 break;
5995 default:
5996 /* If the object is a DECL, then expand it for its rtl. Don't bypass
5997 expand_expr, as that can have various side effects; LABEL_DECLs for
5998 example, may not have their DECL_RTL set yet. Assume language
5999 specific tree nodes can be expanded in some interesting way. */
6000 if (DECL_P (exp)
6001 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6003 result = expand_expr (exp, target, tmode,
6004 modifier == EXPAND_INITIALIZER
6005 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6007 /* If the DECL isn't in memory, then the DECL wasn't properly
6008 marked TREE_ADDRESSABLE, which will be either a front-end
6009 or a tree optimizer bug. */
6010 gcc_assert (GET_CODE (result) == MEM);
6011 result = XEXP (result, 0);
6013 /* ??? Is this needed anymore? */
6014 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6016 assemble_external (exp);
6017 TREE_USED (exp) = 1;
6020 if (modifier != EXPAND_INITIALIZER
6021 && modifier != EXPAND_CONST_ADDRESS)
6022 result = force_operand (result, target);
6023 return result;
6026 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6027 &mode1, &unsignedp, &volatilep);
6028 break;
6031 /* We must have made progress. */
6032 gcc_assert (inner != exp);
6034 subtarget = offset || bitpos ? NULL_RTX : target;
6035 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6037 if (offset)
6039 rtx tmp;
6041 if (modifier != EXPAND_NORMAL)
6042 result = force_operand (result, NULL);
6043 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6045 result = convert_memory_address (tmode, result);
6046 tmp = convert_memory_address (tmode, tmp);
6048 if (modifier == EXPAND_SUM)
6049 result = gen_rtx_PLUS (tmode, result, tmp);
6050 else
6052 subtarget = bitpos ? NULL_RTX : target;
6053 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6054 1, OPTAB_LIB_WIDEN);
6058 if (bitpos)
6060 /* Someone beforehand should have rejected taking the address
6061 of such an object. */
6062 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6064 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6065 if (modifier < EXPAND_SUM)
6066 result = force_operand (result, target);
6069 return result;
6072 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6073 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6075 static rtx
6076 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6077 enum expand_modifier modifier)
6079 enum machine_mode rmode;
6080 rtx result;
6082 /* Target mode of VOIDmode says "whatever's natural". */
6083 if (tmode == VOIDmode)
6084 tmode = TYPE_MODE (TREE_TYPE (exp));
6086 /* We can get called with some Weird Things if the user does silliness
6087 like "(short) &a". In that case, convert_memory_address won't do
6088 the right thing, so ignore the given target mode. */
6089 if (tmode != Pmode && tmode != ptr_mode)
6090 tmode = Pmode;
6092 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6093 tmode, modifier);
6095 /* Despite expand_expr claims concerning ignoring TMODE when not
6096 strictly convenient, stuff breaks if we don't honor it. Note
6097 that combined with the above, we only do this for pointer modes. */
6098 rmode = GET_MODE (result);
6099 if (rmode == VOIDmode)
6100 rmode = tmode;
6101 if (rmode != tmode)
6102 result = convert_memory_address (tmode, result);
6104 return result;
6108 /* expand_expr: generate code for computing expression EXP.
6109 An rtx for the computed value is returned. The value is never null.
6110 In the case of a void EXP, const0_rtx is returned.
6112 The value may be stored in TARGET if TARGET is nonzero.
6113 TARGET is just a suggestion; callers must assume that
6114 the rtx returned may not be the same as TARGET.
6116 If TARGET is CONST0_RTX, it means that the value will be ignored.
6118 If TMODE is not VOIDmode, it suggests generating the
6119 result in mode TMODE. But this is done only when convenient.
6120 Otherwise, TMODE is ignored and the value generated in its natural mode.
6121 TMODE is just a suggestion; callers must assume that
6122 the rtx returned may not have mode TMODE.
6124 Note that TARGET may have neither TMODE nor MODE. In that case, it
6125 probably will not be used.
6127 If MODIFIER is EXPAND_SUM then when EXP is an addition
6128 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6129 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6130 products as above, or REG or MEM, or constant.
6131 Ordinarily in such cases we would output mul or add instructions
6132 and then return a pseudo reg containing the sum.
6134 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6135 it also marks a label as absolutely required (it can't be dead).
6136 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6137 This is used for outputting expressions used in initializers.
6139 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6140 with a constant address even if that address is not normally legitimate.
6141 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6143 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6144 a call parameter. Such targets require special care as we haven't yet
6145 marked TARGET so that it's safe from being trashed by libcalls. We
6146 don't want to use TARGET for anything but the final result;
6147 Intermediate values must go elsewhere. Additionally, calls to
6148 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6150 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6151 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6152 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6153 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6154 recursively. */
6156 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6157 enum expand_modifier, rtx *);
6160 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6161 enum expand_modifier modifier, rtx *alt_rtl)
6163 int rn = -1;
6164 rtx ret, last = NULL;
6166 /* Handle ERROR_MARK before anybody tries to access its type. */
6167 if (TREE_CODE (exp) == ERROR_MARK
6168 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6170 ret = CONST0_RTX (tmode);
6171 return ret ? ret : const0_rtx;
6174 if (flag_non_call_exceptions)
6176 rn = lookup_stmt_eh_region (exp);
6177 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6178 if (rn >= 0)
6179 last = get_last_insn ();
6182 /* If this is an expression of some kind and it has an associated line
6183 number, then emit the line number before expanding the expression.
6185 We need to save and restore the file and line information so that
6186 errors discovered during expansion are emitted with the right
6187 information. It would be better of the diagnostic routines
6188 used the file/line information embedded in the tree nodes rather
6189 than globals. */
6190 if (cfun && EXPR_HAS_LOCATION (exp))
6192 location_t saved_location = input_location;
6193 input_location = EXPR_LOCATION (exp);
6194 emit_line_note (input_location);
6196 /* Record where the insns produced belong. */
6197 record_block_change (TREE_BLOCK (exp));
6199 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6201 input_location = saved_location;
6203 else
6205 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6208 /* If using non-call exceptions, mark all insns that may trap.
6209 expand_call() will mark CALL_INSNs before we get to this code,
6210 but it doesn't handle libcalls, and these may trap. */
6211 if (rn >= 0)
6213 rtx insn;
6214 for (insn = next_real_insn (last); insn;
6215 insn = next_real_insn (insn))
6217 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6218 /* If we want exceptions for non-call insns, any
6219 may_trap_p instruction may throw. */
6220 && GET_CODE (PATTERN (insn)) != CLOBBER
6221 && GET_CODE (PATTERN (insn)) != USE
6222 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6224 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6225 REG_NOTES (insn));
6230 return ret;
6233 static rtx
6234 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6235 enum expand_modifier modifier, rtx *alt_rtl)
6237 rtx op0, op1, temp;
6238 tree type = TREE_TYPE (exp);
6239 int unsignedp;
6240 enum machine_mode mode;
6241 enum tree_code code = TREE_CODE (exp);
6242 optab this_optab;
6243 rtx subtarget, original_target;
6244 int ignore;
6245 tree context;
6246 bool reduce_bit_field = false;
6247 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6248 ? reduce_to_bit_field_precision ((expr), \
6249 target, \
6250 type) \
6251 : (expr))
6253 mode = TYPE_MODE (type);
6254 unsignedp = TYPE_UNSIGNED (type);
6255 if (lang_hooks.reduce_bit_field_operations
6256 && TREE_CODE (type) == INTEGER_TYPE
6257 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6259 /* An operation in what may be a bit-field type needs the
6260 result to be reduced to the precision of the bit-field type,
6261 which is narrower than that of the type's mode. */
6262 reduce_bit_field = true;
6263 if (modifier == EXPAND_STACK_PARM)
6264 target = 0;
6267 /* Use subtarget as the target for operand 0 of a binary operation. */
6268 subtarget = get_subtarget (target);
6269 original_target = target;
6270 ignore = (target == const0_rtx
6271 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6272 || code == CONVERT_EXPR || code == COND_EXPR
6273 || code == VIEW_CONVERT_EXPR)
6274 && TREE_CODE (type) == VOID_TYPE));
6276 /* If we are going to ignore this result, we need only do something
6277 if there is a side-effect somewhere in the expression. If there
6278 is, short-circuit the most common cases here. Note that we must
6279 not call expand_expr with anything but const0_rtx in case this
6280 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6282 if (ignore)
6284 if (! TREE_SIDE_EFFECTS (exp))
6285 return const0_rtx;
6287 /* Ensure we reference a volatile object even if value is ignored, but
6288 don't do this if all we are doing is taking its address. */
6289 if (TREE_THIS_VOLATILE (exp)
6290 && TREE_CODE (exp) != FUNCTION_DECL
6291 && mode != VOIDmode && mode != BLKmode
6292 && modifier != EXPAND_CONST_ADDRESS)
6294 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6295 if (MEM_P (temp))
6296 temp = copy_to_reg (temp);
6297 return const0_rtx;
6300 if (TREE_CODE_CLASS (code) == tcc_unary
6301 || code == COMPONENT_REF || code == INDIRECT_REF)
6302 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6303 modifier);
6305 else if (TREE_CODE_CLASS (code) == tcc_binary
6306 || TREE_CODE_CLASS (code) == tcc_comparison
6307 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6309 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6310 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6311 return const0_rtx;
6313 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6314 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6315 /* If the second operand has no side effects, just evaluate
6316 the first. */
6317 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6318 modifier);
6319 else if (code == BIT_FIELD_REF)
6321 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6322 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6323 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6324 return const0_rtx;
6327 target = 0;
6330 /* If will do cse, generate all results into pseudo registers
6331 since 1) that allows cse to find more things
6332 and 2) otherwise cse could produce an insn the machine
6333 cannot support. An exception is a CONSTRUCTOR into a multi-word
6334 MEM: that's much more likely to be most efficient into the MEM.
6335 Another is a CALL_EXPR which must return in memory. */
6337 if (! cse_not_expected && mode != BLKmode && target
6338 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6339 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6340 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6341 target = 0;
6343 switch (code)
6345 case LABEL_DECL:
6347 tree function = decl_function_context (exp);
6349 temp = label_rtx (exp);
6350 temp = gen_rtx_LABEL_REF (Pmode, temp);
6352 if (function != current_function_decl
6353 && function != 0)
6354 LABEL_REF_NONLOCAL_P (temp) = 1;
6356 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6357 return temp;
6360 case SSA_NAME:
6361 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6362 NULL);
6364 case PARM_DECL:
6365 case VAR_DECL:
6366 /* If a static var's type was incomplete when the decl was written,
6367 but the type is complete now, lay out the decl now. */
6368 if (DECL_SIZE (exp) == 0
6369 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6370 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6371 layout_decl (exp, 0);
6373 /* ... fall through ... */
6375 case FUNCTION_DECL:
6376 case RESULT_DECL:
6377 gcc_assert (DECL_RTL (exp));
6379 /* Ensure variable marked as used even if it doesn't go through
6380 a parser. If it hasn't be used yet, write out an external
6381 definition. */
6382 if (! TREE_USED (exp))
6384 assemble_external (exp);
6385 TREE_USED (exp) = 1;
6388 /* Show we haven't gotten RTL for this yet. */
6389 temp = 0;
6391 /* Variables inherited from containing functions should have
6392 been lowered by this point. */
6393 context = decl_function_context (exp);
6394 gcc_assert (!context
6395 || context == current_function_decl
6396 || TREE_STATIC (exp)
6397 /* ??? C++ creates functions that are not TREE_STATIC. */
6398 || TREE_CODE (exp) == FUNCTION_DECL);
6400 /* This is the case of an array whose size is to be determined
6401 from its initializer, while the initializer is still being parsed.
6402 See expand_decl. */
6404 if (MEM_P (DECL_RTL (exp))
6405 && REG_P (XEXP (DECL_RTL (exp), 0)))
6406 temp = validize_mem (DECL_RTL (exp));
6408 /* If DECL_RTL is memory, we are in the normal case and either
6409 the address is not valid or it is not a register and -fforce-addr
6410 is specified, get the address into a register. */
6412 else if (MEM_P (DECL_RTL (exp))
6413 && modifier != EXPAND_CONST_ADDRESS
6414 && modifier != EXPAND_SUM
6415 && modifier != EXPAND_INITIALIZER
6416 && (! memory_address_p (DECL_MODE (exp),
6417 XEXP (DECL_RTL (exp), 0))
6418 || (flag_force_addr
6419 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6421 if (alt_rtl)
6422 *alt_rtl = DECL_RTL (exp);
6423 temp = replace_equiv_address (DECL_RTL (exp),
6424 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6427 /* If we got something, return it. But first, set the alignment
6428 if the address is a register. */
6429 if (temp != 0)
6431 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6432 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6434 return temp;
6437 /* If the mode of DECL_RTL does not match that of the decl, it
6438 must be a promoted value. We return a SUBREG of the wanted mode,
6439 but mark it so that we know that it was already extended. */
6441 if (REG_P (DECL_RTL (exp))
6442 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6444 enum machine_mode pmode;
6446 /* Get the signedness used for this variable. Ensure we get the
6447 same mode we got when the variable was declared. */
6448 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6449 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6450 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6452 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6453 SUBREG_PROMOTED_VAR_P (temp) = 1;
6454 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6455 return temp;
6458 return DECL_RTL (exp);
6460 case INTEGER_CST:
6461 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6462 TREE_INT_CST_HIGH (exp), mode);
6464 /* ??? If overflow is set, fold will have done an incomplete job,
6465 which can result in (plus xx (const_int 0)), which can get
6466 simplified by validate_replace_rtx during virtual register
6467 instantiation, which can result in unrecognizable insns.
6468 Avoid this by forcing all overflows into registers. */
6469 if (TREE_CONSTANT_OVERFLOW (exp)
6470 && modifier != EXPAND_INITIALIZER)
6471 temp = force_reg (mode, temp);
6473 return temp;
6475 case VECTOR_CST:
6476 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6477 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6478 return const_vector_from_tree (exp);
6479 else
6480 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6481 TREE_VECTOR_CST_ELTS (exp)),
6482 ignore ? const0_rtx : target, tmode, modifier);
6484 case CONST_DECL:
6485 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6487 case REAL_CST:
6488 /* If optimized, generate immediate CONST_DOUBLE
6489 which will be turned into memory by reload if necessary.
6491 We used to force a register so that loop.c could see it. But
6492 this does not allow gen_* patterns to perform optimizations with
6493 the constants. It also produces two insns in cases like "x = 1.0;".
6494 On most machines, floating-point constants are not permitted in
6495 many insns, so we'd end up copying it to a register in any case.
6497 Now, we do the copying in expand_binop, if appropriate. */
6498 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6499 TYPE_MODE (TREE_TYPE (exp)));
6501 case COMPLEX_CST:
6502 /* Handle evaluating a complex constant in a CONCAT target. */
6503 if (original_target && GET_CODE (original_target) == CONCAT)
6505 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6506 rtx rtarg, itarg;
6508 rtarg = XEXP (original_target, 0);
6509 itarg = XEXP (original_target, 1);
6511 /* Move the real and imaginary parts separately. */
6512 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6513 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6515 if (op0 != rtarg)
6516 emit_move_insn (rtarg, op0);
6517 if (op1 != itarg)
6518 emit_move_insn (itarg, op1);
6520 return original_target;
6523 /* ... fall through ... */
6525 case STRING_CST:
6526 temp = output_constant_def (exp, 1);
6528 /* temp contains a constant address.
6529 On RISC machines where a constant address isn't valid,
6530 make some insns to get that address into a register. */
6531 if (modifier != EXPAND_CONST_ADDRESS
6532 && modifier != EXPAND_INITIALIZER
6533 && modifier != EXPAND_SUM
6534 && (! memory_address_p (mode, XEXP (temp, 0))
6535 || flag_force_addr))
6536 return replace_equiv_address (temp,
6537 copy_rtx (XEXP (temp, 0)));
6538 return temp;
6540 case SAVE_EXPR:
6542 tree val = TREE_OPERAND (exp, 0);
6543 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6545 if (!SAVE_EXPR_RESOLVED_P (exp))
6547 /* We can indeed still hit this case, typically via builtin
6548 expanders calling save_expr immediately before expanding
6549 something. Assume this means that we only have to deal
6550 with non-BLKmode values. */
6551 gcc_assert (GET_MODE (ret) != BLKmode);
6553 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6554 DECL_ARTIFICIAL (val) = 1;
6555 DECL_IGNORED_P (val) = 1;
6556 TREE_OPERAND (exp, 0) = val;
6557 SAVE_EXPR_RESOLVED_P (exp) = 1;
6559 if (!CONSTANT_P (ret))
6560 ret = copy_to_reg (ret);
6561 SET_DECL_RTL (val, ret);
6564 return ret;
6567 case GOTO_EXPR:
6568 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6569 expand_goto (TREE_OPERAND (exp, 0));
6570 else
6571 expand_computed_goto (TREE_OPERAND (exp, 0));
6572 return const0_rtx;
6574 case CONSTRUCTOR:
6575 /* If we don't need the result, just ensure we evaluate any
6576 subexpressions. */
6577 if (ignore)
6579 tree elt;
6581 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6582 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6584 return const0_rtx;
6587 /* All elts simple constants => refer to a constant in memory. But
6588 if this is a non-BLKmode mode, let it store a field at a time
6589 since that should make a CONST_INT or CONST_DOUBLE when we
6590 fold. Likewise, if we have a target we can use, it is best to
6591 store directly into the target unless the type is large enough
6592 that memcpy will be used. If we are making an initializer and
6593 all operands are constant, put it in memory as well.
6595 FIXME: Avoid trying to fill vector constructors piece-meal.
6596 Output them with output_constant_def below unless we're sure
6597 they're zeros. This should go away when vector initializers
6598 are treated like VECTOR_CST instead of arrays.
6600 else if ((TREE_STATIC (exp)
6601 && ((mode == BLKmode
6602 && ! (target != 0 && safe_from_p (target, exp, 1)))
6603 || TREE_ADDRESSABLE (exp)
6604 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6605 && (! MOVE_BY_PIECES_P
6606 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6607 TYPE_ALIGN (type)))
6608 && ! mostly_zeros_p (exp))))
6609 || ((modifier == EXPAND_INITIALIZER
6610 || modifier == EXPAND_CONST_ADDRESS)
6611 && TREE_CONSTANT (exp)))
6613 rtx constructor = output_constant_def (exp, 1);
6615 if (modifier != EXPAND_CONST_ADDRESS
6616 && modifier != EXPAND_INITIALIZER
6617 && modifier != EXPAND_SUM)
6618 constructor = validize_mem (constructor);
6620 return constructor;
6622 else
6624 /* Handle calls that pass values in multiple non-contiguous
6625 locations. The Irix 6 ABI has examples of this. */
6626 if (target == 0 || ! safe_from_p (target, exp, 1)
6627 || GET_CODE (target) == PARALLEL
6628 || modifier == EXPAND_STACK_PARM)
6629 target
6630 = assign_temp (build_qualified_type (type,
6631 (TYPE_QUALS (type)
6632 | (TREE_READONLY (exp)
6633 * TYPE_QUAL_CONST))),
6634 0, TREE_ADDRESSABLE (exp), 1);
6636 store_constructor (exp, target, 0, int_expr_size (exp));
6637 return target;
6640 case MISALIGNED_INDIRECT_REF:
6641 case ALIGN_INDIRECT_REF:
6642 case INDIRECT_REF:
6644 tree exp1 = TREE_OPERAND (exp, 0);
6645 tree orig;
6647 if (code == MISALIGNED_INDIRECT_REF
6648 && !targetm.vectorize.misaligned_mem_ok (mode))
6649 abort ();
6651 if (modifier != EXPAND_WRITE)
6653 tree t;
6655 t = fold_read_from_constant_string (exp);
6656 if (t)
6657 return expand_expr (t, target, tmode, modifier);
6660 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6661 op0 = memory_address (mode, op0);
6663 if (code == ALIGN_INDIRECT_REF)
6665 int align = TYPE_ALIGN_UNIT (type);
6666 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6667 op0 = memory_address (mode, op0);
6670 temp = gen_rtx_MEM (mode, op0);
6672 orig = REF_ORIGINAL (exp);
6673 if (!orig)
6674 orig = exp;
6675 set_mem_attributes (temp, orig, 0);
6677 return temp;
6680 case ARRAY_REF:
6683 tree array = TREE_OPERAND (exp, 0);
6684 tree low_bound = array_ref_low_bound (exp);
6685 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6686 HOST_WIDE_INT i;
6688 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
6690 /* Optimize the special-case of a zero lower bound.
6692 We convert the low_bound to sizetype to avoid some problems
6693 with constant folding. (E.g. suppose the lower bound is 1,
6694 and its mode is QI. Without the conversion, (ARRAY
6695 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6696 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6698 if (! integer_zerop (low_bound))
6699 index = size_diffop (index, convert (sizetype, low_bound));
6701 /* Fold an expression like: "foo"[2].
6702 This is not done in fold so it won't happen inside &.
6703 Don't fold if this is for wide characters since it's too
6704 difficult to do correctly and this is a very rare case. */
6706 if (modifier != EXPAND_CONST_ADDRESS
6707 && modifier != EXPAND_INITIALIZER
6708 && modifier != EXPAND_MEMORY)
6710 tree t = fold_read_from_constant_string (exp);
6712 if (t)
6713 return expand_expr (t, target, tmode, modifier);
6716 /* If this is a constant index into a constant array,
6717 just get the value from the array. Handle both the cases when
6718 we have an explicit constructor and when our operand is a variable
6719 that was declared const. */
6721 if (modifier != EXPAND_CONST_ADDRESS
6722 && modifier != EXPAND_INITIALIZER
6723 && modifier != EXPAND_MEMORY
6724 && TREE_CODE (array) == CONSTRUCTOR
6725 && ! TREE_SIDE_EFFECTS (array)
6726 && TREE_CODE (index) == INTEGER_CST
6727 && 0 > compare_tree_int (index,
6728 list_length (CONSTRUCTOR_ELTS
6729 (TREE_OPERAND (exp, 0)))))
6731 tree elem;
6733 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6734 i = TREE_INT_CST_LOW (index);
6735 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6738 if (elem)
6739 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6740 modifier);
6743 else if (optimize >= 1
6744 && modifier != EXPAND_CONST_ADDRESS
6745 && modifier != EXPAND_INITIALIZER
6746 && modifier != EXPAND_MEMORY
6747 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6748 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6749 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6750 && targetm.binds_local_p (array))
6752 if (TREE_CODE (index) == INTEGER_CST)
6754 tree init = DECL_INITIAL (array);
6756 if (TREE_CODE (init) == CONSTRUCTOR)
6758 tree elem;
6760 for (elem = CONSTRUCTOR_ELTS (init);
6761 (elem
6762 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6763 elem = TREE_CHAIN (elem))
6766 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6767 return expand_expr (fold (TREE_VALUE (elem)), target,
6768 tmode, modifier);
6770 else if (TREE_CODE (init) == STRING_CST
6771 && 0 > compare_tree_int (index,
6772 TREE_STRING_LENGTH (init)))
6774 tree type = TREE_TYPE (TREE_TYPE (init));
6775 enum machine_mode mode = TYPE_MODE (type);
6777 if (GET_MODE_CLASS (mode) == MODE_INT
6778 && GET_MODE_SIZE (mode) == 1)
6779 return gen_int_mode (TREE_STRING_POINTER (init)
6780 [TREE_INT_CST_LOW (index)], mode);
6785 goto normal_inner_ref;
6787 case COMPONENT_REF:
6788 /* If the operand is a CONSTRUCTOR, we can just extract the
6789 appropriate field if it is present. */
6790 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6792 tree elt;
6794 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6795 elt = TREE_CHAIN (elt))
6796 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6797 /* We can normally use the value of the field in the
6798 CONSTRUCTOR. However, if this is a bitfield in
6799 an integral mode that we can fit in a HOST_WIDE_INT,
6800 we must mask only the number of bits in the bitfield,
6801 since this is done implicitly by the constructor. If
6802 the bitfield does not meet either of those conditions,
6803 we can't do this optimization. */
6804 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6805 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6806 == MODE_INT)
6807 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6808 <= HOST_BITS_PER_WIDE_INT))))
6810 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6811 && modifier == EXPAND_STACK_PARM)
6812 target = 0;
6813 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6814 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6816 HOST_WIDE_INT bitsize
6817 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6818 enum machine_mode imode
6819 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6821 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6823 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6824 op0 = expand_and (imode, op0, op1, target);
6826 else
6828 tree count
6829 = build_int_cst (NULL_TREE,
6830 GET_MODE_BITSIZE (imode) - bitsize);
6832 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6833 target, 0);
6834 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6835 target, 0);
6839 return op0;
6842 goto normal_inner_ref;
6844 case BIT_FIELD_REF:
6845 case ARRAY_RANGE_REF:
6846 normal_inner_ref:
6848 enum machine_mode mode1;
6849 HOST_WIDE_INT bitsize, bitpos;
6850 tree offset;
6851 int volatilep = 0;
6852 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6853 &mode1, &unsignedp, &volatilep);
6854 rtx orig_op0;
6856 /* If we got back the original object, something is wrong. Perhaps
6857 we are evaluating an expression too early. In any event, don't
6858 infinitely recurse. */
6859 gcc_assert (tem != exp);
6861 /* If TEM's type is a union of variable size, pass TARGET to the inner
6862 computation, since it will need a temporary and TARGET is known
6863 to have to do. This occurs in unchecked conversion in Ada. */
6865 orig_op0 = op0
6866 = expand_expr (tem,
6867 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6868 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6869 != INTEGER_CST)
6870 && modifier != EXPAND_STACK_PARM
6871 ? target : NULL_RTX),
6872 VOIDmode,
6873 (modifier == EXPAND_INITIALIZER
6874 || modifier == EXPAND_CONST_ADDRESS
6875 || modifier == EXPAND_STACK_PARM)
6876 ? modifier : EXPAND_NORMAL);
6878 /* If this is a constant, put it into a register if it is a
6879 legitimate constant and OFFSET is 0 and memory if it isn't. */
6880 if (CONSTANT_P (op0))
6882 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6883 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6884 && offset == 0)
6885 op0 = force_reg (mode, op0);
6886 else
6887 op0 = validize_mem (force_const_mem (mode, op0));
6890 /* Otherwise, if this object not in memory and we either have an
6891 offset or a BLKmode result, put it there. This case can't occur in
6892 C, but can in Ada if we have unchecked conversion of an expression
6893 from a scalar type to an array or record type or for an
6894 ARRAY_RANGE_REF whose type is BLKmode. */
6895 else if (!MEM_P (op0)
6896 && (offset != 0
6897 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6899 tree nt = build_qualified_type (TREE_TYPE (tem),
6900 (TYPE_QUALS (TREE_TYPE (tem))
6901 | TYPE_QUAL_CONST));
6902 rtx memloc = assign_temp (nt, 1, 1, 1);
6904 emit_move_insn (memloc, op0);
6905 op0 = memloc;
6908 if (offset != 0)
6910 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6911 EXPAND_SUM);
6913 gcc_assert (MEM_P (op0));
6915 #ifdef POINTERS_EXTEND_UNSIGNED
6916 if (GET_MODE (offset_rtx) != Pmode)
6917 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6918 #else
6919 if (GET_MODE (offset_rtx) != ptr_mode)
6920 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6921 #endif
6923 if (GET_MODE (op0) == BLKmode
6924 /* A constant address in OP0 can have VOIDmode, we must
6925 not try to call force_reg in that case. */
6926 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6927 && bitsize != 0
6928 && (bitpos % bitsize) == 0
6929 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6930 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6932 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6933 bitpos = 0;
6936 op0 = offset_address (op0, offset_rtx,
6937 highest_pow2_factor (offset));
6940 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6941 record its alignment as BIGGEST_ALIGNMENT. */
6942 if (MEM_P (op0) && bitpos == 0 && offset != 0
6943 && is_aligning_offset (offset, tem))
6944 set_mem_align (op0, BIGGEST_ALIGNMENT);
6946 /* Don't forget about volatility even if this is a bitfield. */
6947 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6949 if (op0 == orig_op0)
6950 op0 = copy_rtx (op0);
6952 MEM_VOLATILE_P (op0) = 1;
6955 /* The following code doesn't handle CONCAT.
6956 Assume only bitpos == 0 can be used for CONCAT, due to
6957 one element arrays having the same mode as its element. */
6958 if (GET_CODE (op0) == CONCAT)
6960 gcc_assert (bitpos == 0
6961 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
6962 return op0;
6965 /* In cases where an aligned union has an unaligned object
6966 as a field, we might be extracting a BLKmode value from
6967 an integer-mode (e.g., SImode) object. Handle this case
6968 by doing the extract into an object as wide as the field
6969 (which we know to be the width of a basic mode), then
6970 storing into memory, and changing the mode to BLKmode. */
6971 if (mode1 == VOIDmode
6972 || REG_P (op0) || GET_CODE (op0) == SUBREG
6973 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6974 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6975 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6976 && modifier != EXPAND_CONST_ADDRESS
6977 && modifier != EXPAND_INITIALIZER)
6978 /* If the field isn't aligned enough to fetch as a memref,
6979 fetch it as a bit field. */
6980 || (mode1 != BLKmode
6981 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6982 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6983 || (MEM_P (op0)
6984 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6985 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6986 && ((modifier == EXPAND_CONST_ADDRESS
6987 || modifier == EXPAND_INITIALIZER)
6988 ? STRICT_ALIGNMENT
6989 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6990 || (bitpos % BITS_PER_UNIT != 0)))
6991 /* If the type and the field are a constant size and the
6992 size of the type isn't the same size as the bitfield,
6993 we must use bitfield operations. */
6994 || (bitsize >= 0
6995 && TYPE_SIZE (TREE_TYPE (exp))
6996 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6997 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6998 bitsize)))
7000 enum machine_mode ext_mode = mode;
7002 if (ext_mode == BLKmode
7003 && ! (target != 0 && MEM_P (op0)
7004 && MEM_P (target)
7005 && bitpos % BITS_PER_UNIT == 0))
7006 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7008 if (ext_mode == BLKmode)
7010 if (target == 0)
7011 target = assign_temp (type, 0, 1, 1);
7013 if (bitsize == 0)
7014 return target;
7016 /* In this case, BITPOS must start at a byte boundary and
7017 TARGET, if specified, must be a MEM. */
7018 gcc_assert (MEM_P (op0)
7019 && (!target || MEM_P (target))
7020 && !(bitpos % BITS_PER_UNIT));
7022 emit_block_move (target,
7023 adjust_address (op0, VOIDmode,
7024 bitpos / BITS_PER_UNIT),
7025 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7026 / BITS_PER_UNIT),
7027 (modifier == EXPAND_STACK_PARM
7028 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7030 return target;
7033 op0 = validize_mem (op0);
7035 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7036 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7038 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7039 (modifier == EXPAND_STACK_PARM
7040 ? NULL_RTX : target),
7041 ext_mode, ext_mode);
7043 /* If the result is a record type and BITSIZE is narrower than
7044 the mode of OP0, an integral mode, and this is a big endian
7045 machine, we must put the field into the high-order bits. */
7046 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7047 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7048 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7049 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7050 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7051 - bitsize),
7052 op0, 1);
7054 /* If the result type is BLKmode, store the data into a temporary
7055 of the appropriate type, but with the mode corresponding to the
7056 mode for the data we have (op0's mode). It's tempting to make
7057 this a constant type, since we know it's only being stored once,
7058 but that can cause problems if we are taking the address of this
7059 COMPONENT_REF because the MEM of any reference via that address
7060 will have flags corresponding to the type, which will not
7061 necessarily be constant. */
7062 if (mode == BLKmode)
7064 rtx new
7065 = assign_stack_temp_for_type
7066 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7068 emit_move_insn (new, op0);
7069 op0 = copy_rtx (new);
7070 PUT_MODE (op0, BLKmode);
7071 set_mem_attributes (op0, exp, 1);
7074 return op0;
7077 /* If the result is BLKmode, use that to access the object
7078 now as well. */
7079 if (mode == BLKmode)
7080 mode1 = BLKmode;
7082 /* Get a reference to just this component. */
7083 if (modifier == EXPAND_CONST_ADDRESS
7084 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7085 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7086 else
7087 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7089 if (op0 == orig_op0)
7090 op0 = copy_rtx (op0);
7092 set_mem_attributes (op0, exp, 0);
7093 if (REG_P (XEXP (op0, 0)))
7094 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7096 MEM_VOLATILE_P (op0) |= volatilep;
7097 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7098 || modifier == EXPAND_CONST_ADDRESS
7099 || modifier == EXPAND_INITIALIZER)
7100 return op0;
7101 else if (target == 0)
7102 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7104 convert_move (target, op0, unsignedp);
7105 return target;
7108 case OBJ_TYPE_REF:
7109 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7111 case CALL_EXPR:
7112 /* Check for a built-in function. */
7113 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7114 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7115 == FUNCTION_DECL)
7116 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7118 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7119 == BUILT_IN_FRONTEND)
7120 return lang_hooks.expand_expr (exp, original_target,
7121 tmode, modifier,
7122 alt_rtl);
7123 else
7124 return expand_builtin (exp, target, subtarget, tmode, ignore);
7127 return expand_call (exp, target, ignore);
7129 case NON_LVALUE_EXPR:
7130 case NOP_EXPR:
7131 case CONVERT_EXPR:
7132 if (TREE_OPERAND (exp, 0) == error_mark_node)
7133 return const0_rtx;
7135 if (TREE_CODE (type) == UNION_TYPE)
7137 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7139 /* If both input and output are BLKmode, this conversion isn't doing
7140 anything except possibly changing memory attribute. */
7141 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7143 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7144 modifier);
7146 result = copy_rtx (result);
7147 set_mem_attributes (result, exp, 0);
7148 return result;
7151 if (target == 0)
7153 if (TYPE_MODE (type) != BLKmode)
7154 target = gen_reg_rtx (TYPE_MODE (type));
7155 else
7156 target = assign_temp (type, 0, 1, 1);
7159 if (MEM_P (target))
7160 /* Store data into beginning of memory target. */
7161 store_expr (TREE_OPERAND (exp, 0),
7162 adjust_address (target, TYPE_MODE (valtype), 0),
7163 modifier == EXPAND_STACK_PARM);
7165 else
7167 gcc_assert (REG_P (target));
7169 /* Store this field into a union of the proper type. */
7170 store_field (target,
7171 MIN ((int_size_in_bytes (TREE_TYPE
7172 (TREE_OPERAND (exp, 0)))
7173 * BITS_PER_UNIT),
7174 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7175 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7176 type, 0);
7179 /* Return the entire union. */
7180 return target;
7183 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7185 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7186 modifier);
7188 /* If the signedness of the conversion differs and OP0 is
7189 a promoted SUBREG, clear that indication since we now
7190 have to do the proper extension. */
7191 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7192 && GET_CODE (op0) == SUBREG)
7193 SUBREG_PROMOTED_VAR_P (op0) = 0;
7195 return REDUCE_BIT_FIELD (op0);
7198 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7199 op0 = REDUCE_BIT_FIELD (op0);
7200 if (GET_MODE (op0) == mode)
7201 return op0;
7203 /* If OP0 is a constant, just convert it into the proper mode. */
7204 if (CONSTANT_P (op0))
7206 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7207 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7209 if (modifier == EXPAND_INITIALIZER)
7210 return simplify_gen_subreg (mode, op0, inner_mode,
7211 subreg_lowpart_offset (mode,
7212 inner_mode));
7213 else
7214 return convert_modes (mode, inner_mode, op0,
7215 TYPE_UNSIGNED (inner_type));
7218 if (modifier == EXPAND_INITIALIZER)
7219 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7221 if (target == 0)
7222 return
7223 convert_to_mode (mode, op0,
7224 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7225 else
7226 convert_move (target, op0,
7227 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7228 return target;
7230 case VIEW_CONVERT_EXPR:
7231 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7233 /* If the input and output modes are both the same, we are done.
7234 Otherwise, if neither mode is BLKmode and both are integral and within
7235 a word, we can use gen_lowpart. If neither is true, make sure the
7236 operand is in memory and convert the MEM to the new mode. */
7237 if (TYPE_MODE (type) == GET_MODE (op0))
7239 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7240 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7241 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7242 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7243 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7244 op0 = gen_lowpart (TYPE_MODE (type), op0);
7245 else if (!MEM_P (op0))
7247 /* If the operand is not a MEM, force it into memory. Since we
7248 are going to be be changing the mode of the MEM, don't call
7249 force_const_mem for constants because we don't allow pool
7250 constants to change mode. */
7251 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7253 gcc_assert (!TREE_ADDRESSABLE (exp));
7255 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7256 target
7257 = assign_stack_temp_for_type
7258 (TYPE_MODE (inner_type),
7259 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7261 emit_move_insn (target, op0);
7262 op0 = target;
7265 /* At this point, OP0 is in the correct mode. If the output type is such
7266 that the operand is known to be aligned, indicate that it is.
7267 Otherwise, we need only be concerned about alignment for non-BLKmode
7268 results. */
7269 if (MEM_P (op0))
7271 op0 = copy_rtx (op0);
7273 if (TYPE_ALIGN_OK (type))
7274 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7275 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7276 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7278 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7279 HOST_WIDE_INT temp_size
7280 = MAX (int_size_in_bytes (inner_type),
7281 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7282 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7283 temp_size, 0, type);
7284 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7286 gcc_assert (!TREE_ADDRESSABLE (exp));
7288 if (GET_MODE (op0) == BLKmode)
7289 emit_block_move (new_with_op0_mode, op0,
7290 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7291 (modifier == EXPAND_STACK_PARM
7292 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7293 else
7294 emit_move_insn (new_with_op0_mode, op0);
7296 op0 = new;
7299 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7302 return op0;
7304 case PLUS_EXPR:
7305 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7306 something else, make sure we add the register to the constant and
7307 then to the other thing. This case can occur during strength
7308 reduction and doing it this way will produce better code if the
7309 frame pointer or argument pointer is eliminated.
7311 fold-const.c will ensure that the constant is always in the inner
7312 PLUS_EXPR, so the only case we need to do anything about is if
7313 sp, ap, or fp is our second argument, in which case we must swap
7314 the innermost first argument and our second argument. */
7316 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7317 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7318 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7319 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7320 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7321 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7323 tree t = TREE_OPERAND (exp, 1);
7325 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7326 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7329 /* If the result is to be ptr_mode and we are adding an integer to
7330 something, we might be forming a constant. So try to use
7331 plus_constant. If it produces a sum and we can't accept it,
7332 use force_operand. This allows P = &ARR[const] to generate
7333 efficient code on machines where a SYMBOL_REF is not a valid
7334 address.
7336 If this is an EXPAND_SUM call, always return the sum. */
7337 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7338 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7340 if (modifier == EXPAND_STACK_PARM)
7341 target = 0;
7342 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7343 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7344 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7346 rtx constant_part;
7348 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7349 EXPAND_SUM);
7350 /* Use immed_double_const to ensure that the constant is
7351 truncated according to the mode of OP1, then sign extended
7352 to a HOST_WIDE_INT. Using the constant directly can result
7353 in non-canonical RTL in a 64x32 cross compile. */
7354 constant_part
7355 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7356 (HOST_WIDE_INT) 0,
7357 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7358 op1 = plus_constant (op1, INTVAL (constant_part));
7359 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7360 op1 = force_operand (op1, target);
7361 return REDUCE_BIT_FIELD (op1);
7364 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7365 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7366 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7368 rtx constant_part;
7370 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7371 (modifier == EXPAND_INITIALIZER
7372 ? EXPAND_INITIALIZER : EXPAND_SUM));
7373 if (! CONSTANT_P (op0))
7375 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7376 VOIDmode, modifier);
7377 /* Return a PLUS if modifier says it's OK. */
7378 if (modifier == EXPAND_SUM
7379 || modifier == EXPAND_INITIALIZER)
7380 return simplify_gen_binary (PLUS, mode, op0, op1);
7381 goto binop2;
7383 /* Use immed_double_const to ensure that the constant is
7384 truncated according to the mode of OP1, then sign extended
7385 to a HOST_WIDE_INT. Using the constant directly can result
7386 in non-canonical RTL in a 64x32 cross compile. */
7387 constant_part
7388 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7389 (HOST_WIDE_INT) 0,
7390 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7391 op0 = plus_constant (op0, INTVAL (constant_part));
7392 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7393 op0 = force_operand (op0, target);
7394 return REDUCE_BIT_FIELD (op0);
7398 /* No sense saving up arithmetic to be done
7399 if it's all in the wrong mode to form part of an address.
7400 And force_operand won't know whether to sign-extend or
7401 zero-extend. */
7402 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7403 || mode != ptr_mode)
7405 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7406 subtarget, &op0, &op1, 0);
7407 if (op0 == const0_rtx)
7408 return op1;
7409 if (op1 == const0_rtx)
7410 return op0;
7411 goto binop2;
7414 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7415 subtarget, &op0, &op1, modifier);
7416 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7418 case MINUS_EXPR:
7419 /* For initializers, we are allowed to return a MINUS of two
7420 symbolic constants. Here we handle all cases when both operands
7421 are constant. */
7422 /* Handle difference of two symbolic constants,
7423 for the sake of an initializer. */
7424 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7425 && really_constant_p (TREE_OPERAND (exp, 0))
7426 && really_constant_p (TREE_OPERAND (exp, 1)))
7428 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7429 NULL_RTX, &op0, &op1, modifier);
7431 /* If the last operand is a CONST_INT, use plus_constant of
7432 the negated constant. Else make the MINUS. */
7433 if (GET_CODE (op1) == CONST_INT)
7434 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7435 else
7436 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7439 /* No sense saving up arithmetic to be done
7440 if it's all in the wrong mode to form part of an address.
7441 And force_operand won't know whether to sign-extend or
7442 zero-extend. */
7443 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7444 || mode != ptr_mode)
7445 goto binop;
7447 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7448 subtarget, &op0, &op1, modifier);
7450 /* Convert A - const to A + (-const). */
7451 if (GET_CODE (op1) == CONST_INT)
7453 op1 = negate_rtx (mode, op1);
7454 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7457 goto binop2;
7459 case MULT_EXPR:
7460 /* If first operand is constant, swap them.
7461 Thus the following special case checks need only
7462 check the second operand. */
7463 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7465 tree t1 = TREE_OPERAND (exp, 0);
7466 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7467 TREE_OPERAND (exp, 1) = t1;
7470 /* Attempt to return something suitable for generating an
7471 indexed address, for machines that support that. */
7473 if (modifier == EXPAND_SUM && mode == ptr_mode
7474 && host_integerp (TREE_OPERAND (exp, 1), 0))
7476 tree exp1 = TREE_OPERAND (exp, 1);
7478 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7479 EXPAND_SUM);
7481 if (!REG_P (op0))
7482 op0 = force_operand (op0, NULL_RTX);
7483 if (!REG_P (op0))
7484 op0 = copy_to_mode_reg (mode, op0);
7486 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7487 gen_int_mode (tree_low_cst (exp1, 0),
7488 TYPE_MODE (TREE_TYPE (exp1)))));
7491 if (modifier == EXPAND_STACK_PARM)
7492 target = 0;
7494 /* Check for multiplying things that have been extended
7495 from a narrower type. If this machine supports multiplying
7496 in that narrower type with a result in the desired type,
7497 do it that way, and avoid the explicit type-conversion. */
7498 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7499 && TREE_CODE (type) == INTEGER_TYPE
7500 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7501 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7502 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7503 && int_fits_type_p (TREE_OPERAND (exp, 1),
7504 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7505 /* Don't use a widening multiply if a shift will do. */
7506 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7507 > HOST_BITS_PER_WIDE_INT)
7508 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7510 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7511 && (TYPE_PRECISION (TREE_TYPE
7512 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7513 == TYPE_PRECISION (TREE_TYPE
7514 (TREE_OPERAND
7515 (TREE_OPERAND (exp, 0), 0))))
7516 /* If both operands are extended, they must either both
7517 be zero-extended or both be sign-extended. */
7518 && (TYPE_UNSIGNED (TREE_TYPE
7519 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7520 == TYPE_UNSIGNED (TREE_TYPE
7521 (TREE_OPERAND
7522 (TREE_OPERAND (exp, 0), 0)))))))
7524 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7525 enum machine_mode innermode = TYPE_MODE (op0type);
7526 bool zextend_p = TYPE_UNSIGNED (op0type);
7527 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7528 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7530 if (mode == GET_MODE_WIDER_MODE (innermode))
7532 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7534 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7535 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7536 TREE_OPERAND (exp, 1),
7537 NULL_RTX, &op0, &op1, 0);
7538 else
7539 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7540 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7541 NULL_RTX, &op0, &op1, 0);
7542 goto binop3;
7544 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7545 && innermode == word_mode)
7547 rtx htem, hipart;
7548 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7549 NULL_RTX, VOIDmode, 0);
7550 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7551 op1 = convert_modes (innermode, mode,
7552 expand_expr (TREE_OPERAND (exp, 1),
7553 NULL_RTX, VOIDmode, 0),
7554 unsignedp);
7555 else
7556 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7557 NULL_RTX, VOIDmode, 0);
7558 temp = expand_binop (mode, other_optab, op0, op1, target,
7559 unsignedp, OPTAB_LIB_WIDEN);
7560 hipart = gen_highpart (innermode, temp);
7561 htem = expand_mult_highpart_adjust (innermode, hipart,
7562 op0, op1, hipart,
7563 zextend_p);
7564 if (htem != hipart)
7565 emit_move_insn (hipart, htem);
7566 return REDUCE_BIT_FIELD (temp);
7570 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7571 subtarget, &op0, &op1, 0);
7572 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7574 case TRUNC_DIV_EXPR:
7575 case FLOOR_DIV_EXPR:
7576 case CEIL_DIV_EXPR:
7577 case ROUND_DIV_EXPR:
7578 case EXACT_DIV_EXPR:
7579 if (modifier == EXPAND_STACK_PARM)
7580 target = 0;
7581 /* Possible optimization: compute the dividend with EXPAND_SUM
7582 then if the divisor is constant can optimize the case
7583 where some terms of the dividend have coeffs divisible by it. */
7584 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7585 subtarget, &op0, &op1, 0);
7586 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7588 case RDIV_EXPR:
7589 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7590 expensive divide. If not, combine will rebuild the original
7591 computation. */
7592 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7593 && TREE_CODE (type) == REAL_TYPE
7594 && !real_onep (TREE_OPERAND (exp, 0)))
7595 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7596 build2 (RDIV_EXPR, type,
7597 build_real (type, dconst1),
7598 TREE_OPERAND (exp, 1))),
7599 target, tmode, modifier);
7601 goto binop;
7603 case TRUNC_MOD_EXPR:
7604 case FLOOR_MOD_EXPR:
7605 case CEIL_MOD_EXPR:
7606 case ROUND_MOD_EXPR:
7607 if (modifier == EXPAND_STACK_PARM)
7608 target = 0;
7609 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7610 subtarget, &op0, &op1, 0);
7611 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7613 case FIX_ROUND_EXPR:
7614 case FIX_FLOOR_EXPR:
7615 case FIX_CEIL_EXPR:
7616 gcc_unreachable (); /* Not used for C. */
7618 case FIX_TRUNC_EXPR:
7619 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7620 if (target == 0 || modifier == EXPAND_STACK_PARM)
7621 target = gen_reg_rtx (mode);
7622 expand_fix (target, op0, unsignedp);
7623 return target;
7625 case FLOAT_EXPR:
7626 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7627 if (target == 0 || modifier == EXPAND_STACK_PARM)
7628 target = gen_reg_rtx (mode);
7629 /* expand_float can't figure out what to do if FROM has VOIDmode.
7630 So give it the correct mode. With -O, cse will optimize this. */
7631 if (GET_MODE (op0) == VOIDmode)
7632 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7633 op0);
7634 expand_float (target, op0,
7635 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7636 return target;
7638 case NEGATE_EXPR:
7639 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7640 if (modifier == EXPAND_STACK_PARM)
7641 target = 0;
7642 temp = expand_unop (mode,
7643 optab_for_tree_code (NEGATE_EXPR, type),
7644 op0, target, 0);
7645 gcc_assert (temp);
7646 return REDUCE_BIT_FIELD (temp);
7648 case ABS_EXPR:
7649 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7650 if (modifier == EXPAND_STACK_PARM)
7651 target = 0;
7653 /* ABS_EXPR is not valid for complex arguments. */
7654 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7655 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7657 /* Unsigned abs is simply the operand. Testing here means we don't
7658 risk generating incorrect code below. */
7659 if (TYPE_UNSIGNED (type))
7660 return op0;
7662 return expand_abs (mode, op0, target, unsignedp,
7663 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7665 case MAX_EXPR:
7666 case MIN_EXPR:
7667 target = original_target;
7668 if (target == 0
7669 || modifier == EXPAND_STACK_PARM
7670 || (MEM_P (target) && MEM_VOLATILE_P (target))
7671 || GET_MODE (target) != mode
7672 || (REG_P (target)
7673 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7674 target = gen_reg_rtx (mode);
7675 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7676 target, &op0, &op1, 0);
7678 /* First try to do it with a special MIN or MAX instruction.
7679 If that does not win, use a conditional jump to select the proper
7680 value. */
7681 this_optab = optab_for_tree_code (code, type);
7682 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7683 OPTAB_WIDEN);
7684 if (temp != 0)
7685 return temp;
7687 /* At this point, a MEM target is no longer useful; we will get better
7688 code without it. */
7690 if (MEM_P (target))
7691 target = gen_reg_rtx (mode);
7693 /* If op1 was placed in target, swap op0 and op1. */
7694 if (target != op0 && target == op1)
7696 rtx tem = op0;
7697 op0 = op1;
7698 op1 = tem;
7701 if (target != op0)
7702 emit_move_insn (target, op0);
7704 op0 = gen_label_rtx ();
7706 /* If this mode is an integer too wide to compare properly,
7707 compare word by word. Rely on cse to optimize constant cases. */
7708 if (GET_MODE_CLASS (mode) == MODE_INT
7709 && ! can_compare_p (GE, mode, ccp_jump))
7711 if (code == MAX_EXPR)
7712 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7713 NULL_RTX, op0);
7714 else
7715 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7716 NULL_RTX, op0);
7718 else
7720 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7721 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7723 emit_move_insn (target, op1);
7724 emit_label (op0);
7725 return target;
7727 case BIT_NOT_EXPR:
7728 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7729 if (modifier == EXPAND_STACK_PARM)
7730 target = 0;
7731 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7732 gcc_assert (temp);
7733 return temp;
7735 /* ??? Can optimize bitwise operations with one arg constant.
7736 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7737 and (a bitwise1 b) bitwise2 b (etc)
7738 but that is probably not worth while. */
7740 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7741 boolean values when we want in all cases to compute both of them. In
7742 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7743 as actual zero-or-1 values and then bitwise anding. In cases where
7744 there cannot be any side effects, better code would be made by
7745 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7746 how to recognize those cases. */
7748 case TRUTH_AND_EXPR:
7749 code = BIT_AND_EXPR;
7750 case BIT_AND_EXPR:
7751 goto binop;
7753 case TRUTH_OR_EXPR:
7754 code = BIT_IOR_EXPR;
7755 case BIT_IOR_EXPR:
7756 goto binop;
7758 case TRUTH_XOR_EXPR:
7759 code = BIT_XOR_EXPR;
7760 case BIT_XOR_EXPR:
7761 goto binop;
7763 case LSHIFT_EXPR:
7764 case RSHIFT_EXPR:
7765 case LROTATE_EXPR:
7766 case RROTATE_EXPR:
7767 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7768 subtarget = 0;
7769 if (modifier == EXPAND_STACK_PARM)
7770 target = 0;
7771 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7772 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7773 unsignedp);
7775 /* Could determine the answer when only additive constants differ. Also,
7776 the addition of one can be handled by changing the condition. */
7777 case LT_EXPR:
7778 case LE_EXPR:
7779 case GT_EXPR:
7780 case GE_EXPR:
7781 case EQ_EXPR:
7782 case NE_EXPR:
7783 case UNORDERED_EXPR:
7784 case ORDERED_EXPR:
7785 case UNLT_EXPR:
7786 case UNLE_EXPR:
7787 case UNGT_EXPR:
7788 case UNGE_EXPR:
7789 case UNEQ_EXPR:
7790 case LTGT_EXPR:
7791 temp = do_store_flag (exp,
7792 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7793 tmode != VOIDmode ? tmode : mode, 0);
7794 if (temp != 0)
7795 return temp;
7797 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7798 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7799 && original_target
7800 && REG_P (original_target)
7801 && (GET_MODE (original_target)
7802 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7804 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7805 VOIDmode, 0);
7807 /* If temp is constant, we can just compute the result. */
7808 if (GET_CODE (temp) == CONST_INT)
7810 if (INTVAL (temp) != 0)
7811 emit_move_insn (target, const1_rtx);
7812 else
7813 emit_move_insn (target, const0_rtx);
7815 return target;
7818 if (temp != original_target)
7820 enum machine_mode mode1 = GET_MODE (temp);
7821 if (mode1 == VOIDmode)
7822 mode1 = tmode != VOIDmode ? tmode : mode;
7824 temp = copy_to_mode_reg (mode1, temp);
7827 op1 = gen_label_rtx ();
7828 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7829 GET_MODE (temp), unsignedp, op1);
7830 emit_move_insn (temp, const1_rtx);
7831 emit_label (op1);
7832 return temp;
7835 /* If no set-flag instruction, must generate a conditional store
7836 into a temporary variable. Drop through and handle this
7837 like && and ||. */
7839 if (! ignore
7840 && (target == 0
7841 || modifier == EXPAND_STACK_PARM
7842 || ! safe_from_p (target, exp, 1)
7843 /* Make sure we don't have a hard reg (such as function's return
7844 value) live across basic blocks, if not optimizing. */
7845 || (!optimize && REG_P (target)
7846 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7847 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7849 if (target)
7850 emit_move_insn (target, const0_rtx);
7852 op1 = gen_label_rtx ();
7853 jumpifnot (exp, op1);
7855 if (target)
7856 emit_move_insn (target, const1_rtx);
7858 emit_label (op1);
7859 return ignore ? const0_rtx : target;
7861 case TRUTH_NOT_EXPR:
7862 if (modifier == EXPAND_STACK_PARM)
7863 target = 0;
7864 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7865 /* The parser is careful to generate TRUTH_NOT_EXPR
7866 only with operands that are always zero or one. */
7867 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7868 target, 1, OPTAB_LIB_WIDEN);
7869 gcc_assert (temp);
7870 return temp;
7872 case STATEMENT_LIST:
7874 tree_stmt_iterator iter;
7876 gcc_assert (ignore);
7878 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7879 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7881 return const0_rtx;
7883 case COND_EXPR:
7884 /* If it's void, we don't need to worry about computing a value. */
7885 if (VOID_TYPE_P (TREE_TYPE (exp)))
7887 tree pred = TREE_OPERAND (exp, 0);
7888 tree then_ = TREE_OPERAND (exp, 1);
7889 tree else_ = TREE_OPERAND (exp, 2);
7891 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
7892 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
7893 && TREE_CODE (else_) == GOTO_EXPR
7894 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
7896 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7897 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7900 /* Note that COND_EXPRs whose type is a structure or union
7901 are required to be constructed to contain assignments of
7902 a temporary variable, so that we can evaluate them here
7903 for side effect only. If type is void, we must do likewise. */
7905 gcc_assert (!TREE_ADDRESSABLE (type)
7906 && !ignore
7907 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7908 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7910 /* If we are not to produce a result, we have no target. Otherwise,
7911 if a target was specified use it; it will not be used as an
7912 intermediate target unless it is safe. If no target, use a
7913 temporary. */
7915 if (modifier != EXPAND_STACK_PARM
7916 && original_target
7917 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7918 && GET_MODE (original_target) == mode
7919 #ifdef HAVE_conditional_move
7920 && (! can_conditionally_move_p (mode)
7921 || REG_P (original_target))
7922 #endif
7923 && !MEM_P (original_target))
7924 temp = original_target;
7925 else
7926 temp = assign_temp (type, 0, 0, 1);
7928 do_pending_stack_adjust ();
7929 NO_DEFER_POP;
7930 op0 = gen_label_rtx ();
7931 op1 = gen_label_rtx ();
7932 jumpifnot (TREE_OPERAND (exp, 0), op0);
7933 store_expr (TREE_OPERAND (exp, 1), temp,
7934 modifier == EXPAND_STACK_PARM);
7936 emit_jump_insn (gen_jump (op1));
7937 emit_barrier ();
7938 emit_label (op0);
7939 store_expr (TREE_OPERAND (exp, 2), temp,
7940 modifier == EXPAND_STACK_PARM);
7942 emit_label (op1);
7943 OK_DEFER_POP;
7944 return temp;
7946 case VEC_COND_EXPR:
7947 target = expand_vec_cond_expr (exp, target);
7948 return target;
7950 case MODIFY_EXPR:
7952 tree lhs = TREE_OPERAND (exp, 0);
7953 tree rhs = TREE_OPERAND (exp, 1);
7955 gcc_assert (ignore);
7957 /* Check for |= or &= of a bitfield of size one into another bitfield
7958 of size 1. In this case, (unless we need the result of the
7959 assignment) we can do this more efficiently with a
7960 test followed by an assignment, if necessary.
7962 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7963 things change so we do, this code should be enhanced to
7964 support it. */
7965 if (TREE_CODE (lhs) == COMPONENT_REF
7966 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7967 || TREE_CODE (rhs) == BIT_AND_EXPR)
7968 && TREE_OPERAND (rhs, 0) == lhs
7969 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7970 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7971 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7973 rtx label = gen_label_rtx ();
7975 do_jump (TREE_OPERAND (rhs, 1),
7976 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7977 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7978 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7979 (TREE_CODE (rhs) == BIT_IOR_EXPR
7980 ? integer_one_node
7981 : integer_zero_node)));
7982 do_pending_stack_adjust ();
7983 emit_label (label);
7984 return const0_rtx;
7987 expand_assignment (lhs, rhs);
7989 return const0_rtx;
7992 case RETURN_EXPR:
7993 if (!TREE_OPERAND (exp, 0))
7994 expand_null_return ();
7995 else
7996 expand_return (TREE_OPERAND (exp, 0));
7997 return const0_rtx;
7999 case ADDR_EXPR:
8000 return expand_expr_addr_expr (exp, target, tmode, modifier);
8002 /* COMPLEX type for Extended Pascal & Fortran */
8003 case COMPLEX_EXPR:
8005 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8006 rtx insns;
8008 /* Get the rtx code of the operands. */
8009 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8010 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8012 if (! target)
8013 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8015 start_sequence ();
8017 /* Move the real (op0) and imaginary (op1) parts to their location. */
8018 emit_move_insn (gen_realpart (mode, target), op0);
8019 emit_move_insn (gen_imagpart (mode, target), op1);
8021 insns = get_insns ();
8022 end_sequence ();
8024 /* Complex construction should appear as a single unit. */
8025 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8026 each with a separate pseudo as destination.
8027 It's not correct for flow to treat them as a unit. */
8028 if (GET_CODE (target) != CONCAT)
8029 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8030 else
8031 emit_insn (insns);
8033 return target;
8036 case REALPART_EXPR:
8037 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8038 return gen_realpart (mode, op0);
8040 case IMAGPART_EXPR:
8041 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8042 return gen_imagpart (mode, op0);
8044 case RESX_EXPR:
8045 expand_resx_expr (exp);
8046 return const0_rtx;
8048 case TRY_CATCH_EXPR:
8049 case CATCH_EXPR:
8050 case EH_FILTER_EXPR:
8051 case TRY_FINALLY_EXPR:
8052 /* Lowered by tree-eh.c. */
8053 gcc_unreachable ();
8055 case WITH_CLEANUP_EXPR:
8056 case CLEANUP_POINT_EXPR:
8057 case TARGET_EXPR:
8058 case CASE_LABEL_EXPR:
8059 case VA_ARG_EXPR:
8060 case BIND_EXPR:
8061 case INIT_EXPR:
8062 case CONJ_EXPR:
8063 case COMPOUND_EXPR:
8064 case PREINCREMENT_EXPR:
8065 case PREDECREMENT_EXPR:
8066 case POSTINCREMENT_EXPR:
8067 case POSTDECREMENT_EXPR:
8068 case LOOP_EXPR:
8069 case EXIT_EXPR:
8070 case LABELED_BLOCK_EXPR:
8071 case EXIT_BLOCK_EXPR:
8072 case TRUTH_ANDIF_EXPR:
8073 case TRUTH_ORIF_EXPR:
8074 /* Lowered by gimplify.c. */
8075 gcc_unreachable ();
8077 case EXC_PTR_EXPR:
8078 return get_exception_pointer (cfun);
8080 case FILTER_EXPR:
8081 return get_exception_filter (cfun);
8083 case FDESC_EXPR:
8084 /* Function descriptors are not valid except for as
8085 initialization constants, and should not be expanded. */
8086 gcc_unreachable ();
8088 case SWITCH_EXPR:
8089 expand_case (exp);
8090 return const0_rtx;
8092 case LABEL_EXPR:
8093 expand_label (TREE_OPERAND (exp, 0));
8094 return const0_rtx;
8096 case ASM_EXPR:
8097 expand_asm_expr (exp);
8098 return const0_rtx;
8100 case WITH_SIZE_EXPR:
8101 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8102 have pulled out the size to use in whatever context it needed. */
8103 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8104 modifier, alt_rtl);
8106 case REALIGN_LOAD_EXPR:
8108 tree oprnd0 = TREE_OPERAND (exp, 0);
8109 tree oprnd1 = TREE_OPERAND (exp, 1);
8110 tree oprnd2 = TREE_OPERAND (exp, 2);
8111 rtx op2;
8113 this_optab = optab_for_tree_code (code, type);
8114 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8115 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8116 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8117 target, unsignedp);
8118 if (temp == 0)
8119 abort ();
8120 return temp;
8124 default:
8125 return lang_hooks.expand_expr (exp, original_target, tmode,
8126 modifier, alt_rtl);
8129 /* Here to do an ordinary binary operator. */
8130 binop:
8131 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8132 subtarget, &op0, &op1, 0);
8133 binop2:
8134 this_optab = optab_for_tree_code (code, type);
8135 binop3:
8136 if (modifier == EXPAND_STACK_PARM)
8137 target = 0;
8138 temp = expand_binop (mode, this_optab, op0, op1, target,
8139 unsignedp, OPTAB_LIB_WIDEN);
8140 gcc_assert (temp);
8141 return REDUCE_BIT_FIELD (temp);
8143 #undef REDUCE_BIT_FIELD
8145 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8146 signedness of TYPE), possibly returning the result in TARGET. */
8147 static rtx
8148 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8150 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8151 if (target && GET_MODE (target) != GET_MODE (exp))
8152 target = 0;
8153 if (TYPE_UNSIGNED (type))
8155 rtx mask;
8156 if (prec < HOST_BITS_PER_WIDE_INT)
8157 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8158 GET_MODE (exp));
8159 else
8160 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8161 ((unsigned HOST_WIDE_INT) 1
8162 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8163 GET_MODE (exp));
8164 return expand_and (GET_MODE (exp), exp, mask, target);
8166 else
8168 tree count = build_int_cst (NULL_TREE,
8169 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8170 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8171 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8175 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8176 when applied to the address of EXP produces an address known to be
8177 aligned more than BIGGEST_ALIGNMENT. */
8179 static int
8180 is_aligning_offset (tree offset, tree exp)
8182 /* Strip off any conversions. */
8183 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8184 || TREE_CODE (offset) == NOP_EXPR
8185 || TREE_CODE (offset) == CONVERT_EXPR)
8186 offset = TREE_OPERAND (offset, 0);
8188 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8189 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8190 if (TREE_CODE (offset) != BIT_AND_EXPR
8191 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8192 || compare_tree_int (TREE_OPERAND (offset, 1),
8193 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8194 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8195 return 0;
8197 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8198 It must be NEGATE_EXPR. Then strip any more conversions. */
8199 offset = TREE_OPERAND (offset, 0);
8200 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8201 || TREE_CODE (offset) == NOP_EXPR
8202 || TREE_CODE (offset) == CONVERT_EXPR)
8203 offset = TREE_OPERAND (offset, 0);
8205 if (TREE_CODE (offset) != NEGATE_EXPR)
8206 return 0;
8208 offset = TREE_OPERAND (offset, 0);
8209 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8210 || TREE_CODE (offset) == NOP_EXPR
8211 || TREE_CODE (offset) == CONVERT_EXPR)
8212 offset = TREE_OPERAND (offset, 0);
8214 /* This must now be the address of EXP. */
8215 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8218 /* Return the tree node if an ARG corresponds to a string constant or zero
8219 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8220 in bytes within the string that ARG is accessing. The type of the
8221 offset will be `sizetype'. */
8223 tree
8224 string_constant (tree arg, tree *ptr_offset)
8226 tree array, offset;
8227 STRIP_NOPS (arg);
8229 if (TREE_CODE (arg) == ADDR_EXPR)
8231 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8233 *ptr_offset = size_zero_node;
8234 return TREE_OPERAND (arg, 0);
8236 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8238 array = TREE_OPERAND (arg, 0);
8239 offset = size_zero_node;
8241 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8243 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8244 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8245 if (TREE_CODE (array) != STRING_CST
8246 && TREE_CODE (array) != VAR_DECL)
8247 return 0;
8249 else
8250 return 0;
8252 else if (TREE_CODE (arg) == PLUS_EXPR)
8254 tree arg0 = TREE_OPERAND (arg, 0);
8255 tree arg1 = TREE_OPERAND (arg, 1);
8257 STRIP_NOPS (arg0);
8258 STRIP_NOPS (arg1);
8260 if (TREE_CODE (arg0) == ADDR_EXPR
8261 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8262 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8264 array = TREE_OPERAND (arg0, 0);
8265 offset = arg1;
8267 else if (TREE_CODE (arg1) == ADDR_EXPR
8268 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8269 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8271 array = TREE_OPERAND (arg1, 0);
8272 offset = arg0;
8274 else
8275 return 0;
8277 else
8278 return 0;
8280 if (TREE_CODE (array) == STRING_CST)
8282 *ptr_offset = convert (sizetype, offset);
8283 return array;
8285 else if (TREE_CODE (array) == VAR_DECL)
8287 int length;
8289 /* Variables initialized to string literals can be handled too. */
8290 if (DECL_INITIAL (array) == NULL_TREE
8291 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8292 return 0;
8294 /* If they are read-only, non-volatile and bind locally. */
8295 if (! TREE_READONLY (array)
8296 || TREE_SIDE_EFFECTS (array)
8297 || ! targetm.binds_local_p (array))
8298 return 0;
8300 /* Avoid const char foo[4] = "abcde"; */
8301 if (DECL_SIZE_UNIT (array) == NULL_TREE
8302 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8303 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8304 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8305 return 0;
8307 /* If variable is bigger than the string literal, OFFSET must be constant
8308 and inside of the bounds of the string literal. */
8309 offset = convert (sizetype, offset);
8310 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8311 && (! host_integerp (offset, 1)
8312 || compare_tree_int (offset, length) >= 0))
8313 return 0;
8315 *ptr_offset = offset;
8316 return DECL_INITIAL (array);
8319 return 0;
8322 /* Generate code to calculate EXP using a store-flag instruction
8323 and return an rtx for the result. EXP is either a comparison
8324 or a TRUTH_NOT_EXPR whose operand is a comparison.
8326 If TARGET is nonzero, store the result there if convenient.
8328 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8329 cheap.
8331 Return zero if there is no suitable set-flag instruction
8332 available on this machine.
8334 Once expand_expr has been called on the arguments of the comparison,
8335 we are committed to doing the store flag, since it is not safe to
8336 re-evaluate the expression. We emit the store-flag insn by calling
8337 emit_store_flag, but only expand the arguments if we have a reason
8338 to believe that emit_store_flag will be successful. If we think that
8339 it will, but it isn't, we have to simulate the store-flag with a
8340 set/jump/set sequence. */
8342 static rtx
8343 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8345 enum rtx_code code;
8346 tree arg0, arg1, type;
8347 tree tem;
8348 enum machine_mode operand_mode;
8349 int invert = 0;
8350 int unsignedp;
8351 rtx op0, op1;
8352 enum insn_code icode;
8353 rtx subtarget = target;
8354 rtx result, label;
8356 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8357 result at the end. We can't simply invert the test since it would
8358 have already been inverted if it were valid. This case occurs for
8359 some floating-point comparisons. */
8361 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8362 invert = 1, exp = TREE_OPERAND (exp, 0);
8364 arg0 = TREE_OPERAND (exp, 0);
8365 arg1 = TREE_OPERAND (exp, 1);
8367 /* Don't crash if the comparison was erroneous. */
8368 if (arg0 == error_mark_node || arg1 == error_mark_node)
8369 return const0_rtx;
8371 type = TREE_TYPE (arg0);
8372 operand_mode = TYPE_MODE (type);
8373 unsignedp = TYPE_UNSIGNED (type);
8375 /* We won't bother with BLKmode store-flag operations because it would mean
8376 passing a lot of information to emit_store_flag. */
8377 if (operand_mode == BLKmode)
8378 return 0;
8380 /* We won't bother with store-flag operations involving function pointers
8381 when function pointers must be canonicalized before comparisons. */
8382 #ifdef HAVE_canonicalize_funcptr_for_compare
8383 if (HAVE_canonicalize_funcptr_for_compare
8384 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8385 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8386 == FUNCTION_TYPE))
8387 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8388 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8389 == FUNCTION_TYPE))))
8390 return 0;
8391 #endif
8393 STRIP_NOPS (arg0);
8394 STRIP_NOPS (arg1);
8396 /* Get the rtx comparison code to use. We know that EXP is a comparison
8397 operation of some type. Some comparisons against 1 and -1 can be
8398 converted to comparisons with zero. Do so here so that the tests
8399 below will be aware that we have a comparison with zero. These
8400 tests will not catch constants in the first operand, but constants
8401 are rarely passed as the first operand. */
8403 switch (TREE_CODE (exp))
8405 case EQ_EXPR:
8406 code = EQ;
8407 break;
8408 case NE_EXPR:
8409 code = NE;
8410 break;
8411 case LT_EXPR:
8412 if (integer_onep (arg1))
8413 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8414 else
8415 code = unsignedp ? LTU : LT;
8416 break;
8417 case LE_EXPR:
8418 if (! unsignedp && integer_all_onesp (arg1))
8419 arg1 = integer_zero_node, code = LT;
8420 else
8421 code = unsignedp ? LEU : LE;
8422 break;
8423 case GT_EXPR:
8424 if (! unsignedp && integer_all_onesp (arg1))
8425 arg1 = integer_zero_node, code = GE;
8426 else
8427 code = unsignedp ? GTU : GT;
8428 break;
8429 case GE_EXPR:
8430 if (integer_onep (arg1))
8431 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8432 else
8433 code = unsignedp ? GEU : GE;
8434 break;
8436 case UNORDERED_EXPR:
8437 code = UNORDERED;
8438 break;
8439 case ORDERED_EXPR:
8440 code = ORDERED;
8441 break;
8442 case UNLT_EXPR:
8443 code = UNLT;
8444 break;
8445 case UNLE_EXPR:
8446 code = UNLE;
8447 break;
8448 case UNGT_EXPR:
8449 code = UNGT;
8450 break;
8451 case UNGE_EXPR:
8452 code = UNGE;
8453 break;
8454 case UNEQ_EXPR:
8455 code = UNEQ;
8456 break;
8457 case LTGT_EXPR:
8458 code = LTGT;
8459 break;
8461 default:
8462 gcc_unreachable ();
8465 /* Put a constant second. */
8466 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8468 tem = arg0; arg0 = arg1; arg1 = tem;
8469 code = swap_condition (code);
8472 /* If this is an equality or inequality test of a single bit, we can
8473 do this by shifting the bit being tested to the low-order bit and
8474 masking the result with the constant 1. If the condition was EQ,
8475 we xor it with 1. This does not require an scc insn and is faster
8476 than an scc insn even if we have it.
8478 The code to make this transformation was moved into fold_single_bit_test,
8479 so we just call into the folder and expand its result. */
8481 if ((code == NE || code == EQ)
8482 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8483 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8485 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8486 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8487 arg0, arg1, type),
8488 target, VOIDmode, EXPAND_NORMAL);
8491 /* Now see if we are likely to be able to do this. Return if not. */
8492 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8493 return 0;
8495 icode = setcc_gen_code[(int) code];
8496 if (icode == CODE_FOR_nothing
8497 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8499 /* We can only do this if it is one of the special cases that
8500 can be handled without an scc insn. */
8501 if ((code == LT && integer_zerop (arg1))
8502 || (! only_cheap && code == GE && integer_zerop (arg1)))
8504 else if (BRANCH_COST >= 0
8505 && ! only_cheap && (code == NE || code == EQ)
8506 && TREE_CODE (type) != REAL_TYPE
8507 && ((abs_optab->handlers[(int) operand_mode].insn_code
8508 != CODE_FOR_nothing)
8509 || (ffs_optab->handlers[(int) operand_mode].insn_code
8510 != CODE_FOR_nothing)))
8512 else
8513 return 0;
8516 if (! get_subtarget (target)
8517 || GET_MODE (subtarget) != operand_mode)
8518 subtarget = 0;
8520 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8522 if (target == 0)
8523 target = gen_reg_rtx (mode);
8525 result = emit_store_flag (target, code, op0, op1,
8526 operand_mode, unsignedp, 1);
8528 if (result)
8530 if (invert)
8531 result = expand_binop (mode, xor_optab, result, const1_rtx,
8532 result, 0, OPTAB_LIB_WIDEN);
8533 return result;
8536 /* If this failed, we have to do this with set/compare/jump/set code. */
8537 if (!REG_P (target)
8538 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8539 target = gen_reg_rtx (GET_MODE (target));
8541 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8542 result = compare_from_rtx (op0, op1, code, unsignedp,
8543 operand_mode, NULL_RTX);
8544 if (GET_CODE (result) == CONST_INT)
8545 return (((result == const0_rtx && ! invert)
8546 || (result != const0_rtx && invert))
8547 ? const0_rtx : const1_rtx);
8549 /* The code of RESULT may not match CODE if compare_from_rtx
8550 decided to swap its operands and reverse the original code.
8552 We know that compare_from_rtx returns either a CONST_INT or
8553 a new comparison code, so it is safe to just extract the
8554 code from RESULT. */
8555 code = GET_CODE (result);
8557 label = gen_label_rtx ();
8558 gcc_assert (bcc_gen_fctn[(int) code]);
8560 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8561 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8562 emit_label (label);
8564 return target;
8568 /* Stubs in case we haven't got a casesi insn. */
8569 #ifndef HAVE_casesi
8570 # define HAVE_casesi 0
8571 # define gen_casesi(a, b, c, d, e) (0)
8572 # define CODE_FOR_casesi CODE_FOR_nothing
8573 #endif
8575 /* If the machine does not have a case insn that compares the bounds,
8576 this means extra overhead for dispatch tables, which raises the
8577 threshold for using them. */
8578 #ifndef CASE_VALUES_THRESHOLD
8579 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8580 #endif /* CASE_VALUES_THRESHOLD */
8582 unsigned int
8583 case_values_threshold (void)
8585 return CASE_VALUES_THRESHOLD;
8588 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8589 0 otherwise (i.e. if there is no casesi instruction). */
8591 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8592 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8594 enum machine_mode index_mode = SImode;
8595 int index_bits = GET_MODE_BITSIZE (index_mode);
8596 rtx op1, op2, index;
8597 enum machine_mode op_mode;
8599 if (! HAVE_casesi)
8600 return 0;
8602 /* Convert the index to SImode. */
8603 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8605 enum machine_mode omode = TYPE_MODE (index_type);
8606 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8608 /* We must handle the endpoints in the original mode. */
8609 index_expr = build2 (MINUS_EXPR, index_type,
8610 index_expr, minval);
8611 minval = integer_zero_node;
8612 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8613 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8614 omode, 1, default_label);
8615 /* Now we can safely truncate. */
8616 index = convert_to_mode (index_mode, index, 0);
8618 else
8620 if (TYPE_MODE (index_type) != index_mode)
8622 index_expr = convert (lang_hooks.types.type_for_size
8623 (index_bits, 0), index_expr);
8624 index_type = TREE_TYPE (index_expr);
8627 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8630 do_pending_stack_adjust ();
8632 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8633 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8634 (index, op_mode))
8635 index = copy_to_mode_reg (op_mode, index);
8637 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8639 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8640 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8641 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8642 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8643 (op1, op_mode))
8644 op1 = copy_to_mode_reg (op_mode, op1);
8646 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8648 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8649 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8650 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8651 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8652 (op2, op_mode))
8653 op2 = copy_to_mode_reg (op_mode, op2);
8655 emit_jump_insn (gen_casesi (index, op1, op2,
8656 table_label, default_label));
8657 return 1;
8660 /* Attempt to generate a tablejump instruction; same concept. */
8661 #ifndef HAVE_tablejump
8662 #define HAVE_tablejump 0
8663 #define gen_tablejump(x, y) (0)
8664 #endif
8666 /* Subroutine of the next function.
8668 INDEX is the value being switched on, with the lowest value
8669 in the table already subtracted.
8670 MODE is its expected mode (needed if INDEX is constant).
8671 RANGE is the length of the jump table.
8672 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8674 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8675 index value is out of range. */
8677 static void
8678 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8679 rtx default_label)
8681 rtx temp, vector;
8683 if (INTVAL (range) > cfun->max_jumptable_ents)
8684 cfun->max_jumptable_ents = INTVAL (range);
8686 /* Do an unsigned comparison (in the proper mode) between the index
8687 expression and the value which represents the length of the range.
8688 Since we just finished subtracting the lower bound of the range
8689 from the index expression, this comparison allows us to simultaneously
8690 check that the original index expression value is both greater than
8691 or equal to the minimum value of the range and less than or equal to
8692 the maximum value of the range. */
8694 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8695 default_label);
8697 /* If index is in range, it must fit in Pmode.
8698 Convert to Pmode so we can index with it. */
8699 if (mode != Pmode)
8700 index = convert_to_mode (Pmode, index, 1);
8702 /* Don't let a MEM slip through, because then INDEX that comes
8703 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8704 and break_out_memory_refs will go to work on it and mess it up. */
8705 #ifdef PIC_CASE_VECTOR_ADDRESS
8706 if (flag_pic && !REG_P (index))
8707 index = copy_to_mode_reg (Pmode, index);
8708 #endif
8710 /* If flag_force_addr were to affect this address
8711 it could interfere with the tricky assumptions made
8712 about addresses that contain label-refs,
8713 which may be valid only very near the tablejump itself. */
8714 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8715 GET_MODE_SIZE, because this indicates how large insns are. The other
8716 uses should all be Pmode, because they are addresses. This code
8717 could fail if addresses and insns are not the same size. */
8718 index = gen_rtx_PLUS (Pmode,
8719 gen_rtx_MULT (Pmode, index,
8720 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8721 gen_rtx_LABEL_REF (Pmode, table_label));
8722 #ifdef PIC_CASE_VECTOR_ADDRESS
8723 if (flag_pic)
8724 index = PIC_CASE_VECTOR_ADDRESS (index);
8725 else
8726 #endif
8727 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8728 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8729 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8730 convert_move (temp, vector, 0);
8732 emit_jump_insn (gen_tablejump (temp, table_label));
8734 /* If we are generating PIC code or if the table is PC-relative, the
8735 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8736 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8737 emit_barrier ();
8741 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8742 rtx table_label, rtx default_label)
8744 rtx index;
8746 if (! HAVE_tablejump)
8747 return 0;
8749 index_expr = fold (build2 (MINUS_EXPR, index_type,
8750 convert (index_type, index_expr),
8751 convert (index_type, minval)));
8752 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8753 do_pending_stack_adjust ();
8755 do_tablejump (index, TYPE_MODE (index_type),
8756 convert_modes (TYPE_MODE (index_type),
8757 TYPE_MODE (TREE_TYPE (range)),
8758 expand_expr (range, NULL_RTX,
8759 VOIDmode, 0),
8760 TYPE_UNSIGNED (TREE_TYPE (range))),
8761 table_label, default_label);
8762 return 1;
8765 /* Nonzero if the mode is a valid vector mode for this architecture.
8766 This returns nonzero even if there is no hardware support for the
8767 vector mode, but we can emulate with narrower modes. */
8770 vector_mode_valid_p (enum machine_mode mode)
8772 enum mode_class class = GET_MODE_CLASS (mode);
8773 enum machine_mode innermode;
8775 /* Doh! What's going on? */
8776 if (class != MODE_VECTOR_INT
8777 && class != MODE_VECTOR_FLOAT)
8778 return 0;
8780 /* Hardware support. Woo hoo! */
8781 if (targetm.vector_mode_supported_p (mode))
8782 return 1;
8784 innermode = GET_MODE_INNER (mode);
8786 /* We should probably return 1 if requesting V4DI and we have no DI,
8787 but we have V2DI, but this is probably very unlikely. */
8789 /* If we have support for the inner mode, we can safely emulate it.
8790 We may not have V2DI, but me can emulate with a pair of DIs. */
8791 return targetm.scalar_mode_supported_p (innermode);
8794 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8795 static rtx
8796 const_vector_from_tree (tree exp)
8798 rtvec v;
8799 int units, i;
8800 tree link, elt;
8801 enum machine_mode inner, mode;
8803 mode = TYPE_MODE (TREE_TYPE (exp));
8805 if (initializer_zerop (exp))
8806 return CONST0_RTX (mode);
8808 units = GET_MODE_NUNITS (mode);
8809 inner = GET_MODE_INNER (mode);
8811 v = rtvec_alloc (units);
8813 link = TREE_VECTOR_CST_ELTS (exp);
8814 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8816 elt = TREE_VALUE (link);
8818 if (TREE_CODE (elt) == REAL_CST)
8819 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8820 inner);
8821 else
8822 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8823 TREE_INT_CST_HIGH (elt),
8824 inner);
8827 /* Initialize remaining elements to 0. */
8828 for (; i < units; ++i)
8829 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8831 return gen_rtx_CONST_VECTOR (mode, v);
8833 #include "gt-expr.h"