2004-08-23 Eric Christopher <echristo@redhat.com>
[official-gcc.git] / gcc / expr.c
blob72ba4cdeaaa2a17d5da485a213a4fa408aa50d6a
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 #endif
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
219 void
220 init_expr_once (void)
222 rtx insn, pat;
223 enum machine_mode mode;
224 int num_clobbers;
225 rtx mem, mem1;
226 rtx reg;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
245 int regno;
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
263 REGNO (reg) = regno;
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
296 enum insn_code ic;
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
300 continue;
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
312 void
313 init_expr (void)
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
323 void
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 if (to_real != from_real)
339 abort ();
341 /* If the source and destination are already the same, then there's
342 nothing to do. */
343 if (to == from)
344 return;
346 /* If FROM is a SUBREG that indicates that we have already done at least
347 the required extension, strip it. We don't handle such SUBREGs as
348 TO here. */
350 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
351 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
352 >= GET_MODE_SIZE (to_mode))
353 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
354 from = gen_lowpart (to_mode, from), from_mode = to_mode;
356 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
357 abort ();
359 if (to_mode == from_mode
360 || (from_mode == VOIDmode && CONSTANT_P (from)))
362 emit_move_insn (to, from);
363 return;
366 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
368 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
369 abort ();
371 if (VECTOR_MODE_P (to_mode))
372 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
373 else
374 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
376 emit_move_insn (to, from);
377 return;
380 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
382 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
383 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
384 return;
387 if (to_real)
389 rtx value, insns;
390 convert_optab tab;
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
395 tab = trunc_optab;
396 else
397 abort ();
399 /* Try converting directly if the insn is supported. */
401 code = tab->handlers[to_mode][from_mode].insn_code;
402 if (code != CODE_FOR_nothing)
404 emit_unop_insn (code, to, from,
405 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
406 return;
409 /* Otherwise use a libcall. */
410 libcall = tab->handlers[to_mode][from_mode].libfunc;
412 if (!libcall)
413 /* This conversion is not implemented yet. */
414 abort ();
416 start_sequence ();
417 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
418 1, from, from_mode);
419 insns = get_insns ();
420 end_sequence ();
421 emit_libcall_block (insns, to, value,
422 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
423 from)
424 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 return;
428 /* Handle pointer conversion. */ /* SPEE 900220. */
429 /* Targets are expected to provide conversion insns between PxImode and
430 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
431 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
433 enum machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
436 if (trunc_optab->handlers[to_mode][full_mode].insn_code
437 == CODE_FOR_nothing)
438 abort ();
440 if (full_mode != from_mode)
441 from = convert_to_mode (full_mode, from, unsignedp);
442 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
443 to, from, UNKNOWN);
444 return;
446 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
448 enum machine_mode full_mode
449 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
451 if (sext_optab->handlers[full_mode][from_mode].insn_code
452 == CODE_FOR_nothing)
453 abort ();
455 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
456 to, from, UNKNOWN);
457 if (to_mode == full_mode)
458 return;
460 /* else proceed to integer conversions below. */
461 from_mode = full_mode;
464 /* Now both modes are integers. */
466 /* Handle expanding beyond a word. */
467 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
468 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 rtx insns;
471 rtx lowpart;
472 rtx fill_value;
473 rtx lowfrom;
474 int i;
475 enum machine_mode lowpart_mode;
476 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
478 /* Try converting directly if the insn is supported. */
479 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
480 != CODE_FOR_nothing)
482 /* If FROM is a SUBREG, put it into a register. Do this
483 so that we always generate the same set of insns for
484 better cse'ing; if an intermediate assignment occurred,
485 we won't be doing the operation directly on the SUBREG. */
486 if (optimize > 0 && GET_CODE (from) == SUBREG)
487 from = force_reg (from_mode, from);
488 emit_unop_insn (code, to, from, equiv_code);
489 return;
491 /* Next, try converting via full word. */
492 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
493 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
494 != CODE_FOR_nothing))
496 if (REG_P (to))
498 if (reg_overlap_mentioned_p (to, from))
499 from = force_reg (from_mode, from);
500 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
502 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
503 emit_unop_insn (code, to,
504 gen_lowpart (word_mode, to), equiv_code);
505 return;
508 /* No special multiword conversion insn; do it by hand. */
509 start_sequence ();
511 /* Since we will turn this into a no conflict block, we must ensure
512 that the source does not overlap the target. */
514 if (reg_overlap_mentioned_p (to, from))
515 from = force_reg (from_mode, from);
517 /* Get a copy of FROM widened to a word, if necessary. */
518 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
519 lowpart_mode = word_mode;
520 else
521 lowpart_mode = from_mode;
523 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
525 lowpart = gen_lowpart (lowpart_mode, to);
526 emit_move_insn (lowpart, lowfrom);
528 /* Compute the value to put in each remaining word. */
529 if (unsignedp)
530 fill_value = const0_rtx;
531 else
533 #ifdef HAVE_slt
534 if (HAVE_slt
535 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
536 && STORE_FLAG_VALUE == -1)
538 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
539 lowpart_mode, 0);
540 fill_value = gen_reg_rtx (word_mode);
541 emit_insn (gen_slt (fill_value));
543 else
544 #endif
546 fill_value
547 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
548 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
549 NULL_RTX, 0);
550 fill_value = convert_to_mode (word_mode, fill_value, 1);
554 /* Fill the remaining words. */
555 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
557 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
558 rtx subword = operand_subword (to, index, 1, to_mode);
560 if (subword == 0)
561 abort ();
563 if (fill_value != subword)
564 emit_move_insn (subword, fill_value);
567 insns = get_insns ();
568 end_sequence ();
570 emit_no_conflict_block (insns, to, from, NULL_RTX,
571 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
572 return;
575 /* Truncating multi-word to a word or less. */
576 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
577 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
579 if (!((MEM_P (from)
580 && ! MEM_VOLATILE_P (from)
581 && direct_load[(int) to_mode]
582 && ! mode_dependent_address_p (XEXP (from, 0)))
583 || REG_P (from)
584 || GET_CODE (from) == SUBREG))
585 from = force_reg (from_mode, from);
586 convert_move (to, gen_lowpart (word_mode, from), 0);
587 return;
590 /* Now follow all the conversions between integers
591 no more than a word long. */
593 /* For truncation, usually we can just refer to FROM in a narrower mode. */
594 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
595 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
596 GET_MODE_BITSIZE (from_mode)))
598 if (!((MEM_P (from)
599 && ! MEM_VOLATILE_P (from)
600 && direct_load[(int) to_mode]
601 && ! mode_dependent_address_p (XEXP (from, 0)))
602 || REG_P (from)
603 || GET_CODE (from) == SUBREG))
604 from = force_reg (from_mode, from);
605 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
606 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
607 from = copy_to_reg (from);
608 emit_move_insn (to, gen_lowpart (to_mode, from));
609 return;
612 /* Handle extension. */
613 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
615 /* Convert directly if that works. */
616 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
617 != CODE_FOR_nothing)
619 if (flag_force_mem)
620 from = force_not_mem (from);
622 emit_unop_insn (code, to, from, equiv_code);
623 return;
625 else
627 enum machine_mode intermediate;
628 rtx tmp;
629 tree shift_amount;
631 /* Search for a mode to convert via. */
632 for (intermediate = from_mode; intermediate != VOIDmode;
633 intermediate = GET_MODE_WIDER_MODE (intermediate))
634 if (((can_extend_p (to_mode, intermediate, unsignedp)
635 != CODE_FOR_nothing)
636 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
637 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
638 GET_MODE_BITSIZE (intermediate))))
639 && (can_extend_p (intermediate, from_mode, unsignedp)
640 != CODE_FOR_nothing))
642 convert_move (to, convert_to_mode (intermediate, from,
643 unsignedp), unsignedp);
644 return;
647 /* No suitable intermediate mode.
648 Generate what we need with shifts. */
649 shift_amount = build_int_cst (NULL_TREE,
650 GET_MODE_BITSIZE (to_mode)
651 - GET_MODE_BITSIZE (from_mode), 0);
652 from = gen_lowpart (to_mode, force_reg (from_mode, from));
653 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
654 to, unsignedp);
655 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
656 to, unsignedp);
657 if (tmp != to)
658 emit_move_insn (to, tmp);
659 return;
663 /* Support special truncate insns for certain modes. */
664 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
666 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
667 to, from, UNKNOWN);
668 return;
671 /* Handle truncation of volatile memrefs, and so on;
672 the things that couldn't be truncated directly,
673 and for which there was no special instruction.
675 ??? Code above formerly short-circuited this, for most integer
676 mode pairs, with a force_reg in from_mode followed by a recursive
677 call to this routine. Appears always to have been wrong. */
678 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
680 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
681 emit_move_insn (to, temp);
682 return;
685 /* Mode combination is not recognized. */
686 abort ();
689 /* Return an rtx for a value that would result
690 from converting X to mode MODE.
691 Both X and MODE may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
693 This can be done by referring to a part of X in place
694 or by copying to a new temporary with conversion. */
697 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
699 return convert_modes (mode, VOIDmode, x, unsignedp);
702 /* Return an rtx for a value that would result
703 from converting X from mode OLDMODE to mode MODE.
704 Both modes may be floating, or both integer.
705 UNSIGNEDP is nonzero if X is an unsigned value.
707 This can be done by referring to a part of X in place
708 or by copying to a new temporary with conversion.
710 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
713 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
715 rtx temp;
717 /* If FROM is a SUBREG that indicates that we have already done at least
718 the required extension, strip it. */
720 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
721 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
722 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
723 x = gen_lowpart (mode, x);
725 if (GET_MODE (x) != VOIDmode)
726 oldmode = GET_MODE (x);
728 if (mode == oldmode)
729 return x;
731 /* There is one case that we must handle specially: If we are converting
732 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
733 we are to interpret the constant as unsigned, gen_lowpart will do
734 the wrong if the constant appears negative. What we want to do is
735 make the high-order word of the constant zero, not all ones. */
737 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
738 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
739 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
741 HOST_WIDE_INT val = INTVAL (x);
743 if (oldmode != VOIDmode
744 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
746 int width = GET_MODE_BITSIZE (oldmode);
748 /* We need to zero extend VAL. */
749 val &= ((HOST_WIDE_INT) 1 << width) - 1;
752 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
755 /* We can do this with a gen_lowpart if both desired and current modes
756 are integer, and this is either a constant integer, a register, or a
757 non-volatile MEM. Except for the constant case where MODE is no
758 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
760 if ((GET_CODE (x) == CONST_INT
761 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
762 || (GET_MODE_CLASS (mode) == MODE_INT
763 && GET_MODE_CLASS (oldmode) == MODE_INT
764 && (GET_CODE (x) == CONST_DOUBLE
765 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
766 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
767 && direct_load[(int) mode])
768 || (REG_P (x)
769 && (! HARD_REGISTER_P (x)
770 || HARD_REGNO_MODE_OK (REGNO (x), mode))
771 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
772 GET_MODE_BITSIZE (GET_MODE (x)))))))))
774 /* ?? If we don't know OLDMODE, we have to assume here that
775 X does not need sign- or zero-extension. This may not be
776 the case, but it's the best we can do. */
777 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
778 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
780 HOST_WIDE_INT val = INTVAL (x);
781 int width = GET_MODE_BITSIZE (oldmode);
783 /* We must sign or zero-extend in this case. Start by
784 zero-extending, then sign extend if we need to. */
785 val &= ((HOST_WIDE_INT) 1 << width) - 1;
786 if (! unsignedp
787 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
788 val |= (HOST_WIDE_INT) (-1) << width;
790 return gen_int_mode (val, mode);
793 return gen_lowpart (mode, x);
796 /* Converting from integer constant into mode is always equivalent to an
797 subreg operation. */
798 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
800 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
801 abort ();
802 return simplify_gen_subreg (mode, x, oldmode, 0);
805 temp = gen_reg_rtx (mode);
806 convert_move (temp, x, unsignedp);
807 return temp;
810 /* STORE_MAX_PIECES is the number of bytes at a time that we can
811 store efficiently. Due to internal GCC limitations, this is
812 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
813 for an immediate constant. */
815 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
817 /* Determine whether the LEN bytes can be moved by using several move
818 instructions. Return nonzero if a call to move_by_pieces should
819 succeed. */
822 can_move_by_pieces (unsigned HOST_WIDE_INT len,
823 unsigned int align ATTRIBUTE_UNUSED)
825 return MOVE_BY_PIECES_P (len, align);
828 /* Generate several move instructions to copy LEN bytes from block FROM to
829 block TO. (These are MEM rtx's with BLKmode).
831 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
832 used to push FROM to the stack.
834 ALIGN is maximum stack alignment we can assume.
836 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
837 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
838 stpcpy. */
841 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
842 unsigned int align, int endp)
844 struct move_by_pieces data;
845 rtx to_addr, from_addr = XEXP (from, 0);
846 unsigned int max_size = MOVE_MAX_PIECES + 1;
847 enum machine_mode mode = VOIDmode, tmode;
848 enum insn_code icode;
850 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
852 data.offset = 0;
853 data.from_addr = from_addr;
854 if (to)
856 to_addr = XEXP (to, 0);
857 data.to = to;
858 data.autinc_to
859 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
860 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
861 data.reverse
862 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
864 else
866 to_addr = NULL_RTX;
867 data.to = NULL_RTX;
868 data.autinc_to = 1;
869 #ifdef STACK_GROWS_DOWNWARD
870 data.reverse = 1;
871 #else
872 data.reverse = 0;
873 #endif
875 data.to_addr = to_addr;
876 data.from = from;
877 data.autinc_from
878 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
879 || GET_CODE (from_addr) == POST_INC
880 || GET_CODE (from_addr) == POST_DEC);
882 data.explicit_inc_from = 0;
883 data.explicit_inc_to = 0;
884 if (data.reverse) data.offset = len;
885 data.len = len;
887 /* If copying requires more than two move insns,
888 copy addresses to registers (to make displacements shorter)
889 and use post-increment if available. */
890 if (!(data.autinc_from && data.autinc_to)
891 && move_by_pieces_ninsns (len, align, max_size) > 2)
893 /* Find the mode of the largest move... */
894 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
895 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
896 if (GET_MODE_SIZE (tmode) < max_size)
897 mode = tmode;
899 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
901 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
902 data.autinc_from = 1;
903 data.explicit_inc_from = -1;
905 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
907 data.from_addr = copy_addr_to_reg (from_addr);
908 data.autinc_from = 1;
909 data.explicit_inc_from = 1;
911 if (!data.autinc_from && CONSTANT_P (from_addr))
912 data.from_addr = copy_addr_to_reg (from_addr);
913 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
915 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
916 data.autinc_to = 1;
917 data.explicit_inc_to = -1;
919 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
921 data.to_addr = copy_addr_to_reg (to_addr);
922 data.autinc_to = 1;
923 data.explicit_inc_to = 1;
925 if (!data.autinc_to && CONSTANT_P (to_addr))
926 data.to_addr = copy_addr_to_reg (to_addr);
929 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
930 if (align >= GET_MODE_ALIGNMENT (tmode))
931 align = GET_MODE_ALIGNMENT (tmode);
932 else
934 enum machine_mode xmode;
936 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
937 tmode != VOIDmode;
938 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
939 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
940 || SLOW_UNALIGNED_ACCESS (tmode, align))
941 break;
943 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
946 /* First move what we can in the largest integer mode, then go to
947 successively smaller modes. */
949 while (max_size > 1)
951 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
952 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
953 if (GET_MODE_SIZE (tmode) < max_size)
954 mode = tmode;
956 if (mode == VOIDmode)
957 break;
959 icode = mov_optab->handlers[(int) mode].insn_code;
960 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
961 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
963 max_size = GET_MODE_SIZE (mode);
966 /* The code above should have handled everything. */
967 if (data.len > 0)
968 abort ();
970 if (endp)
972 rtx to1;
974 if (data.reverse)
975 abort ();
976 if (data.autinc_to)
978 if (endp == 2)
980 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
981 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
982 else
983 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
984 -1));
986 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
987 data.offset);
989 else
991 if (endp == 2)
992 --data.offset;
993 to1 = adjust_address (data.to, QImode, data.offset);
995 return to1;
997 else
998 return data.to;
1001 /* Return number of insns required to move L bytes by pieces.
1002 ALIGN (in bits) is maximum alignment we can assume. */
1004 static unsigned HOST_WIDE_INT
1005 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1006 unsigned int max_size)
1008 unsigned HOST_WIDE_INT n_insns = 0;
1009 enum machine_mode tmode;
1011 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1012 if (align >= GET_MODE_ALIGNMENT (tmode))
1013 align = GET_MODE_ALIGNMENT (tmode);
1014 else
1016 enum machine_mode tmode, xmode;
1018 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1019 tmode != VOIDmode;
1020 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1021 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1022 || SLOW_UNALIGNED_ACCESS (tmode, align))
1023 break;
1025 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1028 while (max_size > 1)
1030 enum machine_mode mode = VOIDmode;
1031 enum insn_code icode;
1033 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1034 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1035 if (GET_MODE_SIZE (tmode) < max_size)
1036 mode = tmode;
1038 if (mode == VOIDmode)
1039 break;
1041 icode = mov_optab->handlers[(int) mode].insn_code;
1042 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1043 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1045 max_size = GET_MODE_SIZE (mode);
1048 if (l)
1049 abort ();
1050 return n_insns;
1053 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1054 with move instructions for mode MODE. GENFUN is the gen_... function
1055 to make a move insn for that mode. DATA has all the other info. */
1057 static void
1058 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1059 struct move_by_pieces *data)
1061 unsigned int size = GET_MODE_SIZE (mode);
1062 rtx to1 = NULL_RTX, from1;
1064 while (data->len >= size)
1066 if (data->reverse)
1067 data->offset -= size;
1069 if (data->to)
1071 if (data->autinc_to)
1072 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1073 data->offset);
1074 else
1075 to1 = adjust_address (data->to, mode, data->offset);
1078 if (data->autinc_from)
1079 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1080 data->offset);
1081 else
1082 from1 = adjust_address (data->from, mode, data->offset);
1084 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1085 emit_insn (gen_add2_insn (data->to_addr,
1086 GEN_INT (-(HOST_WIDE_INT)size)));
1087 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1088 emit_insn (gen_add2_insn (data->from_addr,
1089 GEN_INT (-(HOST_WIDE_INT)size)));
1091 if (data->to)
1092 emit_insn ((*genfun) (to1, from1));
1093 else
1095 #ifdef PUSH_ROUNDING
1096 emit_single_push_insn (mode, from1, NULL);
1097 #else
1098 abort ();
1099 #endif
1102 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1103 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1104 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1105 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1107 if (! data->reverse)
1108 data->offset += size;
1110 data->len -= size;
1114 /* Emit code to move a block Y to a block X. This may be done with
1115 string-move instructions, with multiple scalar move instructions,
1116 or with a library call.
1118 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1119 SIZE is an rtx that says how long they are.
1120 ALIGN is the maximum alignment we can assume they have.
1121 METHOD describes what kind of copy this is, and what mechanisms may be used.
1123 Return the address of the new block, if memcpy is called and returns it,
1124 0 otherwise. */
1127 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1129 bool may_use_call;
1130 rtx retval = 0;
1131 unsigned int align;
1133 switch (method)
1135 case BLOCK_OP_NORMAL:
1136 may_use_call = true;
1137 break;
1139 case BLOCK_OP_CALL_PARM:
1140 may_use_call = block_move_libcall_safe_for_call_parm ();
1142 /* Make inhibit_defer_pop nonzero around the library call
1143 to force it to pop the arguments right away. */
1144 NO_DEFER_POP;
1145 break;
1147 case BLOCK_OP_NO_LIBCALL:
1148 may_use_call = false;
1149 break;
1151 default:
1152 abort ();
1155 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1157 if (!MEM_P (x))
1158 abort ();
1159 if (!MEM_P (y))
1160 abort ();
1161 if (size == 0)
1162 abort ();
1164 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1165 block copy is more efficient for other large modes, e.g. DCmode. */
1166 x = adjust_address (x, BLKmode, 0);
1167 y = adjust_address (y, BLKmode, 0);
1169 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1170 can be incorrect is coming from __builtin_memcpy. */
1171 if (GET_CODE (size) == CONST_INT)
1173 if (INTVAL (size) == 0)
1174 return 0;
1176 x = shallow_copy_rtx (x);
1177 y = shallow_copy_rtx (y);
1178 set_mem_size (x, size);
1179 set_mem_size (y, size);
1182 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1183 move_by_pieces (x, y, INTVAL (size), align, 0);
1184 else if (emit_block_move_via_movmem (x, y, size, align))
1186 else if (may_use_call)
1187 retval = emit_block_move_via_libcall (x, y, size);
1188 else
1189 emit_block_move_via_loop (x, y, size, align);
1191 if (method == BLOCK_OP_CALL_PARM)
1192 OK_DEFER_POP;
1194 return retval;
1197 /* A subroutine of emit_block_move. Returns true if calling the
1198 block move libcall will not clobber any parameters which may have
1199 already been placed on the stack. */
1201 static bool
1202 block_move_libcall_safe_for_call_parm (void)
1204 /* If arguments are pushed on the stack, then they're safe. */
1205 if (PUSH_ARGS)
1206 return true;
1208 /* If registers go on the stack anyway, any argument is sure to clobber
1209 an outgoing argument. */
1210 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1212 tree fn = emit_block_move_libcall_fn (false);
1213 (void) fn;
1214 if (REG_PARM_STACK_SPACE (fn) != 0)
1215 return false;
1217 #endif
1219 /* If any argument goes in memory, then it might clobber an outgoing
1220 argument. */
1222 CUMULATIVE_ARGS args_so_far;
1223 tree fn, arg;
1225 fn = emit_block_move_libcall_fn (false);
1226 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1228 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1229 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1231 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1232 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1233 if (!tmp || !REG_P (tmp))
1234 return false;
1235 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1236 NULL_TREE, 1))
1237 return false;
1238 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1241 return true;
1244 /* A subroutine of emit_block_move. Expand a movmem pattern;
1245 return true if successful. */
1247 static bool
1248 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1250 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1251 int save_volatile_ok = volatile_ok;
1252 enum machine_mode mode;
1254 /* Since this is a move insn, we don't care about volatility. */
1255 volatile_ok = 1;
1257 /* Try the most limited insn first, because there's no point
1258 including more than one in the machine description unless
1259 the more limited one has some advantage. */
1261 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1262 mode = GET_MODE_WIDER_MODE (mode))
1264 enum insn_code code = movmem_optab[(int) mode];
1265 insn_operand_predicate_fn pred;
1267 if (code != CODE_FOR_nothing
1268 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1269 here because if SIZE is less than the mode mask, as it is
1270 returned by the macro, it will definitely be less than the
1271 actual mode mask. */
1272 && ((GET_CODE (size) == CONST_INT
1273 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1274 <= (GET_MODE_MASK (mode) >> 1)))
1275 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1276 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1277 || (*pred) (x, BLKmode))
1278 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1279 || (*pred) (y, BLKmode))
1280 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1281 || (*pred) (opalign, VOIDmode)))
1283 rtx op2;
1284 rtx last = get_last_insn ();
1285 rtx pat;
1287 op2 = convert_to_mode (mode, size, 1);
1288 pred = insn_data[(int) code].operand[2].predicate;
1289 if (pred != 0 && ! (*pred) (op2, mode))
1290 op2 = copy_to_mode_reg (mode, op2);
1292 /* ??? When called via emit_block_move_for_call, it'd be
1293 nice if there were some way to inform the backend, so
1294 that it doesn't fail the expansion because it thinks
1295 emitting the libcall would be more efficient. */
1297 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1298 if (pat)
1300 emit_insn (pat);
1301 volatile_ok = save_volatile_ok;
1302 return true;
1304 else
1305 delete_insns_since (last);
1309 volatile_ok = save_volatile_ok;
1310 return false;
1313 /* A subroutine of emit_block_move. Expand a call to memcpy.
1314 Return the return value from memcpy, 0 otherwise. */
1316 static rtx
1317 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1319 rtx dst_addr, src_addr;
1320 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1321 enum machine_mode size_mode;
1322 rtx retval;
1324 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1325 pseudos. We can then place those new pseudos into a VAR_DECL and
1326 use them later. */
1328 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1329 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1331 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1332 src_addr = convert_memory_address (ptr_mode, src_addr);
1334 dst_tree = make_tree (ptr_type_node, dst_addr);
1335 src_tree = make_tree (ptr_type_node, src_addr);
1337 size_mode = TYPE_MODE (sizetype);
1339 size = convert_to_mode (size_mode, size, 1);
1340 size = copy_to_mode_reg (size_mode, size);
1342 /* It is incorrect to use the libcall calling conventions to call
1343 memcpy in this context. This could be a user call to memcpy and
1344 the user may wish to examine the return value from memcpy. For
1345 targets where libcalls and normal calls have different conventions
1346 for returning pointers, we could end up generating incorrect code. */
1348 size_tree = make_tree (sizetype, size);
1350 fn = emit_block_move_libcall_fn (true);
1351 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1352 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1353 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1355 /* Now we have to build up the CALL_EXPR itself. */
1356 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1357 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1358 call_expr, arg_list, NULL_TREE);
1360 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1362 return retval;
1365 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1366 for the function we use for block copies. The first time FOR_CALL
1367 is true, we call assemble_external. */
1369 static GTY(()) tree block_move_fn;
1371 void
1372 init_block_move_fn (const char *asmspec)
1374 if (!block_move_fn)
1376 tree args, fn;
1378 fn = get_identifier ("memcpy");
1379 args = build_function_type_list (ptr_type_node, ptr_type_node,
1380 const_ptr_type_node, sizetype,
1381 NULL_TREE);
1383 fn = build_decl (FUNCTION_DECL, fn, args);
1384 DECL_EXTERNAL (fn) = 1;
1385 TREE_PUBLIC (fn) = 1;
1386 DECL_ARTIFICIAL (fn) = 1;
1387 TREE_NOTHROW (fn) = 1;
1389 block_move_fn = fn;
1392 if (asmspec)
1393 set_user_assembler_name (block_move_fn, asmspec);
1396 static tree
1397 emit_block_move_libcall_fn (int for_call)
1399 static bool emitted_extern;
1401 if (!block_move_fn)
1402 init_block_move_fn (NULL);
1404 if (for_call && !emitted_extern)
1406 emitted_extern = true;
1407 make_decl_rtl (block_move_fn);
1408 assemble_external (block_move_fn);
1411 return block_move_fn;
1414 /* A subroutine of emit_block_move. Copy the data via an explicit
1415 loop. This is used only when libcalls are forbidden. */
1416 /* ??? It'd be nice to copy in hunks larger than QImode. */
1418 static void
1419 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1420 unsigned int align ATTRIBUTE_UNUSED)
1422 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1423 enum machine_mode iter_mode;
1425 iter_mode = GET_MODE (size);
1426 if (iter_mode == VOIDmode)
1427 iter_mode = word_mode;
1429 top_label = gen_label_rtx ();
1430 cmp_label = gen_label_rtx ();
1431 iter = gen_reg_rtx (iter_mode);
1433 emit_move_insn (iter, const0_rtx);
1435 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1436 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1437 do_pending_stack_adjust ();
1439 emit_jump (cmp_label);
1440 emit_label (top_label);
1442 tmp = convert_modes (Pmode, iter_mode, iter, true);
1443 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1444 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1445 x = change_address (x, QImode, x_addr);
1446 y = change_address (y, QImode, y_addr);
1448 emit_move_insn (x, y);
1450 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1451 true, OPTAB_LIB_WIDEN);
1452 if (tmp != iter)
1453 emit_move_insn (iter, tmp);
1455 emit_label (cmp_label);
1457 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1458 true, top_label);
1461 /* Copy all or part of a value X into registers starting at REGNO.
1462 The number of registers to be filled is NREGS. */
1464 void
1465 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1467 int i;
1468 #ifdef HAVE_load_multiple
1469 rtx pat;
1470 rtx last;
1471 #endif
1473 if (nregs == 0)
1474 return;
1476 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1477 x = validize_mem (force_const_mem (mode, x));
1479 /* See if the machine can do this with a load multiple insn. */
1480 #ifdef HAVE_load_multiple
1481 if (HAVE_load_multiple)
1483 last = get_last_insn ();
1484 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1485 GEN_INT (nregs));
1486 if (pat)
1488 emit_insn (pat);
1489 return;
1491 else
1492 delete_insns_since (last);
1494 #endif
1496 for (i = 0; i < nregs; i++)
1497 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1498 operand_subword_force (x, i, mode));
1501 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1502 The number of registers to be filled is NREGS. */
1504 void
1505 move_block_from_reg (int regno, rtx x, int nregs)
1507 int i;
1509 if (nregs == 0)
1510 return;
1512 /* See if the machine can do this with a store multiple insn. */
1513 #ifdef HAVE_store_multiple
1514 if (HAVE_store_multiple)
1516 rtx last = get_last_insn ();
1517 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1518 GEN_INT (nregs));
1519 if (pat)
1521 emit_insn (pat);
1522 return;
1524 else
1525 delete_insns_since (last);
1527 #endif
1529 for (i = 0; i < nregs; i++)
1531 rtx tem = operand_subword (x, i, 1, BLKmode);
1533 if (tem == 0)
1534 abort ();
1536 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1540 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1541 ORIG, where ORIG is a non-consecutive group of registers represented by
1542 a PARALLEL. The clone is identical to the original except in that the
1543 original set of registers is replaced by a new set of pseudo registers.
1544 The new set has the same modes as the original set. */
1547 gen_group_rtx (rtx orig)
1549 int i, length;
1550 rtx *tmps;
1552 if (GET_CODE (orig) != PARALLEL)
1553 abort ();
1555 length = XVECLEN (orig, 0);
1556 tmps = alloca (sizeof (rtx) * length);
1558 /* Skip a NULL entry in first slot. */
1559 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1561 if (i)
1562 tmps[0] = 0;
1564 for (; i < length; i++)
1566 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1567 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1569 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1572 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1575 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1576 where DST is non-consecutive registers represented by a PARALLEL.
1577 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1578 if not known. */
1580 void
1581 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1583 rtx *tmps, src;
1584 int start, i;
1586 if (GET_CODE (dst) != PARALLEL)
1587 abort ();
1589 /* Check for a NULL entry, used to indicate that the parameter goes
1590 both on the stack and in registers. */
1591 if (XEXP (XVECEXP (dst, 0, 0), 0))
1592 start = 0;
1593 else
1594 start = 1;
1596 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1598 /* Process the pieces. */
1599 for (i = start; i < XVECLEN (dst, 0); i++)
1601 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1602 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1603 unsigned int bytelen = GET_MODE_SIZE (mode);
1604 int shift = 0;
1606 /* Handle trailing fragments that run over the size of the struct. */
1607 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1609 /* Arrange to shift the fragment to where it belongs.
1610 extract_bit_field loads to the lsb of the reg. */
1611 if (
1612 #ifdef BLOCK_REG_PADDING
1613 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1614 == (BYTES_BIG_ENDIAN ? upward : downward)
1615 #else
1616 BYTES_BIG_ENDIAN
1617 #endif
1619 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1620 bytelen = ssize - bytepos;
1621 if (bytelen <= 0)
1622 abort ();
1625 /* If we won't be loading directly from memory, protect the real source
1626 from strange tricks we might play; but make sure that the source can
1627 be loaded directly into the destination. */
1628 src = orig_src;
1629 if (!MEM_P (orig_src)
1630 && (!CONSTANT_P (orig_src)
1631 || (GET_MODE (orig_src) != mode
1632 && GET_MODE (orig_src) != VOIDmode)))
1634 if (GET_MODE (orig_src) == VOIDmode)
1635 src = gen_reg_rtx (mode);
1636 else
1637 src = gen_reg_rtx (GET_MODE (orig_src));
1639 emit_move_insn (src, orig_src);
1642 /* Optimize the access just a bit. */
1643 if (MEM_P (src)
1644 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1645 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1646 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1647 && bytelen == GET_MODE_SIZE (mode))
1649 tmps[i] = gen_reg_rtx (mode);
1650 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1652 else if (GET_CODE (src) == CONCAT)
1654 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1655 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1657 if ((bytepos == 0 && bytelen == slen0)
1658 || (bytepos != 0 && bytepos + bytelen <= slen))
1660 /* The following assumes that the concatenated objects all
1661 have the same size. In this case, a simple calculation
1662 can be used to determine the object and the bit field
1663 to be extracted. */
1664 tmps[i] = XEXP (src, bytepos / slen0);
1665 if (! CONSTANT_P (tmps[i])
1666 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1667 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1668 (bytepos % slen0) * BITS_PER_UNIT,
1669 1, NULL_RTX, mode, mode);
1671 else if (bytepos == 0)
1673 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1674 emit_move_insn (mem, src);
1675 tmps[i] = adjust_address (mem, mode, 0);
1677 else
1678 abort ();
1680 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1681 SIMD register, which is currently broken. While we get GCC
1682 to emit proper RTL for these cases, let's dump to memory. */
1683 else if (VECTOR_MODE_P (GET_MODE (dst))
1684 && REG_P (src))
1686 int slen = GET_MODE_SIZE (GET_MODE (src));
1687 rtx mem;
1689 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1690 emit_move_insn (mem, src);
1691 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1693 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1694 && XVECLEN (dst, 0) > 1)
1695 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1696 else if (CONSTANT_P (src)
1697 || (REG_P (src) && GET_MODE (src) == mode))
1698 tmps[i] = src;
1699 else
1700 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1701 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1702 mode, mode);
1704 if (shift)
1705 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1706 build_int_cst (NULL_TREE,
1707 shift, 0), tmps[i], 0);
1710 /* Copy the extracted pieces into the proper (probable) hard regs. */
1711 for (i = start; i < XVECLEN (dst, 0); i++)
1712 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1715 /* Emit code to move a block SRC to block DST, where SRC and DST are
1716 non-consecutive groups of registers, each represented by a PARALLEL. */
1718 void
1719 emit_group_move (rtx dst, rtx src)
1721 int i;
1723 if (GET_CODE (src) != PARALLEL
1724 || GET_CODE (dst) != PARALLEL
1725 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1726 abort ();
1728 /* Skip first entry if NULL. */
1729 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1730 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1731 XEXP (XVECEXP (src, 0, i), 0));
1734 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1735 where SRC is non-consecutive registers represented by a PARALLEL.
1736 SSIZE represents the total size of block ORIG_DST, or -1 if not
1737 known. */
1739 void
1740 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1742 rtx *tmps, dst;
1743 int start, i;
1745 if (GET_CODE (src) != PARALLEL)
1746 abort ();
1748 /* Check for a NULL entry, used to indicate that the parameter goes
1749 both on the stack and in registers. */
1750 if (XEXP (XVECEXP (src, 0, 0), 0))
1751 start = 0;
1752 else
1753 start = 1;
1755 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1757 /* Copy the (probable) hard regs into pseudos. */
1758 for (i = start; i < XVECLEN (src, 0); i++)
1760 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1761 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1762 emit_move_insn (tmps[i], reg);
1765 /* If we won't be storing directly into memory, protect the real destination
1766 from strange tricks we might play. */
1767 dst = orig_dst;
1768 if (GET_CODE (dst) == PARALLEL)
1770 rtx temp;
1772 /* We can get a PARALLEL dst if there is a conditional expression in
1773 a return statement. In that case, the dst and src are the same,
1774 so no action is necessary. */
1775 if (rtx_equal_p (dst, src))
1776 return;
1778 /* It is unclear if we can ever reach here, but we may as well handle
1779 it. Allocate a temporary, and split this into a store/load to/from
1780 the temporary. */
1782 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1783 emit_group_store (temp, src, type, ssize);
1784 emit_group_load (dst, temp, type, ssize);
1785 return;
1787 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1789 dst = gen_reg_rtx (GET_MODE (orig_dst));
1790 /* Make life a bit easier for combine. */
1791 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1794 /* Process the pieces. */
1795 for (i = start; i < XVECLEN (src, 0); i++)
1797 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1798 enum machine_mode mode = GET_MODE (tmps[i]);
1799 unsigned int bytelen = GET_MODE_SIZE (mode);
1800 rtx dest = dst;
1802 /* Handle trailing fragments that run over the size of the struct. */
1803 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1805 /* store_bit_field always takes its value from the lsb.
1806 Move the fragment to the lsb if it's not already there. */
1807 if (
1808 #ifdef BLOCK_REG_PADDING
1809 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1810 == (BYTES_BIG_ENDIAN ? upward : downward)
1811 #else
1812 BYTES_BIG_ENDIAN
1813 #endif
1816 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1817 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1818 build_int_cst (NULL_TREE,
1819 shift, 0), tmps[i], 0);
1821 bytelen = ssize - bytepos;
1824 if (GET_CODE (dst) == CONCAT)
1826 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1827 dest = XEXP (dst, 0);
1828 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1830 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1831 dest = XEXP (dst, 1);
1833 else if (bytepos == 0 && XVECLEN (src, 0))
1835 dest = assign_stack_temp (GET_MODE (dest),
1836 GET_MODE_SIZE (GET_MODE (dest)), 0);
1837 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1838 tmps[i]);
1839 dst = dest;
1840 break;
1842 else
1843 abort ();
1846 /* Optimize the access just a bit. */
1847 if (MEM_P (dest)
1848 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1849 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1850 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1851 && bytelen == GET_MODE_SIZE (mode))
1852 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1853 else
1854 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1855 mode, tmps[i]);
1858 /* Copy from the pseudo into the (probable) hard reg. */
1859 if (orig_dst != dst)
1860 emit_move_insn (orig_dst, dst);
1863 /* Generate code to copy a BLKmode object of TYPE out of a
1864 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1865 is null, a stack temporary is created. TGTBLK is returned.
1867 The purpose of this routine is to handle functions that return
1868 BLKmode structures in registers. Some machines (the PA for example)
1869 want to return all small structures in registers regardless of the
1870 structure's alignment. */
1873 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1875 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1876 rtx src = NULL, dst = NULL;
1877 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1878 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1880 if (tgtblk == 0)
1882 tgtblk = assign_temp (build_qualified_type (type,
1883 (TYPE_QUALS (type)
1884 | TYPE_QUAL_CONST)),
1885 0, 1, 1);
1886 preserve_temp_slots (tgtblk);
1889 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1890 into a new pseudo which is a full word. */
1892 if (GET_MODE (srcreg) != BLKmode
1893 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1894 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1896 /* If the structure doesn't take up a whole number of words, see whether
1897 SRCREG is padded on the left or on the right. If it's on the left,
1898 set PADDING_CORRECTION to the number of bits to skip.
1900 In most ABIs, the structure will be returned at the least end of
1901 the register, which translates to right padding on little-endian
1902 targets and left padding on big-endian targets. The opposite
1903 holds if the structure is returned at the most significant
1904 end of the register. */
1905 if (bytes % UNITS_PER_WORD != 0
1906 && (targetm.calls.return_in_msb (type)
1907 ? !BYTES_BIG_ENDIAN
1908 : BYTES_BIG_ENDIAN))
1909 padding_correction
1910 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1912 /* Copy the structure BITSIZE bites at a time.
1914 We could probably emit more efficient code for machines which do not use
1915 strict alignment, but it doesn't seem worth the effort at the current
1916 time. */
1917 for (bitpos = 0, xbitpos = padding_correction;
1918 bitpos < bytes * BITS_PER_UNIT;
1919 bitpos += bitsize, xbitpos += bitsize)
1921 /* We need a new source operand each time xbitpos is on a
1922 word boundary and when xbitpos == padding_correction
1923 (the first time through). */
1924 if (xbitpos % BITS_PER_WORD == 0
1925 || xbitpos == padding_correction)
1926 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1927 GET_MODE (srcreg));
1929 /* We need a new destination operand each time bitpos is on
1930 a word boundary. */
1931 if (bitpos % BITS_PER_WORD == 0)
1932 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1934 /* Use xbitpos for the source extraction (right justified) and
1935 xbitpos for the destination store (left justified). */
1936 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1937 extract_bit_field (src, bitsize,
1938 xbitpos % BITS_PER_WORD, 1,
1939 NULL_RTX, word_mode, word_mode));
1942 return tgtblk;
1945 /* Add a USE expression for REG to the (possibly empty) list pointed
1946 to by CALL_FUSAGE. REG must denote a hard register. */
1948 void
1949 use_reg (rtx *call_fusage, rtx reg)
1951 if (!REG_P (reg)
1952 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1953 abort ();
1955 *call_fusage
1956 = gen_rtx_EXPR_LIST (VOIDmode,
1957 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1960 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1961 starting at REGNO. All of these registers must be hard registers. */
1963 void
1964 use_regs (rtx *call_fusage, int regno, int nregs)
1966 int i;
1968 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1969 abort ();
1971 for (i = 0; i < nregs; i++)
1972 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1975 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1976 PARALLEL REGS. This is for calls that pass values in multiple
1977 non-contiguous locations. The Irix 6 ABI has examples of this. */
1979 void
1980 use_group_regs (rtx *call_fusage, rtx regs)
1982 int i;
1984 for (i = 0; i < XVECLEN (regs, 0); i++)
1986 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1988 /* A NULL entry means the parameter goes both on the stack and in
1989 registers. This can also be a MEM for targets that pass values
1990 partially on the stack and partially in registers. */
1991 if (reg != 0 && REG_P (reg))
1992 use_reg (call_fusage, reg);
1997 /* Determine whether the LEN bytes generated by CONSTFUN can be
1998 stored to memory using several move instructions. CONSTFUNDATA is
1999 a pointer which will be passed as argument in every CONSTFUN call.
2000 ALIGN is maximum alignment we can assume. Return nonzero if a
2001 call to store_by_pieces should succeed. */
2004 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2005 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2006 void *constfundata, unsigned int align)
2008 unsigned HOST_WIDE_INT l;
2009 unsigned int max_size;
2010 HOST_WIDE_INT offset = 0;
2011 enum machine_mode mode, tmode;
2012 enum insn_code icode;
2013 int reverse;
2014 rtx cst;
2016 if (len == 0)
2017 return 1;
2019 if (! STORE_BY_PIECES_P (len, align))
2020 return 0;
2022 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2023 if (align >= GET_MODE_ALIGNMENT (tmode))
2024 align = GET_MODE_ALIGNMENT (tmode);
2025 else
2027 enum machine_mode xmode;
2029 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2030 tmode != VOIDmode;
2031 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2032 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2033 || SLOW_UNALIGNED_ACCESS (tmode, align))
2034 break;
2036 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2039 /* We would first store what we can in the largest integer mode, then go to
2040 successively smaller modes. */
2042 for (reverse = 0;
2043 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2044 reverse++)
2046 l = len;
2047 mode = VOIDmode;
2048 max_size = STORE_MAX_PIECES + 1;
2049 while (max_size > 1)
2051 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2052 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2053 if (GET_MODE_SIZE (tmode) < max_size)
2054 mode = tmode;
2056 if (mode == VOIDmode)
2057 break;
2059 icode = mov_optab->handlers[(int) mode].insn_code;
2060 if (icode != CODE_FOR_nothing
2061 && align >= GET_MODE_ALIGNMENT (mode))
2063 unsigned int size = GET_MODE_SIZE (mode);
2065 while (l >= size)
2067 if (reverse)
2068 offset -= size;
2070 cst = (*constfun) (constfundata, offset, mode);
2071 if (!LEGITIMATE_CONSTANT_P (cst))
2072 return 0;
2074 if (!reverse)
2075 offset += size;
2077 l -= size;
2081 max_size = GET_MODE_SIZE (mode);
2084 /* The code above should have handled everything. */
2085 if (l != 0)
2086 abort ();
2089 return 1;
2092 /* Generate several move instructions to store LEN bytes generated by
2093 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2094 pointer which will be passed as argument in every CONSTFUN call.
2095 ALIGN is maximum alignment we can assume.
2096 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2097 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2098 stpcpy. */
2101 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2102 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2103 void *constfundata, unsigned int align, int endp)
2105 struct store_by_pieces data;
2107 if (len == 0)
2109 if (endp == 2)
2110 abort ();
2111 return to;
2114 if (! STORE_BY_PIECES_P (len, align))
2115 abort ();
2116 data.constfun = constfun;
2117 data.constfundata = constfundata;
2118 data.len = len;
2119 data.to = to;
2120 store_by_pieces_1 (&data, align);
2121 if (endp)
2123 rtx to1;
2125 if (data.reverse)
2126 abort ();
2127 if (data.autinc_to)
2129 if (endp == 2)
2131 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2132 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2133 else
2134 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2135 -1));
2137 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2138 data.offset);
2140 else
2142 if (endp == 2)
2143 --data.offset;
2144 to1 = adjust_address (data.to, QImode, data.offset);
2146 return to1;
2148 else
2149 return data.to;
2152 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2153 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2155 static void
2156 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2158 struct store_by_pieces data;
2160 if (len == 0)
2161 return;
2163 data.constfun = clear_by_pieces_1;
2164 data.constfundata = NULL;
2165 data.len = len;
2166 data.to = to;
2167 store_by_pieces_1 (&data, align);
2170 /* Callback routine for clear_by_pieces.
2171 Return const0_rtx unconditionally. */
2173 static rtx
2174 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2175 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2176 enum machine_mode mode ATTRIBUTE_UNUSED)
2178 return const0_rtx;
2181 /* Subroutine of clear_by_pieces and store_by_pieces.
2182 Generate several move instructions to store LEN bytes of block TO. (A MEM
2183 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2185 static void
2186 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2187 unsigned int align ATTRIBUTE_UNUSED)
2189 rtx to_addr = XEXP (data->to, 0);
2190 unsigned int max_size = STORE_MAX_PIECES + 1;
2191 enum machine_mode mode = VOIDmode, tmode;
2192 enum insn_code icode;
2194 data->offset = 0;
2195 data->to_addr = to_addr;
2196 data->autinc_to
2197 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2198 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2200 data->explicit_inc_to = 0;
2201 data->reverse
2202 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2203 if (data->reverse)
2204 data->offset = data->len;
2206 /* If storing requires more than two move insns,
2207 copy addresses to registers (to make displacements shorter)
2208 and use post-increment if available. */
2209 if (!data->autinc_to
2210 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2212 /* Determine the main mode we'll be using. */
2213 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2214 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2215 if (GET_MODE_SIZE (tmode) < max_size)
2216 mode = tmode;
2218 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2220 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2221 data->autinc_to = 1;
2222 data->explicit_inc_to = -1;
2225 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2226 && ! data->autinc_to)
2228 data->to_addr = copy_addr_to_reg (to_addr);
2229 data->autinc_to = 1;
2230 data->explicit_inc_to = 1;
2233 if ( !data->autinc_to && CONSTANT_P (to_addr))
2234 data->to_addr = copy_addr_to_reg (to_addr);
2237 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2238 if (align >= GET_MODE_ALIGNMENT (tmode))
2239 align = GET_MODE_ALIGNMENT (tmode);
2240 else
2242 enum machine_mode xmode;
2244 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2245 tmode != VOIDmode;
2246 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2247 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2248 || SLOW_UNALIGNED_ACCESS (tmode, align))
2249 break;
2251 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2254 /* First store what we can in the largest integer mode, then go to
2255 successively smaller modes. */
2257 while (max_size > 1)
2259 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2260 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2261 if (GET_MODE_SIZE (tmode) < max_size)
2262 mode = tmode;
2264 if (mode == VOIDmode)
2265 break;
2267 icode = mov_optab->handlers[(int) mode].insn_code;
2268 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2269 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2271 max_size = GET_MODE_SIZE (mode);
2274 /* The code above should have handled everything. */
2275 if (data->len != 0)
2276 abort ();
2279 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2280 with move instructions for mode MODE. GENFUN is the gen_... function
2281 to make a move insn for that mode. DATA has all the other info. */
2283 static void
2284 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2285 struct store_by_pieces *data)
2287 unsigned int size = GET_MODE_SIZE (mode);
2288 rtx to1, cst;
2290 while (data->len >= size)
2292 if (data->reverse)
2293 data->offset -= size;
2295 if (data->autinc_to)
2296 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2297 data->offset);
2298 else
2299 to1 = adjust_address (data->to, mode, data->offset);
2301 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2302 emit_insn (gen_add2_insn (data->to_addr,
2303 GEN_INT (-(HOST_WIDE_INT) size)));
2305 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2306 emit_insn ((*genfun) (to1, cst));
2308 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2309 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2311 if (! data->reverse)
2312 data->offset += size;
2314 data->len -= size;
2318 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2319 its length in bytes. */
2322 clear_storage (rtx object, rtx size)
2324 rtx retval = 0;
2325 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2326 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2328 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2329 just move a zero. Otherwise, do this a piece at a time. */
2330 if (GET_MODE (object) != BLKmode
2331 && GET_CODE (size) == CONST_INT
2332 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2333 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2334 else
2336 if (size == const0_rtx)
2338 else if (GET_CODE (size) == CONST_INT
2339 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2340 clear_by_pieces (object, INTVAL (size), align);
2341 else if (clear_storage_via_clrmem (object, size, align))
2343 else
2344 retval = clear_storage_via_libcall (object, size);
2347 return retval;
2350 /* A subroutine of clear_storage. Expand a clrmem pattern;
2351 return true if successful. */
2353 static bool
2354 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2356 /* Try the most limited insn first, because there's no point
2357 including more than one in the machine description unless
2358 the more limited one has some advantage. */
2360 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2361 enum machine_mode mode;
2363 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2364 mode = GET_MODE_WIDER_MODE (mode))
2366 enum insn_code code = clrmem_optab[(int) mode];
2367 insn_operand_predicate_fn pred;
2369 if (code != CODE_FOR_nothing
2370 /* We don't need MODE to be narrower than
2371 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2372 the mode mask, as it is returned by the macro, it will
2373 definitely be less than the actual mode mask. */
2374 && ((GET_CODE (size) == CONST_INT
2375 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2376 <= (GET_MODE_MASK (mode) >> 1)))
2377 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2378 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2379 || (*pred) (object, BLKmode))
2380 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2381 || (*pred) (opalign, VOIDmode)))
2383 rtx op1;
2384 rtx last = get_last_insn ();
2385 rtx pat;
2387 op1 = convert_to_mode (mode, size, 1);
2388 pred = insn_data[(int) code].operand[1].predicate;
2389 if (pred != 0 && ! (*pred) (op1, mode))
2390 op1 = copy_to_mode_reg (mode, op1);
2392 pat = GEN_FCN ((int) code) (object, op1, opalign);
2393 if (pat)
2395 emit_insn (pat);
2396 return true;
2398 else
2399 delete_insns_since (last);
2403 return false;
2406 /* A subroutine of clear_storage. Expand a call to memset.
2407 Return the return value of memset, 0 otherwise. */
2409 static rtx
2410 clear_storage_via_libcall (rtx object, rtx size)
2412 tree call_expr, arg_list, fn, object_tree, size_tree;
2413 enum machine_mode size_mode;
2414 rtx retval;
2416 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2417 place those into new pseudos into a VAR_DECL and use them later. */
2419 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2421 size_mode = TYPE_MODE (sizetype);
2422 size = convert_to_mode (size_mode, size, 1);
2423 size = copy_to_mode_reg (size_mode, size);
2425 /* It is incorrect to use the libcall calling conventions to call
2426 memset in this context. This could be a user call to memset and
2427 the user may wish to examine the return value from memset. For
2428 targets where libcalls and normal calls have different conventions
2429 for returning pointers, we could end up generating incorrect code. */
2431 object_tree = make_tree (ptr_type_node, object);
2432 size_tree = make_tree (sizetype, size);
2434 fn = clear_storage_libcall_fn (true);
2435 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2436 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2437 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2439 /* Now we have to build up the CALL_EXPR itself. */
2440 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2441 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2442 call_expr, arg_list, NULL_TREE);
2444 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2446 return retval;
2449 /* A subroutine of clear_storage_via_libcall. Create the tree node
2450 for the function we use for block clears. The first time FOR_CALL
2451 is true, we call assemble_external. */
2453 static GTY(()) tree block_clear_fn;
2455 void
2456 init_block_clear_fn (const char *asmspec)
2458 if (!block_clear_fn)
2460 tree fn, args;
2462 fn = get_identifier ("memset");
2463 args = build_function_type_list (ptr_type_node, ptr_type_node,
2464 integer_type_node, sizetype,
2465 NULL_TREE);
2467 fn = build_decl (FUNCTION_DECL, fn, args);
2468 DECL_EXTERNAL (fn) = 1;
2469 TREE_PUBLIC (fn) = 1;
2470 DECL_ARTIFICIAL (fn) = 1;
2471 TREE_NOTHROW (fn) = 1;
2473 block_clear_fn = fn;
2476 if (asmspec)
2477 set_user_assembler_name (block_clear_fn, asmspec);
2480 static tree
2481 clear_storage_libcall_fn (int for_call)
2483 static bool emitted_extern;
2485 if (!block_clear_fn)
2486 init_block_clear_fn (NULL);
2488 if (for_call && !emitted_extern)
2490 emitted_extern = true;
2491 make_decl_rtl (block_clear_fn);
2492 assemble_external (block_clear_fn);
2495 return block_clear_fn;
2498 /* Generate code to copy Y into X.
2499 Both Y and X must have the same mode, except that
2500 Y can be a constant with VOIDmode.
2501 This mode cannot be BLKmode; use emit_block_move for that.
2503 Return the last instruction emitted. */
2506 emit_move_insn (rtx x, rtx y)
2508 enum machine_mode mode = GET_MODE (x);
2509 rtx y_cst = NULL_RTX;
2510 rtx last_insn, set;
2512 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2513 abort ();
2515 if (CONSTANT_P (y))
2517 if (optimize
2518 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2519 && (last_insn = compress_float_constant (x, y)))
2520 return last_insn;
2522 y_cst = y;
2524 if (!LEGITIMATE_CONSTANT_P (y))
2526 y = force_const_mem (mode, y);
2528 /* If the target's cannot_force_const_mem prevented the spill,
2529 assume that the target's move expanders will also take care
2530 of the non-legitimate constant. */
2531 if (!y)
2532 y = y_cst;
2536 /* If X or Y are memory references, verify that their addresses are valid
2537 for the machine. */
2538 if (MEM_P (x)
2539 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2540 && ! push_operand (x, GET_MODE (x)))
2541 || (flag_force_addr
2542 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2543 x = validize_mem (x);
2545 if (MEM_P (y)
2546 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2547 || (flag_force_addr
2548 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2549 y = validize_mem (y);
2551 if (mode == BLKmode)
2552 abort ();
2554 last_insn = emit_move_insn_1 (x, y);
2556 if (y_cst && REG_P (x)
2557 && (set = single_set (last_insn)) != NULL_RTX
2558 && SET_DEST (set) == x
2559 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2560 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2562 return last_insn;
2565 /* Low level part of emit_move_insn.
2566 Called just like emit_move_insn, but assumes X and Y
2567 are basically valid. */
2570 emit_move_insn_1 (rtx x, rtx y)
2572 enum machine_mode mode = GET_MODE (x);
2573 enum machine_mode submode;
2574 enum mode_class class = GET_MODE_CLASS (mode);
2576 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2577 abort ();
2579 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2580 return
2581 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2583 /* Expand complex moves by moving real part and imag part, if possible. */
2584 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2585 && BLKmode != (submode = GET_MODE_INNER (mode))
2586 && (mov_optab->handlers[(int) submode].insn_code
2587 != CODE_FOR_nothing))
2589 /* Don't split destination if it is a stack push. */
2590 int stack = push_operand (x, GET_MODE (x));
2592 #ifdef PUSH_ROUNDING
2593 /* In case we output to the stack, but the size is smaller than the
2594 machine can push exactly, we need to use move instructions. */
2595 if (stack
2596 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2597 != GET_MODE_SIZE (submode)))
2599 rtx temp;
2600 HOST_WIDE_INT offset1, offset2;
2602 /* Do not use anti_adjust_stack, since we don't want to update
2603 stack_pointer_delta. */
2604 temp = expand_binop (Pmode,
2605 #ifdef STACK_GROWS_DOWNWARD
2606 sub_optab,
2607 #else
2608 add_optab,
2609 #endif
2610 stack_pointer_rtx,
2611 GEN_INT
2612 (PUSH_ROUNDING
2613 (GET_MODE_SIZE (GET_MODE (x)))),
2614 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2616 if (temp != stack_pointer_rtx)
2617 emit_move_insn (stack_pointer_rtx, temp);
2619 #ifdef STACK_GROWS_DOWNWARD
2620 offset1 = 0;
2621 offset2 = GET_MODE_SIZE (submode);
2622 #else
2623 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2624 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2625 + GET_MODE_SIZE (submode));
2626 #endif
2628 emit_move_insn (change_address (x, submode,
2629 gen_rtx_PLUS (Pmode,
2630 stack_pointer_rtx,
2631 GEN_INT (offset1))),
2632 gen_realpart (submode, y));
2633 emit_move_insn (change_address (x, submode,
2634 gen_rtx_PLUS (Pmode,
2635 stack_pointer_rtx,
2636 GEN_INT (offset2))),
2637 gen_imagpart (submode, y));
2639 else
2640 #endif
2641 /* If this is a stack, push the highpart first, so it
2642 will be in the argument order.
2644 In that case, change_address is used only to convert
2645 the mode, not to change the address. */
2646 if (stack)
2648 /* Note that the real part always precedes the imag part in memory
2649 regardless of machine's endianness. */
2650 #ifdef STACK_GROWS_DOWNWARD
2651 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2652 gen_imagpart (submode, y));
2653 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2654 gen_realpart (submode, y));
2655 #else
2656 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2657 gen_realpart (submode, y));
2658 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2659 gen_imagpart (submode, y));
2660 #endif
2662 else
2664 rtx realpart_x, realpart_y;
2665 rtx imagpart_x, imagpart_y;
2667 /* If this is a complex value with each part being smaller than a
2668 word, the usual calling sequence will likely pack the pieces into
2669 a single register. Unfortunately, SUBREG of hard registers only
2670 deals in terms of words, so we have a problem converting input
2671 arguments to the CONCAT of two registers that is used elsewhere
2672 for complex values. If this is before reload, we can copy it into
2673 memory and reload. FIXME, we should see about using extract and
2674 insert on integer registers, but complex short and complex char
2675 variables should be rarely used. */
2676 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2677 && (reload_in_progress | reload_completed) == 0)
2679 int packed_dest_p
2680 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2681 int packed_src_p
2682 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2684 if (packed_dest_p || packed_src_p)
2686 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2687 ? MODE_FLOAT : MODE_INT);
2689 enum machine_mode reg_mode
2690 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2692 if (reg_mode != BLKmode)
2694 rtx mem = assign_stack_temp (reg_mode,
2695 GET_MODE_SIZE (mode), 0);
2696 rtx cmem = adjust_address (mem, mode, 0);
2698 if (packed_dest_p)
2700 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2702 emit_move_insn_1 (cmem, y);
2703 return emit_move_insn_1 (sreg, mem);
2705 else
2707 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2709 emit_move_insn_1 (mem, sreg);
2710 return emit_move_insn_1 (x, cmem);
2716 realpart_x = gen_realpart (submode, x);
2717 realpart_y = gen_realpart (submode, y);
2718 imagpart_x = gen_imagpart (submode, x);
2719 imagpart_y = gen_imagpart (submode, y);
2721 /* Show the output dies here. This is necessary for SUBREGs
2722 of pseudos since we cannot track their lifetimes correctly;
2723 hard regs shouldn't appear here except as return values.
2724 We never want to emit such a clobber after reload. */
2725 if (x != y
2726 && ! (reload_in_progress || reload_completed)
2727 && (GET_CODE (realpart_x) == SUBREG
2728 || GET_CODE (imagpart_x) == SUBREG))
2729 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2731 emit_move_insn (realpart_x, realpart_y);
2732 emit_move_insn (imagpart_x, imagpart_y);
2735 return get_last_insn ();
2738 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2739 find a mode to do it in. If we have a movcc, use it. Otherwise,
2740 find the MODE_INT mode of the same width. */
2741 else if (GET_MODE_CLASS (mode) == MODE_CC
2742 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2744 enum insn_code insn_code;
2745 enum machine_mode tmode = VOIDmode;
2746 rtx x1 = x, y1 = y;
2748 if (mode != CCmode
2749 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2750 tmode = CCmode;
2751 else
2752 for (tmode = QImode; tmode != VOIDmode;
2753 tmode = GET_MODE_WIDER_MODE (tmode))
2754 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2755 break;
2757 if (tmode == VOIDmode)
2758 abort ();
2760 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2761 may call change_address which is not appropriate if we were
2762 called when a reload was in progress. We don't have to worry
2763 about changing the address since the size in bytes is supposed to
2764 be the same. Copy the MEM to change the mode and move any
2765 substitutions from the old MEM to the new one. */
2767 if (reload_in_progress)
2769 x = gen_lowpart_common (tmode, x1);
2770 if (x == 0 && MEM_P (x1))
2772 x = adjust_address_nv (x1, tmode, 0);
2773 copy_replacements (x1, x);
2776 y = gen_lowpart_common (tmode, y1);
2777 if (y == 0 && MEM_P (y1))
2779 y = adjust_address_nv (y1, tmode, 0);
2780 copy_replacements (y1, y);
2783 else
2785 x = gen_lowpart (tmode, x);
2786 y = gen_lowpart (tmode, y);
2789 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2790 return emit_insn (GEN_FCN (insn_code) (x, y));
2793 /* Try using a move pattern for the corresponding integer mode. This is
2794 only safe when simplify_subreg can convert MODE constants into integer
2795 constants. At present, it can only do this reliably if the value
2796 fits within a HOST_WIDE_INT. */
2797 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2798 && (submode = int_mode_for_mode (mode)) != BLKmode
2799 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2800 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2801 (simplify_gen_subreg (submode, x, mode, 0),
2802 simplify_gen_subreg (submode, y, mode, 0)));
2804 /* This will handle any multi-word or full-word mode that lacks a move_insn
2805 pattern. However, you will get better code if you define such patterns,
2806 even if they must turn into multiple assembler instructions. */
2807 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2809 rtx last_insn = 0;
2810 rtx seq, inner;
2811 int need_clobber;
2812 int i;
2814 #ifdef PUSH_ROUNDING
2816 /* If X is a push on the stack, do the push now and replace
2817 X with a reference to the stack pointer. */
2818 if (push_operand (x, GET_MODE (x)))
2820 rtx temp;
2821 enum rtx_code code;
2823 /* Do not use anti_adjust_stack, since we don't want to update
2824 stack_pointer_delta. */
2825 temp = expand_binop (Pmode,
2826 #ifdef STACK_GROWS_DOWNWARD
2827 sub_optab,
2828 #else
2829 add_optab,
2830 #endif
2831 stack_pointer_rtx,
2832 GEN_INT
2833 (PUSH_ROUNDING
2834 (GET_MODE_SIZE (GET_MODE (x)))),
2835 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2837 if (temp != stack_pointer_rtx)
2838 emit_move_insn (stack_pointer_rtx, temp);
2840 code = GET_CODE (XEXP (x, 0));
2842 /* Just hope that small offsets off SP are OK. */
2843 if (code == POST_INC)
2844 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2845 GEN_INT (-((HOST_WIDE_INT)
2846 GET_MODE_SIZE (GET_MODE (x)))));
2847 else if (code == POST_DEC)
2848 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2849 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2850 else
2851 temp = stack_pointer_rtx;
2853 x = change_address (x, VOIDmode, temp);
2855 #endif
2857 /* If we are in reload, see if either operand is a MEM whose address
2858 is scheduled for replacement. */
2859 if (reload_in_progress && MEM_P (x)
2860 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2861 x = replace_equiv_address_nv (x, inner);
2862 if (reload_in_progress && MEM_P (y)
2863 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2864 y = replace_equiv_address_nv (y, inner);
2866 start_sequence ();
2868 need_clobber = 0;
2869 for (i = 0;
2870 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2871 i++)
2873 rtx xpart = operand_subword (x, i, 1, mode);
2874 rtx ypart = operand_subword (y, i, 1, mode);
2876 /* If we can't get a part of Y, put Y into memory if it is a
2877 constant. Otherwise, force it into a register. If we still
2878 can't get a part of Y, abort. */
2879 if (ypart == 0 && CONSTANT_P (y))
2881 y = force_const_mem (mode, y);
2882 ypart = operand_subword (y, i, 1, mode);
2884 else if (ypart == 0)
2885 ypart = operand_subword_force (y, i, mode);
2887 if (xpart == 0 || ypart == 0)
2888 abort ();
2890 need_clobber |= (GET_CODE (xpart) == SUBREG);
2892 last_insn = emit_move_insn (xpart, ypart);
2895 seq = get_insns ();
2896 end_sequence ();
2898 /* Show the output dies here. This is necessary for SUBREGs
2899 of pseudos since we cannot track their lifetimes correctly;
2900 hard regs shouldn't appear here except as return values.
2901 We never want to emit such a clobber after reload. */
2902 if (x != y
2903 && ! (reload_in_progress || reload_completed)
2904 && need_clobber != 0)
2905 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2907 emit_insn (seq);
2909 return last_insn;
2911 else
2912 abort ();
2915 /* If Y is representable exactly in a narrower mode, and the target can
2916 perform the extension directly from constant or memory, then emit the
2917 move as an extension. */
2919 static rtx
2920 compress_float_constant (rtx x, rtx y)
2922 enum machine_mode dstmode = GET_MODE (x);
2923 enum machine_mode orig_srcmode = GET_MODE (y);
2924 enum machine_mode srcmode;
2925 REAL_VALUE_TYPE r;
2927 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2929 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2930 srcmode != orig_srcmode;
2931 srcmode = GET_MODE_WIDER_MODE (srcmode))
2933 enum insn_code ic;
2934 rtx trunc_y, last_insn;
2936 /* Skip if the target can't extend this way. */
2937 ic = can_extend_p (dstmode, srcmode, 0);
2938 if (ic == CODE_FOR_nothing)
2939 continue;
2941 /* Skip if the narrowed value isn't exact. */
2942 if (! exact_real_truncate (srcmode, &r))
2943 continue;
2945 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2947 if (LEGITIMATE_CONSTANT_P (trunc_y))
2949 /* Skip if the target needs extra instructions to perform
2950 the extension. */
2951 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2952 continue;
2954 else if (float_extend_from_mem[dstmode][srcmode])
2955 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2956 else
2957 continue;
2959 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2960 last_insn = get_last_insn ();
2962 if (REG_P (x))
2963 set_unique_reg_note (last_insn, REG_EQUAL, y);
2965 return last_insn;
2968 return NULL_RTX;
2971 /* Pushing data onto the stack. */
2973 /* Push a block of length SIZE (perhaps variable)
2974 and return an rtx to address the beginning of the block.
2975 The value may be virtual_outgoing_args_rtx.
2977 EXTRA is the number of bytes of padding to push in addition to SIZE.
2978 BELOW nonzero means this padding comes at low addresses;
2979 otherwise, the padding comes at high addresses. */
2982 push_block (rtx size, int extra, int below)
2984 rtx temp;
2986 size = convert_modes (Pmode, ptr_mode, size, 1);
2987 if (CONSTANT_P (size))
2988 anti_adjust_stack (plus_constant (size, extra));
2989 else if (REG_P (size) && extra == 0)
2990 anti_adjust_stack (size);
2991 else
2993 temp = copy_to_mode_reg (Pmode, size);
2994 if (extra != 0)
2995 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2996 temp, 0, OPTAB_LIB_WIDEN);
2997 anti_adjust_stack (temp);
3000 #ifndef STACK_GROWS_DOWNWARD
3001 if (0)
3002 #else
3003 if (1)
3004 #endif
3006 temp = virtual_outgoing_args_rtx;
3007 if (extra != 0 && below)
3008 temp = plus_constant (temp, extra);
3010 else
3012 if (GET_CODE (size) == CONST_INT)
3013 temp = plus_constant (virtual_outgoing_args_rtx,
3014 -INTVAL (size) - (below ? 0 : extra));
3015 else if (extra != 0 && !below)
3016 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3017 negate_rtx (Pmode, plus_constant (size, extra)));
3018 else
3019 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3020 negate_rtx (Pmode, size));
3023 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3026 #ifdef PUSH_ROUNDING
3028 /* Emit single push insn. */
3030 static void
3031 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3033 rtx dest_addr;
3034 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3035 rtx dest;
3036 enum insn_code icode;
3037 insn_operand_predicate_fn pred;
3039 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3040 /* If there is push pattern, use it. Otherwise try old way of throwing
3041 MEM representing push operation to move expander. */
3042 icode = push_optab->handlers[(int) mode].insn_code;
3043 if (icode != CODE_FOR_nothing)
3045 if (((pred = insn_data[(int) icode].operand[0].predicate)
3046 && !((*pred) (x, mode))))
3047 x = force_reg (mode, x);
3048 emit_insn (GEN_FCN (icode) (x));
3049 return;
3051 if (GET_MODE_SIZE (mode) == rounded_size)
3052 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3053 /* If we are to pad downward, adjust the stack pointer first and
3054 then store X into the stack location using an offset. This is
3055 because emit_move_insn does not know how to pad; it does not have
3056 access to type. */
3057 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3059 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3060 HOST_WIDE_INT offset;
3062 emit_move_insn (stack_pointer_rtx,
3063 expand_binop (Pmode,
3064 #ifdef STACK_GROWS_DOWNWARD
3065 sub_optab,
3066 #else
3067 add_optab,
3068 #endif
3069 stack_pointer_rtx,
3070 GEN_INT (rounded_size),
3071 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3073 offset = (HOST_WIDE_INT) padding_size;
3074 #ifdef STACK_GROWS_DOWNWARD
3075 if (STACK_PUSH_CODE == POST_DEC)
3076 /* We have already decremented the stack pointer, so get the
3077 previous value. */
3078 offset += (HOST_WIDE_INT) rounded_size;
3079 #else
3080 if (STACK_PUSH_CODE == POST_INC)
3081 /* We have already incremented the stack pointer, so get the
3082 previous value. */
3083 offset -= (HOST_WIDE_INT) rounded_size;
3084 #endif
3085 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3087 else
3089 #ifdef STACK_GROWS_DOWNWARD
3090 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3091 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3092 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3093 #else
3094 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3095 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3096 GEN_INT (rounded_size));
3097 #endif
3098 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3101 dest = gen_rtx_MEM (mode, dest_addr);
3103 if (type != 0)
3105 set_mem_attributes (dest, type, 1);
3107 if (flag_optimize_sibling_calls)
3108 /* Function incoming arguments may overlap with sibling call
3109 outgoing arguments and we cannot allow reordering of reads
3110 from function arguments with stores to outgoing arguments
3111 of sibling calls. */
3112 set_mem_alias_set (dest, 0);
3114 emit_move_insn (dest, x);
3116 #endif
3118 /* Generate code to push X onto the stack, assuming it has mode MODE and
3119 type TYPE.
3120 MODE is redundant except when X is a CONST_INT (since they don't
3121 carry mode info).
3122 SIZE is an rtx for the size of data to be copied (in bytes),
3123 needed only if X is BLKmode.
3125 ALIGN (in bits) is maximum alignment we can assume.
3127 If PARTIAL and REG are both nonzero, then copy that many of the first
3128 words of X into registers starting with REG, and push the rest of X.
3129 The amount of space pushed is decreased by PARTIAL words,
3130 rounded *down* to a multiple of PARM_BOUNDARY.
3131 REG must be a hard register in this case.
3132 If REG is zero but PARTIAL is not, take any all others actions for an
3133 argument partially in registers, but do not actually load any
3134 registers.
3136 EXTRA is the amount in bytes of extra space to leave next to this arg.
3137 This is ignored if an argument block has already been allocated.
3139 On a machine that lacks real push insns, ARGS_ADDR is the address of
3140 the bottom of the argument block for this call. We use indexing off there
3141 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3142 argument block has not been preallocated.
3144 ARGS_SO_FAR is the size of args previously pushed for this call.
3146 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3147 for arguments passed in registers. If nonzero, it will be the number
3148 of bytes required. */
3150 void
3151 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3152 unsigned int align, int partial, rtx reg, int extra,
3153 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3154 rtx alignment_pad)
3156 rtx xinner;
3157 enum direction stack_direction
3158 #ifdef STACK_GROWS_DOWNWARD
3159 = downward;
3160 #else
3161 = upward;
3162 #endif
3164 /* Decide where to pad the argument: `downward' for below,
3165 `upward' for above, or `none' for don't pad it.
3166 Default is below for small data on big-endian machines; else above. */
3167 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3169 /* Invert direction if stack is post-decrement.
3170 FIXME: why? */
3171 if (STACK_PUSH_CODE == POST_DEC)
3172 if (where_pad != none)
3173 where_pad = (where_pad == downward ? upward : downward);
3175 xinner = x;
3177 if (mode == BLKmode)
3179 /* Copy a block into the stack, entirely or partially. */
3181 rtx temp;
3182 int used = partial * UNITS_PER_WORD;
3183 int offset;
3184 int skip;
3186 if (reg && GET_CODE (reg) == PARALLEL)
3188 /* Use the size of the elt to compute offset. */
3189 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3190 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3191 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3193 else
3194 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3196 if (size == 0)
3197 abort ();
3199 used -= offset;
3201 /* USED is now the # of bytes we need not copy to the stack
3202 because registers will take care of them. */
3204 if (partial != 0)
3205 xinner = adjust_address (xinner, BLKmode, used);
3207 /* If the partial register-part of the arg counts in its stack size,
3208 skip the part of stack space corresponding to the registers.
3209 Otherwise, start copying to the beginning of the stack space,
3210 by setting SKIP to 0. */
3211 skip = (reg_parm_stack_space == 0) ? 0 : used;
3213 #ifdef PUSH_ROUNDING
3214 /* Do it with several push insns if that doesn't take lots of insns
3215 and if there is no difficulty with push insns that skip bytes
3216 on the stack for alignment purposes. */
3217 if (args_addr == 0
3218 && PUSH_ARGS
3219 && GET_CODE (size) == CONST_INT
3220 && skip == 0
3221 && MEM_ALIGN (xinner) >= align
3222 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3223 /* Here we avoid the case of a structure whose weak alignment
3224 forces many pushes of a small amount of data,
3225 and such small pushes do rounding that causes trouble. */
3226 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3227 || align >= BIGGEST_ALIGNMENT
3228 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3229 == (align / BITS_PER_UNIT)))
3230 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3232 /* Push padding now if padding above and stack grows down,
3233 or if padding below and stack grows up.
3234 But if space already allocated, this has already been done. */
3235 if (extra && args_addr == 0
3236 && where_pad != none && where_pad != stack_direction)
3237 anti_adjust_stack (GEN_INT (extra));
3239 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3241 else
3242 #endif /* PUSH_ROUNDING */
3244 rtx target;
3246 /* Otherwise make space on the stack and copy the data
3247 to the address of that space. */
3249 /* Deduct words put into registers from the size we must copy. */
3250 if (partial != 0)
3252 if (GET_CODE (size) == CONST_INT)
3253 size = GEN_INT (INTVAL (size) - used);
3254 else
3255 size = expand_binop (GET_MODE (size), sub_optab, size,
3256 GEN_INT (used), NULL_RTX, 0,
3257 OPTAB_LIB_WIDEN);
3260 /* Get the address of the stack space.
3261 In this case, we do not deal with EXTRA separately.
3262 A single stack adjust will do. */
3263 if (! args_addr)
3265 temp = push_block (size, extra, where_pad == downward);
3266 extra = 0;
3268 else if (GET_CODE (args_so_far) == CONST_INT)
3269 temp = memory_address (BLKmode,
3270 plus_constant (args_addr,
3271 skip + INTVAL (args_so_far)));
3272 else
3273 temp = memory_address (BLKmode,
3274 plus_constant (gen_rtx_PLUS (Pmode,
3275 args_addr,
3276 args_so_far),
3277 skip));
3279 if (!ACCUMULATE_OUTGOING_ARGS)
3281 /* If the source is referenced relative to the stack pointer,
3282 copy it to another register to stabilize it. We do not need
3283 to do this if we know that we won't be changing sp. */
3285 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3286 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3287 temp = copy_to_reg (temp);
3290 target = gen_rtx_MEM (BLKmode, temp);
3292 /* We do *not* set_mem_attributes here, because incoming arguments
3293 may overlap with sibling call outgoing arguments and we cannot
3294 allow reordering of reads from function arguments with stores
3295 to outgoing arguments of sibling calls. We do, however, want
3296 to record the alignment of the stack slot. */
3297 /* ALIGN may well be better aligned than TYPE, e.g. due to
3298 PARM_BOUNDARY. Assume the caller isn't lying. */
3299 set_mem_align (target, align);
3301 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3304 else if (partial > 0)
3306 /* Scalar partly in registers. */
3308 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3309 int i;
3310 int not_stack;
3311 /* # words of start of argument
3312 that we must make space for but need not store. */
3313 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3314 int args_offset = INTVAL (args_so_far);
3315 int skip;
3317 /* Push padding now if padding above and stack grows down,
3318 or if padding below and stack grows up.
3319 But if space already allocated, this has already been done. */
3320 if (extra && args_addr == 0
3321 && where_pad != none && where_pad != stack_direction)
3322 anti_adjust_stack (GEN_INT (extra));
3324 /* If we make space by pushing it, we might as well push
3325 the real data. Otherwise, we can leave OFFSET nonzero
3326 and leave the space uninitialized. */
3327 if (args_addr == 0)
3328 offset = 0;
3330 /* Now NOT_STACK gets the number of words that we don't need to
3331 allocate on the stack. */
3332 not_stack = partial - offset;
3334 /* If the partial register-part of the arg counts in its stack size,
3335 skip the part of stack space corresponding to the registers.
3336 Otherwise, start copying to the beginning of the stack space,
3337 by setting SKIP to 0. */
3338 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3340 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3341 x = validize_mem (force_const_mem (mode, x));
3343 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3344 SUBREGs of such registers are not allowed. */
3345 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3346 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3347 x = copy_to_reg (x);
3349 /* Loop over all the words allocated on the stack for this arg. */
3350 /* We can do it by words, because any scalar bigger than a word
3351 has a size a multiple of a word. */
3352 #ifndef PUSH_ARGS_REVERSED
3353 for (i = not_stack; i < size; i++)
3354 #else
3355 for (i = size - 1; i >= not_stack; i--)
3356 #endif
3357 if (i >= not_stack + offset)
3358 emit_push_insn (operand_subword_force (x, i, mode),
3359 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3360 0, args_addr,
3361 GEN_INT (args_offset + ((i - not_stack + skip)
3362 * UNITS_PER_WORD)),
3363 reg_parm_stack_space, alignment_pad);
3365 else
3367 rtx addr;
3368 rtx dest;
3370 /* Push padding now if padding above and stack grows down,
3371 or if padding below and stack grows up.
3372 But if space already allocated, this has already been done. */
3373 if (extra && args_addr == 0
3374 && where_pad != none && where_pad != stack_direction)
3375 anti_adjust_stack (GEN_INT (extra));
3377 #ifdef PUSH_ROUNDING
3378 if (args_addr == 0 && PUSH_ARGS)
3379 emit_single_push_insn (mode, x, type);
3380 else
3381 #endif
3383 if (GET_CODE (args_so_far) == CONST_INT)
3384 addr
3385 = memory_address (mode,
3386 plus_constant (args_addr,
3387 INTVAL (args_so_far)));
3388 else
3389 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3390 args_so_far));
3391 dest = gen_rtx_MEM (mode, addr);
3393 /* We do *not* set_mem_attributes here, because incoming arguments
3394 may overlap with sibling call outgoing arguments and we cannot
3395 allow reordering of reads from function arguments with stores
3396 to outgoing arguments of sibling calls. We do, however, want
3397 to record the alignment of the stack slot. */
3398 /* ALIGN may well be better aligned than TYPE, e.g. due to
3399 PARM_BOUNDARY. Assume the caller isn't lying. */
3400 set_mem_align (dest, align);
3402 emit_move_insn (dest, x);
3406 /* If part should go in registers, copy that part
3407 into the appropriate registers. Do this now, at the end,
3408 since mem-to-mem copies above may do function calls. */
3409 if (partial > 0 && reg != 0)
3411 /* Handle calls that pass values in multiple non-contiguous locations.
3412 The Irix 6 ABI has examples of this. */
3413 if (GET_CODE (reg) == PARALLEL)
3414 emit_group_load (reg, x, type, -1);
3415 else
3416 move_block_to_reg (REGNO (reg), x, partial, mode);
3419 if (extra && args_addr == 0 && where_pad == stack_direction)
3420 anti_adjust_stack (GEN_INT (extra));
3422 if (alignment_pad && args_addr == 0)
3423 anti_adjust_stack (alignment_pad);
3426 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3427 operations. */
3429 static rtx
3430 get_subtarget (rtx x)
3432 return (optimize
3433 || x == 0
3434 /* Only registers can be subtargets. */
3435 || !REG_P (x)
3436 /* Don't use hard regs to avoid extending their life. */
3437 || REGNO (x) < FIRST_PSEUDO_REGISTER
3438 ? 0 : x);
3441 /* Expand an assignment that stores the value of FROM into TO.
3442 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3443 (If the value is constant, this rtx is a constant.)
3444 Otherwise, the returned value is NULL_RTX. */
3447 expand_assignment (tree to, tree from, int want_value)
3449 rtx to_rtx = 0;
3450 rtx result;
3452 /* Don't crash if the lhs of the assignment was erroneous. */
3454 if (TREE_CODE (to) == ERROR_MARK)
3456 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3457 return want_value ? result : NULL_RTX;
3460 /* Assignment of a structure component needs special treatment
3461 if the structure component's rtx is not simply a MEM.
3462 Assignment of an array element at a constant index, and assignment of
3463 an array element in an unaligned packed structure field, has the same
3464 problem. */
3466 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3467 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3468 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3470 enum machine_mode mode1;
3471 HOST_WIDE_INT bitsize, bitpos;
3472 rtx orig_to_rtx;
3473 tree offset;
3474 int unsignedp;
3475 int volatilep = 0;
3476 tree tem;
3478 push_temp_slots ();
3479 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3480 &unsignedp, &volatilep);
3482 /* If we are going to use store_bit_field and extract_bit_field,
3483 make sure to_rtx will be safe for multiple use. */
3485 if (mode1 == VOIDmode && want_value)
3486 tem = stabilize_reference (tem);
3488 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3490 if (offset != 0)
3492 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3494 if (!MEM_P (to_rtx))
3495 abort ();
3497 #ifdef POINTERS_EXTEND_UNSIGNED
3498 if (GET_MODE (offset_rtx) != Pmode)
3499 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3500 #else
3501 if (GET_MODE (offset_rtx) != ptr_mode)
3502 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3503 #endif
3505 /* A constant address in TO_RTX can have VOIDmode, we must not try
3506 to call force_reg for that case. Avoid that case. */
3507 if (MEM_P (to_rtx)
3508 && GET_MODE (to_rtx) == BLKmode
3509 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3510 && bitsize > 0
3511 && (bitpos % bitsize) == 0
3512 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3513 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3515 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3516 bitpos = 0;
3519 to_rtx = offset_address (to_rtx, offset_rtx,
3520 highest_pow2_factor_for_target (to,
3521 offset));
3524 if (MEM_P (to_rtx))
3526 /* If the field is at offset zero, we could have been given the
3527 DECL_RTX of the parent struct. Don't munge it. */
3528 to_rtx = shallow_copy_rtx (to_rtx);
3530 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3533 /* Deal with volatile and readonly fields. The former is only done
3534 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3535 if (volatilep && MEM_P (to_rtx))
3537 if (to_rtx == orig_to_rtx)
3538 to_rtx = copy_rtx (to_rtx);
3539 MEM_VOLATILE_P (to_rtx) = 1;
3542 if (MEM_P (to_rtx) && ! can_address_p (to))
3544 if (to_rtx == orig_to_rtx)
3545 to_rtx = copy_rtx (to_rtx);
3546 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3549 /* Optimize bitfld op= val in certain cases. */
3550 while (mode1 == VOIDmode && !want_value
3551 && bitsize > 0 && bitsize < BITS_PER_WORD
3552 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3553 && !TREE_SIDE_EFFECTS (to)
3554 && !TREE_THIS_VOLATILE (to))
3556 tree src, op0, op1;
3557 rtx value, str_rtx = to_rtx;
3558 HOST_WIDE_INT bitpos1 = bitpos;
3559 optab binop;
3561 src = from;
3562 STRIP_NOPS (src);
3563 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3564 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3565 break;
3567 op0 = TREE_OPERAND (src, 0);
3568 op1 = TREE_OPERAND (src, 1);
3569 STRIP_NOPS (op0);
3571 if (! operand_equal_p (to, op0, 0))
3572 break;
3574 if (MEM_P (str_rtx))
3576 enum machine_mode mode = GET_MODE (str_rtx);
3577 HOST_WIDE_INT offset1;
3579 if (GET_MODE_BITSIZE (mode) == 0
3580 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3581 mode = word_mode;
3582 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3583 mode, 0);
3584 if (mode == VOIDmode)
3585 break;
3587 offset1 = bitpos1;
3588 bitpos1 %= GET_MODE_BITSIZE (mode);
3589 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3590 str_rtx = adjust_address (str_rtx, mode, offset1);
3592 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3593 break;
3595 /* If the bit field covers the whole REG/MEM, store_field
3596 will likely generate better code. */
3597 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3598 break;
3600 /* We can't handle fields split accross multiple entities. */
3601 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3602 break;
3604 if (BYTES_BIG_ENDIAN)
3605 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3606 - bitsize;
3608 /* Special case some bitfield op= exp. */
3609 switch (TREE_CODE (src))
3611 case PLUS_EXPR:
3612 case MINUS_EXPR:
3613 /* For now, just optimize the case of the topmost bitfield
3614 where we don't need to do any masking and also
3615 1 bit bitfields where xor can be used.
3616 We might win by one instruction for the other bitfields
3617 too if insv/extv instructions aren't used, so that
3618 can be added later. */
3619 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3620 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3621 break;
3622 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3623 value = convert_modes (GET_MODE (str_rtx),
3624 TYPE_MODE (TREE_TYPE (op1)), value,
3625 TYPE_UNSIGNED (TREE_TYPE (op1)));
3627 /* We may be accessing data outside the field, which means
3628 we can alias adjacent data. */
3629 if (MEM_P (str_rtx))
3631 str_rtx = shallow_copy_rtx (str_rtx);
3632 set_mem_alias_set (str_rtx, 0);
3633 set_mem_expr (str_rtx, 0);
3636 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3637 if (bitsize == 1
3638 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3640 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3641 NULL_RTX);
3642 binop = xor_optab;
3644 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3645 build_int_cst (NULL_TREE,bitpos1, 0),
3646 NULL_RTX, 1);
3647 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3648 value, str_rtx, 1, OPTAB_WIDEN);
3649 if (result != str_rtx)
3650 emit_move_insn (str_rtx, result);
3651 free_temp_slots ();
3652 pop_temp_slots ();
3653 return NULL_RTX;
3655 default:
3656 break;
3659 break;
3662 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3663 (want_value
3664 /* Spurious cast for HPUX compiler. */
3665 ? ((enum machine_mode)
3666 TYPE_MODE (TREE_TYPE (to)))
3667 : VOIDmode),
3668 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3670 preserve_temp_slots (result);
3671 free_temp_slots ();
3672 pop_temp_slots ();
3674 /* If the value is meaningful, convert RESULT to the proper mode.
3675 Otherwise, return nothing. */
3676 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3677 TYPE_MODE (TREE_TYPE (from)),
3678 result,
3679 TYPE_UNSIGNED (TREE_TYPE (to)))
3680 : NULL_RTX);
3683 /* If the rhs is a function call and its value is not an aggregate,
3684 call the function before we start to compute the lhs.
3685 This is needed for correct code for cases such as
3686 val = setjmp (buf) on machines where reference to val
3687 requires loading up part of an address in a separate insn.
3689 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3690 since it might be a promoted variable where the zero- or sign- extension
3691 needs to be done. Handling this in the normal way is safe because no
3692 computation is done before the call. */
3693 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3694 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3695 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3696 && REG_P (DECL_RTL (to))))
3698 rtx value;
3700 push_temp_slots ();
3701 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3702 if (to_rtx == 0)
3703 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3705 /* Handle calls that return values in multiple non-contiguous locations.
3706 The Irix 6 ABI has examples of this. */
3707 if (GET_CODE (to_rtx) == PARALLEL)
3708 emit_group_load (to_rtx, value, TREE_TYPE (from),
3709 int_size_in_bytes (TREE_TYPE (from)));
3710 else if (GET_MODE (to_rtx) == BLKmode)
3711 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3712 else
3714 if (POINTER_TYPE_P (TREE_TYPE (to)))
3715 value = convert_memory_address (GET_MODE (to_rtx), value);
3716 emit_move_insn (to_rtx, value);
3718 preserve_temp_slots (to_rtx);
3719 free_temp_slots ();
3720 pop_temp_slots ();
3721 return want_value ? to_rtx : NULL_RTX;
3724 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3725 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3727 if (to_rtx == 0)
3728 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3730 /* Don't move directly into a return register. */
3731 if (TREE_CODE (to) == RESULT_DECL
3732 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3734 rtx temp;
3736 push_temp_slots ();
3737 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3739 if (GET_CODE (to_rtx) == PARALLEL)
3740 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3741 int_size_in_bytes (TREE_TYPE (from)));
3742 else
3743 emit_move_insn (to_rtx, temp);
3745 preserve_temp_slots (to_rtx);
3746 free_temp_slots ();
3747 pop_temp_slots ();
3748 return want_value ? to_rtx : NULL_RTX;
3751 /* In case we are returning the contents of an object which overlaps
3752 the place the value is being stored, use a safe function when copying
3753 a value through a pointer into a structure value return block. */
3754 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3755 && current_function_returns_struct
3756 && !current_function_returns_pcc_struct)
3758 rtx from_rtx, size;
3760 push_temp_slots ();
3761 size = expr_size (from);
3762 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3764 emit_library_call (memmove_libfunc, LCT_NORMAL,
3765 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3766 XEXP (from_rtx, 0), Pmode,
3767 convert_to_mode (TYPE_MODE (sizetype),
3768 size, TYPE_UNSIGNED (sizetype)),
3769 TYPE_MODE (sizetype));
3771 preserve_temp_slots (to_rtx);
3772 free_temp_slots ();
3773 pop_temp_slots ();
3774 return want_value ? to_rtx : NULL_RTX;
3777 /* Compute FROM and store the value in the rtx we got. */
3779 push_temp_slots ();
3780 result = store_expr (from, to_rtx, want_value);
3781 preserve_temp_slots (result);
3782 free_temp_slots ();
3783 pop_temp_slots ();
3784 return want_value ? result : NULL_RTX;
3787 /* Generate code for computing expression EXP,
3788 and storing the value into TARGET.
3790 If WANT_VALUE & 1 is nonzero, return a copy of the value
3791 not in TARGET, so that we can be sure to use the proper
3792 value in a containing expression even if TARGET has something
3793 else stored in it. If possible, we copy the value through a pseudo
3794 and return that pseudo. Or, if the value is constant, we try to
3795 return the constant. In some cases, we return a pseudo
3796 copied *from* TARGET.
3798 If the mode is BLKmode then we may return TARGET itself.
3799 It turns out that in BLKmode it doesn't cause a problem.
3800 because C has no operators that could combine two different
3801 assignments into the same BLKmode object with different values
3802 with no sequence point. Will other languages need this to
3803 be more thorough?
3805 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3806 to catch quickly any cases where the caller uses the value
3807 and fails to set WANT_VALUE.
3809 If WANT_VALUE & 2 is set, this is a store into a call param on the
3810 stack, and block moves may need to be treated specially. */
3813 store_expr (tree exp, rtx target, int want_value)
3815 rtx temp;
3816 rtx alt_rtl = NULL_RTX;
3817 int dont_return_target = 0;
3818 int dont_store_target = 0;
3820 if (VOID_TYPE_P (TREE_TYPE (exp)))
3822 /* C++ can generate ?: expressions with a throw expression in one
3823 branch and an rvalue in the other. Here, we resolve attempts to
3824 store the throw expression's nonexistent result. */
3825 if (want_value)
3826 abort ();
3827 expand_expr (exp, const0_rtx, VOIDmode, 0);
3828 return NULL_RTX;
3830 if (TREE_CODE (exp) == COMPOUND_EXPR)
3832 /* Perform first part of compound expression, then assign from second
3833 part. */
3834 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3835 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3836 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3838 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3840 /* For conditional expression, get safe form of the target. Then
3841 test the condition, doing the appropriate assignment on either
3842 side. This avoids the creation of unnecessary temporaries.
3843 For non-BLKmode, it is more efficient not to do this. */
3845 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3847 do_pending_stack_adjust ();
3848 NO_DEFER_POP;
3849 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3850 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3851 emit_jump_insn (gen_jump (lab2));
3852 emit_barrier ();
3853 emit_label (lab1);
3854 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3855 emit_label (lab2);
3856 OK_DEFER_POP;
3858 return want_value & 1 ? target : NULL_RTX;
3860 else if ((want_value & 1) != 0
3861 && MEM_P (target)
3862 && ! MEM_VOLATILE_P (target)
3863 && GET_MODE (target) != BLKmode)
3864 /* If target is in memory and caller wants value in a register instead,
3865 arrange that. Pass TARGET as target for expand_expr so that,
3866 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3867 We know expand_expr will not use the target in that case.
3868 Don't do this if TARGET is volatile because we are supposed
3869 to write it and then read it. */
3871 temp = expand_expr (exp, target, GET_MODE (target),
3872 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3873 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3875 /* If TEMP is already in the desired TARGET, only copy it from
3876 memory and don't store it there again. */
3877 if (temp == target
3878 || (rtx_equal_p (temp, target)
3879 && ! side_effects_p (temp) && ! side_effects_p (target)))
3880 dont_store_target = 1;
3881 temp = copy_to_reg (temp);
3883 dont_return_target = 1;
3885 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3886 /* If this is a scalar in a register that is stored in a wider mode
3887 than the declared mode, compute the result into its declared mode
3888 and then convert to the wider mode. Our value is the computed
3889 expression. */
3891 rtx inner_target = 0;
3893 /* If we don't want a value, we can do the conversion inside EXP,
3894 which will often result in some optimizations. Do the conversion
3895 in two steps: first change the signedness, if needed, then
3896 the extend. But don't do this if the type of EXP is a subtype
3897 of something else since then the conversion might involve
3898 more than just converting modes. */
3899 if ((want_value & 1) == 0
3900 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3901 && TREE_TYPE (TREE_TYPE (exp)) == 0
3902 && (!lang_hooks.reduce_bit_field_operations
3903 || (GET_MODE_PRECISION (GET_MODE (target))
3904 == TYPE_PRECISION (TREE_TYPE (exp)))))
3906 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3907 != SUBREG_PROMOTED_UNSIGNED_P (target))
3908 exp = convert
3909 (lang_hooks.types.signed_or_unsigned_type
3910 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3912 exp = convert (lang_hooks.types.type_for_mode
3913 (GET_MODE (SUBREG_REG (target)),
3914 SUBREG_PROMOTED_UNSIGNED_P (target)),
3915 exp);
3917 inner_target = SUBREG_REG (target);
3920 temp = expand_expr (exp, inner_target, VOIDmode,
3921 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3923 /* If TEMP is a MEM and we want a result value, make the access
3924 now so it gets done only once. Strictly speaking, this is
3925 only necessary if the MEM is volatile, or if the address
3926 overlaps TARGET. But not performing the load twice also
3927 reduces the amount of rtl we generate and then have to CSE. */
3928 if (MEM_P (temp) && (want_value & 1) != 0)
3929 temp = copy_to_reg (temp);
3931 /* If TEMP is a VOIDmode constant, use convert_modes to make
3932 sure that we properly convert it. */
3933 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3935 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3936 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3937 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3938 GET_MODE (target), temp,
3939 SUBREG_PROMOTED_UNSIGNED_P (target));
3942 convert_move (SUBREG_REG (target), temp,
3943 SUBREG_PROMOTED_UNSIGNED_P (target));
3945 /* If we promoted a constant, change the mode back down to match
3946 target. Otherwise, the caller might get confused by a result whose
3947 mode is larger than expected. */
3949 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3951 if (GET_MODE (temp) != VOIDmode)
3953 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3954 SUBREG_PROMOTED_VAR_P (temp) = 1;
3955 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3956 SUBREG_PROMOTED_UNSIGNED_P (target));
3958 else
3959 temp = convert_modes (GET_MODE (target),
3960 GET_MODE (SUBREG_REG (target)),
3961 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3964 return want_value & 1 ? temp : NULL_RTX;
3966 else
3968 temp = expand_expr_real (exp, target, GET_MODE (target),
3969 (want_value & 2
3970 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3971 &alt_rtl);
3972 /* Return TARGET if it's a specified hardware register.
3973 If TARGET is a volatile mem ref, either return TARGET
3974 or return a reg copied *from* TARGET; ANSI requires this.
3976 Otherwise, if TEMP is not TARGET, return TEMP
3977 if it is constant (for efficiency),
3978 or if we really want the correct value. */
3979 if (!(target && REG_P (target)
3980 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3981 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3982 && ! rtx_equal_p (temp, target)
3983 && (CONSTANT_P (temp) || (want_value & 1) != 0))
3984 dont_return_target = 1;
3987 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3988 the same as that of TARGET, adjust the constant. This is needed, for
3989 example, in case it is a CONST_DOUBLE and we want only a word-sized
3990 value. */
3991 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3992 && TREE_CODE (exp) != ERROR_MARK
3993 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3994 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3995 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3997 /* If value was not generated in the target, store it there.
3998 Convert the value to TARGET's type first if necessary and emit the
3999 pending incrementations that have been queued when expanding EXP.
4000 Note that we cannot emit the whole queue blindly because this will
4001 effectively disable the POST_INC optimization later.
4003 If TEMP and TARGET compare equal according to rtx_equal_p, but
4004 one or both of them are volatile memory refs, we have to distinguish
4005 two cases:
4006 - expand_expr has used TARGET. In this case, we must not generate
4007 another copy. This can be detected by TARGET being equal according
4008 to == .
4009 - expand_expr has not used TARGET - that means that the source just
4010 happens to have the same RTX form. Since temp will have been created
4011 by expand_expr, it will compare unequal according to == .
4012 We must generate a copy in this case, to reach the correct number
4013 of volatile memory references. */
4015 if ((! rtx_equal_p (temp, target)
4016 || (temp != target && (side_effects_p (temp)
4017 || side_effects_p (target))))
4018 && TREE_CODE (exp) != ERROR_MARK
4019 && ! dont_store_target
4020 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4021 but TARGET is not valid memory reference, TEMP will differ
4022 from TARGET although it is really the same location. */
4023 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4024 /* If there's nothing to copy, don't bother. Don't call expr_size
4025 unless necessary, because some front-ends (C++) expr_size-hook
4026 aborts on objects that are not supposed to be bit-copied or
4027 bit-initialized. */
4028 && expr_size (exp) != const0_rtx)
4030 if (GET_MODE (temp) != GET_MODE (target)
4031 && GET_MODE (temp) != VOIDmode)
4033 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4034 if (dont_return_target)
4036 /* In this case, we will return TEMP,
4037 so make sure it has the proper mode.
4038 But don't forget to store the value into TARGET. */
4039 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4040 emit_move_insn (target, temp);
4042 else
4043 convert_move (target, temp, unsignedp);
4046 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4048 /* Handle copying a string constant into an array. The string
4049 constant may be shorter than the array. So copy just the string's
4050 actual length, and clear the rest. First get the size of the data
4051 type of the string, which is actually the size of the target. */
4052 rtx size = expr_size (exp);
4054 if (GET_CODE (size) == CONST_INT
4055 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4056 emit_block_move (target, temp, size,
4057 (want_value & 2
4058 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4059 else
4061 /* Compute the size of the data to copy from the string. */
4062 tree copy_size
4063 = size_binop (MIN_EXPR,
4064 make_tree (sizetype, size),
4065 size_int (TREE_STRING_LENGTH (exp)));
4066 rtx copy_size_rtx
4067 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4068 (want_value & 2
4069 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4070 rtx label = 0;
4072 /* Copy that much. */
4073 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4074 TYPE_UNSIGNED (sizetype));
4075 emit_block_move (target, temp, copy_size_rtx,
4076 (want_value & 2
4077 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4079 /* Figure out how much is left in TARGET that we have to clear.
4080 Do all calculations in ptr_mode. */
4081 if (GET_CODE (copy_size_rtx) == CONST_INT)
4083 size = plus_constant (size, -INTVAL (copy_size_rtx));
4084 target = adjust_address (target, BLKmode,
4085 INTVAL (copy_size_rtx));
4087 else
4089 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4090 copy_size_rtx, NULL_RTX, 0,
4091 OPTAB_LIB_WIDEN);
4093 #ifdef POINTERS_EXTEND_UNSIGNED
4094 if (GET_MODE (copy_size_rtx) != Pmode)
4095 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4096 TYPE_UNSIGNED (sizetype));
4097 #endif
4099 target = offset_address (target, copy_size_rtx,
4100 highest_pow2_factor (copy_size));
4101 label = gen_label_rtx ();
4102 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4103 GET_MODE (size), 0, label);
4106 if (size != const0_rtx)
4107 clear_storage (target, size);
4109 if (label)
4110 emit_label (label);
4113 /* Handle calls that return values in multiple non-contiguous locations.
4114 The Irix 6 ABI has examples of this. */
4115 else if (GET_CODE (target) == PARALLEL)
4116 emit_group_load (target, temp, TREE_TYPE (exp),
4117 int_size_in_bytes (TREE_TYPE (exp)));
4118 else if (GET_MODE (temp) == BLKmode)
4119 emit_block_move (target, temp, expr_size (exp),
4120 (want_value & 2
4121 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4122 else
4124 temp = force_operand (temp, target);
4125 if (temp != target)
4126 emit_move_insn (target, temp);
4130 /* If we don't want a value, return NULL_RTX. */
4131 if ((want_value & 1) == 0)
4132 return NULL_RTX;
4134 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4135 ??? The latter test doesn't seem to make sense. */
4136 else if (dont_return_target && !MEM_P (temp))
4137 return temp;
4139 /* Return TARGET itself if it is a hard register. */
4140 else if ((want_value & 1) != 0
4141 && GET_MODE (target) != BLKmode
4142 && ! (REG_P (target)
4143 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4144 return copy_to_reg (target);
4146 else
4147 return target;
4150 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4151 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4152 are set to non-constant values and place it in *P_NC_ELTS. */
4154 static void
4155 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4156 HOST_WIDE_INT *p_nc_elts)
4158 HOST_WIDE_INT nz_elts, nc_elts;
4159 tree list;
4161 nz_elts = 0;
4162 nc_elts = 0;
4164 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4166 tree value = TREE_VALUE (list);
4167 tree purpose = TREE_PURPOSE (list);
4168 HOST_WIDE_INT mult;
4170 mult = 1;
4171 if (TREE_CODE (purpose) == RANGE_EXPR)
4173 tree lo_index = TREE_OPERAND (purpose, 0);
4174 tree hi_index = TREE_OPERAND (purpose, 1);
4176 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4177 mult = (tree_low_cst (hi_index, 1)
4178 - tree_low_cst (lo_index, 1) + 1);
4181 switch (TREE_CODE (value))
4183 case CONSTRUCTOR:
4185 HOST_WIDE_INT nz = 0, nc = 0;
4186 categorize_ctor_elements_1 (value, &nz, &nc);
4187 nz_elts += mult * nz;
4188 nc_elts += mult * nc;
4190 break;
4192 case INTEGER_CST:
4193 case REAL_CST:
4194 if (!initializer_zerop (value))
4195 nz_elts += mult;
4196 break;
4197 case COMPLEX_CST:
4198 if (!initializer_zerop (TREE_REALPART (value)))
4199 nz_elts += mult;
4200 if (!initializer_zerop (TREE_IMAGPART (value)))
4201 nz_elts += mult;
4202 break;
4203 case VECTOR_CST:
4205 tree v;
4206 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4207 if (!initializer_zerop (TREE_VALUE (v)))
4208 nz_elts += mult;
4210 break;
4212 default:
4213 nz_elts += mult;
4214 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4215 nc_elts += mult;
4216 break;
4220 *p_nz_elts += nz_elts;
4221 *p_nc_elts += nc_elts;
4224 void
4225 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4226 HOST_WIDE_INT *p_nc_elts)
4228 *p_nz_elts = 0;
4229 *p_nc_elts = 0;
4230 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4233 /* Count the number of scalars in TYPE. Return -1 on overflow or
4234 variable-sized. */
4236 HOST_WIDE_INT
4237 count_type_elements (tree type)
4239 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4240 switch (TREE_CODE (type))
4242 case ARRAY_TYPE:
4244 tree telts = array_type_nelts (type);
4245 if (telts && host_integerp (telts, 1))
4247 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4248 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4249 if (n == 0)
4250 return 0;
4251 else if (max / n > m)
4252 return n * m;
4254 return -1;
4257 case RECORD_TYPE:
4259 HOST_WIDE_INT n = 0, t;
4260 tree f;
4262 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4263 if (TREE_CODE (f) == FIELD_DECL)
4265 t = count_type_elements (TREE_TYPE (f));
4266 if (t < 0)
4267 return -1;
4268 n += t;
4271 return n;
4274 case UNION_TYPE:
4275 case QUAL_UNION_TYPE:
4277 /* Ho hum. How in the world do we guess here? Clearly it isn't
4278 right to count the fields. Guess based on the number of words. */
4279 HOST_WIDE_INT n = int_size_in_bytes (type);
4280 if (n < 0)
4281 return -1;
4282 return n / UNITS_PER_WORD;
4285 case COMPLEX_TYPE:
4286 return 2;
4288 case VECTOR_TYPE:
4289 return TYPE_VECTOR_SUBPARTS (type);
4291 case INTEGER_TYPE:
4292 case REAL_TYPE:
4293 case ENUMERAL_TYPE:
4294 case BOOLEAN_TYPE:
4295 case CHAR_TYPE:
4296 case POINTER_TYPE:
4297 case OFFSET_TYPE:
4298 case REFERENCE_TYPE:
4299 return 1;
4301 case VOID_TYPE:
4302 case METHOD_TYPE:
4303 case FILE_TYPE:
4304 case SET_TYPE:
4305 case FUNCTION_TYPE:
4306 case LANG_TYPE:
4307 default:
4308 abort ();
4312 /* Return 1 if EXP contains mostly (3/4) zeros. */
4315 mostly_zeros_p (tree exp)
4317 if (TREE_CODE (exp) == CONSTRUCTOR)
4320 HOST_WIDE_INT nz_elts, nc_elts, elts;
4322 /* If there are no ranges of true bits, it is all zero. */
4323 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4324 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4326 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4327 elts = count_type_elements (TREE_TYPE (exp));
4329 return nz_elts < elts / 4;
4332 return initializer_zerop (exp);
4335 /* Helper function for store_constructor.
4336 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4337 TYPE is the type of the CONSTRUCTOR, not the element type.
4338 CLEARED is as for store_constructor.
4339 ALIAS_SET is the alias set to use for any stores.
4341 This provides a recursive shortcut back to store_constructor when it isn't
4342 necessary to go through store_field. This is so that we can pass through
4343 the cleared field to let store_constructor know that we may not have to
4344 clear a substructure if the outer structure has already been cleared. */
4346 static void
4347 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4348 HOST_WIDE_INT bitpos, enum machine_mode mode,
4349 tree exp, tree type, int cleared, int alias_set)
4351 if (TREE_CODE (exp) == CONSTRUCTOR
4352 /* We can only call store_constructor recursively if the size and
4353 bit position are on a byte boundary. */
4354 && bitpos % BITS_PER_UNIT == 0
4355 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4356 /* If we have a nonzero bitpos for a register target, then we just
4357 let store_field do the bitfield handling. This is unlikely to
4358 generate unnecessary clear instructions anyways. */
4359 && (bitpos == 0 || MEM_P (target)))
4361 if (MEM_P (target))
4362 target
4363 = adjust_address (target,
4364 GET_MODE (target) == BLKmode
4365 || 0 != (bitpos
4366 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4367 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4370 /* Update the alias set, if required. */
4371 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4372 && MEM_ALIAS_SET (target) != 0)
4374 target = copy_rtx (target);
4375 set_mem_alias_set (target, alias_set);
4378 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4380 else
4381 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4382 alias_set);
4385 /* Store the value of constructor EXP into the rtx TARGET.
4386 TARGET is either a REG or a MEM; we know it cannot conflict, since
4387 safe_from_p has been called.
4388 CLEARED is true if TARGET is known to have been zero'd.
4389 SIZE is the number of bytes of TARGET we are allowed to modify: this
4390 may not be the same as the size of EXP if we are assigning to a field
4391 which has been packed to exclude padding bits. */
4393 static void
4394 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4396 tree type = TREE_TYPE (exp);
4397 #ifdef WORD_REGISTER_OPERATIONS
4398 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4399 #endif
4401 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4402 || TREE_CODE (type) == QUAL_UNION_TYPE)
4404 tree elt;
4406 /* If size is zero or the target is already cleared, do nothing. */
4407 if (size == 0 || cleared)
4408 cleared = 1;
4409 /* We either clear the aggregate or indicate the value is dead. */
4410 else if ((TREE_CODE (type) == UNION_TYPE
4411 || TREE_CODE (type) == QUAL_UNION_TYPE)
4412 && ! CONSTRUCTOR_ELTS (exp))
4413 /* If the constructor is empty, clear the union. */
4415 clear_storage (target, expr_size (exp));
4416 cleared = 1;
4419 /* If we are building a static constructor into a register,
4420 set the initial value as zero so we can fold the value into
4421 a constant. But if more than one register is involved,
4422 this probably loses. */
4423 else if (REG_P (target) && TREE_STATIC (exp)
4424 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4426 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4427 cleared = 1;
4430 /* If the constructor has fewer fields than the structure
4431 or if we are initializing the structure to mostly zeros,
4432 clear the whole structure first. Don't do this if TARGET is a
4433 register whose mode size isn't equal to SIZE since clear_storage
4434 can't handle this case. */
4435 else if (size > 0
4436 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4437 || mostly_zeros_p (exp))
4438 && (!REG_P (target)
4439 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4440 == size)))
4442 clear_storage (target, GEN_INT (size));
4443 cleared = 1;
4446 if (! cleared)
4447 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4449 /* Store each element of the constructor into
4450 the corresponding field of TARGET. */
4452 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4454 tree field = TREE_PURPOSE (elt);
4455 tree value = TREE_VALUE (elt);
4456 enum machine_mode mode;
4457 HOST_WIDE_INT bitsize;
4458 HOST_WIDE_INT bitpos = 0;
4459 tree offset;
4460 rtx to_rtx = target;
4462 /* Just ignore missing fields.
4463 We cleared the whole structure, above,
4464 if any fields are missing. */
4465 if (field == 0)
4466 continue;
4468 if (cleared && initializer_zerop (value))
4469 continue;
4471 if (host_integerp (DECL_SIZE (field), 1))
4472 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4473 else
4474 bitsize = -1;
4476 mode = DECL_MODE (field);
4477 if (DECL_BIT_FIELD (field))
4478 mode = VOIDmode;
4480 offset = DECL_FIELD_OFFSET (field);
4481 if (host_integerp (offset, 0)
4482 && host_integerp (bit_position (field), 0))
4484 bitpos = int_bit_position (field);
4485 offset = 0;
4487 else
4488 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4490 if (offset)
4492 rtx offset_rtx;
4494 offset
4495 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4496 make_tree (TREE_TYPE (exp),
4497 target));
4499 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4500 if (!MEM_P (to_rtx))
4501 abort ();
4503 #ifdef POINTERS_EXTEND_UNSIGNED
4504 if (GET_MODE (offset_rtx) != Pmode)
4505 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4506 #else
4507 if (GET_MODE (offset_rtx) != ptr_mode)
4508 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4509 #endif
4511 to_rtx = offset_address (to_rtx, offset_rtx,
4512 highest_pow2_factor (offset));
4515 #ifdef WORD_REGISTER_OPERATIONS
4516 /* If this initializes a field that is smaller than a word, at the
4517 start of a word, try to widen it to a full word.
4518 This special case allows us to output C++ member function
4519 initializations in a form that the optimizers can understand. */
4520 if (REG_P (target)
4521 && bitsize < BITS_PER_WORD
4522 && bitpos % BITS_PER_WORD == 0
4523 && GET_MODE_CLASS (mode) == MODE_INT
4524 && TREE_CODE (value) == INTEGER_CST
4525 && exp_size >= 0
4526 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4528 tree type = TREE_TYPE (value);
4530 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4532 type = lang_hooks.types.type_for_size
4533 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4534 value = convert (type, value);
4537 if (BYTES_BIG_ENDIAN)
4538 value
4539 = fold (build2 (LSHIFT_EXPR, type, value,
4540 build_int_cst (NULL_TREE,
4541 BITS_PER_WORD - bitsize, 0)));
4542 bitsize = BITS_PER_WORD;
4543 mode = word_mode;
4545 #endif
4547 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4548 && DECL_NONADDRESSABLE_P (field))
4550 to_rtx = copy_rtx (to_rtx);
4551 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4554 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4555 value, type, cleared,
4556 get_alias_set (TREE_TYPE (field)));
4560 else if (TREE_CODE (type) == ARRAY_TYPE)
4562 tree elt;
4563 int i;
4564 int need_to_clear;
4565 tree domain;
4566 tree elttype = TREE_TYPE (type);
4567 int const_bounds_p;
4568 HOST_WIDE_INT minelt = 0;
4569 HOST_WIDE_INT maxelt = 0;
4571 domain = TYPE_DOMAIN (type);
4572 const_bounds_p = (TYPE_MIN_VALUE (domain)
4573 && TYPE_MAX_VALUE (domain)
4574 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4575 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4577 /* If we have constant bounds for the range of the type, get them. */
4578 if (const_bounds_p)
4580 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4581 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4584 /* If the constructor has fewer elements than the array,
4585 clear the whole array first. Similarly if this is
4586 static constructor of a non-BLKmode object. */
4587 if (cleared)
4588 need_to_clear = 0;
4589 else if (REG_P (target) && TREE_STATIC (exp))
4590 need_to_clear = 1;
4591 else
4593 HOST_WIDE_INT count = 0, zero_count = 0;
4594 need_to_clear = ! const_bounds_p;
4596 /* This loop is a more accurate version of the loop in
4597 mostly_zeros_p (it handles RANGE_EXPR in an index).
4598 It is also needed to check for missing elements. */
4599 for (elt = CONSTRUCTOR_ELTS (exp);
4600 elt != NULL_TREE && ! need_to_clear;
4601 elt = TREE_CHAIN (elt))
4603 tree index = TREE_PURPOSE (elt);
4604 HOST_WIDE_INT this_node_count;
4606 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4608 tree lo_index = TREE_OPERAND (index, 0);
4609 tree hi_index = TREE_OPERAND (index, 1);
4611 if (! host_integerp (lo_index, 1)
4612 || ! host_integerp (hi_index, 1))
4614 need_to_clear = 1;
4615 break;
4618 this_node_count = (tree_low_cst (hi_index, 1)
4619 - tree_low_cst (lo_index, 1) + 1);
4621 else
4622 this_node_count = 1;
4624 count += this_node_count;
4625 if (mostly_zeros_p (TREE_VALUE (elt)))
4626 zero_count += this_node_count;
4629 /* Clear the entire array first if there are any missing elements,
4630 or if the incidence of zero elements is >= 75%. */
4631 if (! need_to_clear
4632 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4633 need_to_clear = 1;
4636 if (need_to_clear && size > 0)
4638 if (REG_P (target))
4639 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4640 else
4641 clear_storage (target, GEN_INT (size));
4642 cleared = 1;
4645 if (!cleared && REG_P (target))
4646 /* Inform later passes that the old value is dead. */
4647 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4649 /* Store each element of the constructor into
4650 the corresponding element of TARGET, determined
4651 by counting the elements. */
4652 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4653 elt;
4654 elt = TREE_CHAIN (elt), i++)
4656 enum machine_mode mode;
4657 HOST_WIDE_INT bitsize;
4658 HOST_WIDE_INT bitpos;
4659 int unsignedp;
4660 tree value = TREE_VALUE (elt);
4661 tree index = TREE_PURPOSE (elt);
4662 rtx xtarget = target;
4664 if (cleared && initializer_zerop (value))
4665 continue;
4667 unsignedp = TYPE_UNSIGNED (elttype);
4668 mode = TYPE_MODE (elttype);
4669 if (mode == BLKmode)
4670 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4671 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4672 : -1);
4673 else
4674 bitsize = GET_MODE_BITSIZE (mode);
4676 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4678 tree lo_index = TREE_OPERAND (index, 0);
4679 tree hi_index = TREE_OPERAND (index, 1);
4680 rtx index_r, pos_rtx;
4681 HOST_WIDE_INT lo, hi, count;
4682 tree position;
4684 /* If the range is constant and "small", unroll the loop. */
4685 if (const_bounds_p
4686 && host_integerp (lo_index, 0)
4687 && host_integerp (hi_index, 0)
4688 && (lo = tree_low_cst (lo_index, 0),
4689 hi = tree_low_cst (hi_index, 0),
4690 count = hi - lo + 1,
4691 (!MEM_P (target)
4692 || count <= 2
4693 || (host_integerp (TYPE_SIZE (elttype), 1)
4694 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4695 <= 40 * 8)))))
4697 lo -= minelt; hi -= minelt;
4698 for (; lo <= hi; lo++)
4700 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4702 if (MEM_P (target)
4703 && !MEM_KEEP_ALIAS_SET_P (target)
4704 && TREE_CODE (type) == ARRAY_TYPE
4705 && TYPE_NONALIASED_COMPONENT (type))
4707 target = copy_rtx (target);
4708 MEM_KEEP_ALIAS_SET_P (target) = 1;
4711 store_constructor_field
4712 (target, bitsize, bitpos, mode, value, type, cleared,
4713 get_alias_set (elttype));
4716 else
4718 rtx loop_start = gen_label_rtx ();
4719 rtx loop_end = gen_label_rtx ();
4720 tree exit_cond;
4722 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4723 unsignedp = TYPE_UNSIGNED (domain);
4725 index = build_decl (VAR_DECL, NULL_TREE, domain);
4727 index_r
4728 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4729 &unsignedp, 0));
4730 SET_DECL_RTL (index, index_r);
4731 store_expr (lo_index, index_r, 0);
4733 /* Build the head of the loop. */
4734 do_pending_stack_adjust ();
4735 emit_label (loop_start);
4737 /* Assign value to element index. */
4738 position
4739 = convert (ssizetype,
4740 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4741 index, TYPE_MIN_VALUE (domain))));
4742 position = size_binop (MULT_EXPR, position,
4743 convert (ssizetype,
4744 TYPE_SIZE_UNIT (elttype)));
4746 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4747 xtarget = offset_address (target, pos_rtx,
4748 highest_pow2_factor (position));
4749 xtarget = adjust_address (xtarget, mode, 0);
4750 if (TREE_CODE (value) == CONSTRUCTOR)
4751 store_constructor (value, xtarget, cleared,
4752 bitsize / BITS_PER_UNIT);
4753 else
4754 store_expr (value, xtarget, 0);
4756 /* Generate a conditional jump to exit the loop. */
4757 exit_cond = build2 (LT_EXPR, integer_type_node,
4758 index, hi_index);
4759 jumpif (exit_cond, loop_end);
4761 /* Update the loop counter, and jump to the head of
4762 the loop. */
4763 expand_assignment (index,
4764 build2 (PLUS_EXPR, TREE_TYPE (index),
4765 index, integer_one_node), 0);
4767 emit_jump (loop_start);
4769 /* Build the end of the loop. */
4770 emit_label (loop_end);
4773 else if ((index != 0 && ! host_integerp (index, 0))
4774 || ! host_integerp (TYPE_SIZE (elttype), 1))
4776 tree position;
4778 if (index == 0)
4779 index = ssize_int (1);
4781 if (minelt)
4782 index = fold_convert (ssizetype,
4783 fold (build2 (MINUS_EXPR,
4784 TREE_TYPE (index),
4785 index,
4786 TYPE_MIN_VALUE (domain))));
4788 position = size_binop (MULT_EXPR, index,
4789 convert (ssizetype,
4790 TYPE_SIZE_UNIT (elttype)));
4791 xtarget = offset_address (target,
4792 expand_expr (position, 0, VOIDmode, 0),
4793 highest_pow2_factor (position));
4794 xtarget = adjust_address (xtarget, mode, 0);
4795 store_expr (value, xtarget, 0);
4797 else
4799 if (index != 0)
4800 bitpos = ((tree_low_cst (index, 0) - minelt)
4801 * tree_low_cst (TYPE_SIZE (elttype), 1));
4802 else
4803 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4805 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4806 && TREE_CODE (type) == ARRAY_TYPE
4807 && TYPE_NONALIASED_COMPONENT (type))
4809 target = copy_rtx (target);
4810 MEM_KEEP_ALIAS_SET_P (target) = 1;
4812 store_constructor_field (target, bitsize, bitpos, mode, value,
4813 type, cleared, get_alias_set (elttype));
4818 else if (TREE_CODE (type) == VECTOR_TYPE)
4820 tree elt;
4821 int i;
4822 int need_to_clear;
4823 int icode = 0;
4824 tree elttype = TREE_TYPE (type);
4825 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4826 enum machine_mode eltmode = TYPE_MODE (elttype);
4827 HOST_WIDE_INT bitsize;
4828 HOST_WIDE_INT bitpos;
4829 rtx *vector = NULL;
4830 unsigned n_elts;
4832 if (eltmode == BLKmode)
4833 abort ();
4835 n_elts = TYPE_VECTOR_SUBPARTS (type);
4836 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4838 enum machine_mode mode = GET_MODE (target);
4840 icode = (int) vec_init_optab->handlers[mode].insn_code;
4841 if (icode != CODE_FOR_nothing)
4843 unsigned int i;
4845 vector = alloca (n_elts);
4846 for (i = 0; i < n_elts; i++)
4847 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4851 /* If the constructor has fewer elements than the vector,
4852 clear the whole array first. Similarly if this is
4853 static constructor of a non-BLKmode object. */
4854 if (cleared)
4855 need_to_clear = 0;
4856 else if (REG_P (target) && TREE_STATIC (exp))
4857 need_to_clear = 1;
4858 else
4860 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4862 for (elt = CONSTRUCTOR_ELTS (exp);
4863 elt != NULL_TREE;
4864 elt = TREE_CHAIN (elt))
4866 int n_elts_here =
4867 tree_low_cst (
4868 int_const_binop (TRUNC_DIV_EXPR,
4869 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4870 TYPE_SIZE (elttype), 0), 1);
4872 count += n_elts_here;
4873 if (mostly_zeros_p (TREE_VALUE (elt)))
4874 zero_count += n_elts_here;
4877 /* Clear the entire vector first if there are any missing elements,
4878 or if the incidence of zero elements is >= 75%. */
4879 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4882 if (need_to_clear && size > 0 && !vector)
4884 if (REG_P (target))
4885 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4886 else
4887 clear_storage (target, GEN_INT (size));
4888 cleared = 1;
4891 if (!cleared && REG_P (target))
4892 /* Inform later passes that the old value is dead. */
4893 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4895 /* Store each element of the constructor into the corresponding
4896 element of TARGET, determined by counting the elements. */
4897 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4898 elt;
4899 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4901 tree value = TREE_VALUE (elt);
4902 tree index = TREE_PURPOSE (elt);
4903 HOST_WIDE_INT eltpos;
4905 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4906 if (cleared && initializer_zerop (value))
4907 continue;
4909 if (index != 0)
4910 eltpos = tree_low_cst (index, 1);
4911 else
4912 eltpos = i;
4914 if (vector)
4916 /* Vector CONSTRUCTORs should only be built from smaller
4917 vectors in the case of BLKmode vectors. */
4918 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
4919 abort ();
4920 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4922 else
4924 enum machine_mode value_mode =
4925 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4926 ? TYPE_MODE (TREE_TYPE (value))
4927 : eltmode;
4928 bitpos = eltpos * elt_size;
4929 store_constructor_field (target, bitsize, bitpos, value_mode, value,
4930 type, cleared, get_alias_set (elttype));
4934 if (vector)
4935 emit_insn (GEN_FCN (icode) (target,
4936 gen_rtx_PARALLEL (GET_MODE (target),
4937 gen_rtvec_v (n_elts, vector))));
4940 /* Set constructor assignments. */
4941 else if (TREE_CODE (type) == SET_TYPE)
4943 tree elt = CONSTRUCTOR_ELTS (exp);
4944 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4945 tree domain = TYPE_DOMAIN (type);
4946 tree domain_min, domain_max, bitlength;
4948 /* The default implementation strategy is to extract the constant
4949 parts of the constructor, use that to initialize the target,
4950 and then "or" in whatever non-constant ranges we need in addition.
4952 If a large set is all zero or all ones, it is
4953 probably better to set it using memset.
4954 Also, if a large set has just a single range, it may also be
4955 better to first clear all the first clear the set (using
4956 memset), and set the bits we want. */
4958 /* Check for all zeros. */
4959 if (elt == NULL_TREE && size > 0)
4961 if (!cleared)
4962 clear_storage (target, GEN_INT (size));
4963 return;
4966 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4967 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4968 bitlength = size_binop (PLUS_EXPR,
4969 size_diffop (domain_max, domain_min),
4970 ssize_int (1));
4972 nbits = tree_low_cst (bitlength, 1);
4974 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4975 are "complicated" (more than one range), initialize (the
4976 constant parts) by copying from a constant. */
4977 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4978 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4980 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4981 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4982 char *bit_buffer = alloca (nbits);
4983 HOST_WIDE_INT word = 0;
4984 unsigned int bit_pos = 0;
4985 unsigned int ibit = 0;
4986 unsigned int offset = 0; /* In bytes from beginning of set. */
4988 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4989 for (;;)
4991 if (bit_buffer[ibit])
4993 if (BYTES_BIG_ENDIAN)
4994 word |= (1 << (set_word_size - 1 - bit_pos));
4995 else
4996 word |= 1 << bit_pos;
4999 bit_pos++; ibit++;
5000 if (bit_pos >= set_word_size || ibit == nbits)
5002 if (word != 0 || ! cleared)
5004 rtx datum = gen_int_mode (word, mode);
5005 rtx to_rtx;
5007 /* The assumption here is that it is safe to use
5008 XEXP if the set is multi-word, but not if
5009 it's single-word. */
5010 if (MEM_P (target))
5011 to_rtx = adjust_address (target, mode, offset);
5012 else if (offset == 0)
5013 to_rtx = target;
5014 else
5015 abort ();
5016 emit_move_insn (to_rtx, datum);
5019 if (ibit == nbits)
5020 break;
5021 word = 0;
5022 bit_pos = 0;
5023 offset += set_word_size / BITS_PER_UNIT;
5027 else if (!cleared)
5028 /* Don't bother clearing storage if the set is all ones. */
5029 if (TREE_CHAIN (elt) != NULL_TREE
5030 || (TREE_PURPOSE (elt) == NULL_TREE
5031 ? nbits != 1
5032 : ( ! host_integerp (TREE_VALUE (elt), 0)
5033 || ! host_integerp (TREE_PURPOSE (elt), 0)
5034 || (tree_low_cst (TREE_VALUE (elt), 0)
5035 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5036 != (HOST_WIDE_INT) nbits))))
5037 clear_storage (target, expr_size (exp));
5039 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5041 /* Start of range of element or NULL. */
5042 tree startbit = TREE_PURPOSE (elt);
5043 /* End of range of element, or element value. */
5044 tree endbit = TREE_VALUE (elt);
5045 HOST_WIDE_INT startb, endb;
5046 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5048 bitlength_rtx = expand_expr (bitlength,
5049 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5051 /* Handle non-range tuple element like [ expr ]. */
5052 if (startbit == NULL_TREE)
5054 startbit = save_expr (endbit);
5055 endbit = startbit;
5058 startbit = convert (sizetype, startbit);
5059 endbit = convert (sizetype, endbit);
5060 if (! integer_zerop (domain_min))
5062 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5063 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5065 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5066 EXPAND_CONST_ADDRESS);
5067 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5068 EXPAND_CONST_ADDRESS);
5070 if (REG_P (target))
5072 targetx
5073 = assign_temp
5074 ((build_qualified_type (lang_hooks.types.type_for_mode
5075 (GET_MODE (target), 0),
5076 TYPE_QUAL_CONST)),
5077 0, 1, 1);
5078 emit_move_insn (targetx, target);
5081 else if (MEM_P (target))
5082 targetx = target;
5083 else
5084 abort ();
5086 /* Optimization: If startbit and endbit are constants divisible
5087 by BITS_PER_UNIT, call memset instead. */
5088 if (TREE_CODE (startbit) == INTEGER_CST
5089 && TREE_CODE (endbit) == INTEGER_CST
5090 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5091 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5093 emit_library_call (memset_libfunc, LCT_NORMAL,
5094 VOIDmode, 3,
5095 plus_constant (XEXP (targetx, 0),
5096 startb / BITS_PER_UNIT),
5097 Pmode,
5098 constm1_rtx, TYPE_MODE (integer_type_node),
5099 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5100 TYPE_MODE (sizetype));
5102 else
5103 emit_library_call (setbits_libfunc, LCT_NORMAL,
5104 VOIDmode, 4, XEXP (targetx, 0),
5105 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5106 startbit_rtx, TYPE_MODE (sizetype),
5107 endbit_rtx, TYPE_MODE (sizetype));
5109 if (REG_P (target))
5110 emit_move_insn (target, targetx);
5114 else
5115 abort ();
5118 /* Store the value of EXP (an expression tree)
5119 into a subfield of TARGET which has mode MODE and occupies
5120 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5121 If MODE is VOIDmode, it means that we are storing into a bit-field.
5123 If VALUE_MODE is VOIDmode, return nothing in particular.
5124 UNSIGNEDP is not used in this case.
5126 Otherwise, return an rtx for the value stored. This rtx
5127 has mode VALUE_MODE if that is convenient to do.
5128 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5130 TYPE is the type of the underlying object,
5132 ALIAS_SET is the alias set for the destination. This value will
5133 (in general) be different from that for TARGET, since TARGET is a
5134 reference to the containing structure. */
5136 static rtx
5137 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5138 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5139 int unsignedp, tree type, int alias_set)
5141 HOST_WIDE_INT width_mask = 0;
5143 if (TREE_CODE (exp) == ERROR_MARK)
5144 return const0_rtx;
5146 /* If we have nothing to store, do nothing unless the expression has
5147 side-effects. */
5148 if (bitsize == 0)
5149 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5150 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5151 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5153 /* If we are storing into an unaligned field of an aligned union that is
5154 in a register, we may have the mode of TARGET being an integer mode but
5155 MODE == BLKmode. In that case, get an aligned object whose size and
5156 alignment are the same as TARGET and store TARGET into it (we can avoid
5157 the store if the field being stored is the entire width of TARGET). Then
5158 call ourselves recursively to store the field into a BLKmode version of
5159 that object. Finally, load from the object into TARGET. This is not
5160 very efficient in general, but should only be slightly more expensive
5161 than the otherwise-required unaligned accesses. Perhaps this can be
5162 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5163 twice, once with emit_move_insn and once via store_field. */
5165 if (mode == BLKmode
5166 && (REG_P (target) || GET_CODE (target) == SUBREG))
5168 rtx object = assign_temp (type, 0, 1, 1);
5169 rtx blk_object = adjust_address (object, BLKmode, 0);
5171 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5172 emit_move_insn (object, target);
5174 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5175 alias_set);
5177 emit_move_insn (target, object);
5179 /* We want to return the BLKmode version of the data. */
5180 return blk_object;
5183 if (GET_CODE (target) == CONCAT)
5185 /* We're storing into a struct containing a single __complex. */
5187 if (bitpos != 0)
5188 abort ();
5189 return store_expr (exp, target, value_mode != VOIDmode);
5192 /* If the structure is in a register or if the component
5193 is a bit field, we cannot use addressing to access it.
5194 Use bit-field techniques or SUBREG to store in it. */
5196 if (mode == VOIDmode
5197 || (mode != BLKmode && ! direct_store[(int) mode]
5198 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5199 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5200 || REG_P (target)
5201 || GET_CODE (target) == SUBREG
5202 /* If the field isn't aligned enough to store as an ordinary memref,
5203 store it as a bit field. */
5204 || (mode != BLKmode
5205 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5206 || bitpos % GET_MODE_ALIGNMENT (mode))
5207 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5208 || (bitpos % BITS_PER_UNIT != 0)))
5209 /* If the RHS and field are a constant size and the size of the
5210 RHS isn't the same size as the bitfield, we must use bitfield
5211 operations. */
5212 || (bitsize >= 0
5213 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5214 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5216 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5218 /* If BITSIZE is narrower than the size of the type of EXP
5219 we will be narrowing TEMP. Normally, what's wanted are the
5220 low-order bits. However, if EXP's type is a record and this is
5221 big-endian machine, we want the upper BITSIZE bits. */
5222 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5223 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5224 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5225 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5226 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5227 - bitsize),
5228 NULL_RTX, 1);
5230 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5231 MODE. */
5232 if (mode != VOIDmode && mode != BLKmode
5233 && mode != TYPE_MODE (TREE_TYPE (exp)))
5234 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5236 /* If the modes of TARGET and TEMP are both BLKmode, both
5237 must be in memory and BITPOS must be aligned on a byte
5238 boundary. If so, we simply do a block copy. */
5239 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5241 if (!MEM_P (target) || !MEM_P (temp)
5242 || bitpos % BITS_PER_UNIT != 0)
5243 abort ();
5245 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5246 emit_block_move (target, temp,
5247 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5248 / BITS_PER_UNIT),
5249 BLOCK_OP_NORMAL);
5251 return value_mode == VOIDmode ? const0_rtx : target;
5254 /* Store the value in the bitfield. */
5255 store_bit_field (target, bitsize, bitpos, mode, temp);
5257 if (value_mode != VOIDmode)
5259 /* The caller wants an rtx for the value.
5260 If possible, avoid refetching from the bitfield itself. */
5261 if (width_mask != 0
5262 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5264 tree count;
5265 enum machine_mode tmode;
5267 tmode = GET_MODE (temp);
5268 if (tmode == VOIDmode)
5269 tmode = value_mode;
5271 if (unsignedp)
5272 return expand_and (tmode, temp,
5273 gen_int_mode (width_mask, tmode),
5274 NULL_RTX);
5276 count = build_int_cst (NULL_TREE,
5277 GET_MODE_BITSIZE (tmode) - bitsize, 0);
5278 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5279 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5282 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5283 NULL_RTX, value_mode, VOIDmode);
5285 return const0_rtx;
5287 else
5289 rtx addr = XEXP (target, 0);
5290 rtx to_rtx = target;
5292 /* If a value is wanted, it must be the lhs;
5293 so make the address stable for multiple use. */
5295 if (value_mode != VOIDmode && !REG_P (addr)
5296 && ! CONSTANT_ADDRESS_P (addr)
5297 /* A frame-pointer reference is already stable. */
5298 && ! (GET_CODE (addr) == PLUS
5299 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5300 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5301 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5302 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5304 /* Now build a reference to just the desired component. */
5306 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5308 if (to_rtx == target)
5309 to_rtx = copy_rtx (to_rtx);
5311 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5312 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5313 set_mem_alias_set (to_rtx, alias_set);
5315 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5319 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5320 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5321 codes and find the ultimate containing object, which we return.
5323 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5324 bit position, and *PUNSIGNEDP to the signedness of the field.
5325 If the position of the field is variable, we store a tree
5326 giving the variable offset (in units) in *POFFSET.
5327 This offset is in addition to the bit position.
5328 If the position is not variable, we store 0 in *POFFSET.
5330 If any of the extraction expressions is volatile,
5331 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5333 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5334 is a mode that can be used to access the field. In that case, *PBITSIZE
5335 is redundant.
5337 If the field describes a variable-sized object, *PMODE is set to
5338 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5339 this case, but the address of the object can be found. */
5341 tree
5342 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5343 HOST_WIDE_INT *pbitpos, tree *poffset,
5344 enum machine_mode *pmode, int *punsignedp,
5345 int *pvolatilep)
5347 tree size_tree = 0;
5348 enum machine_mode mode = VOIDmode;
5349 tree offset = size_zero_node;
5350 tree bit_offset = bitsize_zero_node;
5351 tree tem;
5353 /* First get the mode, signedness, and size. We do this from just the
5354 outermost expression. */
5355 if (TREE_CODE (exp) == COMPONENT_REF)
5357 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5358 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5359 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5361 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5363 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5365 size_tree = TREE_OPERAND (exp, 1);
5366 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5368 else
5370 mode = TYPE_MODE (TREE_TYPE (exp));
5371 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5373 if (mode == BLKmode)
5374 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5375 else
5376 *pbitsize = GET_MODE_BITSIZE (mode);
5379 if (size_tree != 0)
5381 if (! host_integerp (size_tree, 1))
5382 mode = BLKmode, *pbitsize = -1;
5383 else
5384 *pbitsize = tree_low_cst (size_tree, 1);
5387 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5388 and find the ultimate containing object. */
5389 while (1)
5391 if (TREE_CODE (exp) == BIT_FIELD_REF)
5392 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5393 else if (TREE_CODE (exp) == COMPONENT_REF)
5395 tree field = TREE_OPERAND (exp, 1);
5396 tree this_offset = component_ref_field_offset (exp);
5398 /* If this field hasn't been filled in yet, don't go
5399 past it. This should only happen when folding expressions
5400 made during type construction. */
5401 if (this_offset == 0)
5402 break;
5404 offset = size_binop (PLUS_EXPR, offset, this_offset);
5405 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5406 DECL_FIELD_BIT_OFFSET (field));
5408 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5411 else if (TREE_CODE (exp) == ARRAY_REF
5412 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5414 tree index = TREE_OPERAND (exp, 1);
5415 tree low_bound = array_ref_low_bound (exp);
5416 tree unit_size = array_ref_element_size (exp);
5418 /* We assume all arrays have sizes that are a multiple of a byte.
5419 First subtract the lower bound, if any, in the type of the
5420 index, then convert to sizetype and multiply by the size of the
5421 array element. */
5422 if (! integer_zerop (low_bound))
5423 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5424 index, low_bound));
5426 offset = size_binop (PLUS_EXPR, offset,
5427 size_binop (MULT_EXPR,
5428 convert (sizetype, index),
5429 unit_size));
5432 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5433 conversions that don't change the mode, and all view conversions
5434 except those that need to "step up" the alignment. */
5435 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5436 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5437 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5438 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5439 && STRICT_ALIGNMENT
5440 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5441 < BIGGEST_ALIGNMENT)
5442 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5443 || TYPE_ALIGN_OK (TREE_TYPE
5444 (TREE_OPERAND (exp, 0))))))
5445 && ! ((TREE_CODE (exp) == NOP_EXPR
5446 || TREE_CODE (exp) == CONVERT_EXPR)
5447 && (TYPE_MODE (TREE_TYPE (exp))
5448 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5449 break;
5451 /* If any reference in the chain is volatile, the effect is volatile. */
5452 if (TREE_THIS_VOLATILE (exp))
5453 *pvolatilep = 1;
5455 exp = TREE_OPERAND (exp, 0);
5458 /* If OFFSET is constant, see if we can return the whole thing as a
5459 constant bit position. Otherwise, split it up. */
5460 if (host_integerp (offset, 0)
5461 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5462 bitsize_unit_node))
5463 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5464 && host_integerp (tem, 0))
5465 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5466 else
5467 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5469 *pmode = mode;
5470 return exp;
5473 /* Return a tree of sizetype representing the size, in bytes, of the element
5474 of EXP, an ARRAY_REF. */
5476 tree
5477 array_ref_element_size (tree exp)
5479 tree aligned_size = TREE_OPERAND (exp, 3);
5480 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5482 /* If a size was specified in the ARRAY_REF, it's the size measured
5483 in alignment units of the element type. So multiply by that value. */
5484 if (aligned_size)
5485 return size_binop (MULT_EXPR, aligned_size,
5486 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5488 /* Otherwise, take the size from that of the element type. Substitute
5489 any PLACEHOLDER_EXPR that we have. */
5490 else
5491 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5494 /* Return a tree representing the lower bound of the array mentioned in
5495 EXP, an ARRAY_REF. */
5497 tree
5498 array_ref_low_bound (tree exp)
5500 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5502 /* If a lower bound is specified in EXP, use it. */
5503 if (TREE_OPERAND (exp, 2))
5504 return TREE_OPERAND (exp, 2);
5506 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5507 substituting for a PLACEHOLDER_EXPR as needed. */
5508 if (domain_type && TYPE_MIN_VALUE (domain_type))
5509 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5511 /* Otherwise, return a zero of the appropriate type. */
5512 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5515 /* Return a tree representing the upper bound of the array mentioned in
5516 EXP, an ARRAY_REF. */
5518 tree
5519 array_ref_up_bound (tree exp)
5521 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5523 /* If there is a domain type and it has an upper bound, use it, substituting
5524 for a PLACEHOLDER_EXPR as needed. */
5525 if (domain_type && TYPE_MAX_VALUE (domain_type))
5526 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5528 /* Otherwise fail. */
5529 return NULL_TREE;
5532 /* Return a tree representing the offset, in bytes, of the field referenced
5533 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5535 tree
5536 component_ref_field_offset (tree exp)
5538 tree aligned_offset = TREE_OPERAND (exp, 2);
5539 tree field = TREE_OPERAND (exp, 1);
5541 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5542 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5543 value. */
5544 if (aligned_offset)
5545 return size_binop (MULT_EXPR, aligned_offset,
5546 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5548 /* Otherwise, take the offset from that of the field. Substitute
5549 any PLACEHOLDER_EXPR that we have. */
5550 else
5551 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5554 /* Return 1 if T is an expression that get_inner_reference handles. */
5557 handled_component_p (tree t)
5559 switch (TREE_CODE (t))
5561 case BIT_FIELD_REF:
5562 case COMPONENT_REF:
5563 case ARRAY_REF:
5564 case ARRAY_RANGE_REF:
5565 case NON_LVALUE_EXPR:
5566 case VIEW_CONVERT_EXPR:
5567 return 1;
5569 /* ??? Sure they are handled, but get_inner_reference may return
5570 a different PBITSIZE, depending upon whether the expression is
5571 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5572 case NOP_EXPR:
5573 case CONVERT_EXPR:
5574 return (TYPE_MODE (TREE_TYPE (t))
5575 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5577 default:
5578 return 0;
5582 /* Given an rtx VALUE that may contain additions and multiplications, return
5583 an equivalent value that just refers to a register, memory, or constant.
5584 This is done by generating instructions to perform the arithmetic and
5585 returning a pseudo-register containing the value.
5587 The returned value may be a REG, SUBREG, MEM or constant. */
5590 force_operand (rtx value, rtx target)
5592 rtx op1, op2;
5593 /* Use subtarget as the target for operand 0 of a binary operation. */
5594 rtx subtarget = get_subtarget (target);
5595 enum rtx_code code = GET_CODE (value);
5597 /* Check for subreg applied to an expression produced by loop optimizer. */
5598 if (code == SUBREG
5599 && !REG_P (SUBREG_REG (value))
5600 && !MEM_P (SUBREG_REG (value)))
5602 value = simplify_gen_subreg (GET_MODE (value),
5603 force_reg (GET_MODE (SUBREG_REG (value)),
5604 force_operand (SUBREG_REG (value),
5605 NULL_RTX)),
5606 GET_MODE (SUBREG_REG (value)),
5607 SUBREG_BYTE (value));
5608 code = GET_CODE (value);
5611 /* Check for a PIC address load. */
5612 if ((code == PLUS || code == MINUS)
5613 && XEXP (value, 0) == pic_offset_table_rtx
5614 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5615 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5616 || GET_CODE (XEXP (value, 1)) == CONST))
5618 if (!subtarget)
5619 subtarget = gen_reg_rtx (GET_MODE (value));
5620 emit_move_insn (subtarget, value);
5621 return subtarget;
5624 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5626 if (!target)
5627 target = gen_reg_rtx (GET_MODE (value));
5628 convert_move (target, force_operand (XEXP (value, 0), NULL),
5629 code == ZERO_EXTEND);
5630 return target;
5633 if (ARITHMETIC_P (value))
5635 op2 = XEXP (value, 1);
5636 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5637 subtarget = 0;
5638 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5640 code = PLUS;
5641 op2 = negate_rtx (GET_MODE (value), op2);
5644 /* Check for an addition with OP2 a constant integer and our first
5645 operand a PLUS of a virtual register and something else. In that
5646 case, we want to emit the sum of the virtual register and the
5647 constant first and then add the other value. This allows virtual
5648 register instantiation to simply modify the constant rather than
5649 creating another one around this addition. */
5650 if (code == PLUS && GET_CODE (op2) == CONST_INT
5651 && GET_CODE (XEXP (value, 0)) == PLUS
5652 && REG_P (XEXP (XEXP (value, 0), 0))
5653 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5654 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5656 rtx temp = expand_simple_binop (GET_MODE (value), code,
5657 XEXP (XEXP (value, 0), 0), op2,
5658 subtarget, 0, OPTAB_LIB_WIDEN);
5659 return expand_simple_binop (GET_MODE (value), code, temp,
5660 force_operand (XEXP (XEXP (value,
5661 0), 1), 0),
5662 target, 0, OPTAB_LIB_WIDEN);
5665 op1 = force_operand (XEXP (value, 0), subtarget);
5666 op2 = force_operand (op2, NULL_RTX);
5667 switch (code)
5669 case MULT:
5670 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5671 case DIV:
5672 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5673 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5674 target, 1, OPTAB_LIB_WIDEN);
5675 else
5676 return expand_divmod (0,
5677 FLOAT_MODE_P (GET_MODE (value))
5678 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5679 GET_MODE (value), op1, op2, target, 0);
5680 break;
5681 case MOD:
5682 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5683 target, 0);
5684 break;
5685 case UDIV:
5686 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5687 target, 1);
5688 break;
5689 case UMOD:
5690 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5691 target, 1);
5692 break;
5693 case ASHIFTRT:
5694 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5695 target, 0, OPTAB_LIB_WIDEN);
5696 break;
5697 default:
5698 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5699 target, 1, OPTAB_LIB_WIDEN);
5702 if (UNARY_P (value))
5704 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5705 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5708 #ifdef INSN_SCHEDULING
5709 /* On machines that have insn scheduling, we want all memory reference to be
5710 explicit, so we need to deal with such paradoxical SUBREGs. */
5711 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5712 && (GET_MODE_SIZE (GET_MODE (value))
5713 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5714 value
5715 = simplify_gen_subreg (GET_MODE (value),
5716 force_reg (GET_MODE (SUBREG_REG (value)),
5717 force_operand (SUBREG_REG (value),
5718 NULL_RTX)),
5719 GET_MODE (SUBREG_REG (value)),
5720 SUBREG_BYTE (value));
5721 #endif
5723 return value;
5726 /* Subroutine of expand_expr: return nonzero iff there is no way that
5727 EXP can reference X, which is being modified. TOP_P is nonzero if this
5728 call is going to be used to determine whether we need a temporary
5729 for EXP, as opposed to a recursive call to this function.
5731 It is always safe for this routine to return zero since it merely
5732 searches for optimization opportunities. */
5735 safe_from_p (rtx x, tree exp, int top_p)
5737 rtx exp_rtl = 0;
5738 int i, nops;
5740 if (x == 0
5741 /* If EXP has varying size, we MUST use a target since we currently
5742 have no way of allocating temporaries of variable size
5743 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5744 So we assume here that something at a higher level has prevented a
5745 clash. This is somewhat bogus, but the best we can do. Only
5746 do this when X is BLKmode and when we are at the top level. */
5747 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5748 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5749 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5750 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5751 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5752 != INTEGER_CST)
5753 && GET_MODE (x) == BLKmode)
5754 /* If X is in the outgoing argument area, it is always safe. */
5755 || (MEM_P (x)
5756 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5757 || (GET_CODE (XEXP (x, 0)) == PLUS
5758 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5759 return 1;
5761 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5762 find the underlying pseudo. */
5763 if (GET_CODE (x) == SUBREG)
5765 x = SUBREG_REG (x);
5766 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5767 return 0;
5770 /* Now look at our tree code and possibly recurse. */
5771 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5773 case 'd':
5774 exp_rtl = DECL_RTL_IF_SET (exp);
5775 break;
5777 case 'c':
5778 return 1;
5780 case 'x':
5781 if (TREE_CODE (exp) == TREE_LIST)
5783 while (1)
5785 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5786 return 0;
5787 exp = TREE_CHAIN (exp);
5788 if (!exp)
5789 return 1;
5790 if (TREE_CODE (exp) != TREE_LIST)
5791 return safe_from_p (x, exp, 0);
5794 else if (TREE_CODE (exp) == ERROR_MARK)
5795 return 1; /* An already-visited SAVE_EXPR? */
5796 else
5797 return 0;
5799 case 's':
5800 /* The only case we look at here is the DECL_INITIAL inside a
5801 DECL_EXPR. */
5802 return (TREE_CODE (exp) != DECL_EXPR
5803 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5804 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5805 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5807 case '2':
5808 case '<':
5809 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5810 return 0;
5811 /* Fall through. */
5813 case '1':
5814 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5816 case 'e':
5817 case 'r':
5818 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5819 the expression. If it is set, we conflict iff we are that rtx or
5820 both are in memory. Otherwise, we check all operands of the
5821 expression recursively. */
5823 switch (TREE_CODE (exp))
5825 case ADDR_EXPR:
5826 /* If the operand is static or we are static, we can't conflict.
5827 Likewise if we don't conflict with the operand at all. */
5828 if (staticp (TREE_OPERAND (exp, 0))
5829 || TREE_STATIC (exp)
5830 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5831 return 1;
5833 /* Otherwise, the only way this can conflict is if we are taking
5834 the address of a DECL a that address if part of X, which is
5835 very rare. */
5836 exp = TREE_OPERAND (exp, 0);
5837 if (DECL_P (exp))
5839 if (!DECL_RTL_SET_P (exp)
5840 || !MEM_P (DECL_RTL (exp)))
5841 return 0;
5842 else
5843 exp_rtl = XEXP (DECL_RTL (exp), 0);
5845 break;
5847 case INDIRECT_REF:
5848 if (MEM_P (x)
5849 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5850 get_alias_set (exp)))
5851 return 0;
5852 break;
5854 case CALL_EXPR:
5855 /* Assume that the call will clobber all hard registers and
5856 all of memory. */
5857 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5858 || MEM_P (x))
5859 return 0;
5860 break;
5862 case WITH_CLEANUP_EXPR:
5863 case CLEANUP_POINT_EXPR:
5864 /* Lowered by gimplify.c. */
5865 abort ();
5867 case SAVE_EXPR:
5868 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5870 default:
5871 break;
5874 /* If we have an rtx, we do not need to scan our operands. */
5875 if (exp_rtl)
5876 break;
5878 nops = first_rtl_op (TREE_CODE (exp));
5879 for (i = 0; i < nops; i++)
5880 if (TREE_OPERAND (exp, i) != 0
5881 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5882 return 0;
5884 /* If this is a language-specific tree code, it may require
5885 special handling. */
5886 if ((unsigned int) TREE_CODE (exp)
5887 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5888 && !lang_hooks.safe_from_p (x, exp))
5889 return 0;
5892 /* If we have an rtl, find any enclosed object. Then see if we conflict
5893 with it. */
5894 if (exp_rtl)
5896 if (GET_CODE (exp_rtl) == SUBREG)
5898 exp_rtl = SUBREG_REG (exp_rtl);
5899 if (REG_P (exp_rtl)
5900 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5901 return 0;
5904 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5905 are memory and they conflict. */
5906 return ! (rtx_equal_p (x, exp_rtl)
5907 || (MEM_P (x) && MEM_P (exp_rtl)
5908 && true_dependence (exp_rtl, VOIDmode, x,
5909 rtx_addr_varies_p)));
5912 /* If we reach here, it is safe. */
5913 return 1;
5917 /* Return the highest power of two that EXP is known to be a multiple of.
5918 This is used in updating alignment of MEMs in array references. */
5920 static unsigned HOST_WIDE_INT
5921 highest_pow2_factor (tree exp)
5923 unsigned HOST_WIDE_INT c0, c1;
5925 switch (TREE_CODE (exp))
5927 case INTEGER_CST:
5928 /* We can find the lowest bit that's a one. If the low
5929 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5930 We need to handle this case since we can find it in a COND_EXPR,
5931 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5932 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5933 later ICE. */
5934 if (TREE_CONSTANT_OVERFLOW (exp))
5935 return BIGGEST_ALIGNMENT;
5936 else
5938 /* Note: tree_low_cst is intentionally not used here,
5939 we don't care about the upper bits. */
5940 c0 = TREE_INT_CST_LOW (exp);
5941 c0 &= -c0;
5942 return c0 ? c0 : BIGGEST_ALIGNMENT;
5944 break;
5946 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5947 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5948 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5949 return MIN (c0, c1);
5951 case MULT_EXPR:
5952 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5953 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5954 return c0 * c1;
5956 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5957 case CEIL_DIV_EXPR:
5958 if (integer_pow2p (TREE_OPERAND (exp, 1))
5959 && host_integerp (TREE_OPERAND (exp, 1), 1))
5961 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5962 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5963 return MAX (1, c0 / c1);
5965 break;
5967 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5968 case SAVE_EXPR:
5969 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5971 case COMPOUND_EXPR:
5972 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5974 case COND_EXPR:
5975 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5976 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5977 return MIN (c0, c1);
5979 default:
5980 break;
5983 return 1;
5986 /* Similar, except that the alignment requirements of TARGET are
5987 taken into account. Assume it is at least as aligned as its
5988 type, unless it is a COMPONENT_REF in which case the layout of
5989 the structure gives the alignment. */
5991 static unsigned HOST_WIDE_INT
5992 highest_pow2_factor_for_target (tree target, tree exp)
5994 unsigned HOST_WIDE_INT target_align, factor;
5996 factor = highest_pow2_factor (exp);
5997 if (TREE_CODE (target) == COMPONENT_REF)
5998 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
5999 else
6000 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6001 return MAX (factor, target_align);
6004 /* Expands variable VAR. */
6006 void
6007 expand_var (tree var)
6009 if (DECL_EXTERNAL (var))
6010 return;
6012 if (TREE_STATIC (var))
6013 /* If this is an inlined copy of a static local variable,
6014 look up the original decl. */
6015 var = DECL_ORIGIN (var);
6017 if (TREE_STATIC (var)
6018 ? !TREE_ASM_WRITTEN (var)
6019 : !DECL_RTL_SET_P (var))
6021 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
6022 /* Should be ignored. */;
6023 else if (lang_hooks.expand_decl (var))
6024 /* OK. */;
6025 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6026 expand_decl (var);
6027 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6028 rest_of_decl_compilation (var, 0, 0);
6029 else if (TREE_CODE (var) == TYPE_DECL
6030 || TREE_CODE (var) == CONST_DECL
6031 || TREE_CODE (var) == FUNCTION_DECL
6032 || TREE_CODE (var) == LABEL_DECL)
6033 /* No expansion needed. */;
6034 else
6035 abort ();
6039 /* Subroutine of expand_expr. Expand the two operands of a binary
6040 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6041 The value may be stored in TARGET if TARGET is nonzero. The
6042 MODIFIER argument is as documented by expand_expr. */
6044 static void
6045 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6046 enum expand_modifier modifier)
6048 if (! safe_from_p (target, exp1, 1))
6049 target = 0;
6050 if (operand_equal_p (exp0, exp1, 0))
6052 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6053 *op1 = copy_rtx (*op0);
6055 else
6057 /* If we need to preserve evaluation order, copy exp0 into its own
6058 temporary variable so that it can't be clobbered by exp1. */
6059 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6060 exp0 = save_expr (exp0);
6061 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6062 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6067 /* expand_expr: generate code for computing expression EXP.
6068 An rtx for the computed value is returned. The value is never null.
6069 In the case of a void EXP, const0_rtx is returned.
6071 The value may be stored in TARGET if TARGET is nonzero.
6072 TARGET is just a suggestion; callers must assume that
6073 the rtx returned may not be the same as TARGET.
6075 If TARGET is CONST0_RTX, it means that the value will be ignored.
6077 If TMODE is not VOIDmode, it suggests generating the
6078 result in mode TMODE. But this is done only when convenient.
6079 Otherwise, TMODE is ignored and the value generated in its natural mode.
6080 TMODE is just a suggestion; callers must assume that
6081 the rtx returned may not have mode TMODE.
6083 Note that TARGET may have neither TMODE nor MODE. In that case, it
6084 probably will not be used.
6086 If MODIFIER is EXPAND_SUM then when EXP is an addition
6087 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6088 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6089 products as above, or REG or MEM, or constant.
6090 Ordinarily in such cases we would output mul or add instructions
6091 and then return a pseudo reg containing the sum.
6093 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6094 it also marks a label as absolutely required (it can't be dead).
6095 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6096 This is used for outputting expressions used in initializers.
6098 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6099 with a constant address even if that address is not normally legitimate.
6100 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6102 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6103 a call parameter. Such targets require special care as we haven't yet
6104 marked TARGET so that it's safe from being trashed by libcalls. We
6105 don't want to use TARGET for anything but the final result;
6106 Intermediate values must go elsewhere. Additionally, calls to
6107 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6109 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6110 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6111 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6112 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6113 recursively. */
6115 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6116 enum expand_modifier, rtx *);
6119 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6120 enum expand_modifier modifier, rtx *alt_rtl)
6122 int rn = -1;
6123 rtx ret, last = NULL;
6125 /* Handle ERROR_MARK before anybody tries to access its type. */
6126 if (TREE_CODE (exp) == ERROR_MARK
6127 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6129 ret = CONST0_RTX (tmode);
6130 return ret ? ret : const0_rtx;
6133 if (flag_non_call_exceptions)
6135 rn = lookup_stmt_eh_region (exp);
6136 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6137 if (rn >= 0)
6138 last = get_last_insn ();
6141 /* If this is an expression of some kind and it has an associated line
6142 number, then emit the line number before expanding the expression.
6144 We need to save and restore the file and line information so that
6145 errors discovered during expansion are emitted with the right
6146 information. It would be better of the diagnostic routines
6147 used the file/line information embedded in the tree nodes rather
6148 than globals. */
6149 if (cfun && EXPR_HAS_LOCATION (exp))
6151 location_t saved_location = input_location;
6152 input_location = EXPR_LOCATION (exp);
6153 emit_line_note (input_location);
6155 /* Record where the insns produced belong. */
6156 record_block_change (TREE_BLOCK (exp));
6158 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6160 input_location = saved_location;
6162 else
6164 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6167 /* If using non-call exceptions, mark all insns that may trap.
6168 expand_call() will mark CALL_INSNs before we get to this code,
6169 but it doesn't handle libcalls, and these may trap. */
6170 if (rn >= 0)
6172 rtx insn;
6173 for (insn = next_real_insn (last); insn;
6174 insn = next_real_insn (insn))
6176 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6177 /* If we want exceptions for non-call insns, any
6178 may_trap_p instruction may throw. */
6179 && GET_CODE (PATTERN (insn)) != CLOBBER
6180 && GET_CODE (PATTERN (insn)) != USE
6181 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6183 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6184 REG_NOTES (insn));
6189 return ret;
6192 static rtx
6193 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6194 enum expand_modifier modifier, rtx *alt_rtl)
6196 rtx op0, op1, temp;
6197 tree type = TREE_TYPE (exp);
6198 int unsignedp;
6199 enum machine_mode mode;
6200 enum tree_code code = TREE_CODE (exp);
6201 optab this_optab;
6202 rtx subtarget, original_target;
6203 int ignore;
6204 tree context;
6205 bool reduce_bit_field = false;
6206 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6207 ? reduce_to_bit_field_precision ((expr), \
6208 target, \
6209 type) \
6210 : (expr))
6212 mode = TYPE_MODE (type);
6213 unsignedp = TYPE_UNSIGNED (type);
6214 if (lang_hooks.reduce_bit_field_operations
6215 && TREE_CODE (type) == INTEGER_TYPE
6216 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6218 /* An operation in what may be a bit-field type needs the
6219 result to be reduced to the precision of the bit-field type,
6220 which is narrower than that of the type's mode. */
6221 reduce_bit_field = true;
6222 if (modifier == EXPAND_STACK_PARM)
6223 target = 0;
6226 /* Use subtarget as the target for operand 0 of a binary operation. */
6227 subtarget = get_subtarget (target);
6228 original_target = target;
6229 ignore = (target == const0_rtx
6230 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6231 || code == CONVERT_EXPR || code == COND_EXPR
6232 || code == VIEW_CONVERT_EXPR)
6233 && TREE_CODE (type) == VOID_TYPE));
6235 /* If we are going to ignore this result, we need only do something
6236 if there is a side-effect somewhere in the expression. If there
6237 is, short-circuit the most common cases here. Note that we must
6238 not call expand_expr with anything but const0_rtx in case this
6239 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6241 if (ignore)
6243 if (! TREE_SIDE_EFFECTS (exp))
6244 return const0_rtx;
6246 /* Ensure we reference a volatile object even if value is ignored, but
6247 don't do this if all we are doing is taking its address. */
6248 if (TREE_THIS_VOLATILE (exp)
6249 && TREE_CODE (exp) != FUNCTION_DECL
6250 && mode != VOIDmode && mode != BLKmode
6251 && modifier != EXPAND_CONST_ADDRESS)
6253 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6254 if (MEM_P (temp))
6255 temp = copy_to_reg (temp);
6256 return const0_rtx;
6259 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6260 || code == INDIRECT_REF)
6261 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6262 modifier);
6264 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6265 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6268 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6269 return const0_rtx;
6271 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6272 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6273 /* If the second operand has no side effects, just evaluate
6274 the first. */
6275 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6276 modifier);
6277 else if (code == BIT_FIELD_REF)
6279 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6280 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6281 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6282 return const0_rtx;
6285 target = 0;
6288 /* If will do cse, generate all results into pseudo registers
6289 since 1) that allows cse to find more things
6290 and 2) otherwise cse could produce an insn the machine
6291 cannot support. An exception is a CONSTRUCTOR into a multi-word
6292 MEM: that's much more likely to be most efficient into the MEM.
6293 Another is a CALL_EXPR which must return in memory. */
6295 if (! cse_not_expected && mode != BLKmode && target
6296 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6297 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6298 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6299 target = 0;
6301 switch (code)
6303 case LABEL_DECL:
6305 tree function = decl_function_context (exp);
6307 temp = label_rtx (exp);
6308 temp = gen_rtx_LABEL_REF (Pmode, temp);
6310 if (function != current_function_decl
6311 && function != 0)
6312 LABEL_REF_NONLOCAL_P (temp) = 1;
6314 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6315 return temp;
6318 case PARM_DECL:
6319 case VAR_DECL:
6320 /* If a static var's type was incomplete when the decl was written,
6321 but the type is complete now, lay out the decl now. */
6322 if (DECL_SIZE (exp) == 0
6323 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6324 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6325 layout_decl (exp, 0);
6327 /* ... fall through ... */
6329 case FUNCTION_DECL:
6330 case RESULT_DECL:
6331 if (DECL_RTL (exp) == 0)
6332 abort ();
6334 /* Ensure variable marked as used even if it doesn't go through
6335 a parser. If it hasn't be used yet, write out an external
6336 definition. */
6337 if (! TREE_USED (exp))
6339 assemble_external (exp);
6340 TREE_USED (exp) = 1;
6343 /* Show we haven't gotten RTL for this yet. */
6344 temp = 0;
6346 /* Variables inherited from containing functions should have
6347 been lowered by this point. */
6348 context = decl_function_context (exp);
6349 if (context != 0
6350 && context != current_function_decl
6351 && !TREE_STATIC (exp)
6352 /* ??? C++ creates functions that are not TREE_STATIC. */
6353 && TREE_CODE (exp) != FUNCTION_DECL)
6354 abort ();
6356 /* This is the case of an array whose size is to be determined
6357 from its initializer, while the initializer is still being parsed.
6358 See expand_decl. */
6360 else if (MEM_P (DECL_RTL (exp))
6361 && REG_P (XEXP (DECL_RTL (exp), 0)))
6362 temp = validize_mem (DECL_RTL (exp));
6364 /* If DECL_RTL is memory, we are in the normal case and either
6365 the address is not valid or it is not a register and -fforce-addr
6366 is specified, get the address into a register. */
6368 else if (MEM_P (DECL_RTL (exp))
6369 && modifier != EXPAND_CONST_ADDRESS
6370 && modifier != EXPAND_SUM
6371 && modifier != EXPAND_INITIALIZER
6372 && (! memory_address_p (DECL_MODE (exp),
6373 XEXP (DECL_RTL (exp), 0))
6374 || (flag_force_addr
6375 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6377 if (alt_rtl)
6378 *alt_rtl = DECL_RTL (exp);
6379 temp = replace_equiv_address (DECL_RTL (exp),
6380 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6383 /* If we got something, return it. But first, set the alignment
6384 if the address is a register. */
6385 if (temp != 0)
6387 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6388 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6390 return temp;
6393 /* If the mode of DECL_RTL does not match that of the decl, it
6394 must be a promoted value. We return a SUBREG of the wanted mode,
6395 but mark it so that we know that it was already extended. */
6397 if (REG_P (DECL_RTL (exp))
6398 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6400 /* Get the signedness used for this variable. Ensure we get the
6401 same mode we got when the variable was declared. */
6402 if (GET_MODE (DECL_RTL (exp))
6403 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6404 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6405 abort ();
6407 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6408 SUBREG_PROMOTED_VAR_P (temp) = 1;
6409 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6410 return temp;
6413 return DECL_RTL (exp);
6415 case INTEGER_CST:
6416 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6417 TREE_INT_CST_HIGH (exp), mode);
6419 /* ??? If overflow is set, fold will have done an incomplete job,
6420 which can result in (plus xx (const_int 0)), which can get
6421 simplified by validate_replace_rtx during virtual register
6422 instantiation, which can result in unrecognizable insns.
6423 Avoid this by forcing all overflows into registers. */
6424 if (TREE_CONSTANT_OVERFLOW (exp)
6425 && modifier != EXPAND_INITIALIZER)
6426 temp = force_reg (mode, temp);
6428 return temp;
6430 case VECTOR_CST:
6431 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6432 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6433 return const_vector_from_tree (exp);
6434 else
6435 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6436 TREE_VECTOR_CST_ELTS (exp)),
6437 ignore ? const0_rtx : target, tmode, modifier);
6439 case CONST_DECL:
6440 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6442 case REAL_CST:
6443 /* If optimized, generate immediate CONST_DOUBLE
6444 which will be turned into memory by reload if necessary.
6446 We used to force a register so that loop.c could see it. But
6447 this does not allow gen_* patterns to perform optimizations with
6448 the constants. It also produces two insns in cases like "x = 1.0;".
6449 On most machines, floating-point constants are not permitted in
6450 many insns, so we'd end up copying it to a register in any case.
6452 Now, we do the copying in expand_binop, if appropriate. */
6453 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6454 TYPE_MODE (TREE_TYPE (exp)));
6456 case COMPLEX_CST:
6457 /* Handle evaluating a complex constant in a CONCAT target. */
6458 if (original_target && GET_CODE (original_target) == CONCAT)
6460 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6461 rtx rtarg, itarg;
6463 rtarg = XEXP (original_target, 0);
6464 itarg = XEXP (original_target, 1);
6466 /* Move the real and imaginary parts separately. */
6467 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6468 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6470 if (op0 != rtarg)
6471 emit_move_insn (rtarg, op0);
6472 if (op1 != itarg)
6473 emit_move_insn (itarg, op1);
6475 return original_target;
6478 /* ... fall through ... */
6480 case STRING_CST:
6481 temp = output_constant_def (exp, 1);
6483 /* temp contains a constant address.
6484 On RISC machines where a constant address isn't valid,
6485 make some insns to get that address into a register. */
6486 if (modifier != EXPAND_CONST_ADDRESS
6487 && modifier != EXPAND_INITIALIZER
6488 && modifier != EXPAND_SUM
6489 && (! memory_address_p (mode, XEXP (temp, 0))
6490 || flag_force_addr))
6491 return replace_equiv_address (temp,
6492 copy_rtx (XEXP (temp, 0)));
6493 return temp;
6495 case SAVE_EXPR:
6497 tree val = TREE_OPERAND (exp, 0);
6498 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6500 if (!SAVE_EXPR_RESOLVED_P (exp))
6502 /* We can indeed still hit this case, typically via builtin
6503 expanders calling save_expr immediately before expanding
6504 something. Assume this means that we only have to deal
6505 with non-BLKmode values. */
6506 if (GET_MODE (ret) == BLKmode)
6507 abort ();
6509 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6510 DECL_ARTIFICIAL (val) = 1;
6511 DECL_IGNORED_P (val) = 1;
6512 TREE_OPERAND (exp, 0) = val;
6513 SAVE_EXPR_RESOLVED_P (exp) = 1;
6515 if (!CONSTANT_P (ret))
6516 ret = copy_to_reg (ret);
6517 SET_DECL_RTL (val, ret);
6520 return ret;
6523 case GOTO_EXPR:
6524 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6525 expand_goto (TREE_OPERAND (exp, 0));
6526 else
6527 expand_computed_goto (TREE_OPERAND (exp, 0));
6528 return const0_rtx;
6530 case CONSTRUCTOR:
6531 /* If we don't need the result, just ensure we evaluate any
6532 subexpressions. */
6533 if (ignore)
6535 tree elt;
6537 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6538 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6540 return const0_rtx;
6543 /* All elts simple constants => refer to a constant in memory. But
6544 if this is a non-BLKmode mode, let it store a field at a time
6545 since that should make a CONST_INT or CONST_DOUBLE when we
6546 fold. Likewise, if we have a target we can use, it is best to
6547 store directly into the target unless the type is large enough
6548 that memcpy will be used. If we are making an initializer and
6549 all operands are constant, put it in memory as well.
6551 FIXME: Avoid trying to fill vector constructors piece-meal.
6552 Output them with output_constant_def below unless we're sure
6553 they're zeros. This should go away when vector initializers
6554 are treated like VECTOR_CST instead of arrays.
6556 else if ((TREE_STATIC (exp)
6557 && ((mode == BLKmode
6558 && ! (target != 0 && safe_from_p (target, exp, 1)))
6559 || TREE_ADDRESSABLE (exp)
6560 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6561 && (! MOVE_BY_PIECES_P
6562 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6563 TYPE_ALIGN (type)))
6564 && ! mostly_zeros_p (exp))))
6565 || ((modifier == EXPAND_INITIALIZER
6566 || modifier == EXPAND_CONST_ADDRESS)
6567 && TREE_CONSTANT (exp)))
6569 rtx constructor = output_constant_def (exp, 1);
6571 if (modifier != EXPAND_CONST_ADDRESS
6572 && modifier != EXPAND_INITIALIZER
6573 && modifier != EXPAND_SUM)
6574 constructor = validize_mem (constructor);
6576 return constructor;
6578 else
6580 /* Handle calls that pass values in multiple non-contiguous
6581 locations. The Irix 6 ABI has examples of this. */
6582 if (target == 0 || ! safe_from_p (target, exp, 1)
6583 || GET_CODE (target) == PARALLEL
6584 || modifier == EXPAND_STACK_PARM)
6585 target
6586 = assign_temp (build_qualified_type (type,
6587 (TYPE_QUALS (type)
6588 | (TREE_READONLY (exp)
6589 * TYPE_QUAL_CONST))),
6590 0, TREE_ADDRESSABLE (exp), 1);
6592 store_constructor (exp, target, 0, int_expr_size (exp));
6593 return target;
6596 case INDIRECT_REF:
6598 tree exp1 = TREE_OPERAND (exp, 0);
6600 if (modifier != EXPAND_WRITE)
6602 tree t;
6604 t = fold_read_from_constant_string (exp);
6605 if (t)
6606 return expand_expr (t, target, tmode, modifier);
6609 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6610 op0 = memory_address (mode, op0);
6611 temp = gen_rtx_MEM (mode, op0);
6612 set_mem_attributes (temp, exp, 0);
6614 return temp;
6617 case ARRAY_REF:
6619 #ifdef ENABLE_CHECKING
6620 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6621 abort ();
6622 #endif
6625 tree array = TREE_OPERAND (exp, 0);
6626 tree low_bound = array_ref_low_bound (exp);
6627 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6628 HOST_WIDE_INT i;
6630 /* Optimize the special-case of a zero lower bound.
6632 We convert the low_bound to sizetype to avoid some problems
6633 with constant folding. (E.g. suppose the lower bound is 1,
6634 and its mode is QI. Without the conversion, (ARRAY
6635 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6636 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6638 if (! integer_zerop (low_bound))
6639 index = size_diffop (index, convert (sizetype, low_bound));
6641 /* Fold an expression like: "foo"[2].
6642 This is not done in fold so it won't happen inside &.
6643 Don't fold if this is for wide characters since it's too
6644 difficult to do correctly and this is a very rare case. */
6646 if (modifier != EXPAND_CONST_ADDRESS
6647 && modifier != EXPAND_INITIALIZER
6648 && modifier != EXPAND_MEMORY)
6650 tree t = fold_read_from_constant_string (exp);
6652 if (t)
6653 return expand_expr (t, target, tmode, modifier);
6656 /* If this is a constant index into a constant array,
6657 just get the value from the array. Handle both the cases when
6658 we have an explicit constructor and when our operand is a variable
6659 that was declared const. */
6661 if (modifier != EXPAND_CONST_ADDRESS
6662 && modifier != EXPAND_INITIALIZER
6663 && modifier != EXPAND_MEMORY
6664 && TREE_CODE (array) == CONSTRUCTOR
6665 && ! TREE_SIDE_EFFECTS (array)
6666 && TREE_CODE (index) == INTEGER_CST
6667 && 0 > compare_tree_int (index,
6668 list_length (CONSTRUCTOR_ELTS
6669 (TREE_OPERAND (exp, 0)))))
6671 tree elem;
6673 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6674 i = TREE_INT_CST_LOW (index);
6675 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6678 if (elem)
6679 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6680 modifier);
6683 else if (optimize >= 1
6684 && modifier != EXPAND_CONST_ADDRESS
6685 && modifier != EXPAND_INITIALIZER
6686 && modifier != EXPAND_MEMORY
6687 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6688 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6689 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6690 && targetm.binds_local_p (array))
6692 if (TREE_CODE (index) == INTEGER_CST)
6694 tree init = DECL_INITIAL (array);
6696 if (TREE_CODE (init) == CONSTRUCTOR)
6698 tree elem;
6700 for (elem = CONSTRUCTOR_ELTS (init);
6701 (elem
6702 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6703 elem = TREE_CHAIN (elem))
6706 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6707 return expand_expr (fold (TREE_VALUE (elem)), target,
6708 tmode, modifier);
6710 else if (TREE_CODE (init) == STRING_CST
6711 && 0 > compare_tree_int (index,
6712 TREE_STRING_LENGTH (init)))
6714 tree type = TREE_TYPE (TREE_TYPE (init));
6715 enum machine_mode mode = TYPE_MODE (type);
6717 if (GET_MODE_CLASS (mode) == MODE_INT
6718 && GET_MODE_SIZE (mode) == 1)
6719 return gen_int_mode (TREE_STRING_POINTER (init)
6720 [TREE_INT_CST_LOW (index)], mode);
6725 goto normal_inner_ref;
6727 case COMPONENT_REF:
6728 /* If the operand is a CONSTRUCTOR, we can just extract the
6729 appropriate field if it is present. */
6730 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6732 tree elt;
6734 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6735 elt = TREE_CHAIN (elt))
6736 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6737 /* We can normally use the value of the field in the
6738 CONSTRUCTOR. However, if this is a bitfield in
6739 an integral mode that we can fit in a HOST_WIDE_INT,
6740 we must mask only the number of bits in the bitfield,
6741 since this is done implicitly by the constructor. If
6742 the bitfield does not meet either of those conditions,
6743 we can't do this optimization. */
6744 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6745 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6746 == MODE_INT)
6747 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6748 <= HOST_BITS_PER_WIDE_INT))))
6750 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6751 && modifier == EXPAND_STACK_PARM)
6752 target = 0;
6753 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6754 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6756 HOST_WIDE_INT bitsize
6757 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6758 enum machine_mode imode
6759 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6761 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6763 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6764 op0 = expand_and (imode, op0, op1, target);
6766 else
6768 tree count
6769 = build_int_cst (NULL_TREE,
6770 GET_MODE_BITSIZE (imode) - bitsize,
6773 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6774 target, 0);
6775 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6776 target, 0);
6780 return op0;
6783 goto normal_inner_ref;
6785 case BIT_FIELD_REF:
6786 case ARRAY_RANGE_REF:
6787 normal_inner_ref:
6789 enum machine_mode mode1;
6790 HOST_WIDE_INT bitsize, bitpos;
6791 tree offset;
6792 int volatilep = 0;
6793 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6794 &mode1, &unsignedp, &volatilep);
6795 rtx orig_op0;
6797 /* If we got back the original object, something is wrong. Perhaps
6798 we are evaluating an expression too early. In any event, don't
6799 infinitely recurse. */
6800 if (tem == exp)
6801 abort ();
6803 /* If TEM's type is a union of variable size, pass TARGET to the inner
6804 computation, since it will need a temporary and TARGET is known
6805 to have to do. This occurs in unchecked conversion in Ada. */
6807 orig_op0 = op0
6808 = expand_expr (tem,
6809 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6810 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6811 != INTEGER_CST)
6812 && modifier != EXPAND_STACK_PARM
6813 ? target : NULL_RTX),
6814 VOIDmode,
6815 (modifier == EXPAND_INITIALIZER
6816 || modifier == EXPAND_CONST_ADDRESS
6817 || modifier == EXPAND_STACK_PARM)
6818 ? modifier : EXPAND_NORMAL);
6820 /* If this is a constant, put it into a register if it is a
6821 legitimate constant and OFFSET is 0 and memory if it isn't. */
6822 if (CONSTANT_P (op0))
6824 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6825 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6826 && offset == 0)
6827 op0 = force_reg (mode, op0);
6828 else
6829 op0 = validize_mem (force_const_mem (mode, op0));
6832 /* Otherwise, if this object not in memory and we either have an
6833 offset or a BLKmode result, put it there. This case can't occur in
6834 C, but can in Ada if we have unchecked conversion of an expression
6835 from a scalar type to an array or record type or for an
6836 ARRAY_RANGE_REF whose type is BLKmode. */
6837 else if (!MEM_P (op0)
6838 && (offset != 0
6839 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6841 tree nt = build_qualified_type (TREE_TYPE (tem),
6842 (TYPE_QUALS (TREE_TYPE (tem))
6843 | TYPE_QUAL_CONST));
6844 rtx memloc = assign_temp (nt, 1, 1, 1);
6846 emit_move_insn (memloc, op0);
6847 op0 = memloc;
6850 if (offset != 0)
6852 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6853 EXPAND_SUM);
6855 if (!MEM_P (op0))
6856 abort ();
6858 #ifdef POINTERS_EXTEND_UNSIGNED
6859 if (GET_MODE (offset_rtx) != Pmode)
6860 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6861 #else
6862 if (GET_MODE (offset_rtx) != ptr_mode)
6863 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6864 #endif
6866 if (GET_MODE (op0) == BLKmode
6867 /* A constant address in OP0 can have VOIDmode, we must
6868 not try to call force_reg in that case. */
6869 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6870 && bitsize != 0
6871 && (bitpos % bitsize) == 0
6872 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6873 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6875 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6876 bitpos = 0;
6879 op0 = offset_address (op0, offset_rtx,
6880 highest_pow2_factor (offset));
6883 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6884 record its alignment as BIGGEST_ALIGNMENT. */
6885 if (MEM_P (op0) && bitpos == 0 && offset != 0
6886 && is_aligning_offset (offset, tem))
6887 set_mem_align (op0, BIGGEST_ALIGNMENT);
6889 /* Don't forget about volatility even if this is a bitfield. */
6890 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6892 if (op0 == orig_op0)
6893 op0 = copy_rtx (op0);
6895 MEM_VOLATILE_P (op0) = 1;
6898 /* The following code doesn't handle CONCAT.
6899 Assume only bitpos == 0 can be used for CONCAT, due to
6900 one element arrays having the same mode as its element. */
6901 if (GET_CODE (op0) == CONCAT)
6903 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6904 abort ();
6905 return op0;
6908 /* In cases where an aligned union has an unaligned object
6909 as a field, we might be extracting a BLKmode value from
6910 an integer-mode (e.g., SImode) object. Handle this case
6911 by doing the extract into an object as wide as the field
6912 (which we know to be the width of a basic mode), then
6913 storing into memory, and changing the mode to BLKmode. */
6914 if (mode1 == VOIDmode
6915 || REG_P (op0) || GET_CODE (op0) == SUBREG
6916 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6917 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6918 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6919 && modifier != EXPAND_CONST_ADDRESS
6920 && modifier != EXPAND_INITIALIZER)
6921 /* If the field isn't aligned enough to fetch as a memref,
6922 fetch it as a bit field. */
6923 || (mode1 != BLKmode
6924 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6925 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6926 || (MEM_P (op0)
6927 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6928 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6929 && ((modifier == EXPAND_CONST_ADDRESS
6930 || modifier == EXPAND_INITIALIZER)
6931 ? STRICT_ALIGNMENT
6932 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6933 || (bitpos % BITS_PER_UNIT != 0)))
6934 /* If the type and the field are a constant size and the
6935 size of the type isn't the same size as the bitfield,
6936 we must use bitfield operations. */
6937 || (bitsize >= 0
6938 && TYPE_SIZE (TREE_TYPE (exp))
6939 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6940 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6941 bitsize)))
6943 enum machine_mode ext_mode = mode;
6945 if (ext_mode == BLKmode
6946 && ! (target != 0 && MEM_P (op0)
6947 && MEM_P (target)
6948 && bitpos % BITS_PER_UNIT == 0))
6949 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6951 if (ext_mode == BLKmode)
6953 if (target == 0)
6954 target = assign_temp (type, 0, 1, 1);
6956 if (bitsize == 0)
6957 return target;
6959 /* In this case, BITPOS must start at a byte boundary and
6960 TARGET, if specified, must be a MEM. */
6961 if (!MEM_P (op0)
6962 || (target != 0 && !MEM_P (target))
6963 || bitpos % BITS_PER_UNIT != 0)
6964 abort ();
6966 emit_block_move (target,
6967 adjust_address (op0, VOIDmode,
6968 bitpos / BITS_PER_UNIT),
6969 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6970 / BITS_PER_UNIT),
6971 (modifier == EXPAND_STACK_PARM
6972 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
6974 return target;
6977 op0 = validize_mem (op0);
6979 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
6980 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6982 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
6983 (modifier == EXPAND_STACK_PARM
6984 ? NULL_RTX : target),
6985 ext_mode, ext_mode);
6987 /* If the result is a record type and BITSIZE is narrower than
6988 the mode of OP0, an integral mode, and this is a big endian
6989 machine, we must put the field into the high-order bits. */
6990 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6991 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6992 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6993 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6994 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6995 - bitsize),
6996 op0, 1);
6998 /* If the result type is BLKmode, store the data into a temporary
6999 of the appropriate type, but with the mode corresponding to the
7000 mode for the data we have (op0's mode). It's tempting to make
7001 this a constant type, since we know it's only being stored once,
7002 but that can cause problems if we are taking the address of this
7003 COMPONENT_REF because the MEM of any reference via that address
7004 will have flags corresponding to the type, which will not
7005 necessarily be constant. */
7006 if (mode == BLKmode)
7008 rtx new
7009 = assign_stack_temp_for_type
7010 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7012 emit_move_insn (new, op0);
7013 op0 = copy_rtx (new);
7014 PUT_MODE (op0, BLKmode);
7015 set_mem_attributes (op0, exp, 1);
7018 return op0;
7021 /* If the result is BLKmode, use that to access the object
7022 now as well. */
7023 if (mode == BLKmode)
7024 mode1 = BLKmode;
7026 /* Get a reference to just this component. */
7027 if (modifier == EXPAND_CONST_ADDRESS
7028 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7029 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7030 else
7031 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7033 if (op0 == orig_op0)
7034 op0 = copy_rtx (op0);
7036 set_mem_attributes (op0, exp, 0);
7037 if (REG_P (XEXP (op0, 0)))
7038 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7040 MEM_VOLATILE_P (op0) |= volatilep;
7041 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7042 || modifier == EXPAND_CONST_ADDRESS
7043 || modifier == EXPAND_INITIALIZER)
7044 return op0;
7045 else if (target == 0)
7046 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7048 convert_move (target, op0, unsignedp);
7049 return target;
7052 case OBJ_TYPE_REF:
7053 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7055 case CALL_EXPR:
7056 /* Check for a built-in function. */
7057 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7058 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7059 == FUNCTION_DECL)
7060 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7062 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7063 == BUILT_IN_FRONTEND)
7064 return lang_hooks.expand_expr (exp, original_target,
7065 tmode, modifier,
7066 alt_rtl);
7067 else
7068 return expand_builtin (exp, target, subtarget, tmode, ignore);
7071 return expand_call (exp, target, ignore);
7073 case NON_LVALUE_EXPR:
7074 case NOP_EXPR:
7075 case CONVERT_EXPR:
7076 if (TREE_OPERAND (exp, 0) == error_mark_node)
7077 return const0_rtx;
7079 if (TREE_CODE (type) == UNION_TYPE)
7081 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7083 /* If both input and output are BLKmode, this conversion isn't doing
7084 anything except possibly changing memory attribute. */
7085 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7087 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7088 modifier);
7090 result = copy_rtx (result);
7091 set_mem_attributes (result, exp, 0);
7092 return result;
7095 if (target == 0)
7097 if (TYPE_MODE (type) != BLKmode)
7098 target = gen_reg_rtx (TYPE_MODE (type));
7099 else
7100 target = assign_temp (type, 0, 1, 1);
7103 if (MEM_P (target))
7104 /* Store data into beginning of memory target. */
7105 store_expr (TREE_OPERAND (exp, 0),
7106 adjust_address (target, TYPE_MODE (valtype), 0),
7107 modifier == EXPAND_STACK_PARM ? 2 : 0);
7109 else if (REG_P (target))
7110 /* Store this field into a union of the proper type. */
7111 store_field (target,
7112 MIN ((int_size_in_bytes (TREE_TYPE
7113 (TREE_OPERAND (exp, 0)))
7114 * BITS_PER_UNIT),
7115 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7116 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7117 VOIDmode, 0, type, 0);
7118 else
7119 abort ();
7121 /* Return the entire union. */
7122 return target;
7125 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7127 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7128 modifier);
7130 /* If the signedness of the conversion differs and OP0 is
7131 a promoted SUBREG, clear that indication since we now
7132 have to do the proper extension. */
7133 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7134 && GET_CODE (op0) == SUBREG)
7135 SUBREG_PROMOTED_VAR_P (op0) = 0;
7137 return REDUCE_BIT_FIELD (op0);
7140 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7141 op0 = REDUCE_BIT_FIELD (op0);
7142 if (GET_MODE (op0) == mode)
7143 return op0;
7145 /* If OP0 is a constant, just convert it into the proper mode. */
7146 if (CONSTANT_P (op0))
7148 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7149 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7151 if (modifier == EXPAND_INITIALIZER)
7152 return simplify_gen_subreg (mode, op0, inner_mode,
7153 subreg_lowpart_offset (mode,
7154 inner_mode));
7155 else
7156 return convert_modes (mode, inner_mode, op0,
7157 TYPE_UNSIGNED (inner_type));
7160 if (modifier == EXPAND_INITIALIZER)
7161 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7163 if (target == 0)
7164 return
7165 convert_to_mode (mode, op0,
7166 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7167 else
7168 convert_move (target, op0,
7169 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7170 return target;
7172 case VIEW_CONVERT_EXPR:
7173 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7175 /* If the input and output modes are both the same, we are done.
7176 Otherwise, if neither mode is BLKmode and both are integral and within
7177 a word, we can use gen_lowpart. If neither is true, make sure the
7178 operand is in memory and convert the MEM to the new mode. */
7179 if (TYPE_MODE (type) == GET_MODE (op0))
7181 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7182 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7183 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7184 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7185 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7186 op0 = gen_lowpart (TYPE_MODE (type), op0);
7187 else if (!MEM_P (op0))
7189 /* If the operand is not a MEM, force it into memory. Since we
7190 are going to be be changing the mode of the MEM, don't call
7191 force_const_mem for constants because we don't allow pool
7192 constants to change mode. */
7193 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7195 if (TREE_ADDRESSABLE (exp))
7196 abort ();
7198 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7199 target
7200 = assign_stack_temp_for_type
7201 (TYPE_MODE (inner_type),
7202 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7204 emit_move_insn (target, op0);
7205 op0 = target;
7208 /* At this point, OP0 is in the correct mode. If the output type is such
7209 that the operand is known to be aligned, indicate that it is.
7210 Otherwise, we need only be concerned about alignment for non-BLKmode
7211 results. */
7212 if (MEM_P (op0))
7214 op0 = copy_rtx (op0);
7216 if (TYPE_ALIGN_OK (type))
7217 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7218 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7219 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7221 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7222 HOST_WIDE_INT temp_size
7223 = MAX (int_size_in_bytes (inner_type),
7224 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7225 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7226 temp_size, 0, type);
7227 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7229 if (TREE_ADDRESSABLE (exp))
7230 abort ();
7232 if (GET_MODE (op0) == BLKmode)
7233 emit_block_move (new_with_op0_mode, op0,
7234 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7235 (modifier == EXPAND_STACK_PARM
7236 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7237 else
7238 emit_move_insn (new_with_op0_mode, op0);
7240 op0 = new;
7243 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7246 return op0;
7248 case PLUS_EXPR:
7249 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7250 something else, make sure we add the register to the constant and
7251 then to the other thing. This case can occur during strength
7252 reduction and doing it this way will produce better code if the
7253 frame pointer or argument pointer is eliminated.
7255 fold-const.c will ensure that the constant is always in the inner
7256 PLUS_EXPR, so the only case we need to do anything about is if
7257 sp, ap, or fp is our second argument, in which case we must swap
7258 the innermost first argument and our second argument. */
7260 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7261 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7262 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7263 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7264 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7265 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7267 tree t = TREE_OPERAND (exp, 1);
7269 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7270 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7273 /* If the result is to be ptr_mode and we are adding an integer to
7274 something, we might be forming a constant. So try to use
7275 plus_constant. If it produces a sum and we can't accept it,
7276 use force_operand. This allows P = &ARR[const] to generate
7277 efficient code on machines where a SYMBOL_REF is not a valid
7278 address.
7280 If this is an EXPAND_SUM call, always return the sum. */
7281 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7282 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7284 if (modifier == EXPAND_STACK_PARM)
7285 target = 0;
7286 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7287 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7288 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7290 rtx constant_part;
7292 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7293 EXPAND_SUM);
7294 /* Use immed_double_const to ensure that the constant is
7295 truncated according to the mode of OP1, then sign extended
7296 to a HOST_WIDE_INT. Using the constant directly can result
7297 in non-canonical RTL in a 64x32 cross compile. */
7298 constant_part
7299 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7300 (HOST_WIDE_INT) 0,
7301 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7302 op1 = plus_constant (op1, INTVAL (constant_part));
7303 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7304 op1 = force_operand (op1, target);
7305 return REDUCE_BIT_FIELD (op1);
7308 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7309 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7310 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7312 rtx constant_part;
7314 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7315 (modifier == EXPAND_INITIALIZER
7316 ? EXPAND_INITIALIZER : EXPAND_SUM));
7317 if (! CONSTANT_P (op0))
7319 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7320 VOIDmode, modifier);
7321 /* Return a PLUS if modifier says it's OK. */
7322 if (modifier == EXPAND_SUM
7323 || modifier == EXPAND_INITIALIZER)
7324 return simplify_gen_binary (PLUS, mode, op0, op1);
7325 goto binop2;
7327 /* Use immed_double_const to ensure that the constant is
7328 truncated according to the mode of OP1, then sign extended
7329 to a HOST_WIDE_INT. Using the constant directly can result
7330 in non-canonical RTL in a 64x32 cross compile. */
7331 constant_part
7332 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7333 (HOST_WIDE_INT) 0,
7334 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7335 op0 = plus_constant (op0, INTVAL (constant_part));
7336 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7337 op0 = force_operand (op0, target);
7338 return REDUCE_BIT_FIELD (op0);
7342 /* No sense saving up arithmetic to be done
7343 if it's all in the wrong mode to form part of an address.
7344 And force_operand won't know whether to sign-extend or
7345 zero-extend. */
7346 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7347 || mode != ptr_mode)
7349 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7350 subtarget, &op0, &op1, 0);
7351 if (op0 == const0_rtx)
7352 return op1;
7353 if (op1 == const0_rtx)
7354 return op0;
7355 goto binop2;
7358 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7359 subtarget, &op0, &op1, modifier);
7360 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7362 case MINUS_EXPR:
7363 /* For initializers, we are allowed to return a MINUS of two
7364 symbolic constants. Here we handle all cases when both operands
7365 are constant. */
7366 /* Handle difference of two symbolic constants,
7367 for the sake of an initializer. */
7368 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7369 && really_constant_p (TREE_OPERAND (exp, 0))
7370 && really_constant_p (TREE_OPERAND (exp, 1)))
7372 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7373 NULL_RTX, &op0, &op1, modifier);
7375 /* If the last operand is a CONST_INT, use plus_constant of
7376 the negated constant. Else make the MINUS. */
7377 if (GET_CODE (op1) == CONST_INT)
7378 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7379 else
7380 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7383 /* No sense saving up arithmetic to be done
7384 if it's all in the wrong mode to form part of an address.
7385 And force_operand won't know whether to sign-extend or
7386 zero-extend. */
7387 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7388 || mode != ptr_mode)
7389 goto binop;
7391 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7392 subtarget, &op0, &op1, modifier);
7394 /* Convert A - const to A + (-const). */
7395 if (GET_CODE (op1) == CONST_INT)
7397 op1 = negate_rtx (mode, op1);
7398 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7401 goto binop2;
7403 case MULT_EXPR:
7404 /* If first operand is constant, swap them.
7405 Thus the following special case checks need only
7406 check the second operand. */
7407 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7409 tree t1 = TREE_OPERAND (exp, 0);
7410 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7411 TREE_OPERAND (exp, 1) = t1;
7414 /* Attempt to return something suitable for generating an
7415 indexed address, for machines that support that. */
7417 if (modifier == EXPAND_SUM && mode == ptr_mode
7418 && host_integerp (TREE_OPERAND (exp, 1), 0))
7420 tree exp1 = TREE_OPERAND (exp, 1);
7422 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7423 EXPAND_SUM);
7425 if (!REG_P (op0))
7426 op0 = force_operand (op0, NULL_RTX);
7427 if (!REG_P (op0))
7428 op0 = copy_to_mode_reg (mode, op0);
7430 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7431 gen_int_mode (tree_low_cst (exp1, 0),
7432 TYPE_MODE (TREE_TYPE (exp1)))));
7435 if (modifier == EXPAND_STACK_PARM)
7436 target = 0;
7438 /* Check for multiplying things that have been extended
7439 from a narrower type. If this machine supports multiplying
7440 in that narrower type with a result in the desired type,
7441 do it that way, and avoid the explicit type-conversion. */
7442 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7443 && TREE_CODE (type) == INTEGER_TYPE
7444 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7445 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7446 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7447 && int_fits_type_p (TREE_OPERAND (exp, 1),
7448 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7449 /* Don't use a widening multiply if a shift will do. */
7450 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7451 > HOST_BITS_PER_WIDE_INT)
7452 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7454 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7455 && (TYPE_PRECISION (TREE_TYPE
7456 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7457 == TYPE_PRECISION (TREE_TYPE
7458 (TREE_OPERAND
7459 (TREE_OPERAND (exp, 0), 0))))
7460 /* If both operands are extended, they must either both
7461 be zero-extended or both be sign-extended. */
7462 && (TYPE_UNSIGNED (TREE_TYPE
7463 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7464 == TYPE_UNSIGNED (TREE_TYPE
7465 (TREE_OPERAND
7466 (TREE_OPERAND (exp, 0), 0)))))))
7468 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7469 enum machine_mode innermode = TYPE_MODE (op0type);
7470 bool zextend_p = TYPE_UNSIGNED (op0type);
7471 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7472 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7474 if (mode == GET_MODE_WIDER_MODE (innermode))
7476 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7478 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7479 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7480 TREE_OPERAND (exp, 1),
7481 NULL_RTX, &op0, &op1, 0);
7482 else
7483 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7484 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7485 NULL_RTX, &op0, &op1, 0);
7486 goto binop3;
7488 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7489 && innermode == word_mode)
7491 rtx htem, hipart;
7492 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7493 NULL_RTX, VOIDmode, 0);
7494 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7495 op1 = convert_modes (innermode, mode,
7496 expand_expr (TREE_OPERAND (exp, 1),
7497 NULL_RTX, VOIDmode, 0),
7498 unsignedp);
7499 else
7500 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7501 NULL_RTX, VOIDmode, 0);
7502 temp = expand_binop (mode, other_optab, op0, op1, target,
7503 unsignedp, OPTAB_LIB_WIDEN);
7504 hipart = gen_highpart (innermode, temp);
7505 htem = expand_mult_highpart_adjust (innermode, hipart,
7506 op0, op1, hipart,
7507 zextend_p);
7508 if (htem != hipart)
7509 emit_move_insn (hipart, htem);
7510 return REDUCE_BIT_FIELD (temp);
7514 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7515 subtarget, &op0, &op1, 0);
7516 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7518 case TRUNC_DIV_EXPR:
7519 case FLOOR_DIV_EXPR:
7520 case CEIL_DIV_EXPR:
7521 case ROUND_DIV_EXPR:
7522 case EXACT_DIV_EXPR:
7523 if (modifier == EXPAND_STACK_PARM)
7524 target = 0;
7525 /* Possible optimization: compute the dividend with EXPAND_SUM
7526 then if the divisor is constant can optimize the case
7527 where some terms of the dividend have coeffs divisible by it. */
7528 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7529 subtarget, &op0, &op1, 0);
7530 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7532 case RDIV_EXPR:
7533 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7534 expensive divide. If not, combine will rebuild the original
7535 computation. */
7536 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7537 && TREE_CODE (type) == REAL_TYPE
7538 && !real_onep (TREE_OPERAND (exp, 0)))
7539 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7540 build2 (RDIV_EXPR, type,
7541 build_real (type, dconst1),
7542 TREE_OPERAND (exp, 1))),
7543 target, tmode, modifier);
7545 goto binop;
7547 case TRUNC_MOD_EXPR:
7548 case FLOOR_MOD_EXPR:
7549 case CEIL_MOD_EXPR:
7550 case ROUND_MOD_EXPR:
7551 if (modifier == EXPAND_STACK_PARM)
7552 target = 0;
7553 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7554 subtarget, &op0, &op1, 0);
7555 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7557 case FIX_ROUND_EXPR:
7558 case FIX_FLOOR_EXPR:
7559 case FIX_CEIL_EXPR:
7560 abort (); /* Not used for C. */
7562 case FIX_TRUNC_EXPR:
7563 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7564 if (target == 0 || modifier == EXPAND_STACK_PARM)
7565 target = gen_reg_rtx (mode);
7566 expand_fix (target, op0, unsignedp);
7567 return target;
7569 case FLOAT_EXPR:
7570 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7571 if (target == 0 || modifier == EXPAND_STACK_PARM)
7572 target = gen_reg_rtx (mode);
7573 /* expand_float can't figure out what to do if FROM has VOIDmode.
7574 So give it the correct mode. With -O, cse will optimize this. */
7575 if (GET_MODE (op0) == VOIDmode)
7576 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7577 op0);
7578 expand_float (target, op0,
7579 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7580 return target;
7582 case NEGATE_EXPR:
7583 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7584 if (modifier == EXPAND_STACK_PARM)
7585 target = 0;
7586 temp = expand_unop (mode,
7587 optab_for_tree_code (NEGATE_EXPR, type),
7588 op0, target, 0);
7589 if (temp == 0)
7590 abort ();
7591 return REDUCE_BIT_FIELD (temp);
7593 case ABS_EXPR:
7594 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7595 if (modifier == EXPAND_STACK_PARM)
7596 target = 0;
7598 /* ABS_EXPR is not valid for complex arguments. */
7599 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7600 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7601 abort ();
7603 /* Unsigned abs is simply the operand. Testing here means we don't
7604 risk generating incorrect code below. */
7605 if (TYPE_UNSIGNED (type))
7606 return op0;
7608 return expand_abs (mode, op0, target, unsignedp,
7609 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7611 case MAX_EXPR:
7612 case MIN_EXPR:
7613 target = original_target;
7614 if (target == 0
7615 || modifier == EXPAND_STACK_PARM
7616 || (MEM_P (target) && MEM_VOLATILE_P (target))
7617 || GET_MODE (target) != mode
7618 || (REG_P (target)
7619 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7620 target = gen_reg_rtx (mode);
7621 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7622 target, &op0, &op1, 0);
7624 /* First try to do it with a special MIN or MAX instruction.
7625 If that does not win, use a conditional jump to select the proper
7626 value. */
7627 this_optab = optab_for_tree_code (code, type);
7628 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7629 OPTAB_WIDEN);
7630 if (temp != 0)
7631 return temp;
7633 /* At this point, a MEM target is no longer useful; we will get better
7634 code without it. */
7636 if (MEM_P (target))
7637 target = gen_reg_rtx (mode);
7639 /* If op1 was placed in target, swap op0 and op1. */
7640 if (target != op0 && target == op1)
7642 rtx tem = op0;
7643 op0 = op1;
7644 op1 = tem;
7647 if (target != op0)
7648 emit_move_insn (target, op0);
7650 op0 = gen_label_rtx ();
7652 /* If this mode is an integer too wide to compare properly,
7653 compare word by word. Rely on cse to optimize constant cases. */
7654 if (GET_MODE_CLASS (mode) == MODE_INT
7655 && ! can_compare_p (GE, mode, ccp_jump))
7657 if (code == MAX_EXPR)
7658 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7659 NULL_RTX, op0);
7660 else
7661 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7662 NULL_RTX, op0);
7664 else
7666 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7667 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7669 emit_move_insn (target, op1);
7670 emit_label (op0);
7671 return target;
7673 case BIT_NOT_EXPR:
7674 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7675 if (modifier == EXPAND_STACK_PARM)
7676 target = 0;
7677 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7678 if (temp == 0)
7679 abort ();
7680 return temp;
7682 /* ??? Can optimize bitwise operations with one arg constant.
7683 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7684 and (a bitwise1 b) bitwise2 b (etc)
7685 but that is probably not worth while. */
7687 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7688 boolean values when we want in all cases to compute both of them. In
7689 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7690 as actual zero-or-1 values and then bitwise anding. In cases where
7691 there cannot be any side effects, better code would be made by
7692 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7693 how to recognize those cases. */
7695 case TRUTH_AND_EXPR:
7696 code = BIT_AND_EXPR;
7697 case BIT_AND_EXPR:
7698 goto binop;
7700 case TRUTH_OR_EXPR:
7701 code = BIT_IOR_EXPR;
7702 case BIT_IOR_EXPR:
7703 goto binop;
7705 case TRUTH_XOR_EXPR:
7706 code = BIT_XOR_EXPR;
7707 case BIT_XOR_EXPR:
7708 goto binop;
7710 case LSHIFT_EXPR:
7711 case RSHIFT_EXPR:
7712 case LROTATE_EXPR:
7713 case RROTATE_EXPR:
7714 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7715 subtarget = 0;
7716 if (modifier == EXPAND_STACK_PARM)
7717 target = 0;
7718 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7719 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7720 unsignedp);
7722 /* Could determine the answer when only additive constants differ. Also,
7723 the addition of one can be handled by changing the condition. */
7724 case LT_EXPR:
7725 case LE_EXPR:
7726 case GT_EXPR:
7727 case GE_EXPR:
7728 case EQ_EXPR:
7729 case NE_EXPR:
7730 case UNORDERED_EXPR:
7731 case ORDERED_EXPR:
7732 case UNLT_EXPR:
7733 case UNLE_EXPR:
7734 case UNGT_EXPR:
7735 case UNGE_EXPR:
7736 case UNEQ_EXPR:
7737 case LTGT_EXPR:
7738 temp = do_store_flag (exp,
7739 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7740 tmode != VOIDmode ? tmode : mode, 0);
7741 if (temp != 0)
7742 return temp;
7744 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7745 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7746 && original_target
7747 && REG_P (original_target)
7748 && (GET_MODE (original_target)
7749 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7751 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7752 VOIDmode, 0);
7754 /* If temp is constant, we can just compute the result. */
7755 if (GET_CODE (temp) == CONST_INT)
7757 if (INTVAL (temp) != 0)
7758 emit_move_insn (target, const1_rtx);
7759 else
7760 emit_move_insn (target, const0_rtx);
7762 return target;
7765 if (temp != original_target)
7767 enum machine_mode mode1 = GET_MODE (temp);
7768 if (mode1 == VOIDmode)
7769 mode1 = tmode != VOIDmode ? tmode : mode;
7771 temp = copy_to_mode_reg (mode1, temp);
7774 op1 = gen_label_rtx ();
7775 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7776 GET_MODE (temp), unsignedp, op1);
7777 emit_move_insn (temp, const1_rtx);
7778 emit_label (op1);
7779 return temp;
7782 /* If no set-flag instruction, must generate a conditional store
7783 into a temporary variable. Drop through and handle this
7784 like && and ||. */
7786 if (! ignore
7787 && (target == 0
7788 || modifier == EXPAND_STACK_PARM
7789 || ! safe_from_p (target, exp, 1)
7790 /* Make sure we don't have a hard reg (such as function's return
7791 value) live across basic blocks, if not optimizing. */
7792 || (!optimize && REG_P (target)
7793 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7794 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7796 if (target)
7797 emit_move_insn (target, const0_rtx);
7799 op1 = gen_label_rtx ();
7800 jumpifnot (exp, op1);
7802 if (target)
7803 emit_move_insn (target, const1_rtx);
7805 emit_label (op1);
7806 return ignore ? const0_rtx : target;
7808 case TRUTH_NOT_EXPR:
7809 if (modifier == EXPAND_STACK_PARM)
7810 target = 0;
7811 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7812 /* The parser is careful to generate TRUTH_NOT_EXPR
7813 only with operands that are always zero or one. */
7814 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7815 target, 1, OPTAB_LIB_WIDEN);
7816 if (temp == 0)
7817 abort ();
7818 return temp;
7820 case STATEMENT_LIST:
7822 tree_stmt_iterator iter;
7824 if (!ignore)
7825 abort ();
7827 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7828 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7830 return const0_rtx;
7832 case COND_EXPR:
7833 /* If it's void, we don't need to worry about computing a value. */
7834 if (VOID_TYPE_P (TREE_TYPE (exp)))
7836 tree pred = TREE_OPERAND (exp, 0);
7837 tree then_ = TREE_OPERAND (exp, 1);
7838 tree else_ = TREE_OPERAND (exp, 2);
7840 if (TREE_CODE (then_) != GOTO_EXPR
7841 || TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL
7842 || TREE_CODE (else_) != GOTO_EXPR
7843 || TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL)
7844 abort ();
7846 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7847 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7850 /* Note that COND_EXPRs whose type is a structure or union
7851 are required to be constructed to contain assignments of
7852 a temporary variable, so that we can evaluate them here
7853 for side effect only. If type is void, we must do likewise. */
7855 if (TREE_ADDRESSABLE (type)
7856 || ignore
7857 || TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node
7858 || TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node)
7859 abort ();
7861 /* If we are not to produce a result, we have no target. Otherwise,
7862 if a target was specified use it; it will not be used as an
7863 intermediate target unless it is safe. If no target, use a
7864 temporary. */
7866 if (modifier != EXPAND_STACK_PARM
7867 && original_target
7868 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7869 && GET_MODE (original_target) == mode
7870 #ifdef HAVE_conditional_move
7871 && (! can_conditionally_move_p (mode)
7872 || REG_P (original_target))
7873 #endif
7874 && !MEM_P (original_target))
7875 temp = original_target;
7876 else
7877 temp = assign_temp (type, 0, 0, 1);
7879 do_pending_stack_adjust ();
7880 NO_DEFER_POP;
7881 op0 = gen_label_rtx ();
7882 op1 = gen_label_rtx ();
7883 jumpifnot (TREE_OPERAND (exp, 0), op0);
7884 store_expr (TREE_OPERAND (exp, 1), temp,
7885 modifier == EXPAND_STACK_PARM ? 2 : 0);
7887 emit_jump_insn (gen_jump (op1));
7888 emit_barrier ();
7889 emit_label (op0);
7890 store_expr (TREE_OPERAND (exp, 2), temp,
7891 modifier == EXPAND_STACK_PARM ? 2 : 0);
7893 emit_label (op1);
7894 OK_DEFER_POP;
7895 return temp;
7897 case MODIFY_EXPR:
7899 /* If lhs is complex, expand calls in rhs before computing it.
7900 That's so we don't compute a pointer and save it over a
7901 call. If lhs is simple, compute it first so we can give it
7902 as a target if the rhs is just a call. This avoids an
7903 extra temp and copy and that prevents a partial-subsumption
7904 which makes bad code. Actually we could treat
7905 component_ref's of vars like vars. */
7907 tree lhs = TREE_OPERAND (exp, 0);
7908 tree rhs = TREE_OPERAND (exp, 1);
7910 temp = 0;
7912 /* Check for |= or &= of a bitfield of size one into another bitfield
7913 of size 1. In this case, (unless we need the result of the
7914 assignment) we can do this more efficiently with a
7915 test followed by an assignment, if necessary.
7917 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7918 things change so we do, this code should be enhanced to
7919 support it. */
7920 if (ignore
7921 && TREE_CODE (lhs) == COMPONENT_REF
7922 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7923 || TREE_CODE (rhs) == BIT_AND_EXPR)
7924 && TREE_OPERAND (rhs, 0) == lhs
7925 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7926 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7927 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7929 rtx label = gen_label_rtx ();
7931 do_jump (TREE_OPERAND (rhs, 1),
7932 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7933 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7934 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7935 (TREE_CODE (rhs) == BIT_IOR_EXPR
7936 ? integer_one_node
7937 : integer_zero_node)),
7939 do_pending_stack_adjust ();
7940 emit_label (label);
7941 return const0_rtx;
7944 temp = expand_assignment (lhs, rhs, ! ignore);
7946 return temp;
7949 case RETURN_EXPR:
7950 if (!TREE_OPERAND (exp, 0))
7951 expand_null_return ();
7952 else
7953 expand_return (TREE_OPERAND (exp, 0));
7954 return const0_rtx;
7956 case ADDR_EXPR:
7957 if (modifier == EXPAND_STACK_PARM)
7958 target = 0;
7959 /* If we are taking the address of something erroneous, just
7960 return a zero. */
7961 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7962 return const0_rtx;
7963 /* If we are taking the address of a constant and are at the
7964 top level, we have to use output_constant_def since we can't
7965 call force_const_mem at top level. */
7966 else if (cfun == 0
7967 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7968 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
7969 == 'c')))
7970 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
7971 else
7973 /* We make sure to pass const0_rtx down if we came in with
7974 ignore set, to avoid doing the cleanups twice for something. */
7975 op0 = expand_expr (TREE_OPERAND (exp, 0),
7976 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7977 (modifier == EXPAND_INITIALIZER
7978 ? modifier : EXPAND_CONST_ADDRESS));
7980 /* If we are going to ignore the result, OP0 will have been set
7981 to const0_rtx, so just return it. Don't get confused and
7982 think we are taking the address of the constant. */
7983 if (ignore)
7984 return op0;
7986 /* We would like the object in memory. If it is a constant, we can
7987 have it be statically allocated into memory. For a non-constant,
7988 we need to allocate some memory and store the value into it. */
7990 if (CONSTANT_P (op0))
7991 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7992 op0);
7993 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
7994 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
7995 || GET_CODE (op0) == LO_SUM)
7997 /* If this object is in a register, it can't be BLKmode. */
7998 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7999 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8001 if (GET_CODE (op0) == PARALLEL)
8002 /* Handle calls that pass values in multiple
8003 non-contiguous locations. The Irix 6 ABI has examples
8004 of this. */
8005 emit_group_store (memloc, op0, inner_type,
8006 int_size_in_bytes (inner_type));
8007 else
8008 emit_move_insn (memloc, op0);
8010 op0 = memloc;
8013 if (!MEM_P (op0))
8014 abort ();
8016 mark_temp_addr_taken (op0);
8017 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8019 op0 = XEXP (op0, 0);
8020 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8021 op0 = convert_memory_address (ptr_mode, op0);
8022 return op0;
8025 /* If OP0 is not aligned as least as much as the type requires, we
8026 need to make a temporary, copy OP0 to it, and take the address of
8027 the temporary. We want to use the alignment of the type, not of
8028 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8029 the test for BLKmode means that can't happen. The test for
8030 BLKmode is because we never make mis-aligned MEMs with
8031 non-BLKmode.
8033 We don't need to do this at all if the machine doesn't have
8034 strict alignment. */
8035 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8036 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8037 > MEM_ALIGN (op0))
8038 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8040 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8041 rtx new;
8043 if (TYPE_ALIGN_OK (inner_type))
8044 abort ();
8046 if (TREE_ADDRESSABLE (inner_type))
8048 /* We can't make a bitwise copy of this object, so fail. */
8049 error ("cannot take the address of an unaligned member");
8050 return const0_rtx;
8053 new = assign_stack_temp_for_type
8054 (TYPE_MODE (inner_type),
8055 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8056 : int_size_in_bytes (inner_type),
8057 1, build_qualified_type (inner_type,
8058 (TYPE_QUALS (inner_type)
8059 | TYPE_QUAL_CONST)));
8061 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8062 (modifier == EXPAND_STACK_PARM
8063 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8065 op0 = new;
8068 op0 = force_operand (XEXP (op0, 0), target);
8071 if (flag_force_addr
8072 && !REG_P (op0)
8073 && modifier != EXPAND_CONST_ADDRESS
8074 && modifier != EXPAND_INITIALIZER
8075 && modifier != EXPAND_SUM)
8076 op0 = force_reg (Pmode, op0);
8078 if (REG_P (op0)
8079 && ! REG_USERVAR_P (op0))
8080 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8082 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8083 op0 = convert_memory_address (ptr_mode, op0);
8085 return op0;
8087 /* COMPLEX type for Extended Pascal & Fortran */
8088 case COMPLEX_EXPR:
8090 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8091 rtx insns;
8093 /* Get the rtx code of the operands. */
8094 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8095 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8097 if (! target)
8098 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8100 start_sequence ();
8102 /* Move the real (op0) and imaginary (op1) parts to their location. */
8103 emit_move_insn (gen_realpart (mode, target), op0);
8104 emit_move_insn (gen_imagpart (mode, target), op1);
8106 insns = get_insns ();
8107 end_sequence ();
8109 /* Complex construction should appear as a single unit. */
8110 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8111 each with a separate pseudo as destination.
8112 It's not correct for flow to treat them as a unit. */
8113 if (GET_CODE (target) != CONCAT)
8114 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8115 else
8116 emit_insn (insns);
8118 return target;
8121 case REALPART_EXPR:
8122 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8123 return gen_realpart (mode, op0);
8125 case IMAGPART_EXPR:
8126 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8127 return gen_imagpart (mode, op0);
8129 case RESX_EXPR:
8130 expand_resx_expr (exp);
8131 return const0_rtx;
8133 case TRY_CATCH_EXPR:
8134 case CATCH_EXPR:
8135 case EH_FILTER_EXPR:
8136 case TRY_FINALLY_EXPR:
8137 /* Lowered by tree-eh.c. */
8138 abort ();
8140 case WITH_CLEANUP_EXPR:
8141 case CLEANUP_POINT_EXPR:
8142 case TARGET_EXPR:
8143 case CASE_LABEL_EXPR:
8144 case VA_ARG_EXPR:
8145 case BIND_EXPR:
8146 case INIT_EXPR:
8147 case CONJ_EXPR:
8148 case COMPOUND_EXPR:
8149 case PREINCREMENT_EXPR:
8150 case PREDECREMENT_EXPR:
8151 case POSTINCREMENT_EXPR:
8152 case POSTDECREMENT_EXPR:
8153 case LOOP_EXPR:
8154 case EXIT_EXPR:
8155 case LABELED_BLOCK_EXPR:
8156 case EXIT_BLOCK_EXPR:
8157 case TRUTH_ANDIF_EXPR:
8158 case TRUTH_ORIF_EXPR:
8159 /* Lowered by gimplify.c. */
8160 abort ();
8162 case EXC_PTR_EXPR:
8163 return get_exception_pointer (cfun);
8165 case FILTER_EXPR:
8166 return get_exception_filter (cfun);
8168 case FDESC_EXPR:
8169 /* Function descriptors are not valid except for as
8170 initialization constants, and should not be expanded. */
8171 abort ();
8173 case SWITCH_EXPR:
8174 expand_case (exp);
8175 return const0_rtx;
8177 case LABEL_EXPR:
8178 expand_label (TREE_OPERAND (exp, 0));
8179 return const0_rtx;
8181 case ASM_EXPR:
8182 expand_asm_expr (exp);
8183 return const0_rtx;
8185 case WITH_SIZE_EXPR:
8186 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8187 have pulled out the size to use in whatever context it needed. */
8188 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8189 modifier, alt_rtl);
8191 default:
8192 return lang_hooks.expand_expr (exp, original_target, tmode,
8193 modifier, alt_rtl);
8196 /* Here to do an ordinary binary operator. */
8197 binop:
8198 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8199 subtarget, &op0, &op1, 0);
8200 binop2:
8201 this_optab = optab_for_tree_code (code, type);
8202 binop3:
8203 if (modifier == EXPAND_STACK_PARM)
8204 target = 0;
8205 temp = expand_binop (mode, this_optab, op0, op1, target,
8206 unsignedp, OPTAB_LIB_WIDEN);
8207 if (temp == 0)
8208 abort ();
8209 return REDUCE_BIT_FIELD (temp);
8211 #undef REDUCE_BIT_FIELD
8213 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8214 signedness of TYPE), possibly returning the result in TARGET. */
8215 static rtx
8216 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8218 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8219 if (target && GET_MODE (target) != GET_MODE (exp))
8220 target = 0;
8221 if (TYPE_UNSIGNED (type))
8223 rtx mask;
8224 if (prec < HOST_BITS_PER_WIDE_INT)
8225 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8226 GET_MODE (exp));
8227 else
8228 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8229 ((unsigned HOST_WIDE_INT) 1
8230 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8231 GET_MODE (exp));
8232 return expand_and (GET_MODE (exp), exp, mask, target);
8234 else
8236 tree count = build_int_cst (NULL_TREE,
8237 GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8238 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8239 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8243 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8244 when applied to the address of EXP produces an address known to be
8245 aligned more than BIGGEST_ALIGNMENT. */
8247 static int
8248 is_aligning_offset (tree offset, tree exp)
8250 /* Strip off any conversions. */
8251 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8252 || TREE_CODE (offset) == NOP_EXPR
8253 || TREE_CODE (offset) == CONVERT_EXPR)
8254 offset = TREE_OPERAND (offset, 0);
8256 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8257 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8258 if (TREE_CODE (offset) != BIT_AND_EXPR
8259 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8260 || compare_tree_int (TREE_OPERAND (offset, 1),
8261 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8262 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8263 return 0;
8265 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8266 It must be NEGATE_EXPR. Then strip any more conversions. */
8267 offset = TREE_OPERAND (offset, 0);
8268 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8269 || TREE_CODE (offset) == NOP_EXPR
8270 || TREE_CODE (offset) == CONVERT_EXPR)
8271 offset = TREE_OPERAND (offset, 0);
8273 if (TREE_CODE (offset) != NEGATE_EXPR)
8274 return 0;
8276 offset = TREE_OPERAND (offset, 0);
8277 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8278 || TREE_CODE (offset) == NOP_EXPR
8279 || TREE_CODE (offset) == CONVERT_EXPR)
8280 offset = TREE_OPERAND (offset, 0);
8282 /* This must now be the address of EXP. */
8283 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8286 /* Return the tree node if an ARG corresponds to a string constant or zero
8287 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8288 in bytes within the string that ARG is accessing. The type of the
8289 offset will be `sizetype'. */
8291 tree
8292 string_constant (tree arg, tree *ptr_offset)
8294 STRIP_NOPS (arg);
8296 if (TREE_CODE (arg) == ADDR_EXPR
8297 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8299 *ptr_offset = size_zero_node;
8300 return TREE_OPERAND (arg, 0);
8302 if (TREE_CODE (arg) == ADDR_EXPR
8303 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8304 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8306 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8307 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8309 else if (TREE_CODE (arg) == PLUS_EXPR)
8311 tree arg0 = TREE_OPERAND (arg, 0);
8312 tree arg1 = TREE_OPERAND (arg, 1);
8314 STRIP_NOPS (arg0);
8315 STRIP_NOPS (arg1);
8317 if (TREE_CODE (arg0) == ADDR_EXPR
8318 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8320 *ptr_offset = convert (sizetype, arg1);
8321 return TREE_OPERAND (arg0, 0);
8323 else if (TREE_CODE (arg1) == ADDR_EXPR
8324 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8326 *ptr_offset = convert (sizetype, arg0);
8327 return TREE_OPERAND (arg1, 0);
8331 return 0;
8334 /* Generate code to calculate EXP using a store-flag instruction
8335 and return an rtx for the result. EXP is either a comparison
8336 or a TRUTH_NOT_EXPR whose operand is a comparison.
8338 If TARGET is nonzero, store the result there if convenient.
8340 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8341 cheap.
8343 Return zero if there is no suitable set-flag instruction
8344 available on this machine.
8346 Once expand_expr has been called on the arguments of the comparison,
8347 we are committed to doing the store flag, since it is not safe to
8348 re-evaluate the expression. We emit the store-flag insn by calling
8349 emit_store_flag, but only expand the arguments if we have a reason
8350 to believe that emit_store_flag will be successful. If we think that
8351 it will, but it isn't, we have to simulate the store-flag with a
8352 set/jump/set sequence. */
8354 static rtx
8355 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8357 enum rtx_code code;
8358 tree arg0, arg1, type;
8359 tree tem;
8360 enum machine_mode operand_mode;
8361 int invert = 0;
8362 int unsignedp;
8363 rtx op0, op1;
8364 enum insn_code icode;
8365 rtx subtarget = target;
8366 rtx result, label;
8368 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8369 result at the end. We can't simply invert the test since it would
8370 have already been inverted if it were valid. This case occurs for
8371 some floating-point comparisons. */
8373 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8374 invert = 1, exp = TREE_OPERAND (exp, 0);
8376 arg0 = TREE_OPERAND (exp, 0);
8377 arg1 = TREE_OPERAND (exp, 1);
8379 /* Don't crash if the comparison was erroneous. */
8380 if (arg0 == error_mark_node || arg1 == error_mark_node)
8381 return const0_rtx;
8383 type = TREE_TYPE (arg0);
8384 operand_mode = TYPE_MODE (type);
8385 unsignedp = TYPE_UNSIGNED (type);
8387 /* We won't bother with BLKmode store-flag operations because it would mean
8388 passing a lot of information to emit_store_flag. */
8389 if (operand_mode == BLKmode)
8390 return 0;
8392 /* We won't bother with store-flag operations involving function pointers
8393 when function pointers must be canonicalized before comparisons. */
8394 #ifdef HAVE_canonicalize_funcptr_for_compare
8395 if (HAVE_canonicalize_funcptr_for_compare
8396 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8397 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8398 == FUNCTION_TYPE))
8399 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8400 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8401 == FUNCTION_TYPE))))
8402 return 0;
8403 #endif
8405 STRIP_NOPS (arg0);
8406 STRIP_NOPS (arg1);
8408 /* Get the rtx comparison code to use. We know that EXP is a comparison
8409 operation of some type. Some comparisons against 1 and -1 can be
8410 converted to comparisons with zero. Do so here so that the tests
8411 below will be aware that we have a comparison with zero. These
8412 tests will not catch constants in the first operand, but constants
8413 are rarely passed as the first operand. */
8415 switch (TREE_CODE (exp))
8417 case EQ_EXPR:
8418 code = EQ;
8419 break;
8420 case NE_EXPR:
8421 code = NE;
8422 break;
8423 case LT_EXPR:
8424 if (integer_onep (arg1))
8425 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8426 else
8427 code = unsignedp ? LTU : LT;
8428 break;
8429 case LE_EXPR:
8430 if (! unsignedp && integer_all_onesp (arg1))
8431 arg1 = integer_zero_node, code = LT;
8432 else
8433 code = unsignedp ? LEU : LE;
8434 break;
8435 case GT_EXPR:
8436 if (! unsignedp && integer_all_onesp (arg1))
8437 arg1 = integer_zero_node, code = GE;
8438 else
8439 code = unsignedp ? GTU : GT;
8440 break;
8441 case GE_EXPR:
8442 if (integer_onep (arg1))
8443 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8444 else
8445 code = unsignedp ? GEU : GE;
8446 break;
8448 case UNORDERED_EXPR:
8449 code = UNORDERED;
8450 break;
8451 case ORDERED_EXPR:
8452 code = ORDERED;
8453 break;
8454 case UNLT_EXPR:
8455 code = UNLT;
8456 break;
8457 case UNLE_EXPR:
8458 code = UNLE;
8459 break;
8460 case UNGT_EXPR:
8461 code = UNGT;
8462 break;
8463 case UNGE_EXPR:
8464 code = UNGE;
8465 break;
8466 case UNEQ_EXPR:
8467 code = UNEQ;
8468 break;
8469 case LTGT_EXPR:
8470 code = LTGT;
8471 break;
8473 default:
8474 abort ();
8477 /* Put a constant second. */
8478 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8480 tem = arg0; arg0 = arg1; arg1 = tem;
8481 code = swap_condition (code);
8484 /* If this is an equality or inequality test of a single bit, we can
8485 do this by shifting the bit being tested to the low-order bit and
8486 masking the result with the constant 1. If the condition was EQ,
8487 we xor it with 1. This does not require an scc insn and is faster
8488 than an scc insn even if we have it.
8490 The code to make this transformation was moved into fold_single_bit_test,
8491 so we just call into the folder and expand its result. */
8493 if ((code == NE || code == EQ)
8494 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8495 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8497 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8498 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8499 arg0, arg1, type),
8500 target, VOIDmode, EXPAND_NORMAL);
8503 /* Now see if we are likely to be able to do this. Return if not. */
8504 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8505 return 0;
8507 icode = setcc_gen_code[(int) code];
8508 if (icode == CODE_FOR_nothing
8509 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8511 /* We can only do this if it is one of the special cases that
8512 can be handled without an scc insn. */
8513 if ((code == LT && integer_zerop (arg1))
8514 || (! only_cheap && code == GE && integer_zerop (arg1)))
8516 else if (BRANCH_COST >= 0
8517 && ! only_cheap && (code == NE || code == EQ)
8518 && TREE_CODE (type) != REAL_TYPE
8519 && ((abs_optab->handlers[(int) operand_mode].insn_code
8520 != CODE_FOR_nothing)
8521 || (ffs_optab->handlers[(int) operand_mode].insn_code
8522 != CODE_FOR_nothing)))
8524 else
8525 return 0;
8528 if (! get_subtarget (target)
8529 || GET_MODE (subtarget) != operand_mode)
8530 subtarget = 0;
8532 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8534 if (target == 0)
8535 target = gen_reg_rtx (mode);
8537 result = emit_store_flag (target, code, op0, op1,
8538 operand_mode, unsignedp, 1);
8540 if (result)
8542 if (invert)
8543 result = expand_binop (mode, xor_optab, result, const1_rtx,
8544 result, 0, OPTAB_LIB_WIDEN);
8545 return result;
8548 /* If this failed, we have to do this with set/compare/jump/set code. */
8549 if (!REG_P (target)
8550 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8551 target = gen_reg_rtx (GET_MODE (target));
8553 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8554 result = compare_from_rtx (op0, op1, code, unsignedp,
8555 operand_mode, NULL_RTX);
8556 if (GET_CODE (result) == CONST_INT)
8557 return (((result == const0_rtx && ! invert)
8558 || (result != const0_rtx && invert))
8559 ? const0_rtx : const1_rtx);
8561 /* The code of RESULT may not match CODE if compare_from_rtx
8562 decided to swap its operands and reverse the original code.
8564 We know that compare_from_rtx returns either a CONST_INT or
8565 a new comparison code, so it is safe to just extract the
8566 code from RESULT. */
8567 code = GET_CODE (result);
8569 label = gen_label_rtx ();
8570 if (bcc_gen_fctn[(int) code] == 0)
8571 abort ();
8573 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8574 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8575 emit_label (label);
8577 return target;
8581 /* Stubs in case we haven't got a casesi insn. */
8582 #ifndef HAVE_casesi
8583 # define HAVE_casesi 0
8584 # define gen_casesi(a, b, c, d, e) (0)
8585 # define CODE_FOR_casesi CODE_FOR_nothing
8586 #endif
8588 /* If the machine does not have a case insn that compares the bounds,
8589 this means extra overhead for dispatch tables, which raises the
8590 threshold for using them. */
8591 #ifndef CASE_VALUES_THRESHOLD
8592 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8593 #endif /* CASE_VALUES_THRESHOLD */
8595 unsigned int
8596 case_values_threshold (void)
8598 return CASE_VALUES_THRESHOLD;
8601 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8602 0 otherwise (i.e. if there is no casesi instruction). */
8604 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8605 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8607 enum machine_mode index_mode = SImode;
8608 int index_bits = GET_MODE_BITSIZE (index_mode);
8609 rtx op1, op2, index;
8610 enum machine_mode op_mode;
8612 if (! HAVE_casesi)
8613 return 0;
8615 /* Convert the index to SImode. */
8616 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8618 enum machine_mode omode = TYPE_MODE (index_type);
8619 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8621 /* We must handle the endpoints in the original mode. */
8622 index_expr = build2 (MINUS_EXPR, index_type,
8623 index_expr, minval);
8624 minval = integer_zero_node;
8625 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8626 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8627 omode, 1, default_label);
8628 /* Now we can safely truncate. */
8629 index = convert_to_mode (index_mode, index, 0);
8631 else
8633 if (TYPE_MODE (index_type) != index_mode)
8635 index_expr = convert (lang_hooks.types.type_for_size
8636 (index_bits, 0), index_expr);
8637 index_type = TREE_TYPE (index_expr);
8640 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8643 do_pending_stack_adjust ();
8645 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8646 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8647 (index, op_mode))
8648 index = copy_to_mode_reg (op_mode, index);
8650 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8652 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8653 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8654 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8655 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8656 (op1, op_mode))
8657 op1 = copy_to_mode_reg (op_mode, op1);
8659 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8661 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8662 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8663 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8664 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8665 (op2, op_mode))
8666 op2 = copy_to_mode_reg (op_mode, op2);
8668 emit_jump_insn (gen_casesi (index, op1, op2,
8669 table_label, default_label));
8670 return 1;
8673 /* Attempt to generate a tablejump instruction; same concept. */
8674 #ifndef HAVE_tablejump
8675 #define HAVE_tablejump 0
8676 #define gen_tablejump(x, y) (0)
8677 #endif
8679 /* Subroutine of the next function.
8681 INDEX is the value being switched on, with the lowest value
8682 in the table already subtracted.
8683 MODE is its expected mode (needed if INDEX is constant).
8684 RANGE is the length of the jump table.
8685 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8687 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8688 index value is out of range. */
8690 static void
8691 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8692 rtx default_label)
8694 rtx temp, vector;
8696 if (INTVAL (range) > cfun->max_jumptable_ents)
8697 cfun->max_jumptable_ents = INTVAL (range);
8699 /* Do an unsigned comparison (in the proper mode) between the index
8700 expression and the value which represents the length of the range.
8701 Since we just finished subtracting the lower bound of the range
8702 from the index expression, this comparison allows us to simultaneously
8703 check that the original index expression value is both greater than
8704 or equal to the minimum value of the range and less than or equal to
8705 the maximum value of the range. */
8707 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8708 default_label);
8710 /* If index is in range, it must fit in Pmode.
8711 Convert to Pmode so we can index with it. */
8712 if (mode != Pmode)
8713 index = convert_to_mode (Pmode, index, 1);
8715 /* Don't let a MEM slip through, because then INDEX that comes
8716 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8717 and break_out_memory_refs will go to work on it and mess it up. */
8718 #ifdef PIC_CASE_VECTOR_ADDRESS
8719 if (flag_pic && !REG_P (index))
8720 index = copy_to_mode_reg (Pmode, index);
8721 #endif
8723 /* If flag_force_addr were to affect this address
8724 it could interfere with the tricky assumptions made
8725 about addresses that contain label-refs,
8726 which may be valid only very near the tablejump itself. */
8727 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8728 GET_MODE_SIZE, because this indicates how large insns are. The other
8729 uses should all be Pmode, because they are addresses. This code
8730 could fail if addresses and insns are not the same size. */
8731 index = gen_rtx_PLUS (Pmode,
8732 gen_rtx_MULT (Pmode, index,
8733 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8734 gen_rtx_LABEL_REF (Pmode, table_label));
8735 #ifdef PIC_CASE_VECTOR_ADDRESS
8736 if (flag_pic)
8737 index = PIC_CASE_VECTOR_ADDRESS (index);
8738 else
8739 #endif
8740 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8741 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8742 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8743 convert_move (temp, vector, 0);
8745 emit_jump_insn (gen_tablejump (temp, table_label));
8747 /* If we are generating PIC code or if the table is PC-relative, the
8748 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8749 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8750 emit_barrier ();
8754 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8755 rtx table_label, rtx default_label)
8757 rtx index;
8759 if (! HAVE_tablejump)
8760 return 0;
8762 index_expr = fold (build2 (MINUS_EXPR, index_type,
8763 convert (index_type, index_expr),
8764 convert (index_type, minval)));
8765 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8766 do_pending_stack_adjust ();
8768 do_tablejump (index, TYPE_MODE (index_type),
8769 convert_modes (TYPE_MODE (index_type),
8770 TYPE_MODE (TREE_TYPE (range)),
8771 expand_expr (range, NULL_RTX,
8772 VOIDmode, 0),
8773 TYPE_UNSIGNED (TREE_TYPE (range))),
8774 table_label, default_label);
8775 return 1;
8778 /* Nonzero if the mode is a valid vector mode for this architecture.
8779 This returns nonzero even if there is no hardware support for the
8780 vector mode, but we can emulate with narrower modes. */
8783 vector_mode_valid_p (enum machine_mode mode)
8785 enum mode_class class = GET_MODE_CLASS (mode);
8786 enum machine_mode innermode;
8788 /* Doh! What's going on? */
8789 if (class != MODE_VECTOR_INT
8790 && class != MODE_VECTOR_FLOAT)
8791 return 0;
8793 /* Hardware support. Woo hoo! */
8794 if (targetm.vector_mode_supported_p (mode))
8795 return 1;
8797 innermode = GET_MODE_INNER (mode);
8799 /* We should probably return 1 if requesting V4DI and we have no DI,
8800 but we have V2DI, but this is probably very unlikely. */
8802 /* If we have support for the inner mode, we can safely emulate it.
8803 We may not have V2DI, but me can emulate with a pair of DIs. */
8804 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
8807 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8808 static rtx
8809 const_vector_from_tree (tree exp)
8811 rtvec v;
8812 int units, i;
8813 tree link, elt;
8814 enum machine_mode inner, mode;
8816 mode = TYPE_MODE (TREE_TYPE (exp));
8818 if (initializer_zerop (exp))
8819 return CONST0_RTX (mode);
8821 units = GET_MODE_NUNITS (mode);
8822 inner = GET_MODE_INNER (mode);
8824 v = rtvec_alloc (units);
8826 link = TREE_VECTOR_CST_ELTS (exp);
8827 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8829 elt = TREE_VALUE (link);
8831 if (TREE_CODE (elt) == REAL_CST)
8832 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8833 inner);
8834 else
8835 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8836 TREE_INT_CST_HIGH (elt),
8837 inner);
8840 /* Initialize remaining elements to 0. */
8841 for (; i < units; ++i)
8842 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8844 return gen_rtx_raw_CONST_VECTOR (mode, v);
8846 #include "gt-expr.h"