Comment typo.
[official-gcc.git] / gcc / expr.c
blob6811abedd803a71e6d16d875a7b650cb2850eaf4
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 #endif
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
219 void
220 init_expr_once (void)
222 rtx insn, pat;
223 enum machine_mode mode;
224 int num_clobbers;
225 rtx mem, mem1;
226 rtx reg;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
245 int regno;
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
263 REGNO (reg) = regno;
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
296 enum insn_code ic;
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
300 continue;
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
312 void
313 init_expr (void)
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
323 void
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 if (to_real != from_real)
339 abort ();
341 /* If the source and destination are already the same, then there's
342 nothing to do. */
343 if (to == from)
344 return;
346 /* If FROM is a SUBREG that indicates that we have already done at least
347 the required extension, strip it. We don't handle such SUBREGs as
348 TO here. */
350 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
351 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
352 >= GET_MODE_SIZE (to_mode))
353 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
354 from = gen_lowpart (to_mode, from), from_mode = to_mode;
356 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
357 abort ();
359 if (to_mode == from_mode
360 || (from_mode == VOIDmode && CONSTANT_P (from)))
362 emit_move_insn (to, from);
363 return;
366 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
368 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
369 abort ();
371 if (VECTOR_MODE_P (to_mode))
372 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
373 else
374 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
376 emit_move_insn (to, from);
377 return;
380 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
382 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
383 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
384 return;
387 if (to_real)
389 rtx value, insns;
390 convert_optab tab;
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
395 tab = trunc_optab;
396 else
397 abort ();
399 /* Try converting directly if the insn is supported. */
401 code = tab->handlers[to_mode][from_mode].insn_code;
402 if (code != CODE_FOR_nothing)
404 emit_unop_insn (code, to, from,
405 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
406 return;
409 /* Otherwise use a libcall. */
410 libcall = tab->handlers[to_mode][from_mode].libfunc;
412 if (!libcall)
413 /* This conversion is not implemented yet. */
414 abort ();
416 start_sequence ();
417 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
418 1, from, from_mode);
419 insns = get_insns ();
420 end_sequence ();
421 emit_libcall_block (insns, to, value,
422 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
423 from)
424 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 return;
428 /* Handle pointer conversion. */ /* SPEE 900220. */
429 /* Targets are expected to provide conversion insns between PxImode and
430 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
431 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
433 enum machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
436 if (trunc_optab->handlers[to_mode][full_mode].insn_code
437 == CODE_FOR_nothing)
438 abort ();
440 if (full_mode != from_mode)
441 from = convert_to_mode (full_mode, from, unsignedp);
442 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
443 to, from, UNKNOWN);
444 return;
446 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
448 enum machine_mode full_mode
449 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
451 if (sext_optab->handlers[full_mode][from_mode].insn_code
452 == CODE_FOR_nothing)
453 abort ();
455 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
456 to, from, UNKNOWN);
457 if (to_mode == full_mode)
458 return;
460 /* else proceed to integer conversions below. */
461 from_mode = full_mode;
464 /* Now both modes are integers. */
466 /* Handle expanding beyond a word. */
467 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
468 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 rtx insns;
471 rtx lowpart;
472 rtx fill_value;
473 rtx lowfrom;
474 int i;
475 enum machine_mode lowpart_mode;
476 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
478 /* Try converting directly if the insn is supported. */
479 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
480 != CODE_FOR_nothing)
482 /* If FROM is a SUBREG, put it into a register. Do this
483 so that we always generate the same set of insns for
484 better cse'ing; if an intermediate assignment occurred,
485 we won't be doing the operation directly on the SUBREG. */
486 if (optimize > 0 && GET_CODE (from) == SUBREG)
487 from = force_reg (from_mode, from);
488 emit_unop_insn (code, to, from, equiv_code);
489 return;
491 /* Next, try converting via full word. */
492 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
493 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
494 != CODE_FOR_nothing))
496 if (REG_P (to))
498 if (reg_overlap_mentioned_p (to, from))
499 from = force_reg (from_mode, from);
500 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
502 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
503 emit_unop_insn (code, to,
504 gen_lowpart (word_mode, to), equiv_code);
505 return;
508 /* No special multiword conversion insn; do it by hand. */
509 start_sequence ();
511 /* Since we will turn this into a no conflict block, we must ensure
512 that the source does not overlap the target. */
514 if (reg_overlap_mentioned_p (to, from))
515 from = force_reg (from_mode, from);
517 /* Get a copy of FROM widened to a word, if necessary. */
518 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
519 lowpart_mode = word_mode;
520 else
521 lowpart_mode = from_mode;
523 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
525 lowpart = gen_lowpart (lowpart_mode, to);
526 emit_move_insn (lowpart, lowfrom);
528 /* Compute the value to put in each remaining word. */
529 if (unsignedp)
530 fill_value = const0_rtx;
531 else
533 #ifdef HAVE_slt
534 if (HAVE_slt
535 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
536 && STORE_FLAG_VALUE == -1)
538 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
539 lowpart_mode, 0);
540 fill_value = gen_reg_rtx (word_mode);
541 emit_insn (gen_slt (fill_value));
543 else
544 #endif
546 fill_value
547 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
548 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
549 NULL_RTX, 0);
550 fill_value = convert_to_mode (word_mode, fill_value, 1);
554 /* Fill the remaining words. */
555 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
557 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
558 rtx subword = operand_subword (to, index, 1, to_mode);
560 if (subword == 0)
561 abort ();
563 if (fill_value != subword)
564 emit_move_insn (subword, fill_value);
567 insns = get_insns ();
568 end_sequence ();
570 emit_no_conflict_block (insns, to, from, NULL_RTX,
571 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
572 return;
575 /* Truncating multi-word to a word or less. */
576 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
577 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
579 if (!((MEM_P (from)
580 && ! MEM_VOLATILE_P (from)
581 && direct_load[(int) to_mode]
582 && ! mode_dependent_address_p (XEXP (from, 0)))
583 || REG_P (from)
584 || GET_CODE (from) == SUBREG))
585 from = force_reg (from_mode, from);
586 convert_move (to, gen_lowpart (word_mode, from), 0);
587 return;
590 /* Now follow all the conversions between integers
591 no more than a word long. */
593 /* For truncation, usually we can just refer to FROM in a narrower mode. */
594 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
595 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
596 GET_MODE_BITSIZE (from_mode)))
598 if (!((MEM_P (from)
599 && ! MEM_VOLATILE_P (from)
600 && direct_load[(int) to_mode]
601 && ! mode_dependent_address_p (XEXP (from, 0)))
602 || REG_P (from)
603 || GET_CODE (from) == SUBREG))
604 from = force_reg (from_mode, from);
605 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
606 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
607 from = copy_to_reg (from);
608 emit_move_insn (to, gen_lowpart (to_mode, from));
609 return;
612 /* Handle extension. */
613 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
615 /* Convert directly if that works. */
616 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
617 != CODE_FOR_nothing)
619 if (flag_force_mem)
620 from = force_not_mem (from);
622 emit_unop_insn (code, to, from, equiv_code);
623 return;
625 else
627 enum machine_mode intermediate;
628 rtx tmp;
629 tree shift_amount;
631 /* Search for a mode to convert via. */
632 for (intermediate = from_mode; intermediate != VOIDmode;
633 intermediate = GET_MODE_WIDER_MODE (intermediate))
634 if (((can_extend_p (to_mode, intermediate, unsignedp)
635 != CODE_FOR_nothing)
636 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
637 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
638 GET_MODE_BITSIZE (intermediate))))
639 && (can_extend_p (intermediate, from_mode, unsignedp)
640 != CODE_FOR_nothing))
642 convert_move (to, convert_to_mode (intermediate, from,
643 unsignedp), unsignedp);
644 return;
647 /* No suitable intermediate mode.
648 Generate what we need with shifts. */
649 shift_amount = build_int_cst (NULL_TREE,
650 GET_MODE_BITSIZE (to_mode)
651 - GET_MODE_BITSIZE (from_mode));
652 from = gen_lowpart (to_mode, force_reg (from_mode, from));
653 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
654 to, unsignedp);
655 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
656 to, unsignedp);
657 if (tmp != to)
658 emit_move_insn (to, tmp);
659 return;
663 /* Support special truncate insns for certain modes. */
664 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
666 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
667 to, from, UNKNOWN);
668 return;
671 /* Handle truncation of volatile memrefs, and so on;
672 the things that couldn't be truncated directly,
673 and for which there was no special instruction.
675 ??? Code above formerly short-circuited this, for most integer
676 mode pairs, with a force_reg in from_mode followed by a recursive
677 call to this routine. Appears always to have been wrong. */
678 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
680 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
681 emit_move_insn (to, temp);
682 return;
685 /* Mode combination is not recognized. */
686 abort ();
689 /* Return an rtx for a value that would result
690 from converting X to mode MODE.
691 Both X and MODE may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
693 This can be done by referring to a part of X in place
694 or by copying to a new temporary with conversion. */
697 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
699 return convert_modes (mode, VOIDmode, x, unsignedp);
702 /* Return an rtx for a value that would result
703 from converting X from mode OLDMODE to mode MODE.
704 Both modes may be floating, or both integer.
705 UNSIGNEDP is nonzero if X is an unsigned value.
707 This can be done by referring to a part of X in place
708 or by copying to a new temporary with conversion.
710 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
713 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
715 rtx temp;
717 /* If FROM is a SUBREG that indicates that we have already done at least
718 the required extension, strip it. */
720 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
721 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
722 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
723 x = gen_lowpart (mode, x);
725 if (GET_MODE (x) != VOIDmode)
726 oldmode = GET_MODE (x);
728 if (mode == oldmode)
729 return x;
731 /* There is one case that we must handle specially: If we are converting
732 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
733 we are to interpret the constant as unsigned, gen_lowpart will do
734 the wrong if the constant appears negative. What we want to do is
735 make the high-order word of the constant zero, not all ones. */
737 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
738 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
739 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
741 HOST_WIDE_INT val = INTVAL (x);
743 if (oldmode != VOIDmode
744 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
746 int width = GET_MODE_BITSIZE (oldmode);
748 /* We need to zero extend VAL. */
749 val &= ((HOST_WIDE_INT) 1 << width) - 1;
752 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
755 /* We can do this with a gen_lowpart if both desired and current modes
756 are integer, and this is either a constant integer, a register, or a
757 non-volatile MEM. Except for the constant case where MODE is no
758 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
760 if ((GET_CODE (x) == CONST_INT
761 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
762 || (GET_MODE_CLASS (mode) == MODE_INT
763 && GET_MODE_CLASS (oldmode) == MODE_INT
764 && (GET_CODE (x) == CONST_DOUBLE
765 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
766 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
767 && direct_load[(int) mode])
768 || (REG_P (x)
769 && (! HARD_REGISTER_P (x)
770 || HARD_REGNO_MODE_OK (REGNO (x), mode))
771 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
772 GET_MODE_BITSIZE (GET_MODE (x)))))))))
774 /* ?? If we don't know OLDMODE, we have to assume here that
775 X does not need sign- or zero-extension. This may not be
776 the case, but it's the best we can do. */
777 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
778 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
780 HOST_WIDE_INT val = INTVAL (x);
781 int width = GET_MODE_BITSIZE (oldmode);
783 /* We must sign or zero-extend in this case. Start by
784 zero-extending, then sign extend if we need to. */
785 val &= ((HOST_WIDE_INT) 1 << width) - 1;
786 if (! unsignedp
787 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
788 val |= (HOST_WIDE_INT) (-1) << width;
790 return gen_int_mode (val, mode);
793 return gen_lowpart (mode, x);
796 /* Converting from integer constant into mode is always equivalent to an
797 subreg operation. */
798 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
800 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
801 abort ();
802 return simplify_gen_subreg (mode, x, oldmode, 0);
805 temp = gen_reg_rtx (mode);
806 convert_move (temp, x, unsignedp);
807 return temp;
810 /* STORE_MAX_PIECES is the number of bytes at a time that we can
811 store efficiently. Due to internal GCC limitations, this is
812 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
813 for an immediate constant. */
815 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
817 /* Determine whether the LEN bytes can be moved by using several move
818 instructions. Return nonzero if a call to move_by_pieces should
819 succeed. */
822 can_move_by_pieces (unsigned HOST_WIDE_INT len,
823 unsigned int align ATTRIBUTE_UNUSED)
825 return MOVE_BY_PIECES_P (len, align);
828 /* Generate several move instructions to copy LEN bytes from block FROM to
829 block TO. (These are MEM rtx's with BLKmode).
831 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
832 used to push FROM to the stack.
834 ALIGN is maximum stack alignment we can assume.
836 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
837 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
838 stpcpy. */
841 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
842 unsigned int align, int endp)
844 struct move_by_pieces data;
845 rtx to_addr, from_addr = XEXP (from, 0);
846 unsigned int max_size = MOVE_MAX_PIECES + 1;
847 enum machine_mode mode = VOIDmode, tmode;
848 enum insn_code icode;
850 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
852 data.offset = 0;
853 data.from_addr = from_addr;
854 if (to)
856 to_addr = XEXP (to, 0);
857 data.to = to;
858 data.autinc_to
859 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
860 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
861 data.reverse
862 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
864 else
866 to_addr = NULL_RTX;
867 data.to = NULL_RTX;
868 data.autinc_to = 1;
869 #ifdef STACK_GROWS_DOWNWARD
870 data.reverse = 1;
871 #else
872 data.reverse = 0;
873 #endif
875 data.to_addr = to_addr;
876 data.from = from;
877 data.autinc_from
878 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
879 || GET_CODE (from_addr) == POST_INC
880 || GET_CODE (from_addr) == POST_DEC);
882 data.explicit_inc_from = 0;
883 data.explicit_inc_to = 0;
884 if (data.reverse) data.offset = len;
885 data.len = len;
887 /* If copying requires more than two move insns,
888 copy addresses to registers (to make displacements shorter)
889 and use post-increment if available. */
890 if (!(data.autinc_from && data.autinc_to)
891 && move_by_pieces_ninsns (len, align, max_size) > 2)
893 /* Find the mode of the largest move... */
894 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
895 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
896 if (GET_MODE_SIZE (tmode) < max_size)
897 mode = tmode;
899 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
901 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
902 data.autinc_from = 1;
903 data.explicit_inc_from = -1;
905 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
907 data.from_addr = copy_addr_to_reg (from_addr);
908 data.autinc_from = 1;
909 data.explicit_inc_from = 1;
911 if (!data.autinc_from && CONSTANT_P (from_addr))
912 data.from_addr = copy_addr_to_reg (from_addr);
913 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
915 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
916 data.autinc_to = 1;
917 data.explicit_inc_to = -1;
919 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
921 data.to_addr = copy_addr_to_reg (to_addr);
922 data.autinc_to = 1;
923 data.explicit_inc_to = 1;
925 if (!data.autinc_to && CONSTANT_P (to_addr))
926 data.to_addr = copy_addr_to_reg (to_addr);
929 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
930 if (align >= GET_MODE_ALIGNMENT (tmode))
931 align = GET_MODE_ALIGNMENT (tmode);
932 else
934 enum machine_mode xmode;
936 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
937 tmode != VOIDmode;
938 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
939 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
940 || SLOW_UNALIGNED_ACCESS (tmode, align))
941 break;
943 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
946 /* First move what we can in the largest integer mode, then go to
947 successively smaller modes. */
949 while (max_size > 1)
951 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
952 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
953 if (GET_MODE_SIZE (tmode) < max_size)
954 mode = tmode;
956 if (mode == VOIDmode)
957 break;
959 icode = mov_optab->handlers[(int) mode].insn_code;
960 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
961 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
963 max_size = GET_MODE_SIZE (mode);
966 /* The code above should have handled everything. */
967 if (data.len > 0)
968 abort ();
970 if (endp)
972 rtx to1;
974 if (data.reverse)
975 abort ();
976 if (data.autinc_to)
978 if (endp == 2)
980 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
981 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
982 else
983 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
984 -1));
986 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
987 data.offset);
989 else
991 if (endp == 2)
992 --data.offset;
993 to1 = adjust_address (data.to, QImode, data.offset);
995 return to1;
997 else
998 return data.to;
1001 /* Return number of insns required to move L bytes by pieces.
1002 ALIGN (in bits) is maximum alignment we can assume. */
1004 static unsigned HOST_WIDE_INT
1005 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1006 unsigned int max_size)
1008 unsigned HOST_WIDE_INT n_insns = 0;
1009 enum machine_mode tmode;
1011 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1012 if (align >= GET_MODE_ALIGNMENT (tmode))
1013 align = GET_MODE_ALIGNMENT (tmode);
1014 else
1016 enum machine_mode tmode, xmode;
1018 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1019 tmode != VOIDmode;
1020 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1021 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1022 || SLOW_UNALIGNED_ACCESS (tmode, align))
1023 break;
1025 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1028 while (max_size > 1)
1030 enum machine_mode mode = VOIDmode;
1031 enum insn_code icode;
1033 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1034 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1035 if (GET_MODE_SIZE (tmode) < max_size)
1036 mode = tmode;
1038 if (mode == VOIDmode)
1039 break;
1041 icode = mov_optab->handlers[(int) mode].insn_code;
1042 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1043 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1045 max_size = GET_MODE_SIZE (mode);
1048 if (l)
1049 abort ();
1050 return n_insns;
1053 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1054 with move instructions for mode MODE. GENFUN is the gen_... function
1055 to make a move insn for that mode. DATA has all the other info. */
1057 static void
1058 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1059 struct move_by_pieces *data)
1061 unsigned int size = GET_MODE_SIZE (mode);
1062 rtx to1 = NULL_RTX, from1;
1064 while (data->len >= size)
1066 if (data->reverse)
1067 data->offset -= size;
1069 if (data->to)
1071 if (data->autinc_to)
1072 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1073 data->offset);
1074 else
1075 to1 = adjust_address (data->to, mode, data->offset);
1078 if (data->autinc_from)
1079 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1080 data->offset);
1081 else
1082 from1 = adjust_address (data->from, mode, data->offset);
1084 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1085 emit_insn (gen_add2_insn (data->to_addr,
1086 GEN_INT (-(HOST_WIDE_INT)size)));
1087 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1088 emit_insn (gen_add2_insn (data->from_addr,
1089 GEN_INT (-(HOST_WIDE_INT)size)));
1091 if (data->to)
1092 emit_insn ((*genfun) (to1, from1));
1093 else
1095 #ifdef PUSH_ROUNDING
1096 emit_single_push_insn (mode, from1, NULL);
1097 #else
1098 abort ();
1099 #endif
1102 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1103 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1104 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1105 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1107 if (! data->reverse)
1108 data->offset += size;
1110 data->len -= size;
1114 /* Emit code to move a block Y to a block X. This may be done with
1115 string-move instructions, with multiple scalar move instructions,
1116 or with a library call.
1118 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1119 SIZE is an rtx that says how long they are.
1120 ALIGN is the maximum alignment we can assume they have.
1121 METHOD describes what kind of copy this is, and what mechanisms may be used.
1123 Return the address of the new block, if memcpy is called and returns it,
1124 0 otherwise. */
1127 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1129 bool may_use_call;
1130 rtx retval = 0;
1131 unsigned int align;
1133 switch (method)
1135 case BLOCK_OP_NORMAL:
1136 may_use_call = true;
1137 break;
1139 case BLOCK_OP_CALL_PARM:
1140 may_use_call = block_move_libcall_safe_for_call_parm ();
1142 /* Make inhibit_defer_pop nonzero around the library call
1143 to force it to pop the arguments right away. */
1144 NO_DEFER_POP;
1145 break;
1147 case BLOCK_OP_NO_LIBCALL:
1148 may_use_call = false;
1149 break;
1151 default:
1152 abort ();
1155 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1157 if (!MEM_P (x))
1158 abort ();
1159 if (!MEM_P (y))
1160 abort ();
1161 if (size == 0)
1162 abort ();
1164 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1165 block copy is more efficient for other large modes, e.g. DCmode. */
1166 x = adjust_address (x, BLKmode, 0);
1167 y = adjust_address (y, BLKmode, 0);
1169 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1170 can be incorrect is coming from __builtin_memcpy. */
1171 if (GET_CODE (size) == CONST_INT)
1173 if (INTVAL (size) == 0)
1174 return 0;
1176 x = shallow_copy_rtx (x);
1177 y = shallow_copy_rtx (y);
1178 set_mem_size (x, size);
1179 set_mem_size (y, size);
1182 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1183 move_by_pieces (x, y, INTVAL (size), align, 0);
1184 else if (emit_block_move_via_movmem (x, y, size, align))
1186 else if (may_use_call)
1187 retval = emit_block_move_via_libcall (x, y, size);
1188 else
1189 emit_block_move_via_loop (x, y, size, align);
1191 if (method == BLOCK_OP_CALL_PARM)
1192 OK_DEFER_POP;
1194 return retval;
1197 /* A subroutine of emit_block_move. Returns true if calling the
1198 block move libcall will not clobber any parameters which may have
1199 already been placed on the stack. */
1201 static bool
1202 block_move_libcall_safe_for_call_parm (void)
1204 /* If arguments are pushed on the stack, then they're safe. */
1205 if (PUSH_ARGS)
1206 return true;
1208 /* If registers go on the stack anyway, any argument is sure to clobber
1209 an outgoing argument. */
1210 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1212 tree fn = emit_block_move_libcall_fn (false);
1213 (void) fn;
1214 if (REG_PARM_STACK_SPACE (fn) != 0)
1215 return false;
1217 #endif
1219 /* If any argument goes in memory, then it might clobber an outgoing
1220 argument. */
1222 CUMULATIVE_ARGS args_so_far;
1223 tree fn, arg;
1225 fn = emit_block_move_libcall_fn (false);
1226 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1228 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1229 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1231 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1232 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1233 if (!tmp || !REG_P (tmp))
1234 return false;
1235 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1236 NULL_TREE, 1))
1237 return false;
1238 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1241 return true;
1244 /* A subroutine of emit_block_move. Expand a movmem pattern;
1245 return true if successful. */
1247 static bool
1248 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1250 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1251 int save_volatile_ok = volatile_ok;
1252 enum machine_mode mode;
1254 /* Since this is a move insn, we don't care about volatility. */
1255 volatile_ok = 1;
1257 /* Try the most limited insn first, because there's no point
1258 including more than one in the machine description unless
1259 the more limited one has some advantage. */
1261 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1262 mode = GET_MODE_WIDER_MODE (mode))
1264 enum insn_code code = movmem_optab[(int) mode];
1265 insn_operand_predicate_fn pred;
1267 if (code != CODE_FOR_nothing
1268 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1269 here because if SIZE is less than the mode mask, as it is
1270 returned by the macro, it will definitely be less than the
1271 actual mode mask. */
1272 && ((GET_CODE (size) == CONST_INT
1273 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1274 <= (GET_MODE_MASK (mode) >> 1)))
1275 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1276 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1277 || (*pred) (x, BLKmode))
1278 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1279 || (*pred) (y, BLKmode))
1280 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1281 || (*pred) (opalign, VOIDmode)))
1283 rtx op2;
1284 rtx last = get_last_insn ();
1285 rtx pat;
1287 op2 = convert_to_mode (mode, size, 1);
1288 pred = insn_data[(int) code].operand[2].predicate;
1289 if (pred != 0 && ! (*pred) (op2, mode))
1290 op2 = copy_to_mode_reg (mode, op2);
1292 /* ??? When called via emit_block_move_for_call, it'd be
1293 nice if there were some way to inform the backend, so
1294 that it doesn't fail the expansion because it thinks
1295 emitting the libcall would be more efficient. */
1297 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1298 if (pat)
1300 emit_insn (pat);
1301 volatile_ok = save_volatile_ok;
1302 return true;
1304 else
1305 delete_insns_since (last);
1309 volatile_ok = save_volatile_ok;
1310 return false;
1313 /* A subroutine of emit_block_move. Expand a call to memcpy.
1314 Return the return value from memcpy, 0 otherwise. */
1316 static rtx
1317 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1319 rtx dst_addr, src_addr;
1320 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1321 enum machine_mode size_mode;
1322 rtx retval;
1324 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1325 pseudos. We can then place those new pseudos into a VAR_DECL and
1326 use them later. */
1328 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1329 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1331 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1332 src_addr = convert_memory_address (ptr_mode, src_addr);
1334 dst_tree = make_tree (ptr_type_node, dst_addr);
1335 src_tree = make_tree (ptr_type_node, src_addr);
1337 size_mode = TYPE_MODE (sizetype);
1339 size = convert_to_mode (size_mode, size, 1);
1340 size = copy_to_mode_reg (size_mode, size);
1342 /* It is incorrect to use the libcall calling conventions to call
1343 memcpy in this context. This could be a user call to memcpy and
1344 the user may wish to examine the return value from memcpy. For
1345 targets where libcalls and normal calls have different conventions
1346 for returning pointers, we could end up generating incorrect code. */
1348 size_tree = make_tree (sizetype, size);
1350 fn = emit_block_move_libcall_fn (true);
1351 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1352 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1353 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1355 /* Now we have to build up the CALL_EXPR itself. */
1356 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1357 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1358 call_expr, arg_list, NULL_TREE);
1360 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1362 return retval;
1365 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1366 for the function we use for block copies. The first time FOR_CALL
1367 is true, we call assemble_external. */
1369 static GTY(()) tree block_move_fn;
1371 void
1372 init_block_move_fn (const char *asmspec)
1374 if (!block_move_fn)
1376 tree args, fn;
1378 fn = get_identifier ("memcpy");
1379 args = build_function_type_list (ptr_type_node, ptr_type_node,
1380 const_ptr_type_node, sizetype,
1381 NULL_TREE);
1383 fn = build_decl (FUNCTION_DECL, fn, args);
1384 DECL_EXTERNAL (fn) = 1;
1385 TREE_PUBLIC (fn) = 1;
1386 DECL_ARTIFICIAL (fn) = 1;
1387 TREE_NOTHROW (fn) = 1;
1389 block_move_fn = fn;
1392 if (asmspec)
1393 set_user_assembler_name (block_move_fn, asmspec);
1396 static tree
1397 emit_block_move_libcall_fn (int for_call)
1399 static bool emitted_extern;
1401 if (!block_move_fn)
1402 init_block_move_fn (NULL);
1404 if (for_call && !emitted_extern)
1406 emitted_extern = true;
1407 make_decl_rtl (block_move_fn);
1408 assemble_external (block_move_fn);
1411 return block_move_fn;
1414 /* A subroutine of emit_block_move. Copy the data via an explicit
1415 loop. This is used only when libcalls are forbidden. */
1416 /* ??? It'd be nice to copy in hunks larger than QImode. */
1418 static void
1419 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1420 unsigned int align ATTRIBUTE_UNUSED)
1422 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1423 enum machine_mode iter_mode;
1425 iter_mode = GET_MODE (size);
1426 if (iter_mode == VOIDmode)
1427 iter_mode = word_mode;
1429 top_label = gen_label_rtx ();
1430 cmp_label = gen_label_rtx ();
1431 iter = gen_reg_rtx (iter_mode);
1433 emit_move_insn (iter, const0_rtx);
1435 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1436 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1437 do_pending_stack_adjust ();
1439 emit_jump (cmp_label);
1440 emit_label (top_label);
1442 tmp = convert_modes (Pmode, iter_mode, iter, true);
1443 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1444 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1445 x = change_address (x, QImode, x_addr);
1446 y = change_address (y, QImode, y_addr);
1448 emit_move_insn (x, y);
1450 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1451 true, OPTAB_LIB_WIDEN);
1452 if (tmp != iter)
1453 emit_move_insn (iter, tmp);
1455 emit_label (cmp_label);
1457 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1458 true, top_label);
1461 /* Copy all or part of a value X into registers starting at REGNO.
1462 The number of registers to be filled is NREGS. */
1464 void
1465 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1467 int i;
1468 #ifdef HAVE_load_multiple
1469 rtx pat;
1470 rtx last;
1471 #endif
1473 if (nregs == 0)
1474 return;
1476 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1477 x = validize_mem (force_const_mem (mode, x));
1479 /* See if the machine can do this with a load multiple insn. */
1480 #ifdef HAVE_load_multiple
1481 if (HAVE_load_multiple)
1483 last = get_last_insn ();
1484 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1485 GEN_INT (nregs));
1486 if (pat)
1488 emit_insn (pat);
1489 return;
1491 else
1492 delete_insns_since (last);
1494 #endif
1496 for (i = 0; i < nregs; i++)
1497 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1498 operand_subword_force (x, i, mode));
1501 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1502 The number of registers to be filled is NREGS. */
1504 void
1505 move_block_from_reg (int regno, rtx x, int nregs)
1507 int i;
1509 if (nregs == 0)
1510 return;
1512 /* See if the machine can do this with a store multiple insn. */
1513 #ifdef HAVE_store_multiple
1514 if (HAVE_store_multiple)
1516 rtx last = get_last_insn ();
1517 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1518 GEN_INT (nregs));
1519 if (pat)
1521 emit_insn (pat);
1522 return;
1524 else
1525 delete_insns_since (last);
1527 #endif
1529 for (i = 0; i < nregs; i++)
1531 rtx tem = operand_subword (x, i, 1, BLKmode);
1533 if (tem == 0)
1534 abort ();
1536 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1540 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1541 ORIG, where ORIG is a non-consecutive group of registers represented by
1542 a PARALLEL. The clone is identical to the original except in that the
1543 original set of registers is replaced by a new set of pseudo registers.
1544 The new set has the same modes as the original set. */
1547 gen_group_rtx (rtx orig)
1549 int i, length;
1550 rtx *tmps;
1552 if (GET_CODE (orig) != PARALLEL)
1553 abort ();
1555 length = XVECLEN (orig, 0);
1556 tmps = alloca (sizeof (rtx) * length);
1558 /* Skip a NULL entry in first slot. */
1559 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1561 if (i)
1562 tmps[0] = 0;
1564 for (; i < length; i++)
1566 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1567 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1569 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1572 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1575 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1576 where DST is non-consecutive registers represented by a PARALLEL.
1577 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1578 if not known. */
1580 void
1581 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1583 rtx *tmps, src;
1584 int start, i;
1586 if (GET_CODE (dst) != PARALLEL)
1587 abort ();
1589 /* Check for a NULL entry, used to indicate that the parameter goes
1590 both on the stack and in registers. */
1591 if (XEXP (XVECEXP (dst, 0, 0), 0))
1592 start = 0;
1593 else
1594 start = 1;
1596 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1598 /* Process the pieces. */
1599 for (i = start; i < XVECLEN (dst, 0); i++)
1601 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1602 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1603 unsigned int bytelen = GET_MODE_SIZE (mode);
1604 int shift = 0;
1606 /* Handle trailing fragments that run over the size of the struct. */
1607 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1609 /* Arrange to shift the fragment to where it belongs.
1610 extract_bit_field loads to the lsb of the reg. */
1611 if (
1612 #ifdef BLOCK_REG_PADDING
1613 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1614 == (BYTES_BIG_ENDIAN ? upward : downward)
1615 #else
1616 BYTES_BIG_ENDIAN
1617 #endif
1619 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1620 bytelen = ssize - bytepos;
1621 if (bytelen <= 0)
1622 abort ();
1625 /* If we won't be loading directly from memory, protect the real source
1626 from strange tricks we might play; but make sure that the source can
1627 be loaded directly into the destination. */
1628 src = orig_src;
1629 if (!MEM_P (orig_src)
1630 && (!CONSTANT_P (orig_src)
1631 || (GET_MODE (orig_src) != mode
1632 && GET_MODE (orig_src) != VOIDmode)))
1634 if (GET_MODE (orig_src) == VOIDmode)
1635 src = gen_reg_rtx (mode);
1636 else
1637 src = gen_reg_rtx (GET_MODE (orig_src));
1639 emit_move_insn (src, orig_src);
1642 /* Optimize the access just a bit. */
1643 if (MEM_P (src)
1644 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1645 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1646 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1647 && bytelen == GET_MODE_SIZE (mode))
1649 tmps[i] = gen_reg_rtx (mode);
1650 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1652 else if (GET_CODE (src) == CONCAT)
1654 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1655 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1657 if ((bytepos == 0 && bytelen == slen0)
1658 || (bytepos != 0 && bytepos + bytelen <= slen))
1660 /* The following assumes that the concatenated objects all
1661 have the same size. In this case, a simple calculation
1662 can be used to determine the object and the bit field
1663 to be extracted. */
1664 tmps[i] = XEXP (src, bytepos / slen0);
1665 if (! CONSTANT_P (tmps[i])
1666 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1667 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1668 (bytepos % slen0) * BITS_PER_UNIT,
1669 1, NULL_RTX, mode, mode);
1671 else if (bytepos == 0)
1673 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1674 emit_move_insn (mem, src);
1675 tmps[i] = adjust_address (mem, mode, 0);
1677 else
1678 abort ();
1680 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1681 SIMD register, which is currently broken. While we get GCC
1682 to emit proper RTL for these cases, let's dump to memory. */
1683 else if (VECTOR_MODE_P (GET_MODE (dst))
1684 && REG_P (src))
1686 int slen = GET_MODE_SIZE (GET_MODE (src));
1687 rtx mem;
1689 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1690 emit_move_insn (mem, src);
1691 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1693 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1694 && XVECLEN (dst, 0) > 1)
1695 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1696 else if (CONSTANT_P (src)
1697 || (REG_P (src) && GET_MODE (src) == mode))
1698 tmps[i] = src;
1699 else
1700 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1701 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1702 mode, mode);
1704 if (shift)
1705 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1706 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1709 /* Copy the extracted pieces into the proper (probable) hard regs. */
1710 for (i = start; i < XVECLEN (dst, 0); i++)
1711 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1714 /* Emit code to move a block SRC to block DST, where SRC and DST are
1715 non-consecutive groups of registers, each represented by a PARALLEL. */
1717 void
1718 emit_group_move (rtx dst, rtx src)
1720 int i;
1722 if (GET_CODE (src) != PARALLEL
1723 || GET_CODE (dst) != PARALLEL
1724 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1725 abort ();
1727 /* Skip first entry if NULL. */
1728 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1729 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1730 XEXP (XVECEXP (src, 0, i), 0));
1733 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1734 where SRC is non-consecutive registers represented by a PARALLEL.
1735 SSIZE represents the total size of block ORIG_DST, or -1 if not
1736 known. */
1738 void
1739 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1741 rtx *tmps, dst;
1742 int start, i;
1744 if (GET_CODE (src) != PARALLEL)
1745 abort ();
1747 /* Check for a NULL entry, used to indicate that the parameter goes
1748 both on the stack and in registers. */
1749 if (XEXP (XVECEXP (src, 0, 0), 0))
1750 start = 0;
1751 else
1752 start = 1;
1754 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1756 /* Copy the (probable) hard regs into pseudos. */
1757 for (i = start; i < XVECLEN (src, 0); i++)
1759 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1760 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1761 emit_move_insn (tmps[i], reg);
1764 /* If we won't be storing directly into memory, protect the real destination
1765 from strange tricks we might play. */
1766 dst = orig_dst;
1767 if (GET_CODE (dst) == PARALLEL)
1769 rtx temp;
1771 /* We can get a PARALLEL dst if there is a conditional expression in
1772 a return statement. In that case, the dst and src are the same,
1773 so no action is necessary. */
1774 if (rtx_equal_p (dst, src))
1775 return;
1777 /* It is unclear if we can ever reach here, but we may as well handle
1778 it. Allocate a temporary, and split this into a store/load to/from
1779 the temporary. */
1781 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1782 emit_group_store (temp, src, type, ssize);
1783 emit_group_load (dst, temp, type, ssize);
1784 return;
1786 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1788 dst = gen_reg_rtx (GET_MODE (orig_dst));
1789 /* Make life a bit easier for combine. */
1790 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1793 /* Process the pieces. */
1794 for (i = start; i < XVECLEN (src, 0); i++)
1796 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1797 enum machine_mode mode = GET_MODE (tmps[i]);
1798 unsigned int bytelen = GET_MODE_SIZE (mode);
1799 rtx dest = dst;
1801 /* Handle trailing fragments that run over the size of the struct. */
1802 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1804 /* store_bit_field always takes its value from the lsb.
1805 Move the fragment to the lsb if it's not already there. */
1806 if (
1807 #ifdef BLOCK_REG_PADDING
1808 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1809 == (BYTES_BIG_ENDIAN ? upward : downward)
1810 #else
1811 BYTES_BIG_ENDIAN
1812 #endif
1815 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1816 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1817 build_int_cst (NULL_TREE, shift),
1818 tmps[i], 0);
1820 bytelen = ssize - bytepos;
1823 if (GET_CODE (dst) == CONCAT)
1825 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1826 dest = XEXP (dst, 0);
1827 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1829 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1830 dest = XEXP (dst, 1);
1832 else if (bytepos == 0 && XVECLEN (src, 0))
1834 dest = assign_stack_temp (GET_MODE (dest),
1835 GET_MODE_SIZE (GET_MODE (dest)), 0);
1836 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1837 tmps[i]);
1838 dst = dest;
1839 break;
1841 else
1842 abort ();
1845 /* Optimize the access just a bit. */
1846 if (MEM_P (dest)
1847 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1848 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1849 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1850 && bytelen == GET_MODE_SIZE (mode))
1851 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1852 else
1853 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1854 mode, tmps[i]);
1857 /* Copy from the pseudo into the (probable) hard reg. */
1858 if (orig_dst != dst)
1859 emit_move_insn (orig_dst, dst);
1862 /* Generate code to copy a BLKmode object of TYPE out of a
1863 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1864 is null, a stack temporary is created. TGTBLK is returned.
1866 The purpose of this routine is to handle functions that return
1867 BLKmode structures in registers. Some machines (the PA for example)
1868 want to return all small structures in registers regardless of the
1869 structure's alignment. */
1872 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1874 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1875 rtx src = NULL, dst = NULL;
1876 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1877 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1879 if (tgtblk == 0)
1881 tgtblk = assign_temp (build_qualified_type (type,
1882 (TYPE_QUALS (type)
1883 | TYPE_QUAL_CONST)),
1884 0, 1, 1);
1885 preserve_temp_slots (tgtblk);
1888 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1889 into a new pseudo which is a full word. */
1891 if (GET_MODE (srcreg) != BLKmode
1892 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1893 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1895 /* If the structure doesn't take up a whole number of words, see whether
1896 SRCREG is padded on the left or on the right. If it's on the left,
1897 set PADDING_CORRECTION to the number of bits to skip.
1899 In most ABIs, the structure will be returned at the least end of
1900 the register, which translates to right padding on little-endian
1901 targets and left padding on big-endian targets. The opposite
1902 holds if the structure is returned at the most significant
1903 end of the register. */
1904 if (bytes % UNITS_PER_WORD != 0
1905 && (targetm.calls.return_in_msb (type)
1906 ? !BYTES_BIG_ENDIAN
1907 : BYTES_BIG_ENDIAN))
1908 padding_correction
1909 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1911 /* Copy the structure BITSIZE bites at a time.
1913 We could probably emit more efficient code for machines which do not use
1914 strict alignment, but it doesn't seem worth the effort at the current
1915 time. */
1916 for (bitpos = 0, xbitpos = padding_correction;
1917 bitpos < bytes * BITS_PER_UNIT;
1918 bitpos += bitsize, xbitpos += bitsize)
1920 /* We need a new source operand each time xbitpos is on a
1921 word boundary and when xbitpos == padding_correction
1922 (the first time through). */
1923 if (xbitpos % BITS_PER_WORD == 0
1924 || xbitpos == padding_correction)
1925 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1926 GET_MODE (srcreg));
1928 /* We need a new destination operand each time bitpos is on
1929 a word boundary. */
1930 if (bitpos % BITS_PER_WORD == 0)
1931 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1933 /* Use xbitpos for the source extraction (right justified) and
1934 xbitpos for the destination store (left justified). */
1935 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1936 extract_bit_field (src, bitsize,
1937 xbitpos % BITS_PER_WORD, 1,
1938 NULL_RTX, word_mode, word_mode));
1941 return tgtblk;
1944 /* Add a USE expression for REG to the (possibly empty) list pointed
1945 to by CALL_FUSAGE. REG must denote a hard register. */
1947 void
1948 use_reg (rtx *call_fusage, rtx reg)
1950 if (!REG_P (reg)
1951 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1952 abort ();
1954 *call_fusage
1955 = gen_rtx_EXPR_LIST (VOIDmode,
1956 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1959 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1960 starting at REGNO. All of these registers must be hard registers. */
1962 void
1963 use_regs (rtx *call_fusage, int regno, int nregs)
1965 int i;
1967 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1968 abort ();
1970 for (i = 0; i < nregs; i++)
1971 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1974 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1975 PARALLEL REGS. This is for calls that pass values in multiple
1976 non-contiguous locations. The Irix 6 ABI has examples of this. */
1978 void
1979 use_group_regs (rtx *call_fusage, rtx regs)
1981 int i;
1983 for (i = 0; i < XVECLEN (regs, 0); i++)
1985 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1987 /* A NULL entry means the parameter goes both on the stack and in
1988 registers. This can also be a MEM for targets that pass values
1989 partially on the stack and partially in registers. */
1990 if (reg != 0 && REG_P (reg))
1991 use_reg (call_fusage, reg);
1996 /* Determine whether the LEN bytes generated by CONSTFUN can be
1997 stored to memory using several move instructions. CONSTFUNDATA is
1998 a pointer which will be passed as argument in every CONSTFUN call.
1999 ALIGN is maximum alignment we can assume. Return nonzero if a
2000 call to store_by_pieces should succeed. */
2003 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2004 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2005 void *constfundata, unsigned int align)
2007 unsigned HOST_WIDE_INT l;
2008 unsigned int max_size;
2009 HOST_WIDE_INT offset = 0;
2010 enum machine_mode mode, tmode;
2011 enum insn_code icode;
2012 int reverse;
2013 rtx cst;
2015 if (len == 0)
2016 return 1;
2018 if (! STORE_BY_PIECES_P (len, align))
2019 return 0;
2021 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2022 if (align >= GET_MODE_ALIGNMENT (tmode))
2023 align = GET_MODE_ALIGNMENT (tmode);
2024 else
2026 enum machine_mode xmode;
2028 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2029 tmode != VOIDmode;
2030 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2031 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2032 || SLOW_UNALIGNED_ACCESS (tmode, align))
2033 break;
2035 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2038 /* We would first store what we can in the largest integer mode, then go to
2039 successively smaller modes. */
2041 for (reverse = 0;
2042 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2043 reverse++)
2045 l = len;
2046 mode = VOIDmode;
2047 max_size = STORE_MAX_PIECES + 1;
2048 while (max_size > 1)
2050 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2051 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2052 if (GET_MODE_SIZE (tmode) < max_size)
2053 mode = tmode;
2055 if (mode == VOIDmode)
2056 break;
2058 icode = mov_optab->handlers[(int) mode].insn_code;
2059 if (icode != CODE_FOR_nothing
2060 && align >= GET_MODE_ALIGNMENT (mode))
2062 unsigned int size = GET_MODE_SIZE (mode);
2064 while (l >= size)
2066 if (reverse)
2067 offset -= size;
2069 cst = (*constfun) (constfundata, offset, mode);
2070 if (!LEGITIMATE_CONSTANT_P (cst))
2071 return 0;
2073 if (!reverse)
2074 offset += size;
2076 l -= size;
2080 max_size = GET_MODE_SIZE (mode);
2083 /* The code above should have handled everything. */
2084 if (l != 0)
2085 abort ();
2088 return 1;
2091 /* Generate several move instructions to store LEN bytes generated by
2092 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2093 pointer which will be passed as argument in every CONSTFUN call.
2094 ALIGN is maximum alignment we can assume.
2095 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2096 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2097 stpcpy. */
2100 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2101 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2102 void *constfundata, unsigned int align, int endp)
2104 struct store_by_pieces data;
2106 if (len == 0)
2108 if (endp == 2)
2109 abort ();
2110 return to;
2113 if (! STORE_BY_PIECES_P (len, align))
2114 abort ();
2115 data.constfun = constfun;
2116 data.constfundata = constfundata;
2117 data.len = len;
2118 data.to = to;
2119 store_by_pieces_1 (&data, align);
2120 if (endp)
2122 rtx to1;
2124 if (data.reverse)
2125 abort ();
2126 if (data.autinc_to)
2128 if (endp == 2)
2130 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2131 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2132 else
2133 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2134 -1));
2136 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2137 data.offset);
2139 else
2141 if (endp == 2)
2142 --data.offset;
2143 to1 = adjust_address (data.to, QImode, data.offset);
2145 return to1;
2147 else
2148 return data.to;
2151 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2152 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2154 static void
2155 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2157 struct store_by_pieces data;
2159 if (len == 0)
2160 return;
2162 data.constfun = clear_by_pieces_1;
2163 data.constfundata = NULL;
2164 data.len = len;
2165 data.to = to;
2166 store_by_pieces_1 (&data, align);
2169 /* Callback routine for clear_by_pieces.
2170 Return const0_rtx unconditionally. */
2172 static rtx
2173 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2174 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2175 enum machine_mode mode ATTRIBUTE_UNUSED)
2177 return const0_rtx;
2180 /* Subroutine of clear_by_pieces and store_by_pieces.
2181 Generate several move instructions to store LEN bytes of block TO. (A MEM
2182 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2184 static void
2185 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2186 unsigned int align ATTRIBUTE_UNUSED)
2188 rtx to_addr = XEXP (data->to, 0);
2189 unsigned int max_size = STORE_MAX_PIECES + 1;
2190 enum machine_mode mode = VOIDmode, tmode;
2191 enum insn_code icode;
2193 data->offset = 0;
2194 data->to_addr = to_addr;
2195 data->autinc_to
2196 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2197 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2199 data->explicit_inc_to = 0;
2200 data->reverse
2201 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2202 if (data->reverse)
2203 data->offset = data->len;
2205 /* If storing requires more than two move insns,
2206 copy addresses to registers (to make displacements shorter)
2207 and use post-increment if available. */
2208 if (!data->autinc_to
2209 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2211 /* Determine the main mode we'll be using. */
2212 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2213 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2214 if (GET_MODE_SIZE (tmode) < max_size)
2215 mode = tmode;
2217 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2219 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2220 data->autinc_to = 1;
2221 data->explicit_inc_to = -1;
2224 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2225 && ! data->autinc_to)
2227 data->to_addr = copy_addr_to_reg (to_addr);
2228 data->autinc_to = 1;
2229 data->explicit_inc_to = 1;
2232 if ( !data->autinc_to && CONSTANT_P (to_addr))
2233 data->to_addr = copy_addr_to_reg (to_addr);
2236 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2237 if (align >= GET_MODE_ALIGNMENT (tmode))
2238 align = GET_MODE_ALIGNMENT (tmode);
2239 else
2241 enum machine_mode xmode;
2243 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2244 tmode != VOIDmode;
2245 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2246 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2247 || SLOW_UNALIGNED_ACCESS (tmode, align))
2248 break;
2250 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2253 /* First store what we can in the largest integer mode, then go to
2254 successively smaller modes. */
2256 while (max_size > 1)
2258 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2259 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2260 if (GET_MODE_SIZE (tmode) < max_size)
2261 mode = tmode;
2263 if (mode == VOIDmode)
2264 break;
2266 icode = mov_optab->handlers[(int) mode].insn_code;
2267 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2268 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2270 max_size = GET_MODE_SIZE (mode);
2273 /* The code above should have handled everything. */
2274 if (data->len != 0)
2275 abort ();
2278 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2279 with move instructions for mode MODE. GENFUN is the gen_... function
2280 to make a move insn for that mode. DATA has all the other info. */
2282 static void
2283 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2284 struct store_by_pieces *data)
2286 unsigned int size = GET_MODE_SIZE (mode);
2287 rtx to1, cst;
2289 while (data->len >= size)
2291 if (data->reverse)
2292 data->offset -= size;
2294 if (data->autinc_to)
2295 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2296 data->offset);
2297 else
2298 to1 = adjust_address (data->to, mode, data->offset);
2300 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2301 emit_insn (gen_add2_insn (data->to_addr,
2302 GEN_INT (-(HOST_WIDE_INT) size)));
2304 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2305 emit_insn ((*genfun) (to1, cst));
2307 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2308 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2310 if (! data->reverse)
2311 data->offset += size;
2313 data->len -= size;
2317 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2318 its length in bytes. */
2321 clear_storage (rtx object, rtx size)
2323 rtx retval = 0;
2324 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2325 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2327 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2328 just move a zero. Otherwise, do this a piece at a time. */
2329 if (GET_MODE (object) != BLKmode
2330 && GET_CODE (size) == CONST_INT
2331 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2332 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2333 else
2335 if (size == const0_rtx)
2337 else if (GET_CODE (size) == CONST_INT
2338 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2339 clear_by_pieces (object, INTVAL (size), align);
2340 else if (clear_storage_via_clrmem (object, size, align))
2342 else
2343 retval = clear_storage_via_libcall (object, size);
2346 return retval;
2349 /* A subroutine of clear_storage. Expand a clrmem pattern;
2350 return true if successful. */
2352 static bool
2353 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2355 /* Try the most limited insn first, because there's no point
2356 including more than one in the machine description unless
2357 the more limited one has some advantage. */
2359 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2360 enum machine_mode mode;
2362 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2363 mode = GET_MODE_WIDER_MODE (mode))
2365 enum insn_code code = clrmem_optab[(int) mode];
2366 insn_operand_predicate_fn pred;
2368 if (code != CODE_FOR_nothing
2369 /* We don't need MODE to be narrower than
2370 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2371 the mode mask, as it is returned by the macro, it will
2372 definitely be less than the actual mode mask. */
2373 && ((GET_CODE (size) == CONST_INT
2374 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2375 <= (GET_MODE_MASK (mode) >> 1)))
2376 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2377 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2378 || (*pred) (object, BLKmode))
2379 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2380 || (*pred) (opalign, VOIDmode)))
2382 rtx op1;
2383 rtx last = get_last_insn ();
2384 rtx pat;
2386 op1 = convert_to_mode (mode, size, 1);
2387 pred = insn_data[(int) code].operand[1].predicate;
2388 if (pred != 0 && ! (*pred) (op1, mode))
2389 op1 = copy_to_mode_reg (mode, op1);
2391 pat = GEN_FCN ((int) code) (object, op1, opalign);
2392 if (pat)
2394 emit_insn (pat);
2395 return true;
2397 else
2398 delete_insns_since (last);
2402 return false;
2405 /* A subroutine of clear_storage. Expand a call to memset.
2406 Return the return value of memset, 0 otherwise. */
2408 static rtx
2409 clear_storage_via_libcall (rtx object, rtx size)
2411 tree call_expr, arg_list, fn, object_tree, size_tree;
2412 enum machine_mode size_mode;
2413 rtx retval;
2415 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2416 place those into new pseudos into a VAR_DECL and use them later. */
2418 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2420 size_mode = TYPE_MODE (sizetype);
2421 size = convert_to_mode (size_mode, size, 1);
2422 size = copy_to_mode_reg (size_mode, size);
2424 /* It is incorrect to use the libcall calling conventions to call
2425 memset in this context. This could be a user call to memset and
2426 the user may wish to examine the return value from memset. For
2427 targets where libcalls and normal calls have different conventions
2428 for returning pointers, we could end up generating incorrect code. */
2430 object_tree = make_tree (ptr_type_node, object);
2431 size_tree = make_tree (sizetype, size);
2433 fn = clear_storage_libcall_fn (true);
2434 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2435 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2436 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2438 /* Now we have to build up the CALL_EXPR itself. */
2439 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2440 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2441 call_expr, arg_list, NULL_TREE);
2443 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2445 return retval;
2448 /* A subroutine of clear_storage_via_libcall. Create the tree node
2449 for the function we use for block clears. The first time FOR_CALL
2450 is true, we call assemble_external. */
2452 static GTY(()) tree block_clear_fn;
2454 void
2455 init_block_clear_fn (const char *asmspec)
2457 if (!block_clear_fn)
2459 tree fn, args;
2461 fn = get_identifier ("memset");
2462 args = build_function_type_list (ptr_type_node, ptr_type_node,
2463 integer_type_node, sizetype,
2464 NULL_TREE);
2466 fn = build_decl (FUNCTION_DECL, fn, args);
2467 DECL_EXTERNAL (fn) = 1;
2468 TREE_PUBLIC (fn) = 1;
2469 DECL_ARTIFICIAL (fn) = 1;
2470 TREE_NOTHROW (fn) = 1;
2472 block_clear_fn = fn;
2475 if (asmspec)
2476 set_user_assembler_name (block_clear_fn, asmspec);
2479 static tree
2480 clear_storage_libcall_fn (int for_call)
2482 static bool emitted_extern;
2484 if (!block_clear_fn)
2485 init_block_clear_fn (NULL);
2487 if (for_call && !emitted_extern)
2489 emitted_extern = true;
2490 make_decl_rtl (block_clear_fn);
2491 assemble_external (block_clear_fn);
2494 return block_clear_fn;
2497 /* Generate code to copy Y into X.
2498 Both Y and X must have the same mode, except that
2499 Y can be a constant with VOIDmode.
2500 This mode cannot be BLKmode; use emit_block_move for that.
2502 Return the last instruction emitted. */
2505 emit_move_insn (rtx x, rtx y)
2507 enum machine_mode mode = GET_MODE (x);
2508 rtx y_cst = NULL_RTX;
2509 rtx last_insn, set;
2511 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2512 abort ();
2514 if (CONSTANT_P (y))
2516 if (optimize
2517 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2518 && (last_insn = compress_float_constant (x, y)))
2519 return last_insn;
2521 y_cst = y;
2523 if (!LEGITIMATE_CONSTANT_P (y))
2525 y = force_const_mem (mode, y);
2527 /* If the target's cannot_force_const_mem prevented the spill,
2528 assume that the target's move expanders will also take care
2529 of the non-legitimate constant. */
2530 if (!y)
2531 y = y_cst;
2535 /* If X or Y are memory references, verify that their addresses are valid
2536 for the machine. */
2537 if (MEM_P (x)
2538 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2539 && ! push_operand (x, GET_MODE (x)))
2540 || (flag_force_addr
2541 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2542 x = validize_mem (x);
2544 if (MEM_P (y)
2545 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2546 || (flag_force_addr
2547 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2548 y = validize_mem (y);
2550 if (mode == BLKmode)
2551 abort ();
2553 last_insn = emit_move_insn_1 (x, y);
2555 if (y_cst && REG_P (x)
2556 && (set = single_set (last_insn)) != NULL_RTX
2557 && SET_DEST (set) == x
2558 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2559 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2561 return last_insn;
2564 /* Low level part of emit_move_insn.
2565 Called just like emit_move_insn, but assumes X and Y
2566 are basically valid. */
2569 emit_move_insn_1 (rtx x, rtx y)
2571 enum machine_mode mode = GET_MODE (x);
2572 enum machine_mode submode;
2573 enum mode_class class = GET_MODE_CLASS (mode);
2575 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2576 abort ();
2578 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2579 return
2580 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2582 /* Expand complex moves by moving real part and imag part, if possible. */
2583 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2584 && BLKmode != (submode = GET_MODE_INNER (mode))
2585 && (mov_optab->handlers[(int) submode].insn_code
2586 != CODE_FOR_nothing))
2588 /* Don't split destination if it is a stack push. */
2589 int stack = push_operand (x, GET_MODE (x));
2591 #ifdef PUSH_ROUNDING
2592 /* In case we output to the stack, but the size is smaller than the
2593 machine can push exactly, we need to use move instructions. */
2594 if (stack
2595 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2596 != GET_MODE_SIZE (submode)))
2598 rtx temp;
2599 HOST_WIDE_INT offset1, offset2;
2601 /* Do not use anti_adjust_stack, since we don't want to update
2602 stack_pointer_delta. */
2603 temp = expand_binop (Pmode,
2604 #ifdef STACK_GROWS_DOWNWARD
2605 sub_optab,
2606 #else
2607 add_optab,
2608 #endif
2609 stack_pointer_rtx,
2610 GEN_INT
2611 (PUSH_ROUNDING
2612 (GET_MODE_SIZE (GET_MODE (x)))),
2613 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2615 if (temp != stack_pointer_rtx)
2616 emit_move_insn (stack_pointer_rtx, temp);
2618 #ifdef STACK_GROWS_DOWNWARD
2619 offset1 = 0;
2620 offset2 = GET_MODE_SIZE (submode);
2621 #else
2622 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2623 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2624 + GET_MODE_SIZE (submode));
2625 #endif
2627 emit_move_insn (change_address (x, submode,
2628 gen_rtx_PLUS (Pmode,
2629 stack_pointer_rtx,
2630 GEN_INT (offset1))),
2631 gen_realpart (submode, y));
2632 emit_move_insn (change_address (x, submode,
2633 gen_rtx_PLUS (Pmode,
2634 stack_pointer_rtx,
2635 GEN_INT (offset2))),
2636 gen_imagpart (submode, y));
2638 else
2639 #endif
2640 /* If this is a stack, push the highpart first, so it
2641 will be in the argument order.
2643 In that case, change_address is used only to convert
2644 the mode, not to change the address. */
2645 if (stack)
2647 /* Note that the real part always precedes the imag part in memory
2648 regardless of machine's endianness. */
2649 #ifdef STACK_GROWS_DOWNWARD
2650 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2651 gen_imagpart (submode, y));
2652 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2653 gen_realpart (submode, y));
2654 #else
2655 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2656 gen_realpart (submode, y));
2657 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2658 gen_imagpart (submode, y));
2659 #endif
2661 else
2663 rtx realpart_x, realpart_y;
2664 rtx imagpart_x, imagpart_y;
2666 /* If this is a complex value with each part being smaller than a
2667 word, the usual calling sequence will likely pack the pieces into
2668 a single register. Unfortunately, SUBREG of hard registers only
2669 deals in terms of words, so we have a problem converting input
2670 arguments to the CONCAT of two registers that is used elsewhere
2671 for complex values. If this is before reload, we can copy it into
2672 memory and reload. FIXME, we should see about using extract and
2673 insert on integer registers, but complex short and complex char
2674 variables should be rarely used. */
2675 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2676 && (reload_in_progress | reload_completed) == 0)
2678 int packed_dest_p
2679 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2680 int packed_src_p
2681 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2683 if (packed_dest_p || packed_src_p)
2685 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2686 ? MODE_FLOAT : MODE_INT);
2688 enum machine_mode reg_mode
2689 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2691 if (reg_mode != BLKmode)
2693 rtx mem = assign_stack_temp (reg_mode,
2694 GET_MODE_SIZE (mode), 0);
2695 rtx cmem = adjust_address (mem, mode, 0);
2697 if (packed_dest_p)
2699 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2701 emit_move_insn_1 (cmem, y);
2702 return emit_move_insn_1 (sreg, mem);
2704 else
2706 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2708 emit_move_insn_1 (mem, sreg);
2709 return emit_move_insn_1 (x, cmem);
2715 realpart_x = gen_realpart (submode, x);
2716 realpart_y = gen_realpart (submode, y);
2717 imagpart_x = gen_imagpart (submode, x);
2718 imagpart_y = gen_imagpart (submode, y);
2720 /* Show the output dies here. This is necessary for SUBREGs
2721 of pseudos since we cannot track their lifetimes correctly;
2722 hard regs shouldn't appear here except as return values.
2723 We never want to emit such a clobber after reload. */
2724 if (x != y
2725 && ! (reload_in_progress || reload_completed)
2726 && (GET_CODE (realpart_x) == SUBREG
2727 || GET_CODE (imagpart_x) == SUBREG))
2728 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2730 emit_move_insn (realpart_x, realpart_y);
2731 emit_move_insn (imagpart_x, imagpart_y);
2734 return get_last_insn ();
2737 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2738 find a mode to do it in. If we have a movcc, use it. Otherwise,
2739 find the MODE_INT mode of the same width. */
2740 else if (GET_MODE_CLASS (mode) == MODE_CC
2741 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2743 enum insn_code insn_code;
2744 enum machine_mode tmode = VOIDmode;
2745 rtx x1 = x, y1 = y;
2747 if (mode != CCmode
2748 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2749 tmode = CCmode;
2750 else
2751 for (tmode = QImode; tmode != VOIDmode;
2752 tmode = GET_MODE_WIDER_MODE (tmode))
2753 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2754 break;
2756 if (tmode == VOIDmode)
2757 abort ();
2759 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2760 may call change_address which is not appropriate if we were
2761 called when a reload was in progress. We don't have to worry
2762 about changing the address since the size in bytes is supposed to
2763 be the same. Copy the MEM to change the mode and move any
2764 substitutions from the old MEM to the new one. */
2766 if (reload_in_progress)
2768 x = gen_lowpart_common (tmode, x1);
2769 if (x == 0 && MEM_P (x1))
2771 x = adjust_address_nv (x1, tmode, 0);
2772 copy_replacements (x1, x);
2775 y = gen_lowpart_common (tmode, y1);
2776 if (y == 0 && MEM_P (y1))
2778 y = adjust_address_nv (y1, tmode, 0);
2779 copy_replacements (y1, y);
2782 else
2784 x = gen_lowpart (tmode, x);
2785 y = gen_lowpart (tmode, y);
2788 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2789 return emit_insn (GEN_FCN (insn_code) (x, y));
2792 /* Try using a move pattern for the corresponding integer mode. This is
2793 only safe when simplify_subreg can convert MODE constants into integer
2794 constants. At present, it can only do this reliably if the value
2795 fits within a HOST_WIDE_INT. */
2796 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2797 && (submode = int_mode_for_mode (mode)) != BLKmode
2798 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2799 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2800 (simplify_gen_subreg (submode, x, mode, 0),
2801 simplify_gen_subreg (submode, y, mode, 0)));
2803 /* This will handle any multi-word or full-word mode that lacks a move_insn
2804 pattern. However, you will get better code if you define such patterns,
2805 even if they must turn into multiple assembler instructions. */
2806 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2808 rtx last_insn = 0;
2809 rtx seq, inner;
2810 int need_clobber;
2811 int i;
2813 #ifdef PUSH_ROUNDING
2815 /* If X is a push on the stack, do the push now and replace
2816 X with a reference to the stack pointer. */
2817 if (push_operand (x, GET_MODE (x)))
2819 rtx temp;
2820 enum rtx_code code;
2822 /* Do not use anti_adjust_stack, since we don't want to update
2823 stack_pointer_delta. */
2824 temp = expand_binop (Pmode,
2825 #ifdef STACK_GROWS_DOWNWARD
2826 sub_optab,
2827 #else
2828 add_optab,
2829 #endif
2830 stack_pointer_rtx,
2831 GEN_INT
2832 (PUSH_ROUNDING
2833 (GET_MODE_SIZE (GET_MODE (x)))),
2834 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2836 if (temp != stack_pointer_rtx)
2837 emit_move_insn (stack_pointer_rtx, temp);
2839 code = GET_CODE (XEXP (x, 0));
2841 /* Just hope that small offsets off SP are OK. */
2842 if (code == POST_INC)
2843 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2844 GEN_INT (-((HOST_WIDE_INT)
2845 GET_MODE_SIZE (GET_MODE (x)))));
2846 else if (code == POST_DEC)
2847 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2848 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2849 else
2850 temp = stack_pointer_rtx;
2852 x = change_address (x, VOIDmode, temp);
2854 #endif
2856 /* If we are in reload, see if either operand is a MEM whose address
2857 is scheduled for replacement. */
2858 if (reload_in_progress && MEM_P (x)
2859 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2860 x = replace_equiv_address_nv (x, inner);
2861 if (reload_in_progress && MEM_P (y)
2862 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2863 y = replace_equiv_address_nv (y, inner);
2865 start_sequence ();
2867 need_clobber = 0;
2868 for (i = 0;
2869 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2870 i++)
2872 rtx xpart = operand_subword (x, i, 1, mode);
2873 rtx ypart = operand_subword (y, i, 1, mode);
2875 /* If we can't get a part of Y, put Y into memory if it is a
2876 constant. Otherwise, force it into a register. If we still
2877 can't get a part of Y, abort. */
2878 if (ypart == 0 && CONSTANT_P (y))
2880 y = force_const_mem (mode, y);
2881 ypart = operand_subword (y, i, 1, mode);
2883 else if (ypart == 0)
2884 ypart = operand_subword_force (y, i, mode);
2886 if (xpart == 0 || ypart == 0)
2887 abort ();
2889 need_clobber |= (GET_CODE (xpart) == SUBREG);
2891 last_insn = emit_move_insn (xpart, ypart);
2894 seq = get_insns ();
2895 end_sequence ();
2897 /* Show the output dies here. This is necessary for SUBREGs
2898 of pseudos since we cannot track their lifetimes correctly;
2899 hard regs shouldn't appear here except as return values.
2900 We never want to emit such a clobber after reload. */
2901 if (x != y
2902 && ! (reload_in_progress || reload_completed)
2903 && need_clobber != 0)
2904 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2906 emit_insn (seq);
2908 return last_insn;
2910 else
2911 abort ();
2914 /* If Y is representable exactly in a narrower mode, and the target can
2915 perform the extension directly from constant or memory, then emit the
2916 move as an extension. */
2918 static rtx
2919 compress_float_constant (rtx x, rtx y)
2921 enum machine_mode dstmode = GET_MODE (x);
2922 enum machine_mode orig_srcmode = GET_MODE (y);
2923 enum machine_mode srcmode;
2924 REAL_VALUE_TYPE r;
2926 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2928 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2929 srcmode != orig_srcmode;
2930 srcmode = GET_MODE_WIDER_MODE (srcmode))
2932 enum insn_code ic;
2933 rtx trunc_y, last_insn;
2935 /* Skip if the target can't extend this way. */
2936 ic = can_extend_p (dstmode, srcmode, 0);
2937 if (ic == CODE_FOR_nothing)
2938 continue;
2940 /* Skip if the narrowed value isn't exact. */
2941 if (! exact_real_truncate (srcmode, &r))
2942 continue;
2944 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2946 if (LEGITIMATE_CONSTANT_P (trunc_y))
2948 /* Skip if the target needs extra instructions to perform
2949 the extension. */
2950 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2951 continue;
2953 else if (float_extend_from_mem[dstmode][srcmode])
2954 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2955 else
2956 continue;
2958 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2959 last_insn = get_last_insn ();
2961 if (REG_P (x))
2962 set_unique_reg_note (last_insn, REG_EQUAL, y);
2964 return last_insn;
2967 return NULL_RTX;
2970 /* Pushing data onto the stack. */
2972 /* Push a block of length SIZE (perhaps variable)
2973 and return an rtx to address the beginning of the block.
2974 The value may be virtual_outgoing_args_rtx.
2976 EXTRA is the number of bytes of padding to push in addition to SIZE.
2977 BELOW nonzero means this padding comes at low addresses;
2978 otherwise, the padding comes at high addresses. */
2981 push_block (rtx size, int extra, int below)
2983 rtx temp;
2985 size = convert_modes (Pmode, ptr_mode, size, 1);
2986 if (CONSTANT_P (size))
2987 anti_adjust_stack (plus_constant (size, extra));
2988 else if (REG_P (size) && extra == 0)
2989 anti_adjust_stack (size);
2990 else
2992 temp = copy_to_mode_reg (Pmode, size);
2993 if (extra != 0)
2994 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2995 temp, 0, OPTAB_LIB_WIDEN);
2996 anti_adjust_stack (temp);
2999 #ifndef STACK_GROWS_DOWNWARD
3000 if (0)
3001 #else
3002 if (1)
3003 #endif
3005 temp = virtual_outgoing_args_rtx;
3006 if (extra != 0 && below)
3007 temp = plus_constant (temp, extra);
3009 else
3011 if (GET_CODE (size) == CONST_INT)
3012 temp = plus_constant (virtual_outgoing_args_rtx,
3013 -INTVAL (size) - (below ? 0 : extra));
3014 else if (extra != 0 && !below)
3015 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3016 negate_rtx (Pmode, plus_constant (size, extra)));
3017 else
3018 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3019 negate_rtx (Pmode, size));
3022 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3025 #ifdef PUSH_ROUNDING
3027 /* Emit single push insn. */
3029 static void
3030 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3032 rtx dest_addr;
3033 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3034 rtx dest;
3035 enum insn_code icode;
3036 insn_operand_predicate_fn pred;
3038 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3039 /* If there is push pattern, use it. Otherwise try old way of throwing
3040 MEM representing push operation to move expander. */
3041 icode = push_optab->handlers[(int) mode].insn_code;
3042 if (icode != CODE_FOR_nothing)
3044 if (((pred = insn_data[(int) icode].operand[0].predicate)
3045 && !((*pred) (x, mode))))
3046 x = force_reg (mode, x);
3047 emit_insn (GEN_FCN (icode) (x));
3048 return;
3050 if (GET_MODE_SIZE (mode) == rounded_size)
3051 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3052 /* If we are to pad downward, adjust the stack pointer first and
3053 then store X into the stack location using an offset. This is
3054 because emit_move_insn does not know how to pad; it does not have
3055 access to type. */
3056 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3058 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3059 HOST_WIDE_INT offset;
3061 emit_move_insn (stack_pointer_rtx,
3062 expand_binop (Pmode,
3063 #ifdef STACK_GROWS_DOWNWARD
3064 sub_optab,
3065 #else
3066 add_optab,
3067 #endif
3068 stack_pointer_rtx,
3069 GEN_INT (rounded_size),
3070 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3072 offset = (HOST_WIDE_INT) padding_size;
3073 #ifdef STACK_GROWS_DOWNWARD
3074 if (STACK_PUSH_CODE == POST_DEC)
3075 /* We have already decremented the stack pointer, so get the
3076 previous value. */
3077 offset += (HOST_WIDE_INT) rounded_size;
3078 #else
3079 if (STACK_PUSH_CODE == POST_INC)
3080 /* We have already incremented the stack pointer, so get the
3081 previous value. */
3082 offset -= (HOST_WIDE_INT) rounded_size;
3083 #endif
3084 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3086 else
3088 #ifdef STACK_GROWS_DOWNWARD
3089 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3090 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3091 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3092 #else
3093 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3094 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3095 GEN_INT (rounded_size));
3096 #endif
3097 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3100 dest = gen_rtx_MEM (mode, dest_addr);
3102 if (type != 0)
3104 set_mem_attributes (dest, type, 1);
3106 if (flag_optimize_sibling_calls)
3107 /* Function incoming arguments may overlap with sibling call
3108 outgoing arguments and we cannot allow reordering of reads
3109 from function arguments with stores to outgoing arguments
3110 of sibling calls. */
3111 set_mem_alias_set (dest, 0);
3113 emit_move_insn (dest, x);
3115 #endif
3117 /* Generate code to push X onto the stack, assuming it has mode MODE and
3118 type TYPE.
3119 MODE is redundant except when X is a CONST_INT (since they don't
3120 carry mode info).
3121 SIZE is an rtx for the size of data to be copied (in bytes),
3122 needed only if X is BLKmode.
3124 ALIGN (in bits) is maximum alignment we can assume.
3126 If PARTIAL and REG are both nonzero, then copy that many of the first
3127 words of X into registers starting with REG, and push the rest of X.
3128 The amount of space pushed is decreased by PARTIAL words,
3129 rounded *down* to a multiple of PARM_BOUNDARY.
3130 REG must be a hard register in this case.
3131 If REG is zero but PARTIAL is not, take any all others actions for an
3132 argument partially in registers, but do not actually load any
3133 registers.
3135 EXTRA is the amount in bytes of extra space to leave next to this arg.
3136 This is ignored if an argument block has already been allocated.
3138 On a machine that lacks real push insns, ARGS_ADDR is the address of
3139 the bottom of the argument block for this call. We use indexing off there
3140 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3141 argument block has not been preallocated.
3143 ARGS_SO_FAR is the size of args previously pushed for this call.
3145 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3146 for arguments passed in registers. If nonzero, it will be the number
3147 of bytes required. */
3149 void
3150 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3151 unsigned int align, int partial, rtx reg, int extra,
3152 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3153 rtx alignment_pad)
3155 rtx xinner;
3156 enum direction stack_direction
3157 #ifdef STACK_GROWS_DOWNWARD
3158 = downward;
3159 #else
3160 = upward;
3161 #endif
3163 /* Decide where to pad the argument: `downward' for below,
3164 `upward' for above, or `none' for don't pad it.
3165 Default is below for small data on big-endian machines; else above. */
3166 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3168 /* Invert direction if stack is post-decrement.
3169 FIXME: why? */
3170 if (STACK_PUSH_CODE == POST_DEC)
3171 if (where_pad != none)
3172 where_pad = (where_pad == downward ? upward : downward);
3174 xinner = x;
3176 if (mode == BLKmode)
3178 /* Copy a block into the stack, entirely or partially. */
3180 rtx temp;
3181 int used = partial * UNITS_PER_WORD;
3182 int offset;
3183 int skip;
3185 if (reg && GET_CODE (reg) == PARALLEL)
3187 /* Use the size of the elt to compute offset. */
3188 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3189 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3190 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3192 else
3193 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3195 if (size == 0)
3196 abort ();
3198 used -= offset;
3200 /* USED is now the # of bytes we need not copy to the stack
3201 because registers will take care of them. */
3203 if (partial != 0)
3204 xinner = adjust_address (xinner, BLKmode, used);
3206 /* If the partial register-part of the arg counts in its stack size,
3207 skip the part of stack space corresponding to the registers.
3208 Otherwise, start copying to the beginning of the stack space,
3209 by setting SKIP to 0. */
3210 skip = (reg_parm_stack_space == 0) ? 0 : used;
3212 #ifdef PUSH_ROUNDING
3213 /* Do it with several push insns if that doesn't take lots of insns
3214 and if there is no difficulty with push insns that skip bytes
3215 on the stack for alignment purposes. */
3216 if (args_addr == 0
3217 && PUSH_ARGS
3218 && GET_CODE (size) == CONST_INT
3219 && skip == 0
3220 && MEM_ALIGN (xinner) >= align
3221 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3222 /* Here we avoid the case of a structure whose weak alignment
3223 forces many pushes of a small amount of data,
3224 and such small pushes do rounding that causes trouble. */
3225 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3226 || align >= BIGGEST_ALIGNMENT
3227 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3228 == (align / BITS_PER_UNIT)))
3229 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3231 /* Push padding now if padding above and stack grows down,
3232 or if padding below and stack grows up.
3233 But if space already allocated, this has already been done. */
3234 if (extra && args_addr == 0
3235 && where_pad != none && where_pad != stack_direction)
3236 anti_adjust_stack (GEN_INT (extra));
3238 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3240 else
3241 #endif /* PUSH_ROUNDING */
3243 rtx target;
3245 /* Otherwise make space on the stack and copy the data
3246 to the address of that space. */
3248 /* Deduct words put into registers from the size we must copy. */
3249 if (partial != 0)
3251 if (GET_CODE (size) == CONST_INT)
3252 size = GEN_INT (INTVAL (size) - used);
3253 else
3254 size = expand_binop (GET_MODE (size), sub_optab, size,
3255 GEN_INT (used), NULL_RTX, 0,
3256 OPTAB_LIB_WIDEN);
3259 /* Get the address of the stack space.
3260 In this case, we do not deal with EXTRA separately.
3261 A single stack adjust will do. */
3262 if (! args_addr)
3264 temp = push_block (size, extra, where_pad == downward);
3265 extra = 0;
3267 else if (GET_CODE (args_so_far) == CONST_INT)
3268 temp = memory_address (BLKmode,
3269 plus_constant (args_addr,
3270 skip + INTVAL (args_so_far)));
3271 else
3272 temp = memory_address (BLKmode,
3273 plus_constant (gen_rtx_PLUS (Pmode,
3274 args_addr,
3275 args_so_far),
3276 skip));
3278 if (!ACCUMULATE_OUTGOING_ARGS)
3280 /* If the source is referenced relative to the stack pointer,
3281 copy it to another register to stabilize it. We do not need
3282 to do this if we know that we won't be changing sp. */
3284 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3285 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3286 temp = copy_to_reg (temp);
3289 target = gen_rtx_MEM (BLKmode, temp);
3291 /* We do *not* set_mem_attributes here, because incoming arguments
3292 may overlap with sibling call outgoing arguments and we cannot
3293 allow reordering of reads from function arguments with stores
3294 to outgoing arguments of sibling calls. We do, however, want
3295 to record the alignment of the stack slot. */
3296 /* ALIGN may well be better aligned than TYPE, e.g. due to
3297 PARM_BOUNDARY. Assume the caller isn't lying. */
3298 set_mem_align (target, align);
3300 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3303 else if (partial > 0)
3305 /* Scalar partly in registers. */
3307 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3308 int i;
3309 int not_stack;
3310 /* # words of start of argument
3311 that we must make space for but need not store. */
3312 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3313 int args_offset = INTVAL (args_so_far);
3314 int skip;
3316 /* Push padding now if padding above and stack grows down,
3317 or if padding below and stack grows up.
3318 But if space already allocated, this has already been done. */
3319 if (extra && args_addr == 0
3320 && where_pad != none && where_pad != stack_direction)
3321 anti_adjust_stack (GEN_INT (extra));
3323 /* If we make space by pushing it, we might as well push
3324 the real data. Otherwise, we can leave OFFSET nonzero
3325 and leave the space uninitialized. */
3326 if (args_addr == 0)
3327 offset = 0;
3329 /* Now NOT_STACK gets the number of words that we don't need to
3330 allocate on the stack. */
3331 not_stack = partial - offset;
3333 /* If the partial register-part of the arg counts in its stack size,
3334 skip the part of stack space corresponding to the registers.
3335 Otherwise, start copying to the beginning of the stack space,
3336 by setting SKIP to 0. */
3337 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3339 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3340 x = validize_mem (force_const_mem (mode, x));
3342 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3343 SUBREGs of such registers are not allowed. */
3344 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3345 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3346 x = copy_to_reg (x);
3348 /* Loop over all the words allocated on the stack for this arg. */
3349 /* We can do it by words, because any scalar bigger than a word
3350 has a size a multiple of a word. */
3351 #ifndef PUSH_ARGS_REVERSED
3352 for (i = not_stack; i < size; i++)
3353 #else
3354 for (i = size - 1; i >= not_stack; i--)
3355 #endif
3356 if (i >= not_stack + offset)
3357 emit_push_insn (operand_subword_force (x, i, mode),
3358 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3359 0, args_addr,
3360 GEN_INT (args_offset + ((i - not_stack + skip)
3361 * UNITS_PER_WORD)),
3362 reg_parm_stack_space, alignment_pad);
3364 else
3366 rtx addr;
3367 rtx dest;
3369 /* Push padding now if padding above and stack grows down,
3370 or if padding below and stack grows up.
3371 But if space already allocated, this has already been done. */
3372 if (extra && args_addr == 0
3373 && where_pad != none && where_pad != stack_direction)
3374 anti_adjust_stack (GEN_INT (extra));
3376 #ifdef PUSH_ROUNDING
3377 if (args_addr == 0 && PUSH_ARGS)
3378 emit_single_push_insn (mode, x, type);
3379 else
3380 #endif
3382 if (GET_CODE (args_so_far) == CONST_INT)
3383 addr
3384 = memory_address (mode,
3385 plus_constant (args_addr,
3386 INTVAL (args_so_far)));
3387 else
3388 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3389 args_so_far));
3390 dest = gen_rtx_MEM (mode, addr);
3392 /* We do *not* set_mem_attributes here, because incoming arguments
3393 may overlap with sibling call outgoing arguments and we cannot
3394 allow reordering of reads from function arguments with stores
3395 to outgoing arguments of sibling calls. We do, however, want
3396 to record the alignment of the stack slot. */
3397 /* ALIGN may well be better aligned than TYPE, e.g. due to
3398 PARM_BOUNDARY. Assume the caller isn't lying. */
3399 set_mem_align (dest, align);
3401 emit_move_insn (dest, x);
3405 /* If part should go in registers, copy that part
3406 into the appropriate registers. Do this now, at the end,
3407 since mem-to-mem copies above may do function calls. */
3408 if (partial > 0 && reg != 0)
3410 /* Handle calls that pass values in multiple non-contiguous locations.
3411 The Irix 6 ABI has examples of this. */
3412 if (GET_CODE (reg) == PARALLEL)
3413 emit_group_load (reg, x, type, -1);
3414 else
3415 move_block_to_reg (REGNO (reg), x, partial, mode);
3418 if (extra && args_addr == 0 && where_pad == stack_direction)
3419 anti_adjust_stack (GEN_INT (extra));
3421 if (alignment_pad && args_addr == 0)
3422 anti_adjust_stack (alignment_pad);
3425 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3426 operations. */
3428 static rtx
3429 get_subtarget (rtx x)
3431 return (optimize
3432 || x == 0
3433 /* Only registers can be subtargets. */
3434 || !REG_P (x)
3435 /* Don't use hard regs to avoid extending their life. */
3436 || REGNO (x) < FIRST_PSEUDO_REGISTER
3437 ? 0 : x);
3440 /* Expand an assignment that stores the value of FROM into TO.
3441 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3442 (If the value is constant, this rtx is a constant.)
3443 Otherwise, the returned value is NULL_RTX. */
3446 expand_assignment (tree to, tree from, int want_value)
3448 rtx to_rtx = 0;
3449 rtx result;
3451 /* Don't crash if the lhs of the assignment was erroneous. */
3453 if (TREE_CODE (to) == ERROR_MARK)
3455 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3456 return want_value ? result : NULL_RTX;
3459 /* Assignment of a structure component needs special treatment
3460 if the structure component's rtx is not simply a MEM.
3461 Assignment of an array element at a constant index, and assignment of
3462 an array element in an unaligned packed structure field, has the same
3463 problem. */
3465 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3466 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3467 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3469 enum machine_mode mode1;
3470 HOST_WIDE_INT bitsize, bitpos;
3471 rtx orig_to_rtx;
3472 tree offset;
3473 int unsignedp;
3474 int volatilep = 0;
3475 tree tem;
3477 push_temp_slots ();
3478 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3479 &unsignedp, &volatilep);
3481 /* If we are going to use store_bit_field and extract_bit_field,
3482 make sure to_rtx will be safe for multiple use. */
3484 if (mode1 == VOIDmode && want_value)
3485 tem = stabilize_reference (tem);
3487 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3489 if (offset != 0)
3491 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3493 if (!MEM_P (to_rtx))
3494 abort ();
3496 #ifdef POINTERS_EXTEND_UNSIGNED
3497 if (GET_MODE (offset_rtx) != Pmode)
3498 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3499 #else
3500 if (GET_MODE (offset_rtx) != ptr_mode)
3501 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3502 #endif
3504 /* A constant address in TO_RTX can have VOIDmode, we must not try
3505 to call force_reg for that case. Avoid that case. */
3506 if (MEM_P (to_rtx)
3507 && GET_MODE (to_rtx) == BLKmode
3508 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3509 && bitsize > 0
3510 && (bitpos % bitsize) == 0
3511 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3512 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3514 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3515 bitpos = 0;
3518 to_rtx = offset_address (to_rtx, offset_rtx,
3519 highest_pow2_factor_for_target (to,
3520 offset));
3523 if (MEM_P (to_rtx))
3525 /* If the field is at offset zero, we could have been given the
3526 DECL_RTX of the parent struct. Don't munge it. */
3527 to_rtx = shallow_copy_rtx (to_rtx);
3529 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3532 /* Deal with volatile and readonly fields. The former is only done
3533 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3534 if (volatilep && MEM_P (to_rtx))
3536 if (to_rtx == orig_to_rtx)
3537 to_rtx = copy_rtx (to_rtx);
3538 MEM_VOLATILE_P (to_rtx) = 1;
3541 if (MEM_P (to_rtx) && ! can_address_p (to))
3543 if (to_rtx == orig_to_rtx)
3544 to_rtx = copy_rtx (to_rtx);
3545 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3548 /* Optimize bitfld op= val in certain cases. */
3549 while (mode1 == VOIDmode && !want_value
3550 && bitsize > 0 && bitsize < BITS_PER_WORD
3551 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3552 && !TREE_SIDE_EFFECTS (to)
3553 && !TREE_THIS_VOLATILE (to))
3555 tree src, op0, op1;
3556 rtx value, str_rtx = to_rtx;
3557 HOST_WIDE_INT bitpos1 = bitpos;
3558 optab binop;
3560 src = from;
3561 STRIP_NOPS (src);
3562 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3563 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3564 break;
3566 op0 = TREE_OPERAND (src, 0);
3567 op1 = TREE_OPERAND (src, 1);
3568 STRIP_NOPS (op0);
3570 if (! operand_equal_p (to, op0, 0))
3571 break;
3573 if (MEM_P (str_rtx))
3575 enum machine_mode mode = GET_MODE (str_rtx);
3576 HOST_WIDE_INT offset1;
3578 if (GET_MODE_BITSIZE (mode) == 0
3579 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3580 mode = word_mode;
3581 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3582 mode, 0);
3583 if (mode == VOIDmode)
3584 break;
3586 offset1 = bitpos1;
3587 bitpos1 %= GET_MODE_BITSIZE (mode);
3588 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3589 str_rtx = adjust_address (str_rtx, mode, offset1);
3591 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3592 break;
3594 /* If the bit field covers the whole REG/MEM, store_field
3595 will likely generate better code. */
3596 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3597 break;
3599 /* We can't handle fields split accross multiple entities. */
3600 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3601 break;
3603 if (BYTES_BIG_ENDIAN)
3604 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3605 - bitsize;
3607 /* Special case some bitfield op= exp. */
3608 switch (TREE_CODE (src))
3610 case PLUS_EXPR:
3611 case MINUS_EXPR:
3612 /* For now, just optimize the case of the topmost bitfield
3613 where we don't need to do any masking and also
3614 1 bit bitfields where xor can be used.
3615 We might win by one instruction for the other bitfields
3616 too if insv/extv instructions aren't used, so that
3617 can be added later. */
3618 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3619 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3620 break;
3621 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3622 value = convert_modes (GET_MODE (str_rtx),
3623 TYPE_MODE (TREE_TYPE (op1)), value,
3624 TYPE_UNSIGNED (TREE_TYPE (op1)));
3626 /* We may be accessing data outside the field, which means
3627 we can alias adjacent data. */
3628 if (MEM_P (str_rtx))
3630 str_rtx = shallow_copy_rtx (str_rtx);
3631 set_mem_alias_set (str_rtx, 0);
3632 set_mem_expr (str_rtx, 0);
3635 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3636 if (bitsize == 1
3637 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3639 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3640 NULL_RTX);
3641 binop = xor_optab;
3643 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3644 build_int_cst (NULL_TREE, bitpos1),
3645 NULL_RTX, 1);
3646 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3647 value, str_rtx, 1, OPTAB_WIDEN);
3648 if (result != str_rtx)
3649 emit_move_insn (str_rtx, result);
3650 free_temp_slots ();
3651 pop_temp_slots ();
3652 return NULL_RTX;
3654 default:
3655 break;
3658 break;
3661 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3662 (want_value
3663 /* Spurious cast for HPUX compiler. */
3664 ? ((enum machine_mode)
3665 TYPE_MODE (TREE_TYPE (to)))
3666 : VOIDmode),
3667 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3669 preserve_temp_slots (result);
3670 free_temp_slots ();
3671 pop_temp_slots ();
3673 /* If the value is meaningful, convert RESULT to the proper mode.
3674 Otherwise, return nothing. */
3675 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3676 TYPE_MODE (TREE_TYPE (from)),
3677 result,
3678 TYPE_UNSIGNED (TREE_TYPE (to)))
3679 : NULL_RTX);
3682 /* If the rhs is a function call and its value is not an aggregate,
3683 call the function before we start to compute the lhs.
3684 This is needed for correct code for cases such as
3685 val = setjmp (buf) on machines where reference to val
3686 requires loading up part of an address in a separate insn.
3688 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3689 since it might be a promoted variable where the zero- or sign- extension
3690 needs to be done. Handling this in the normal way is safe because no
3691 computation is done before the call. */
3692 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3693 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3694 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3695 && REG_P (DECL_RTL (to))))
3697 rtx value;
3699 push_temp_slots ();
3700 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3701 if (to_rtx == 0)
3702 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3704 /* Handle calls that return values in multiple non-contiguous locations.
3705 The Irix 6 ABI has examples of this. */
3706 if (GET_CODE (to_rtx) == PARALLEL)
3707 emit_group_load (to_rtx, value, TREE_TYPE (from),
3708 int_size_in_bytes (TREE_TYPE (from)));
3709 else if (GET_MODE (to_rtx) == BLKmode)
3710 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3711 else
3713 if (POINTER_TYPE_P (TREE_TYPE (to)))
3714 value = convert_memory_address (GET_MODE (to_rtx), value);
3715 emit_move_insn (to_rtx, value);
3717 preserve_temp_slots (to_rtx);
3718 free_temp_slots ();
3719 pop_temp_slots ();
3720 return want_value ? to_rtx : NULL_RTX;
3723 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3724 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3726 if (to_rtx == 0)
3727 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3729 /* Don't move directly into a return register. */
3730 if (TREE_CODE (to) == RESULT_DECL
3731 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3733 rtx temp;
3735 push_temp_slots ();
3736 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3738 if (GET_CODE (to_rtx) == PARALLEL)
3739 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3740 int_size_in_bytes (TREE_TYPE (from)));
3741 else
3742 emit_move_insn (to_rtx, temp);
3744 preserve_temp_slots (to_rtx);
3745 free_temp_slots ();
3746 pop_temp_slots ();
3747 return want_value ? to_rtx : NULL_RTX;
3750 /* In case we are returning the contents of an object which overlaps
3751 the place the value is being stored, use a safe function when copying
3752 a value through a pointer into a structure value return block. */
3753 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3754 && current_function_returns_struct
3755 && !current_function_returns_pcc_struct)
3757 rtx from_rtx, size;
3759 push_temp_slots ();
3760 size = expr_size (from);
3761 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3763 emit_library_call (memmove_libfunc, LCT_NORMAL,
3764 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3765 XEXP (from_rtx, 0), Pmode,
3766 convert_to_mode (TYPE_MODE (sizetype),
3767 size, TYPE_UNSIGNED (sizetype)),
3768 TYPE_MODE (sizetype));
3770 preserve_temp_slots (to_rtx);
3771 free_temp_slots ();
3772 pop_temp_slots ();
3773 return want_value ? to_rtx : NULL_RTX;
3776 /* Compute FROM and store the value in the rtx we got. */
3778 push_temp_slots ();
3779 result = store_expr (from, to_rtx, want_value);
3780 preserve_temp_slots (result);
3781 free_temp_slots ();
3782 pop_temp_slots ();
3783 return want_value ? result : NULL_RTX;
3786 /* Generate code for computing expression EXP,
3787 and storing the value into TARGET.
3789 If WANT_VALUE & 1 is nonzero, return a copy of the value
3790 not in TARGET, so that we can be sure to use the proper
3791 value in a containing expression even if TARGET has something
3792 else stored in it. If possible, we copy the value through a pseudo
3793 and return that pseudo. Or, if the value is constant, we try to
3794 return the constant. In some cases, we return a pseudo
3795 copied *from* TARGET.
3797 If the mode is BLKmode then we may return TARGET itself.
3798 It turns out that in BLKmode it doesn't cause a problem.
3799 because C has no operators that could combine two different
3800 assignments into the same BLKmode object with different values
3801 with no sequence point. Will other languages need this to
3802 be more thorough?
3804 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3805 to catch quickly any cases where the caller uses the value
3806 and fails to set WANT_VALUE.
3808 If WANT_VALUE & 2 is set, this is a store into a call param on the
3809 stack, and block moves may need to be treated specially. */
3812 store_expr (tree exp, rtx target, int want_value)
3814 rtx temp;
3815 rtx alt_rtl = NULL_RTX;
3816 int dont_return_target = 0;
3817 int dont_store_target = 0;
3819 if (VOID_TYPE_P (TREE_TYPE (exp)))
3821 /* C++ can generate ?: expressions with a throw expression in one
3822 branch and an rvalue in the other. Here, we resolve attempts to
3823 store the throw expression's nonexistent result. */
3824 if (want_value)
3825 abort ();
3826 expand_expr (exp, const0_rtx, VOIDmode, 0);
3827 return NULL_RTX;
3829 if (TREE_CODE (exp) == COMPOUND_EXPR)
3831 /* Perform first part of compound expression, then assign from second
3832 part. */
3833 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3834 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3835 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3837 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3839 /* For conditional expression, get safe form of the target. Then
3840 test the condition, doing the appropriate assignment on either
3841 side. This avoids the creation of unnecessary temporaries.
3842 For non-BLKmode, it is more efficient not to do this. */
3844 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3846 do_pending_stack_adjust ();
3847 NO_DEFER_POP;
3848 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3849 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3850 emit_jump_insn (gen_jump (lab2));
3851 emit_barrier ();
3852 emit_label (lab1);
3853 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3854 emit_label (lab2);
3855 OK_DEFER_POP;
3857 return want_value & 1 ? target : NULL_RTX;
3859 else if ((want_value & 1) != 0
3860 && MEM_P (target)
3861 && ! MEM_VOLATILE_P (target)
3862 && GET_MODE (target) != BLKmode)
3863 /* If target is in memory and caller wants value in a register instead,
3864 arrange that. Pass TARGET as target for expand_expr so that,
3865 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3866 We know expand_expr will not use the target in that case.
3867 Don't do this if TARGET is volatile because we are supposed
3868 to write it and then read it. */
3870 temp = expand_expr (exp, target, GET_MODE (target),
3871 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3872 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3874 /* If TEMP is already in the desired TARGET, only copy it from
3875 memory and don't store it there again. */
3876 if (temp == target
3877 || (rtx_equal_p (temp, target)
3878 && ! side_effects_p (temp) && ! side_effects_p (target)))
3879 dont_store_target = 1;
3880 temp = copy_to_reg (temp);
3882 dont_return_target = 1;
3884 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3885 /* If this is a scalar in a register that is stored in a wider mode
3886 than the declared mode, compute the result into its declared mode
3887 and then convert to the wider mode. Our value is the computed
3888 expression. */
3890 rtx inner_target = 0;
3892 /* If we don't want a value, we can do the conversion inside EXP,
3893 which will often result in some optimizations. Do the conversion
3894 in two steps: first change the signedness, if needed, then
3895 the extend. But don't do this if the type of EXP is a subtype
3896 of something else since then the conversion might involve
3897 more than just converting modes. */
3898 if ((want_value & 1) == 0
3899 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3900 && TREE_TYPE (TREE_TYPE (exp)) == 0
3901 && (!lang_hooks.reduce_bit_field_operations
3902 || (GET_MODE_PRECISION (GET_MODE (target))
3903 == TYPE_PRECISION (TREE_TYPE (exp)))))
3905 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3906 != SUBREG_PROMOTED_UNSIGNED_P (target))
3907 exp = convert
3908 (lang_hooks.types.signed_or_unsigned_type
3909 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3911 exp = convert (lang_hooks.types.type_for_mode
3912 (GET_MODE (SUBREG_REG (target)),
3913 SUBREG_PROMOTED_UNSIGNED_P (target)),
3914 exp);
3916 inner_target = SUBREG_REG (target);
3919 temp = expand_expr (exp, inner_target, VOIDmode,
3920 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3922 /* If TEMP is a MEM and we want a result value, make the access
3923 now so it gets done only once. Strictly speaking, this is
3924 only necessary if the MEM is volatile, or if the address
3925 overlaps TARGET. But not performing the load twice also
3926 reduces the amount of rtl we generate and then have to CSE. */
3927 if (MEM_P (temp) && (want_value & 1) != 0)
3928 temp = copy_to_reg (temp);
3930 /* If TEMP is a VOIDmode constant, use convert_modes to make
3931 sure that we properly convert it. */
3932 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3934 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3935 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3936 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3937 GET_MODE (target), temp,
3938 SUBREG_PROMOTED_UNSIGNED_P (target));
3941 convert_move (SUBREG_REG (target), temp,
3942 SUBREG_PROMOTED_UNSIGNED_P (target));
3944 /* If we promoted a constant, change the mode back down to match
3945 target. Otherwise, the caller might get confused by a result whose
3946 mode is larger than expected. */
3948 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3950 if (GET_MODE (temp) != VOIDmode)
3952 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3953 SUBREG_PROMOTED_VAR_P (temp) = 1;
3954 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3955 SUBREG_PROMOTED_UNSIGNED_P (target));
3957 else
3958 temp = convert_modes (GET_MODE (target),
3959 GET_MODE (SUBREG_REG (target)),
3960 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3963 return want_value & 1 ? temp : NULL_RTX;
3965 else
3967 temp = expand_expr_real (exp, target, GET_MODE (target),
3968 (want_value & 2
3969 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3970 &alt_rtl);
3971 /* Return TARGET if it's a specified hardware register.
3972 If TARGET is a volatile mem ref, either return TARGET
3973 or return a reg copied *from* TARGET; ANSI requires this.
3975 Otherwise, if TEMP is not TARGET, return TEMP
3976 if it is constant (for efficiency),
3977 or if we really want the correct value. */
3978 if (!(target && REG_P (target)
3979 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3980 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3981 && ! rtx_equal_p (temp, target)
3982 && (CONSTANT_P (temp) || (want_value & 1) != 0))
3983 dont_return_target = 1;
3986 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3987 the same as that of TARGET, adjust the constant. This is needed, for
3988 example, in case it is a CONST_DOUBLE and we want only a word-sized
3989 value. */
3990 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3991 && TREE_CODE (exp) != ERROR_MARK
3992 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3993 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3994 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3996 /* If value was not generated in the target, store it there.
3997 Convert the value to TARGET's type first if necessary and emit the
3998 pending incrementations that have been queued when expanding EXP.
3999 Note that we cannot emit the whole queue blindly because this will
4000 effectively disable the POST_INC optimization later.
4002 If TEMP and TARGET compare equal according to rtx_equal_p, but
4003 one or both of them are volatile memory refs, we have to distinguish
4004 two cases:
4005 - expand_expr has used TARGET. In this case, we must not generate
4006 another copy. This can be detected by TARGET being equal according
4007 to == .
4008 - expand_expr has not used TARGET - that means that the source just
4009 happens to have the same RTX form. Since temp will have been created
4010 by expand_expr, it will compare unequal according to == .
4011 We must generate a copy in this case, to reach the correct number
4012 of volatile memory references. */
4014 if ((! rtx_equal_p (temp, target)
4015 || (temp != target && (side_effects_p (temp)
4016 || side_effects_p (target))))
4017 && TREE_CODE (exp) != ERROR_MARK
4018 && ! dont_store_target
4019 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4020 but TARGET is not valid memory reference, TEMP will differ
4021 from TARGET although it is really the same location. */
4022 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4023 /* If there's nothing to copy, don't bother. Don't call expr_size
4024 unless necessary, because some front-ends (C++) expr_size-hook
4025 aborts on objects that are not supposed to be bit-copied or
4026 bit-initialized. */
4027 && expr_size (exp) != const0_rtx)
4029 if (GET_MODE (temp) != GET_MODE (target)
4030 && GET_MODE (temp) != VOIDmode)
4032 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4033 if (dont_return_target)
4035 /* In this case, we will return TEMP,
4036 so make sure it has the proper mode.
4037 But don't forget to store the value into TARGET. */
4038 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4039 emit_move_insn (target, temp);
4041 else
4042 convert_move (target, temp, unsignedp);
4045 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4047 /* Handle copying a string constant into an array. The string
4048 constant may be shorter than the array. So copy just the string's
4049 actual length, and clear the rest. First get the size of the data
4050 type of the string, which is actually the size of the target. */
4051 rtx size = expr_size (exp);
4053 if (GET_CODE (size) == CONST_INT
4054 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4055 emit_block_move (target, temp, size,
4056 (want_value & 2
4057 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4058 else
4060 /* Compute the size of the data to copy from the string. */
4061 tree copy_size
4062 = size_binop (MIN_EXPR,
4063 make_tree (sizetype, size),
4064 size_int (TREE_STRING_LENGTH (exp)));
4065 rtx copy_size_rtx
4066 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4067 (want_value & 2
4068 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4069 rtx label = 0;
4071 /* Copy that much. */
4072 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4073 TYPE_UNSIGNED (sizetype));
4074 emit_block_move (target, temp, copy_size_rtx,
4075 (want_value & 2
4076 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4078 /* Figure out how much is left in TARGET that we have to clear.
4079 Do all calculations in ptr_mode. */
4080 if (GET_CODE (copy_size_rtx) == CONST_INT)
4082 size = plus_constant (size, -INTVAL (copy_size_rtx));
4083 target = adjust_address (target, BLKmode,
4084 INTVAL (copy_size_rtx));
4086 else
4088 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4089 copy_size_rtx, NULL_RTX, 0,
4090 OPTAB_LIB_WIDEN);
4092 #ifdef POINTERS_EXTEND_UNSIGNED
4093 if (GET_MODE (copy_size_rtx) != Pmode)
4094 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4095 TYPE_UNSIGNED (sizetype));
4096 #endif
4098 target = offset_address (target, copy_size_rtx,
4099 highest_pow2_factor (copy_size));
4100 label = gen_label_rtx ();
4101 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4102 GET_MODE (size), 0, label);
4105 if (size != const0_rtx)
4106 clear_storage (target, size);
4108 if (label)
4109 emit_label (label);
4112 /* Handle calls that return values in multiple non-contiguous locations.
4113 The Irix 6 ABI has examples of this. */
4114 else if (GET_CODE (target) == PARALLEL)
4115 emit_group_load (target, temp, TREE_TYPE (exp),
4116 int_size_in_bytes (TREE_TYPE (exp)));
4117 else if (GET_MODE (temp) == BLKmode)
4118 emit_block_move (target, temp, expr_size (exp),
4119 (want_value & 2
4120 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4121 else
4123 temp = force_operand (temp, target);
4124 if (temp != target)
4125 emit_move_insn (target, temp);
4129 /* If we don't want a value, return NULL_RTX. */
4130 if ((want_value & 1) == 0)
4131 return NULL_RTX;
4133 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4134 ??? The latter test doesn't seem to make sense. */
4135 else if (dont_return_target && !MEM_P (temp))
4136 return temp;
4138 /* Return TARGET itself if it is a hard register. */
4139 else if ((want_value & 1) != 0
4140 && GET_MODE (target) != BLKmode
4141 && ! (REG_P (target)
4142 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4143 return copy_to_reg (target);
4145 else
4146 return target;
4149 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4150 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4151 are set to non-constant values and place it in *P_NC_ELTS. */
4153 static void
4154 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4155 HOST_WIDE_INT *p_nc_elts)
4157 HOST_WIDE_INT nz_elts, nc_elts;
4158 tree list;
4160 nz_elts = 0;
4161 nc_elts = 0;
4163 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4165 tree value = TREE_VALUE (list);
4166 tree purpose = TREE_PURPOSE (list);
4167 HOST_WIDE_INT mult;
4169 mult = 1;
4170 if (TREE_CODE (purpose) == RANGE_EXPR)
4172 tree lo_index = TREE_OPERAND (purpose, 0);
4173 tree hi_index = TREE_OPERAND (purpose, 1);
4175 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4176 mult = (tree_low_cst (hi_index, 1)
4177 - tree_low_cst (lo_index, 1) + 1);
4180 switch (TREE_CODE (value))
4182 case CONSTRUCTOR:
4184 HOST_WIDE_INT nz = 0, nc = 0;
4185 categorize_ctor_elements_1 (value, &nz, &nc);
4186 nz_elts += mult * nz;
4187 nc_elts += mult * nc;
4189 break;
4191 case INTEGER_CST:
4192 case REAL_CST:
4193 if (!initializer_zerop (value))
4194 nz_elts += mult;
4195 break;
4196 case COMPLEX_CST:
4197 if (!initializer_zerop (TREE_REALPART (value)))
4198 nz_elts += mult;
4199 if (!initializer_zerop (TREE_IMAGPART (value)))
4200 nz_elts += mult;
4201 break;
4202 case VECTOR_CST:
4204 tree v;
4205 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4206 if (!initializer_zerop (TREE_VALUE (v)))
4207 nz_elts += mult;
4209 break;
4211 default:
4212 nz_elts += mult;
4213 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4214 nc_elts += mult;
4215 break;
4219 *p_nz_elts += nz_elts;
4220 *p_nc_elts += nc_elts;
4223 void
4224 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4225 HOST_WIDE_INT *p_nc_elts)
4227 *p_nz_elts = 0;
4228 *p_nc_elts = 0;
4229 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4232 /* Count the number of scalars in TYPE. Return -1 on overflow or
4233 variable-sized. */
4235 HOST_WIDE_INT
4236 count_type_elements (tree type)
4238 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4239 switch (TREE_CODE (type))
4241 case ARRAY_TYPE:
4243 tree telts = array_type_nelts (type);
4244 if (telts && host_integerp (telts, 1))
4246 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4247 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4248 if (n == 0)
4249 return 0;
4250 else if (max / n > m)
4251 return n * m;
4253 return -1;
4256 case RECORD_TYPE:
4258 HOST_WIDE_INT n = 0, t;
4259 tree f;
4261 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4262 if (TREE_CODE (f) == FIELD_DECL)
4264 t = count_type_elements (TREE_TYPE (f));
4265 if (t < 0)
4266 return -1;
4267 n += t;
4270 return n;
4273 case UNION_TYPE:
4274 case QUAL_UNION_TYPE:
4276 /* Ho hum. How in the world do we guess here? Clearly it isn't
4277 right to count the fields. Guess based on the number of words. */
4278 HOST_WIDE_INT n = int_size_in_bytes (type);
4279 if (n < 0)
4280 return -1;
4281 return n / UNITS_PER_WORD;
4284 case COMPLEX_TYPE:
4285 return 2;
4287 case VECTOR_TYPE:
4288 return TYPE_VECTOR_SUBPARTS (type);
4290 case INTEGER_TYPE:
4291 case REAL_TYPE:
4292 case ENUMERAL_TYPE:
4293 case BOOLEAN_TYPE:
4294 case CHAR_TYPE:
4295 case POINTER_TYPE:
4296 case OFFSET_TYPE:
4297 case REFERENCE_TYPE:
4298 return 1;
4300 case VOID_TYPE:
4301 case METHOD_TYPE:
4302 case FILE_TYPE:
4303 case SET_TYPE:
4304 case FUNCTION_TYPE:
4305 case LANG_TYPE:
4306 default:
4307 abort ();
4311 /* Return 1 if EXP contains mostly (3/4) zeros. */
4314 mostly_zeros_p (tree exp)
4316 if (TREE_CODE (exp) == CONSTRUCTOR)
4319 HOST_WIDE_INT nz_elts, nc_elts, elts;
4321 /* If there are no ranges of true bits, it is all zero. */
4322 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4323 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4325 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4326 elts = count_type_elements (TREE_TYPE (exp));
4328 return nz_elts < elts / 4;
4331 return initializer_zerop (exp);
4334 /* Helper function for store_constructor.
4335 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4336 TYPE is the type of the CONSTRUCTOR, not the element type.
4337 CLEARED is as for store_constructor.
4338 ALIAS_SET is the alias set to use for any stores.
4340 This provides a recursive shortcut back to store_constructor when it isn't
4341 necessary to go through store_field. This is so that we can pass through
4342 the cleared field to let store_constructor know that we may not have to
4343 clear a substructure if the outer structure has already been cleared. */
4345 static void
4346 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4347 HOST_WIDE_INT bitpos, enum machine_mode mode,
4348 tree exp, tree type, int cleared, int alias_set)
4350 if (TREE_CODE (exp) == CONSTRUCTOR
4351 /* We can only call store_constructor recursively if the size and
4352 bit position are on a byte boundary. */
4353 && bitpos % BITS_PER_UNIT == 0
4354 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4355 /* If we have a nonzero bitpos for a register target, then we just
4356 let store_field do the bitfield handling. This is unlikely to
4357 generate unnecessary clear instructions anyways. */
4358 && (bitpos == 0 || MEM_P (target)))
4360 if (MEM_P (target))
4361 target
4362 = adjust_address (target,
4363 GET_MODE (target) == BLKmode
4364 || 0 != (bitpos
4365 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4366 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4369 /* Update the alias set, if required. */
4370 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4371 && MEM_ALIAS_SET (target) != 0)
4373 target = copy_rtx (target);
4374 set_mem_alias_set (target, alias_set);
4377 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4379 else
4380 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4381 alias_set);
4384 /* Store the value of constructor EXP into the rtx TARGET.
4385 TARGET is either a REG or a MEM; we know it cannot conflict, since
4386 safe_from_p has been called.
4387 CLEARED is true if TARGET is known to have been zero'd.
4388 SIZE is the number of bytes of TARGET we are allowed to modify: this
4389 may not be the same as the size of EXP if we are assigning to a field
4390 which has been packed to exclude padding bits. */
4392 static void
4393 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4395 tree type = TREE_TYPE (exp);
4396 #ifdef WORD_REGISTER_OPERATIONS
4397 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4398 #endif
4400 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4401 || TREE_CODE (type) == QUAL_UNION_TYPE)
4403 tree elt;
4405 /* If size is zero or the target is already cleared, do nothing. */
4406 if (size == 0 || cleared)
4407 cleared = 1;
4408 /* We either clear the aggregate or indicate the value is dead. */
4409 else if ((TREE_CODE (type) == UNION_TYPE
4410 || TREE_CODE (type) == QUAL_UNION_TYPE)
4411 && ! CONSTRUCTOR_ELTS (exp))
4412 /* If the constructor is empty, clear the union. */
4414 clear_storage (target, expr_size (exp));
4415 cleared = 1;
4418 /* If we are building a static constructor into a register,
4419 set the initial value as zero so we can fold the value into
4420 a constant. But if more than one register is involved,
4421 this probably loses. */
4422 else if (REG_P (target) && TREE_STATIC (exp)
4423 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4425 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4426 cleared = 1;
4429 /* If the constructor has fewer fields than the structure
4430 or if we are initializing the structure to mostly zeros,
4431 clear the whole structure first. Don't do this if TARGET is a
4432 register whose mode size isn't equal to SIZE since clear_storage
4433 can't handle this case. */
4434 else if (size > 0
4435 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4436 || mostly_zeros_p (exp))
4437 && (!REG_P (target)
4438 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4439 == size)))
4441 clear_storage (target, GEN_INT (size));
4442 cleared = 1;
4445 if (! cleared)
4446 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4448 /* Store each element of the constructor into
4449 the corresponding field of TARGET. */
4451 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4453 tree field = TREE_PURPOSE (elt);
4454 tree value = TREE_VALUE (elt);
4455 enum machine_mode mode;
4456 HOST_WIDE_INT bitsize;
4457 HOST_WIDE_INT bitpos = 0;
4458 tree offset;
4459 rtx to_rtx = target;
4461 /* Just ignore missing fields.
4462 We cleared the whole structure, above,
4463 if any fields are missing. */
4464 if (field == 0)
4465 continue;
4467 if (cleared && initializer_zerop (value))
4468 continue;
4470 if (host_integerp (DECL_SIZE (field), 1))
4471 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4472 else
4473 bitsize = -1;
4475 mode = DECL_MODE (field);
4476 if (DECL_BIT_FIELD (field))
4477 mode = VOIDmode;
4479 offset = DECL_FIELD_OFFSET (field);
4480 if (host_integerp (offset, 0)
4481 && host_integerp (bit_position (field), 0))
4483 bitpos = int_bit_position (field);
4484 offset = 0;
4486 else
4487 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4489 if (offset)
4491 rtx offset_rtx;
4493 offset
4494 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4495 make_tree (TREE_TYPE (exp),
4496 target));
4498 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4499 if (!MEM_P (to_rtx))
4500 abort ();
4502 #ifdef POINTERS_EXTEND_UNSIGNED
4503 if (GET_MODE (offset_rtx) != Pmode)
4504 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4505 #else
4506 if (GET_MODE (offset_rtx) != ptr_mode)
4507 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4508 #endif
4510 to_rtx = offset_address (to_rtx, offset_rtx,
4511 highest_pow2_factor (offset));
4514 #ifdef WORD_REGISTER_OPERATIONS
4515 /* If this initializes a field that is smaller than a word, at the
4516 start of a word, try to widen it to a full word.
4517 This special case allows us to output C++ member function
4518 initializations in a form that the optimizers can understand. */
4519 if (REG_P (target)
4520 && bitsize < BITS_PER_WORD
4521 && bitpos % BITS_PER_WORD == 0
4522 && GET_MODE_CLASS (mode) == MODE_INT
4523 && TREE_CODE (value) == INTEGER_CST
4524 && exp_size >= 0
4525 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4527 tree type = TREE_TYPE (value);
4529 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4531 type = lang_hooks.types.type_for_size
4532 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4533 value = convert (type, value);
4536 if (BYTES_BIG_ENDIAN)
4537 value
4538 = fold (build2 (LSHIFT_EXPR, type, value,
4539 build_int_cst (NULL_TREE,
4540 BITS_PER_WORD - bitsize)));
4541 bitsize = BITS_PER_WORD;
4542 mode = word_mode;
4544 #endif
4546 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4547 && DECL_NONADDRESSABLE_P (field))
4549 to_rtx = copy_rtx (to_rtx);
4550 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4553 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4554 value, type, cleared,
4555 get_alias_set (TREE_TYPE (field)));
4559 else if (TREE_CODE (type) == ARRAY_TYPE)
4561 tree elt;
4562 int i;
4563 int need_to_clear;
4564 tree domain;
4565 tree elttype = TREE_TYPE (type);
4566 int const_bounds_p;
4567 HOST_WIDE_INT minelt = 0;
4568 HOST_WIDE_INT maxelt = 0;
4570 domain = TYPE_DOMAIN (type);
4571 const_bounds_p = (TYPE_MIN_VALUE (domain)
4572 && TYPE_MAX_VALUE (domain)
4573 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4574 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4576 /* If we have constant bounds for the range of the type, get them. */
4577 if (const_bounds_p)
4579 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4580 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4583 /* If the constructor has fewer elements than the array,
4584 clear the whole array first. Similarly if this is
4585 static constructor of a non-BLKmode object. */
4586 if (cleared)
4587 need_to_clear = 0;
4588 else if (REG_P (target) && TREE_STATIC (exp))
4589 need_to_clear = 1;
4590 else
4592 HOST_WIDE_INT count = 0, zero_count = 0;
4593 need_to_clear = ! const_bounds_p;
4595 /* This loop is a more accurate version of the loop in
4596 mostly_zeros_p (it handles RANGE_EXPR in an index).
4597 It is also needed to check for missing elements. */
4598 for (elt = CONSTRUCTOR_ELTS (exp);
4599 elt != NULL_TREE && ! need_to_clear;
4600 elt = TREE_CHAIN (elt))
4602 tree index = TREE_PURPOSE (elt);
4603 HOST_WIDE_INT this_node_count;
4605 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4607 tree lo_index = TREE_OPERAND (index, 0);
4608 tree hi_index = TREE_OPERAND (index, 1);
4610 if (! host_integerp (lo_index, 1)
4611 || ! host_integerp (hi_index, 1))
4613 need_to_clear = 1;
4614 break;
4617 this_node_count = (tree_low_cst (hi_index, 1)
4618 - tree_low_cst (lo_index, 1) + 1);
4620 else
4621 this_node_count = 1;
4623 count += this_node_count;
4624 if (mostly_zeros_p (TREE_VALUE (elt)))
4625 zero_count += this_node_count;
4628 /* Clear the entire array first if there are any missing elements,
4629 or if the incidence of zero elements is >= 75%. */
4630 if (! need_to_clear
4631 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4632 need_to_clear = 1;
4635 if (need_to_clear && size > 0)
4637 if (REG_P (target))
4638 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4639 else
4640 clear_storage (target, GEN_INT (size));
4641 cleared = 1;
4644 if (!cleared && REG_P (target))
4645 /* Inform later passes that the old value is dead. */
4646 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4648 /* Store each element of the constructor into
4649 the corresponding element of TARGET, determined
4650 by counting the elements. */
4651 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4652 elt;
4653 elt = TREE_CHAIN (elt), i++)
4655 enum machine_mode mode;
4656 HOST_WIDE_INT bitsize;
4657 HOST_WIDE_INT bitpos;
4658 int unsignedp;
4659 tree value = TREE_VALUE (elt);
4660 tree index = TREE_PURPOSE (elt);
4661 rtx xtarget = target;
4663 if (cleared && initializer_zerop (value))
4664 continue;
4666 unsignedp = TYPE_UNSIGNED (elttype);
4667 mode = TYPE_MODE (elttype);
4668 if (mode == BLKmode)
4669 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4670 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4671 : -1);
4672 else
4673 bitsize = GET_MODE_BITSIZE (mode);
4675 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4677 tree lo_index = TREE_OPERAND (index, 0);
4678 tree hi_index = TREE_OPERAND (index, 1);
4679 rtx index_r, pos_rtx;
4680 HOST_WIDE_INT lo, hi, count;
4681 tree position;
4683 /* If the range is constant and "small", unroll the loop. */
4684 if (const_bounds_p
4685 && host_integerp (lo_index, 0)
4686 && host_integerp (hi_index, 0)
4687 && (lo = tree_low_cst (lo_index, 0),
4688 hi = tree_low_cst (hi_index, 0),
4689 count = hi - lo + 1,
4690 (!MEM_P (target)
4691 || count <= 2
4692 || (host_integerp (TYPE_SIZE (elttype), 1)
4693 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4694 <= 40 * 8)))))
4696 lo -= minelt; hi -= minelt;
4697 for (; lo <= hi; lo++)
4699 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4701 if (MEM_P (target)
4702 && !MEM_KEEP_ALIAS_SET_P (target)
4703 && TREE_CODE (type) == ARRAY_TYPE
4704 && TYPE_NONALIASED_COMPONENT (type))
4706 target = copy_rtx (target);
4707 MEM_KEEP_ALIAS_SET_P (target) = 1;
4710 store_constructor_field
4711 (target, bitsize, bitpos, mode, value, type, cleared,
4712 get_alias_set (elttype));
4715 else
4717 rtx loop_start = gen_label_rtx ();
4718 rtx loop_end = gen_label_rtx ();
4719 tree exit_cond;
4721 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4722 unsignedp = TYPE_UNSIGNED (domain);
4724 index = build_decl (VAR_DECL, NULL_TREE, domain);
4726 index_r
4727 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4728 &unsignedp, 0));
4729 SET_DECL_RTL (index, index_r);
4730 store_expr (lo_index, index_r, 0);
4732 /* Build the head of the loop. */
4733 do_pending_stack_adjust ();
4734 emit_label (loop_start);
4736 /* Assign value to element index. */
4737 position
4738 = convert (ssizetype,
4739 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4740 index, TYPE_MIN_VALUE (domain))));
4741 position = size_binop (MULT_EXPR, position,
4742 convert (ssizetype,
4743 TYPE_SIZE_UNIT (elttype)));
4745 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4746 xtarget = offset_address (target, pos_rtx,
4747 highest_pow2_factor (position));
4748 xtarget = adjust_address (xtarget, mode, 0);
4749 if (TREE_CODE (value) == CONSTRUCTOR)
4750 store_constructor (value, xtarget, cleared,
4751 bitsize / BITS_PER_UNIT);
4752 else
4753 store_expr (value, xtarget, 0);
4755 /* Generate a conditional jump to exit the loop. */
4756 exit_cond = build2 (LT_EXPR, integer_type_node,
4757 index, hi_index);
4758 jumpif (exit_cond, loop_end);
4760 /* Update the loop counter, and jump to the head of
4761 the loop. */
4762 expand_assignment (index,
4763 build2 (PLUS_EXPR, TREE_TYPE (index),
4764 index, integer_one_node), 0);
4766 emit_jump (loop_start);
4768 /* Build the end of the loop. */
4769 emit_label (loop_end);
4772 else if ((index != 0 && ! host_integerp (index, 0))
4773 || ! host_integerp (TYPE_SIZE (elttype), 1))
4775 tree position;
4777 if (index == 0)
4778 index = ssize_int (1);
4780 if (minelt)
4781 index = fold_convert (ssizetype,
4782 fold (build2 (MINUS_EXPR,
4783 TREE_TYPE (index),
4784 index,
4785 TYPE_MIN_VALUE (domain))));
4787 position = size_binop (MULT_EXPR, index,
4788 convert (ssizetype,
4789 TYPE_SIZE_UNIT (elttype)));
4790 xtarget = offset_address (target,
4791 expand_expr (position, 0, VOIDmode, 0),
4792 highest_pow2_factor (position));
4793 xtarget = adjust_address (xtarget, mode, 0);
4794 store_expr (value, xtarget, 0);
4796 else
4798 if (index != 0)
4799 bitpos = ((tree_low_cst (index, 0) - minelt)
4800 * tree_low_cst (TYPE_SIZE (elttype), 1));
4801 else
4802 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4804 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4805 && TREE_CODE (type) == ARRAY_TYPE
4806 && TYPE_NONALIASED_COMPONENT (type))
4808 target = copy_rtx (target);
4809 MEM_KEEP_ALIAS_SET_P (target) = 1;
4811 store_constructor_field (target, bitsize, bitpos, mode, value,
4812 type, cleared, get_alias_set (elttype));
4817 else if (TREE_CODE (type) == VECTOR_TYPE)
4819 tree elt;
4820 int i;
4821 int need_to_clear;
4822 int icode = 0;
4823 tree elttype = TREE_TYPE (type);
4824 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4825 enum machine_mode eltmode = TYPE_MODE (elttype);
4826 HOST_WIDE_INT bitsize;
4827 HOST_WIDE_INT bitpos;
4828 rtx *vector = NULL;
4829 unsigned n_elts;
4831 if (eltmode == BLKmode)
4832 abort ();
4834 n_elts = TYPE_VECTOR_SUBPARTS (type);
4835 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4837 enum machine_mode mode = GET_MODE (target);
4839 icode = (int) vec_init_optab->handlers[mode].insn_code;
4840 if (icode != CODE_FOR_nothing)
4842 unsigned int i;
4844 vector = alloca (n_elts);
4845 for (i = 0; i < n_elts; i++)
4846 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4850 /* If the constructor has fewer elements than the vector,
4851 clear the whole array first. Similarly if this is
4852 static constructor of a non-BLKmode object. */
4853 if (cleared)
4854 need_to_clear = 0;
4855 else if (REG_P (target) && TREE_STATIC (exp))
4856 need_to_clear = 1;
4857 else
4859 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4861 for (elt = CONSTRUCTOR_ELTS (exp);
4862 elt != NULL_TREE;
4863 elt = TREE_CHAIN (elt))
4865 int n_elts_here =
4866 tree_low_cst (
4867 int_const_binop (TRUNC_DIV_EXPR,
4868 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4869 TYPE_SIZE (elttype), 0), 1);
4871 count += n_elts_here;
4872 if (mostly_zeros_p (TREE_VALUE (elt)))
4873 zero_count += n_elts_here;
4876 /* Clear the entire vector first if there are any missing elements,
4877 or if the incidence of zero elements is >= 75%. */
4878 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4881 if (need_to_clear && size > 0 && !vector)
4883 if (REG_P (target))
4884 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4885 else
4886 clear_storage (target, GEN_INT (size));
4887 cleared = 1;
4890 if (!cleared && REG_P (target))
4891 /* Inform later passes that the old value is dead. */
4892 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4894 /* Store each element of the constructor into the corresponding
4895 element of TARGET, determined by counting the elements. */
4896 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4897 elt;
4898 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4900 tree value = TREE_VALUE (elt);
4901 tree index = TREE_PURPOSE (elt);
4902 HOST_WIDE_INT eltpos;
4904 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4905 if (cleared && initializer_zerop (value))
4906 continue;
4908 if (index != 0)
4909 eltpos = tree_low_cst (index, 1);
4910 else
4911 eltpos = i;
4913 if (vector)
4915 /* Vector CONSTRUCTORs should only be built from smaller
4916 vectors in the case of BLKmode vectors. */
4917 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
4918 abort ();
4919 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4921 else
4923 enum machine_mode value_mode =
4924 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4925 ? TYPE_MODE (TREE_TYPE (value))
4926 : eltmode;
4927 bitpos = eltpos * elt_size;
4928 store_constructor_field (target, bitsize, bitpos, value_mode, value,
4929 type, cleared, get_alias_set (elttype));
4933 if (vector)
4934 emit_insn (GEN_FCN (icode) (target,
4935 gen_rtx_PARALLEL (GET_MODE (target),
4936 gen_rtvec_v (n_elts, vector))));
4939 /* Set constructor assignments. */
4940 else if (TREE_CODE (type) == SET_TYPE)
4942 tree elt = CONSTRUCTOR_ELTS (exp);
4943 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4944 tree domain = TYPE_DOMAIN (type);
4945 tree domain_min, domain_max, bitlength;
4947 /* The default implementation strategy is to extract the constant
4948 parts of the constructor, use that to initialize the target,
4949 and then "or" in whatever non-constant ranges we need in addition.
4951 If a large set is all zero or all ones, it is
4952 probably better to set it using memset.
4953 Also, if a large set has just a single range, it may also be
4954 better to first clear all the first clear the set (using
4955 memset), and set the bits we want. */
4957 /* Check for all zeros. */
4958 if (elt == NULL_TREE && size > 0)
4960 if (!cleared)
4961 clear_storage (target, GEN_INT (size));
4962 return;
4965 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4966 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4967 bitlength = size_binop (PLUS_EXPR,
4968 size_diffop (domain_max, domain_min),
4969 ssize_int (1));
4971 nbits = tree_low_cst (bitlength, 1);
4973 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4974 are "complicated" (more than one range), initialize (the
4975 constant parts) by copying from a constant. */
4976 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4977 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4979 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4980 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4981 char *bit_buffer = alloca (nbits);
4982 HOST_WIDE_INT word = 0;
4983 unsigned int bit_pos = 0;
4984 unsigned int ibit = 0;
4985 unsigned int offset = 0; /* In bytes from beginning of set. */
4987 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4988 for (;;)
4990 if (bit_buffer[ibit])
4992 if (BYTES_BIG_ENDIAN)
4993 word |= (1 << (set_word_size - 1 - bit_pos));
4994 else
4995 word |= 1 << bit_pos;
4998 bit_pos++; ibit++;
4999 if (bit_pos >= set_word_size || ibit == nbits)
5001 if (word != 0 || ! cleared)
5003 rtx datum = gen_int_mode (word, mode);
5004 rtx to_rtx;
5006 /* The assumption here is that it is safe to use
5007 XEXP if the set is multi-word, but not if
5008 it's single-word. */
5009 if (MEM_P (target))
5010 to_rtx = adjust_address (target, mode, offset);
5011 else if (offset == 0)
5012 to_rtx = target;
5013 else
5014 abort ();
5015 emit_move_insn (to_rtx, datum);
5018 if (ibit == nbits)
5019 break;
5020 word = 0;
5021 bit_pos = 0;
5022 offset += set_word_size / BITS_PER_UNIT;
5026 else if (!cleared)
5027 /* Don't bother clearing storage if the set is all ones. */
5028 if (TREE_CHAIN (elt) != NULL_TREE
5029 || (TREE_PURPOSE (elt) == NULL_TREE
5030 ? nbits != 1
5031 : ( ! host_integerp (TREE_VALUE (elt), 0)
5032 || ! host_integerp (TREE_PURPOSE (elt), 0)
5033 || (tree_low_cst (TREE_VALUE (elt), 0)
5034 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5035 != (HOST_WIDE_INT) nbits))))
5036 clear_storage (target, expr_size (exp));
5038 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5040 /* Start of range of element or NULL. */
5041 tree startbit = TREE_PURPOSE (elt);
5042 /* End of range of element, or element value. */
5043 tree endbit = TREE_VALUE (elt);
5044 HOST_WIDE_INT startb, endb;
5045 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5047 bitlength_rtx = expand_expr (bitlength,
5048 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5050 /* Handle non-range tuple element like [ expr ]. */
5051 if (startbit == NULL_TREE)
5053 startbit = save_expr (endbit);
5054 endbit = startbit;
5057 startbit = convert (sizetype, startbit);
5058 endbit = convert (sizetype, endbit);
5059 if (! integer_zerop (domain_min))
5061 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5062 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5064 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5065 EXPAND_CONST_ADDRESS);
5066 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5067 EXPAND_CONST_ADDRESS);
5069 if (REG_P (target))
5071 targetx
5072 = assign_temp
5073 ((build_qualified_type (lang_hooks.types.type_for_mode
5074 (GET_MODE (target), 0),
5075 TYPE_QUAL_CONST)),
5076 0, 1, 1);
5077 emit_move_insn (targetx, target);
5080 else if (MEM_P (target))
5081 targetx = target;
5082 else
5083 abort ();
5085 /* Optimization: If startbit and endbit are constants divisible
5086 by BITS_PER_UNIT, call memset instead. */
5087 if (TREE_CODE (startbit) == INTEGER_CST
5088 && TREE_CODE (endbit) == INTEGER_CST
5089 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5090 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5092 emit_library_call (memset_libfunc, LCT_NORMAL,
5093 VOIDmode, 3,
5094 plus_constant (XEXP (targetx, 0),
5095 startb / BITS_PER_UNIT),
5096 Pmode,
5097 constm1_rtx, TYPE_MODE (integer_type_node),
5098 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5099 TYPE_MODE (sizetype));
5101 else
5102 emit_library_call (setbits_libfunc, LCT_NORMAL,
5103 VOIDmode, 4, XEXP (targetx, 0),
5104 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5105 startbit_rtx, TYPE_MODE (sizetype),
5106 endbit_rtx, TYPE_MODE (sizetype));
5108 if (REG_P (target))
5109 emit_move_insn (target, targetx);
5113 else
5114 abort ();
5117 /* Store the value of EXP (an expression tree)
5118 into a subfield of TARGET which has mode MODE and occupies
5119 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5120 If MODE is VOIDmode, it means that we are storing into a bit-field.
5122 If VALUE_MODE is VOIDmode, return nothing in particular.
5123 UNSIGNEDP is not used in this case.
5125 Otherwise, return an rtx for the value stored. This rtx
5126 has mode VALUE_MODE if that is convenient to do.
5127 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5129 TYPE is the type of the underlying object,
5131 ALIAS_SET is the alias set for the destination. This value will
5132 (in general) be different from that for TARGET, since TARGET is a
5133 reference to the containing structure. */
5135 static rtx
5136 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5137 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5138 int unsignedp, tree type, int alias_set)
5140 HOST_WIDE_INT width_mask = 0;
5142 if (TREE_CODE (exp) == ERROR_MARK)
5143 return const0_rtx;
5145 /* If we have nothing to store, do nothing unless the expression has
5146 side-effects. */
5147 if (bitsize == 0)
5148 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5149 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5150 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5152 /* If we are storing into an unaligned field of an aligned union that is
5153 in a register, we may have the mode of TARGET being an integer mode but
5154 MODE == BLKmode. In that case, get an aligned object whose size and
5155 alignment are the same as TARGET and store TARGET into it (we can avoid
5156 the store if the field being stored is the entire width of TARGET). Then
5157 call ourselves recursively to store the field into a BLKmode version of
5158 that object. Finally, load from the object into TARGET. This is not
5159 very efficient in general, but should only be slightly more expensive
5160 than the otherwise-required unaligned accesses. Perhaps this can be
5161 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5162 twice, once with emit_move_insn and once via store_field. */
5164 if (mode == BLKmode
5165 && (REG_P (target) || GET_CODE (target) == SUBREG))
5167 rtx object = assign_temp (type, 0, 1, 1);
5168 rtx blk_object = adjust_address (object, BLKmode, 0);
5170 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5171 emit_move_insn (object, target);
5173 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5174 alias_set);
5176 emit_move_insn (target, object);
5178 /* We want to return the BLKmode version of the data. */
5179 return blk_object;
5182 if (GET_CODE (target) == CONCAT)
5184 /* We're storing into a struct containing a single __complex. */
5186 if (bitpos != 0)
5187 abort ();
5188 return store_expr (exp, target, value_mode != VOIDmode);
5191 /* If the structure is in a register or if the component
5192 is a bit field, we cannot use addressing to access it.
5193 Use bit-field techniques or SUBREG to store in it. */
5195 if (mode == VOIDmode
5196 || (mode != BLKmode && ! direct_store[(int) mode]
5197 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5198 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5199 || REG_P (target)
5200 || GET_CODE (target) == SUBREG
5201 /* If the field isn't aligned enough to store as an ordinary memref,
5202 store it as a bit field. */
5203 || (mode != BLKmode
5204 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5205 || bitpos % GET_MODE_ALIGNMENT (mode))
5206 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5207 || (bitpos % BITS_PER_UNIT != 0)))
5208 /* If the RHS and field are a constant size and the size of the
5209 RHS isn't the same size as the bitfield, we must use bitfield
5210 operations. */
5211 || (bitsize >= 0
5212 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5213 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5215 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5217 /* If BITSIZE is narrower than the size of the type of EXP
5218 we will be narrowing TEMP. Normally, what's wanted are the
5219 low-order bits. However, if EXP's type is a record and this is
5220 big-endian machine, we want the upper BITSIZE bits. */
5221 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5222 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5223 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5224 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5225 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5226 - bitsize),
5227 NULL_RTX, 1);
5229 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5230 MODE. */
5231 if (mode != VOIDmode && mode != BLKmode
5232 && mode != TYPE_MODE (TREE_TYPE (exp)))
5233 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5235 /* If the modes of TARGET and TEMP are both BLKmode, both
5236 must be in memory and BITPOS must be aligned on a byte
5237 boundary. If so, we simply do a block copy. */
5238 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5240 if (!MEM_P (target) || !MEM_P (temp)
5241 || bitpos % BITS_PER_UNIT != 0)
5242 abort ();
5244 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5245 emit_block_move (target, temp,
5246 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5247 / BITS_PER_UNIT),
5248 BLOCK_OP_NORMAL);
5250 return value_mode == VOIDmode ? const0_rtx : target;
5253 /* Store the value in the bitfield. */
5254 store_bit_field (target, bitsize, bitpos, mode, temp);
5256 if (value_mode != VOIDmode)
5258 /* The caller wants an rtx for the value.
5259 If possible, avoid refetching from the bitfield itself. */
5260 if (width_mask != 0
5261 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5263 tree count;
5264 enum machine_mode tmode;
5266 tmode = GET_MODE (temp);
5267 if (tmode == VOIDmode)
5268 tmode = value_mode;
5270 if (unsignedp)
5271 return expand_and (tmode, temp,
5272 gen_int_mode (width_mask, tmode),
5273 NULL_RTX);
5275 count = build_int_cst (NULL_TREE,
5276 GET_MODE_BITSIZE (tmode) - bitsize);
5277 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5278 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5281 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5282 NULL_RTX, value_mode, VOIDmode);
5284 return const0_rtx;
5286 else
5288 rtx addr = XEXP (target, 0);
5289 rtx to_rtx = target;
5291 /* If a value is wanted, it must be the lhs;
5292 so make the address stable for multiple use. */
5294 if (value_mode != VOIDmode && !REG_P (addr)
5295 && ! CONSTANT_ADDRESS_P (addr)
5296 /* A frame-pointer reference is already stable. */
5297 && ! (GET_CODE (addr) == PLUS
5298 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5299 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5300 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5301 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5303 /* Now build a reference to just the desired component. */
5305 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5307 if (to_rtx == target)
5308 to_rtx = copy_rtx (to_rtx);
5310 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5311 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5312 set_mem_alias_set (to_rtx, alias_set);
5314 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5318 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5319 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5320 codes and find the ultimate containing object, which we return.
5322 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5323 bit position, and *PUNSIGNEDP to the signedness of the field.
5324 If the position of the field is variable, we store a tree
5325 giving the variable offset (in units) in *POFFSET.
5326 This offset is in addition to the bit position.
5327 If the position is not variable, we store 0 in *POFFSET.
5329 If any of the extraction expressions is volatile,
5330 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5332 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5333 is a mode that can be used to access the field. In that case, *PBITSIZE
5334 is redundant.
5336 If the field describes a variable-sized object, *PMODE is set to
5337 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5338 this case, but the address of the object can be found. */
5340 tree
5341 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5342 HOST_WIDE_INT *pbitpos, tree *poffset,
5343 enum machine_mode *pmode, int *punsignedp,
5344 int *pvolatilep)
5346 tree size_tree = 0;
5347 enum machine_mode mode = VOIDmode;
5348 tree offset = size_zero_node;
5349 tree bit_offset = bitsize_zero_node;
5350 tree tem;
5352 /* First get the mode, signedness, and size. We do this from just the
5353 outermost expression. */
5354 if (TREE_CODE (exp) == COMPONENT_REF)
5356 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5357 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5358 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5360 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5362 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5364 size_tree = TREE_OPERAND (exp, 1);
5365 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5367 else
5369 mode = TYPE_MODE (TREE_TYPE (exp));
5370 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5372 if (mode == BLKmode)
5373 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5374 else
5375 *pbitsize = GET_MODE_BITSIZE (mode);
5378 if (size_tree != 0)
5380 if (! host_integerp (size_tree, 1))
5381 mode = BLKmode, *pbitsize = -1;
5382 else
5383 *pbitsize = tree_low_cst (size_tree, 1);
5386 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5387 and find the ultimate containing object. */
5388 while (1)
5390 if (TREE_CODE (exp) == BIT_FIELD_REF)
5391 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5392 else if (TREE_CODE (exp) == COMPONENT_REF)
5394 tree field = TREE_OPERAND (exp, 1);
5395 tree this_offset = component_ref_field_offset (exp);
5397 /* If this field hasn't been filled in yet, don't go
5398 past it. This should only happen when folding expressions
5399 made during type construction. */
5400 if (this_offset == 0)
5401 break;
5403 offset = size_binop (PLUS_EXPR, offset, this_offset);
5404 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5405 DECL_FIELD_BIT_OFFSET (field));
5407 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5410 else if (TREE_CODE (exp) == ARRAY_REF
5411 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5413 tree index = TREE_OPERAND (exp, 1);
5414 tree low_bound = array_ref_low_bound (exp);
5415 tree unit_size = array_ref_element_size (exp);
5417 /* We assume all arrays have sizes that are a multiple of a byte.
5418 First subtract the lower bound, if any, in the type of the
5419 index, then convert to sizetype and multiply by the size of the
5420 array element. */
5421 if (! integer_zerop (low_bound))
5422 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5423 index, low_bound));
5425 offset = size_binop (PLUS_EXPR, offset,
5426 size_binop (MULT_EXPR,
5427 convert (sizetype, index),
5428 unit_size));
5431 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5432 conversions that don't change the mode, and all view conversions
5433 except those that need to "step up" the alignment. */
5434 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5435 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5436 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5437 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5438 && STRICT_ALIGNMENT
5439 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5440 < BIGGEST_ALIGNMENT)
5441 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5442 || TYPE_ALIGN_OK (TREE_TYPE
5443 (TREE_OPERAND (exp, 0))))))
5444 && ! ((TREE_CODE (exp) == NOP_EXPR
5445 || TREE_CODE (exp) == CONVERT_EXPR)
5446 && (TYPE_MODE (TREE_TYPE (exp))
5447 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5448 break;
5450 /* If any reference in the chain is volatile, the effect is volatile. */
5451 if (TREE_THIS_VOLATILE (exp))
5452 *pvolatilep = 1;
5454 exp = TREE_OPERAND (exp, 0);
5457 /* If OFFSET is constant, see if we can return the whole thing as a
5458 constant bit position. Otherwise, split it up. */
5459 if (host_integerp (offset, 0)
5460 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5461 bitsize_unit_node))
5462 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5463 && host_integerp (tem, 0))
5464 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5465 else
5466 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5468 *pmode = mode;
5469 return exp;
5472 /* Return a tree of sizetype representing the size, in bytes, of the element
5473 of EXP, an ARRAY_REF. */
5475 tree
5476 array_ref_element_size (tree exp)
5478 tree aligned_size = TREE_OPERAND (exp, 3);
5479 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5481 /* If a size was specified in the ARRAY_REF, it's the size measured
5482 in alignment units of the element type. So multiply by that value. */
5483 if (aligned_size)
5484 return size_binop (MULT_EXPR, aligned_size,
5485 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5487 /* Otherwise, take the size from that of the element type. Substitute
5488 any PLACEHOLDER_EXPR that we have. */
5489 else
5490 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5493 /* Return a tree representing the lower bound of the array mentioned in
5494 EXP, an ARRAY_REF. */
5496 tree
5497 array_ref_low_bound (tree exp)
5499 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5501 /* If a lower bound is specified in EXP, use it. */
5502 if (TREE_OPERAND (exp, 2))
5503 return TREE_OPERAND (exp, 2);
5505 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5506 substituting for a PLACEHOLDER_EXPR as needed. */
5507 if (domain_type && TYPE_MIN_VALUE (domain_type))
5508 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5510 /* Otherwise, return a zero of the appropriate type. */
5511 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5514 /* Return a tree representing the upper bound of the array mentioned in
5515 EXP, an ARRAY_REF. */
5517 tree
5518 array_ref_up_bound (tree exp)
5520 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5522 /* If there is a domain type and it has an upper bound, use it, substituting
5523 for a PLACEHOLDER_EXPR as needed. */
5524 if (domain_type && TYPE_MAX_VALUE (domain_type))
5525 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5527 /* Otherwise fail. */
5528 return NULL_TREE;
5531 /* Return a tree representing the offset, in bytes, of the field referenced
5532 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5534 tree
5535 component_ref_field_offset (tree exp)
5537 tree aligned_offset = TREE_OPERAND (exp, 2);
5538 tree field = TREE_OPERAND (exp, 1);
5540 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5541 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5542 value. */
5543 if (aligned_offset)
5544 return size_binop (MULT_EXPR, aligned_offset,
5545 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5547 /* Otherwise, take the offset from that of the field. Substitute
5548 any PLACEHOLDER_EXPR that we have. */
5549 else
5550 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5553 /* Return 1 if T is an expression that get_inner_reference handles. */
5556 handled_component_p (tree t)
5558 switch (TREE_CODE (t))
5560 case BIT_FIELD_REF:
5561 case COMPONENT_REF:
5562 case ARRAY_REF:
5563 case ARRAY_RANGE_REF:
5564 case NON_LVALUE_EXPR:
5565 case VIEW_CONVERT_EXPR:
5566 return 1;
5568 /* ??? Sure they are handled, but get_inner_reference may return
5569 a different PBITSIZE, depending upon whether the expression is
5570 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5571 case NOP_EXPR:
5572 case CONVERT_EXPR:
5573 return (TYPE_MODE (TREE_TYPE (t))
5574 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5576 default:
5577 return 0;
5581 /* Given an rtx VALUE that may contain additions and multiplications, return
5582 an equivalent value that just refers to a register, memory, or constant.
5583 This is done by generating instructions to perform the arithmetic and
5584 returning a pseudo-register containing the value.
5586 The returned value may be a REG, SUBREG, MEM or constant. */
5589 force_operand (rtx value, rtx target)
5591 rtx op1, op2;
5592 /* Use subtarget as the target for operand 0 of a binary operation. */
5593 rtx subtarget = get_subtarget (target);
5594 enum rtx_code code = GET_CODE (value);
5596 /* Check for subreg applied to an expression produced by loop optimizer. */
5597 if (code == SUBREG
5598 && !REG_P (SUBREG_REG (value))
5599 && !MEM_P (SUBREG_REG (value)))
5601 value = simplify_gen_subreg (GET_MODE (value),
5602 force_reg (GET_MODE (SUBREG_REG (value)),
5603 force_operand (SUBREG_REG (value),
5604 NULL_RTX)),
5605 GET_MODE (SUBREG_REG (value)),
5606 SUBREG_BYTE (value));
5607 code = GET_CODE (value);
5610 /* Check for a PIC address load. */
5611 if ((code == PLUS || code == MINUS)
5612 && XEXP (value, 0) == pic_offset_table_rtx
5613 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5614 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5615 || GET_CODE (XEXP (value, 1)) == CONST))
5617 if (!subtarget)
5618 subtarget = gen_reg_rtx (GET_MODE (value));
5619 emit_move_insn (subtarget, value);
5620 return subtarget;
5623 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5625 if (!target)
5626 target = gen_reg_rtx (GET_MODE (value));
5627 convert_move (target, force_operand (XEXP (value, 0), NULL),
5628 code == ZERO_EXTEND);
5629 return target;
5632 if (ARITHMETIC_P (value))
5634 op2 = XEXP (value, 1);
5635 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5636 subtarget = 0;
5637 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5639 code = PLUS;
5640 op2 = negate_rtx (GET_MODE (value), op2);
5643 /* Check for an addition with OP2 a constant integer and our first
5644 operand a PLUS of a virtual register and something else. In that
5645 case, we want to emit the sum of the virtual register and the
5646 constant first and then add the other value. This allows virtual
5647 register instantiation to simply modify the constant rather than
5648 creating another one around this addition. */
5649 if (code == PLUS && GET_CODE (op2) == CONST_INT
5650 && GET_CODE (XEXP (value, 0)) == PLUS
5651 && REG_P (XEXP (XEXP (value, 0), 0))
5652 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5653 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5655 rtx temp = expand_simple_binop (GET_MODE (value), code,
5656 XEXP (XEXP (value, 0), 0), op2,
5657 subtarget, 0, OPTAB_LIB_WIDEN);
5658 return expand_simple_binop (GET_MODE (value), code, temp,
5659 force_operand (XEXP (XEXP (value,
5660 0), 1), 0),
5661 target, 0, OPTAB_LIB_WIDEN);
5664 op1 = force_operand (XEXP (value, 0), subtarget);
5665 op2 = force_operand (op2, NULL_RTX);
5666 switch (code)
5668 case MULT:
5669 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5670 case DIV:
5671 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5672 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5673 target, 1, OPTAB_LIB_WIDEN);
5674 else
5675 return expand_divmod (0,
5676 FLOAT_MODE_P (GET_MODE (value))
5677 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5678 GET_MODE (value), op1, op2, target, 0);
5679 break;
5680 case MOD:
5681 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5682 target, 0);
5683 break;
5684 case UDIV:
5685 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5686 target, 1);
5687 break;
5688 case UMOD:
5689 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5690 target, 1);
5691 break;
5692 case ASHIFTRT:
5693 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5694 target, 0, OPTAB_LIB_WIDEN);
5695 break;
5696 default:
5697 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5698 target, 1, OPTAB_LIB_WIDEN);
5701 if (UNARY_P (value))
5703 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5704 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5707 #ifdef INSN_SCHEDULING
5708 /* On machines that have insn scheduling, we want all memory reference to be
5709 explicit, so we need to deal with such paradoxical SUBREGs. */
5710 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5711 && (GET_MODE_SIZE (GET_MODE (value))
5712 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5713 value
5714 = simplify_gen_subreg (GET_MODE (value),
5715 force_reg (GET_MODE (SUBREG_REG (value)),
5716 force_operand (SUBREG_REG (value),
5717 NULL_RTX)),
5718 GET_MODE (SUBREG_REG (value)),
5719 SUBREG_BYTE (value));
5720 #endif
5722 return value;
5725 /* Subroutine of expand_expr: return nonzero iff there is no way that
5726 EXP can reference X, which is being modified. TOP_P is nonzero if this
5727 call is going to be used to determine whether we need a temporary
5728 for EXP, as opposed to a recursive call to this function.
5730 It is always safe for this routine to return zero since it merely
5731 searches for optimization opportunities. */
5734 safe_from_p (rtx x, tree exp, int top_p)
5736 rtx exp_rtl = 0;
5737 int i, nops;
5739 if (x == 0
5740 /* If EXP has varying size, we MUST use a target since we currently
5741 have no way of allocating temporaries of variable size
5742 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5743 So we assume here that something at a higher level has prevented a
5744 clash. This is somewhat bogus, but the best we can do. Only
5745 do this when X is BLKmode and when we are at the top level. */
5746 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5747 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5748 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5749 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5750 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5751 != INTEGER_CST)
5752 && GET_MODE (x) == BLKmode)
5753 /* If X is in the outgoing argument area, it is always safe. */
5754 || (MEM_P (x)
5755 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5756 || (GET_CODE (XEXP (x, 0)) == PLUS
5757 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5758 return 1;
5760 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5761 find the underlying pseudo. */
5762 if (GET_CODE (x) == SUBREG)
5764 x = SUBREG_REG (x);
5765 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5766 return 0;
5769 /* Now look at our tree code and possibly recurse. */
5770 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5772 case 'd':
5773 exp_rtl = DECL_RTL_IF_SET (exp);
5774 break;
5776 case 'c':
5777 return 1;
5779 case 'x':
5780 if (TREE_CODE (exp) == TREE_LIST)
5782 while (1)
5784 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5785 return 0;
5786 exp = TREE_CHAIN (exp);
5787 if (!exp)
5788 return 1;
5789 if (TREE_CODE (exp) != TREE_LIST)
5790 return safe_from_p (x, exp, 0);
5793 else if (TREE_CODE (exp) == ERROR_MARK)
5794 return 1; /* An already-visited SAVE_EXPR? */
5795 else
5796 return 0;
5798 case 's':
5799 /* The only case we look at here is the DECL_INITIAL inside a
5800 DECL_EXPR. */
5801 return (TREE_CODE (exp) != DECL_EXPR
5802 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5803 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5804 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5806 case '2':
5807 case '<':
5808 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5809 return 0;
5810 /* Fall through. */
5812 case '1':
5813 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5815 case 'e':
5816 case 'r':
5817 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5818 the expression. If it is set, we conflict iff we are that rtx or
5819 both are in memory. Otherwise, we check all operands of the
5820 expression recursively. */
5822 switch (TREE_CODE (exp))
5824 case ADDR_EXPR:
5825 /* If the operand is static or we are static, we can't conflict.
5826 Likewise if we don't conflict with the operand at all. */
5827 if (staticp (TREE_OPERAND (exp, 0))
5828 || TREE_STATIC (exp)
5829 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5830 return 1;
5832 /* Otherwise, the only way this can conflict is if we are taking
5833 the address of a DECL a that address if part of X, which is
5834 very rare. */
5835 exp = TREE_OPERAND (exp, 0);
5836 if (DECL_P (exp))
5838 if (!DECL_RTL_SET_P (exp)
5839 || !MEM_P (DECL_RTL (exp)))
5840 return 0;
5841 else
5842 exp_rtl = XEXP (DECL_RTL (exp), 0);
5844 break;
5846 case INDIRECT_REF:
5847 if (MEM_P (x)
5848 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5849 get_alias_set (exp)))
5850 return 0;
5851 break;
5853 case CALL_EXPR:
5854 /* Assume that the call will clobber all hard registers and
5855 all of memory. */
5856 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5857 || MEM_P (x))
5858 return 0;
5859 break;
5861 case WITH_CLEANUP_EXPR:
5862 case CLEANUP_POINT_EXPR:
5863 /* Lowered by gimplify.c. */
5864 abort ();
5866 case SAVE_EXPR:
5867 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5869 default:
5870 break;
5873 /* If we have an rtx, we do not need to scan our operands. */
5874 if (exp_rtl)
5875 break;
5877 nops = first_rtl_op (TREE_CODE (exp));
5878 for (i = 0; i < nops; i++)
5879 if (TREE_OPERAND (exp, i) != 0
5880 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5881 return 0;
5883 /* If this is a language-specific tree code, it may require
5884 special handling. */
5885 if ((unsigned int) TREE_CODE (exp)
5886 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5887 && !lang_hooks.safe_from_p (x, exp))
5888 return 0;
5891 /* If we have an rtl, find any enclosed object. Then see if we conflict
5892 with it. */
5893 if (exp_rtl)
5895 if (GET_CODE (exp_rtl) == SUBREG)
5897 exp_rtl = SUBREG_REG (exp_rtl);
5898 if (REG_P (exp_rtl)
5899 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5900 return 0;
5903 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5904 are memory and they conflict. */
5905 return ! (rtx_equal_p (x, exp_rtl)
5906 || (MEM_P (x) && MEM_P (exp_rtl)
5907 && true_dependence (exp_rtl, VOIDmode, x,
5908 rtx_addr_varies_p)));
5911 /* If we reach here, it is safe. */
5912 return 1;
5916 /* Return the highest power of two that EXP is known to be a multiple of.
5917 This is used in updating alignment of MEMs in array references. */
5919 static unsigned HOST_WIDE_INT
5920 highest_pow2_factor (tree exp)
5922 unsigned HOST_WIDE_INT c0, c1;
5924 switch (TREE_CODE (exp))
5926 case INTEGER_CST:
5927 /* We can find the lowest bit that's a one. If the low
5928 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5929 We need to handle this case since we can find it in a COND_EXPR,
5930 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5931 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5932 later ICE. */
5933 if (TREE_CONSTANT_OVERFLOW (exp))
5934 return BIGGEST_ALIGNMENT;
5935 else
5937 /* Note: tree_low_cst is intentionally not used here,
5938 we don't care about the upper bits. */
5939 c0 = TREE_INT_CST_LOW (exp);
5940 c0 &= -c0;
5941 return c0 ? c0 : BIGGEST_ALIGNMENT;
5943 break;
5945 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5946 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5947 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5948 return MIN (c0, c1);
5950 case MULT_EXPR:
5951 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5952 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5953 return c0 * c1;
5955 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5956 case CEIL_DIV_EXPR:
5957 if (integer_pow2p (TREE_OPERAND (exp, 1))
5958 && host_integerp (TREE_OPERAND (exp, 1), 1))
5960 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5961 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5962 return MAX (1, c0 / c1);
5964 break;
5966 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5967 case SAVE_EXPR:
5968 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5970 case COMPOUND_EXPR:
5971 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5973 case COND_EXPR:
5974 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5975 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5976 return MIN (c0, c1);
5978 default:
5979 break;
5982 return 1;
5985 /* Similar, except that the alignment requirements of TARGET are
5986 taken into account. Assume it is at least as aligned as its
5987 type, unless it is a COMPONENT_REF in which case the layout of
5988 the structure gives the alignment. */
5990 static unsigned HOST_WIDE_INT
5991 highest_pow2_factor_for_target (tree target, tree exp)
5993 unsigned HOST_WIDE_INT target_align, factor;
5995 factor = highest_pow2_factor (exp);
5996 if (TREE_CODE (target) == COMPONENT_REF)
5997 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
5998 else
5999 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6000 return MAX (factor, target_align);
6003 /* Expands variable VAR. */
6005 void
6006 expand_var (tree var)
6008 if (DECL_EXTERNAL (var))
6009 return;
6011 if (TREE_STATIC (var))
6012 /* If this is an inlined copy of a static local variable,
6013 look up the original decl. */
6014 var = DECL_ORIGIN (var);
6016 if (TREE_STATIC (var)
6017 ? !TREE_ASM_WRITTEN (var)
6018 : !DECL_RTL_SET_P (var))
6020 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
6021 /* Should be ignored. */;
6022 else if (lang_hooks.expand_decl (var))
6023 /* OK. */;
6024 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6025 expand_decl (var);
6026 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6027 rest_of_decl_compilation (var, 0, 0);
6028 else if (TREE_CODE (var) == TYPE_DECL
6029 || TREE_CODE (var) == CONST_DECL
6030 || TREE_CODE (var) == FUNCTION_DECL
6031 || TREE_CODE (var) == LABEL_DECL)
6032 /* No expansion needed. */;
6033 else
6034 abort ();
6038 /* Subroutine of expand_expr. Expand the two operands of a binary
6039 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6040 The value may be stored in TARGET if TARGET is nonzero. The
6041 MODIFIER argument is as documented by expand_expr. */
6043 static void
6044 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6045 enum expand_modifier modifier)
6047 if (! safe_from_p (target, exp1, 1))
6048 target = 0;
6049 if (operand_equal_p (exp0, exp1, 0))
6051 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6052 *op1 = copy_rtx (*op0);
6054 else
6056 /* If we need to preserve evaluation order, copy exp0 into its own
6057 temporary variable so that it can't be clobbered by exp1. */
6058 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6059 exp0 = save_expr (exp0);
6060 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6061 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6066 /* expand_expr: generate code for computing expression EXP.
6067 An rtx for the computed value is returned. The value is never null.
6068 In the case of a void EXP, const0_rtx is returned.
6070 The value may be stored in TARGET if TARGET is nonzero.
6071 TARGET is just a suggestion; callers must assume that
6072 the rtx returned may not be the same as TARGET.
6074 If TARGET is CONST0_RTX, it means that the value will be ignored.
6076 If TMODE is not VOIDmode, it suggests generating the
6077 result in mode TMODE. But this is done only when convenient.
6078 Otherwise, TMODE is ignored and the value generated in its natural mode.
6079 TMODE is just a suggestion; callers must assume that
6080 the rtx returned may not have mode TMODE.
6082 Note that TARGET may have neither TMODE nor MODE. In that case, it
6083 probably will not be used.
6085 If MODIFIER is EXPAND_SUM then when EXP is an addition
6086 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6087 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6088 products as above, or REG or MEM, or constant.
6089 Ordinarily in such cases we would output mul or add instructions
6090 and then return a pseudo reg containing the sum.
6092 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6093 it also marks a label as absolutely required (it can't be dead).
6094 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6095 This is used for outputting expressions used in initializers.
6097 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6098 with a constant address even if that address is not normally legitimate.
6099 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6101 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6102 a call parameter. Such targets require special care as we haven't yet
6103 marked TARGET so that it's safe from being trashed by libcalls. We
6104 don't want to use TARGET for anything but the final result;
6105 Intermediate values must go elsewhere. Additionally, calls to
6106 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6108 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6109 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6110 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6111 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6112 recursively. */
6114 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6115 enum expand_modifier, rtx *);
6118 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6119 enum expand_modifier modifier, rtx *alt_rtl)
6121 int rn = -1;
6122 rtx ret, last = NULL;
6124 /* Handle ERROR_MARK before anybody tries to access its type. */
6125 if (TREE_CODE (exp) == ERROR_MARK
6126 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6128 ret = CONST0_RTX (tmode);
6129 return ret ? ret : const0_rtx;
6132 if (flag_non_call_exceptions)
6134 rn = lookup_stmt_eh_region (exp);
6135 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6136 if (rn >= 0)
6137 last = get_last_insn ();
6140 /* If this is an expression of some kind and it has an associated line
6141 number, then emit the line number before expanding the expression.
6143 We need to save and restore the file and line information so that
6144 errors discovered during expansion are emitted with the right
6145 information. It would be better of the diagnostic routines
6146 used the file/line information embedded in the tree nodes rather
6147 than globals. */
6148 if (cfun && EXPR_HAS_LOCATION (exp))
6150 location_t saved_location = input_location;
6151 input_location = EXPR_LOCATION (exp);
6152 emit_line_note (input_location);
6154 /* Record where the insns produced belong. */
6155 record_block_change (TREE_BLOCK (exp));
6157 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6159 input_location = saved_location;
6161 else
6163 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6166 /* If using non-call exceptions, mark all insns that may trap.
6167 expand_call() will mark CALL_INSNs before we get to this code,
6168 but it doesn't handle libcalls, and these may trap. */
6169 if (rn >= 0)
6171 rtx insn;
6172 for (insn = next_real_insn (last); insn;
6173 insn = next_real_insn (insn))
6175 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6176 /* If we want exceptions for non-call insns, any
6177 may_trap_p instruction may throw. */
6178 && GET_CODE (PATTERN (insn)) != CLOBBER
6179 && GET_CODE (PATTERN (insn)) != USE
6180 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6182 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6183 REG_NOTES (insn));
6188 return ret;
6191 static rtx
6192 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6193 enum expand_modifier modifier, rtx *alt_rtl)
6195 rtx op0, op1, temp;
6196 tree type = TREE_TYPE (exp);
6197 int unsignedp;
6198 enum machine_mode mode;
6199 enum tree_code code = TREE_CODE (exp);
6200 optab this_optab;
6201 rtx subtarget, original_target;
6202 int ignore;
6203 tree context;
6204 bool reduce_bit_field = false;
6205 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6206 ? reduce_to_bit_field_precision ((expr), \
6207 target, \
6208 type) \
6209 : (expr))
6211 mode = TYPE_MODE (type);
6212 unsignedp = TYPE_UNSIGNED (type);
6213 if (lang_hooks.reduce_bit_field_operations
6214 && TREE_CODE (type) == INTEGER_TYPE
6215 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6217 /* An operation in what may be a bit-field type needs the
6218 result to be reduced to the precision of the bit-field type,
6219 which is narrower than that of the type's mode. */
6220 reduce_bit_field = true;
6221 if (modifier == EXPAND_STACK_PARM)
6222 target = 0;
6225 /* Use subtarget as the target for operand 0 of a binary operation. */
6226 subtarget = get_subtarget (target);
6227 original_target = target;
6228 ignore = (target == const0_rtx
6229 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6230 || code == CONVERT_EXPR || code == COND_EXPR
6231 || code == VIEW_CONVERT_EXPR)
6232 && TREE_CODE (type) == VOID_TYPE));
6234 /* If we are going to ignore this result, we need only do something
6235 if there is a side-effect somewhere in the expression. If there
6236 is, short-circuit the most common cases here. Note that we must
6237 not call expand_expr with anything but const0_rtx in case this
6238 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6240 if (ignore)
6242 if (! TREE_SIDE_EFFECTS (exp))
6243 return const0_rtx;
6245 /* Ensure we reference a volatile object even if value is ignored, but
6246 don't do this if all we are doing is taking its address. */
6247 if (TREE_THIS_VOLATILE (exp)
6248 && TREE_CODE (exp) != FUNCTION_DECL
6249 && mode != VOIDmode && mode != BLKmode
6250 && modifier != EXPAND_CONST_ADDRESS)
6252 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6253 if (MEM_P (temp))
6254 temp = copy_to_reg (temp);
6255 return const0_rtx;
6258 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6259 || code == INDIRECT_REF)
6260 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6261 modifier);
6263 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6264 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6266 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6267 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6268 return const0_rtx;
6270 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6271 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6272 /* If the second operand has no side effects, just evaluate
6273 the first. */
6274 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6275 modifier);
6276 else if (code == BIT_FIELD_REF)
6278 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6279 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6280 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6281 return const0_rtx;
6284 target = 0;
6287 /* If will do cse, generate all results into pseudo registers
6288 since 1) that allows cse to find more things
6289 and 2) otherwise cse could produce an insn the machine
6290 cannot support. An exception is a CONSTRUCTOR into a multi-word
6291 MEM: that's much more likely to be most efficient into the MEM.
6292 Another is a CALL_EXPR which must return in memory. */
6294 if (! cse_not_expected && mode != BLKmode && target
6295 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6296 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6297 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6298 target = 0;
6300 switch (code)
6302 case LABEL_DECL:
6304 tree function = decl_function_context (exp);
6306 temp = label_rtx (exp);
6307 temp = gen_rtx_LABEL_REF (Pmode, temp);
6309 if (function != current_function_decl
6310 && function != 0)
6311 LABEL_REF_NONLOCAL_P (temp) = 1;
6313 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6314 return temp;
6317 case PARM_DECL:
6318 case VAR_DECL:
6319 /* If a static var's type was incomplete when the decl was written,
6320 but the type is complete now, lay out the decl now. */
6321 if (DECL_SIZE (exp) == 0
6322 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6323 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6324 layout_decl (exp, 0);
6326 /* ... fall through ... */
6328 case FUNCTION_DECL:
6329 case RESULT_DECL:
6330 if (DECL_RTL (exp) == 0)
6331 abort ();
6333 /* Ensure variable marked as used even if it doesn't go through
6334 a parser. If it hasn't be used yet, write out an external
6335 definition. */
6336 if (! TREE_USED (exp))
6338 assemble_external (exp);
6339 TREE_USED (exp) = 1;
6342 /* Show we haven't gotten RTL for this yet. */
6343 temp = 0;
6345 /* Variables inherited from containing functions should have
6346 been lowered by this point. */
6347 context = decl_function_context (exp);
6348 if (context != 0
6349 && context != current_function_decl
6350 && !TREE_STATIC (exp)
6351 /* ??? C++ creates functions that are not TREE_STATIC. */
6352 && TREE_CODE (exp) != FUNCTION_DECL)
6353 abort ();
6355 /* This is the case of an array whose size is to be determined
6356 from its initializer, while the initializer is still being parsed.
6357 See expand_decl. */
6359 else if (MEM_P (DECL_RTL (exp))
6360 && REG_P (XEXP (DECL_RTL (exp), 0)))
6361 temp = validize_mem (DECL_RTL (exp));
6363 /* If DECL_RTL is memory, we are in the normal case and either
6364 the address is not valid or it is not a register and -fforce-addr
6365 is specified, get the address into a register. */
6367 else if (MEM_P (DECL_RTL (exp))
6368 && modifier != EXPAND_CONST_ADDRESS
6369 && modifier != EXPAND_SUM
6370 && modifier != EXPAND_INITIALIZER
6371 && (! memory_address_p (DECL_MODE (exp),
6372 XEXP (DECL_RTL (exp), 0))
6373 || (flag_force_addr
6374 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6376 if (alt_rtl)
6377 *alt_rtl = DECL_RTL (exp);
6378 temp = replace_equiv_address (DECL_RTL (exp),
6379 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6382 /* If we got something, return it. But first, set the alignment
6383 if the address is a register. */
6384 if (temp != 0)
6386 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6387 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6389 return temp;
6392 /* If the mode of DECL_RTL does not match that of the decl, it
6393 must be a promoted value. We return a SUBREG of the wanted mode,
6394 but mark it so that we know that it was already extended. */
6396 if (REG_P (DECL_RTL (exp))
6397 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6399 /* Get the signedness used for this variable. Ensure we get the
6400 same mode we got when the variable was declared. */
6401 if (GET_MODE (DECL_RTL (exp))
6402 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6403 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6404 abort ();
6406 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6407 SUBREG_PROMOTED_VAR_P (temp) = 1;
6408 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6409 return temp;
6412 return DECL_RTL (exp);
6414 case INTEGER_CST:
6415 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6416 TREE_INT_CST_HIGH (exp), mode);
6418 /* ??? If overflow is set, fold will have done an incomplete job,
6419 which can result in (plus xx (const_int 0)), which can get
6420 simplified by validate_replace_rtx during virtual register
6421 instantiation, which can result in unrecognizable insns.
6422 Avoid this by forcing all overflows into registers. */
6423 if (TREE_CONSTANT_OVERFLOW (exp)
6424 && modifier != EXPAND_INITIALIZER)
6425 temp = force_reg (mode, temp);
6427 return temp;
6429 case VECTOR_CST:
6430 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6431 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6432 return const_vector_from_tree (exp);
6433 else
6434 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6435 TREE_VECTOR_CST_ELTS (exp)),
6436 ignore ? const0_rtx : target, tmode, modifier);
6438 case CONST_DECL:
6439 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6441 case REAL_CST:
6442 /* If optimized, generate immediate CONST_DOUBLE
6443 which will be turned into memory by reload if necessary.
6445 We used to force a register so that loop.c could see it. But
6446 this does not allow gen_* patterns to perform optimizations with
6447 the constants. It also produces two insns in cases like "x = 1.0;".
6448 On most machines, floating-point constants are not permitted in
6449 many insns, so we'd end up copying it to a register in any case.
6451 Now, we do the copying in expand_binop, if appropriate. */
6452 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6453 TYPE_MODE (TREE_TYPE (exp)));
6455 case COMPLEX_CST:
6456 /* Handle evaluating a complex constant in a CONCAT target. */
6457 if (original_target && GET_CODE (original_target) == CONCAT)
6459 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6460 rtx rtarg, itarg;
6462 rtarg = XEXP (original_target, 0);
6463 itarg = XEXP (original_target, 1);
6465 /* Move the real and imaginary parts separately. */
6466 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6467 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6469 if (op0 != rtarg)
6470 emit_move_insn (rtarg, op0);
6471 if (op1 != itarg)
6472 emit_move_insn (itarg, op1);
6474 return original_target;
6477 /* ... fall through ... */
6479 case STRING_CST:
6480 temp = output_constant_def (exp, 1);
6482 /* temp contains a constant address.
6483 On RISC machines where a constant address isn't valid,
6484 make some insns to get that address into a register. */
6485 if (modifier != EXPAND_CONST_ADDRESS
6486 && modifier != EXPAND_INITIALIZER
6487 && modifier != EXPAND_SUM
6488 && (! memory_address_p (mode, XEXP (temp, 0))
6489 || flag_force_addr))
6490 return replace_equiv_address (temp,
6491 copy_rtx (XEXP (temp, 0)));
6492 return temp;
6494 case SAVE_EXPR:
6496 tree val = TREE_OPERAND (exp, 0);
6497 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6499 if (!SAVE_EXPR_RESOLVED_P (exp))
6501 /* We can indeed still hit this case, typically via builtin
6502 expanders calling save_expr immediately before expanding
6503 something. Assume this means that we only have to deal
6504 with non-BLKmode values. */
6505 if (GET_MODE (ret) == BLKmode)
6506 abort ();
6508 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6509 DECL_ARTIFICIAL (val) = 1;
6510 DECL_IGNORED_P (val) = 1;
6511 TREE_OPERAND (exp, 0) = val;
6512 SAVE_EXPR_RESOLVED_P (exp) = 1;
6514 if (!CONSTANT_P (ret))
6515 ret = copy_to_reg (ret);
6516 SET_DECL_RTL (val, ret);
6519 return ret;
6522 case GOTO_EXPR:
6523 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6524 expand_goto (TREE_OPERAND (exp, 0));
6525 else
6526 expand_computed_goto (TREE_OPERAND (exp, 0));
6527 return const0_rtx;
6529 case CONSTRUCTOR:
6530 /* If we don't need the result, just ensure we evaluate any
6531 subexpressions. */
6532 if (ignore)
6534 tree elt;
6536 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6537 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6539 return const0_rtx;
6542 /* All elts simple constants => refer to a constant in memory. But
6543 if this is a non-BLKmode mode, let it store a field at a time
6544 since that should make a CONST_INT or CONST_DOUBLE when we
6545 fold. Likewise, if we have a target we can use, it is best to
6546 store directly into the target unless the type is large enough
6547 that memcpy will be used. If we are making an initializer and
6548 all operands are constant, put it in memory as well.
6550 FIXME: Avoid trying to fill vector constructors piece-meal.
6551 Output them with output_constant_def below unless we're sure
6552 they're zeros. This should go away when vector initializers
6553 are treated like VECTOR_CST instead of arrays.
6555 else if ((TREE_STATIC (exp)
6556 && ((mode == BLKmode
6557 && ! (target != 0 && safe_from_p (target, exp, 1)))
6558 || TREE_ADDRESSABLE (exp)
6559 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6560 && (! MOVE_BY_PIECES_P
6561 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6562 TYPE_ALIGN (type)))
6563 && ! mostly_zeros_p (exp))))
6564 || ((modifier == EXPAND_INITIALIZER
6565 || modifier == EXPAND_CONST_ADDRESS)
6566 && TREE_CONSTANT (exp)))
6568 rtx constructor = output_constant_def (exp, 1);
6570 if (modifier != EXPAND_CONST_ADDRESS
6571 && modifier != EXPAND_INITIALIZER
6572 && modifier != EXPAND_SUM)
6573 constructor = validize_mem (constructor);
6575 return constructor;
6577 else
6579 /* Handle calls that pass values in multiple non-contiguous
6580 locations. The Irix 6 ABI has examples of this. */
6581 if (target == 0 || ! safe_from_p (target, exp, 1)
6582 || GET_CODE (target) == PARALLEL
6583 || modifier == EXPAND_STACK_PARM)
6584 target
6585 = assign_temp (build_qualified_type (type,
6586 (TYPE_QUALS (type)
6587 | (TREE_READONLY (exp)
6588 * TYPE_QUAL_CONST))),
6589 0, TREE_ADDRESSABLE (exp), 1);
6591 store_constructor (exp, target, 0, int_expr_size (exp));
6592 return target;
6595 case INDIRECT_REF:
6597 tree exp1 = TREE_OPERAND (exp, 0);
6599 if (modifier != EXPAND_WRITE)
6601 tree t;
6603 t = fold_read_from_constant_string (exp);
6604 if (t)
6605 return expand_expr (t, target, tmode, modifier);
6608 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6609 op0 = memory_address (mode, op0);
6610 temp = gen_rtx_MEM (mode, op0);
6611 set_mem_attributes (temp, exp, 0);
6613 return temp;
6616 case ARRAY_REF:
6618 #ifdef ENABLE_CHECKING
6619 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6620 abort ();
6621 #endif
6624 tree array = TREE_OPERAND (exp, 0);
6625 tree low_bound = array_ref_low_bound (exp);
6626 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6627 HOST_WIDE_INT i;
6629 /* Optimize the special-case of a zero lower bound.
6631 We convert the low_bound to sizetype to avoid some problems
6632 with constant folding. (E.g. suppose the lower bound is 1,
6633 and its mode is QI. Without the conversion, (ARRAY
6634 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6635 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6637 if (! integer_zerop (low_bound))
6638 index = size_diffop (index, convert (sizetype, low_bound));
6640 /* Fold an expression like: "foo"[2].
6641 This is not done in fold so it won't happen inside &.
6642 Don't fold if this is for wide characters since it's too
6643 difficult to do correctly and this is a very rare case. */
6645 if (modifier != EXPAND_CONST_ADDRESS
6646 && modifier != EXPAND_INITIALIZER
6647 && modifier != EXPAND_MEMORY)
6649 tree t = fold_read_from_constant_string (exp);
6651 if (t)
6652 return expand_expr (t, target, tmode, modifier);
6655 /* If this is a constant index into a constant array,
6656 just get the value from the array. Handle both the cases when
6657 we have an explicit constructor and when our operand is a variable
6658 that was declared const. */
6660 if (modifier != EXPAND_CONST_ADDRESS
6661 && modifier != EXPAND_INITIALIZER
6662 && modifier != EXPAND_MEMORY
6663 && TREE_CODE (array) == CONSTRUCTOR
6664 && ! TREE_SIDE_EFFECTS (array)
6665 && TREE_CODE (index) == INTEGER_CST
6666 && 0 > compare_tree_int (index,
6667 list_length (CONSTRUCTOR_ELTS
6668 (TREE_OPERAND (exp, 0)))))
6670 tree elem;
6672 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6673 i = TREE_INT_CST_LOW (index);
6674 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6677 if (elem)
6678 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6679 modifier);
6682 else if (optimize >= 1
6683 && modifier != EXPAND_CONST_ADDRESS
6684 && modifier != EXPAND_INITIALIZER
6685 && modifier != EXPAND_MEMORY
6686 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6687 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6688 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6689 && targetm.binds_local_p (array))
6691 if (TREE_CODE (index) == INTEGER_CST)
6693 tree init = DECL_INITIAL (array);
6695 if (TREE_CODE (init) == CONSTRUCTOR)
6697 tree elem;
6699 for (elem = CONSTRUCTOR_ELTS (init);
6700 (elem
6701 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6702 elem = TREE_CHAIN (elem))
6705 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6706 return expand_expr (fold (TREE_VALUE (elem)), target,
6707 tmode, modifier);
6709 else if (TREE_CODE (init) == STRING_CST
6710 && 0 > compare_tree_int (index,
6711 TREE_STRING_LENGTH (init)))
6713 tree type = TREE_TYPE (TREE_TYPE (init));
6714 enum machine_mode mode = TYPE_MODE (type);
6716 if (GET_MODE_CLASS (mode) == MODE_INT
6717 && GET_MODE_SIZE (mode) == 1)
6718 return gen_int_mode (TREE_STRING_POINTER (init)
6719 [TREE_INT_CST_LOW (index)], mode);
6724 goto normal_inner_ref;
6726 case COMPONENT_REF:
6727 /* If the operand is a CONSTRUCTOR, we can just extract the
6728 appropriate field if it is present. */
6729 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6731 tree elt;
6733 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6734 elt = TREE_CHAIN (elt))
6735 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6736 /* We can normally use the value of the field in the
6737 CONSTRUCTOR. However, if this is a bitfield in
6738 an integral mode that we can fit in a HOST_WIDE_INT,
6739 we must mask only the number of bits in the bitfield,
6740 since this is done implicitly by the constructor. If
6741 the bitfield does not meet either of those conditions,
6742 we can't do this optimization. */
6743 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6744 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6745 == MODE_INT)
6746 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6747 <= HOST_BITS_PER_WIDE_INT))))
6749 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6750 && modifier == EXPAND_STACK_PARM)
6751 target = 0;
6752 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6753 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6755 HOST_WIDE_INT bitsize
6756 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6757 enum machine_mode imode
6758 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6760 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6762 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6763 op0 = expand_and (imode, op0, op1, target);
6765 else
6767 tree count
6768 = build_int_cst (NULL_TREE,
6769 GET_MODE_BITSIZE (imode) - bitsize);
6771 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6772 target, 0);
6773 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6774 target, 0);
6778 return op0;
6781 goto normal_inner_ref;
6783 case BIT_FIELD_REF:
6784 case ARRAY_RANGE_REF:
6785 normal_inner_ref:
6787 enum machine_mode mode1;
6788 HOST_WIDE_INT bitsize, bitpos;
6789 tree offset;
6790 int volatilep = 0;
6791 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6792 &mode1, &unsignedp, &volatilep);
6793 rtx orig_op0;
6795 /* If we got back the original object, something is wrong. Perhaps
6796 we are evaluating an expression too early. In any event, don't
6797 infinitely recurse. */
6798 if (tem == exp)
6799 abort ();
6801 /* If TEM's type is a union of variable size, pass TARGET to the inner
6802 computation, since it will need a temporary and TARGET is known
6803 to have to do. This occurs in unchecked conversion in Ada. */
6805 orig_op0 = op0
6806 = expand_expr (tem,
6807 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6808 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6809 != INTEGER_CST)
6810 && modifier != EXPAND_STACK_PARM
6811 ? target : NULL_RTX),
6812 VOIDmode,
6813 (modifier == EXPAND_INITIALIZER
6814 || modifier == EXPAND_CONST_ADDRESS
6815 || modifier == EXPAND_STACK_PARM)
6816 ? modifier : EXPAND_NORMAL);
6818 /* If this is a constant, put it into a register if it is a
6819 legitimate constant and OFFSET is 0 and memory if it isn't. */
6820 if (CONSTANT_P (op0))
6822 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6823 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6824 && offset == 0)
6825 op0 = force_reg (mode, op0);
6826 else
6827 op0 = validize_mem (force_const_mem (mode, op0));
6830 /* Otherwise, if this object not in memory and we either have an
6831 offset or a BLKmode result, put it there. This case can't occur in
6832 C, but can in Ada if we have unchecked conversion of an expression
6833 from a scalar type to an array or record type or for an
6834 ARRAY_RANGE_REF whose type is BLKmode. */
6835 else if (!MEM_P (op0)
6836 && (offset != 0
6837 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6839 tree nt = build_qualified_type (TREE_TYPE (tem),
6840 (TYPE_QUALS (TREE_TYPE (tem))
6841 | TYPE_QUAL_CONST));
6842 rtx memloc = assign_temp (nt, 1, 1, 1);
6844 emit_move_insn (memloc, op0);
6845 op0 = memloc;
6848 if (offset != 0)
6850 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6851 EXPAND_SUM);
6853 if (!MEM_P (op0))
6854 abort ();
6856 #ifdef POINTERS_EXTEND_UNSIGNED
6857 if (GET_MODE (offset_rtx) != Pmode)
6858 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6859 #else
6860 if (GET_MODE (offset_rtx) != ptr_mode)
6861 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6862 #endif
6864 if (GET_MODE (op0) == BLKmode
6865 /* A constant address in OP0 can have VOIDmode, we must
6866 not try to call force_reg in that case. */
6867 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6868 && bitsize != 0
6869 && (bitpos % bitsize) == 0
6870 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6871 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6873 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6874 bitpos = 0;
6877 op0 = offset_address (op0, offset_rtx,
6878 highest_pow2_factor (offset));
6881 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6882 record its alignment as BIGGEST_ALIGNMENT. */
6883 if (MEM_P (op0) && bitpos == 0 && offset != 0
6884 && is_aligning_offset (offset, tem))
6885 set_mem_align (op0, BIGGEST_ALIGNMENT);
6887 /* Don't forget about volatility even if this is a bitfield. */
6888 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6890 if (op0 == orig_op0)
6891 op0 = copy_rtx (op0);
6893 MEM_VOLATILE_P (op0) = 1;
6896 /* The following code doesn't handle CONCAT.
6897 Assume only bitpos == 0 can be used for CONCAT, due to
6898 one element arrays having the same mode as its element. */
6899 if (GET_CODE (op0) == CONCAT)
6901 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6902 abort ();
6903 return op0;
6906 /* In cases where an aligned union has an unaligned object
6907 as a field, we might be extracting a BLKmode value from
6908 an integer-mode (e.g., SImode) object. Handle this case
6909 by doing the extract into an object as wide as the field
6910 (which we know to be the width of a basic mode), then
6911 storing into memory, and changing the mode to BLKmode. */
6912 if (mode1 == VOIDmode
6913 || REG_P (op0) || GET_CODE (op0) == SUBREG
6914 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6915 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6916 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6917 && modifier != EXPAND_CONST_ADDRESS
6918 && modifier != EXPAND_INITIALIZER)
6919 /* If the field isn't aligned enough to fetch as a memref,
6920 fetch it as a bit field. */
6921 || (mode1 != BLKmode
6922 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6923 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6924 || (MEM_P (op0)
6925 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6926 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6927 && ((modifier == EXPAND_CONST_ADDRESS
6928 || modifier == EXPAND_INITIALIZER)
6929 ? STRICT_ALIGNMENT
6930 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6931 || (bitpos % BITS_PER_UNIT != 0)))
6932 /* If the type and the field are a constant size and the
6933 size of the type isn't the same size as the bitfield,
6934 we must use bitfield operations. */
6935 || (bitsize >= 0
6936 && TYPE_SIZE (TREE_TYPE (exp))
6937 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6938 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6939 bitsize)))
6941 enum machine_mode ext_mode = mode;
6943 if (ext_mode == BLKmode
6944 && ! (target != 0 && MEM_P (op0)
6945 && MEM_P (target)
6946 && bitpos % BITS_PER_UNIT == 0))
6947 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6949 if (ext_mode == BLKmode)
6951 if (target == 0)
6952 target = assign_temp (type, 0, 1, 1);
6954 if (bitsize == 0)
6955 return target;
6957 /* In this case, BITPOS must start at a byte boundary and
6958 TARGET, if specified, must be a MEM. */
6959 if (!MEM_P (op0)
6960 || (target != 0 && !MEM_P (target))
6961 || bitpos % BITS_PER_UNIT != 0)
6962 abort ();
6964 emit_block_move (target,
6965 adjust_address (op0, VOIDmode,
6966 bitpos / BITS_PER_UNIT),
6967 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6968 / BITS_PER_UNIT),
6969 (modifier == EXPAND_STACK_PARM
6970 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
6972 return target;
6975 op0 = validize_mem (op0);
6977 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
6978 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6980 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
6981 (modifier == EXPAND_STACK_PARM
6982 ? NULL_RTX : target),
6983 ext_mode, ext_mode);
6985 /* If the result is a record type and BITSIZE is narrower than
6986 the mode of OP0, an integral mode, and this is a big endian
6987 machine, we must put the field into the high-order bits. */
6988 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6989 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6990 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6991 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6992 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6993 - bitsize),
6994 op0, 1);
6996 /* If the result type is BLKmode, store the data into a temporary
6997 of the appropriate type, but with the mode corresponding to the
6998 mode for the data we have (op0's mode). It's tempting to make
6999 this a constant type, since we know it's only being stored once,
7000 but that can cause problems if we are taking the address of this
7001 COMPONENT_REF because the MEM of any reference via that address
7002 will have flags corresponding to the type, which will not
7003 necessarily be constant. */
7004 if (mode == BLKmode)
7006 rtx new
7007 = assign_stack_temp_for_type
7008 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7010 emit_move_insn (new, op0);
7011 op0 = copy_rtx (new);
7012 PUT_MODE (op0, BLKmode);
7013 set_mem_attributes (op0, exp, 1);
7016 return op0;
7019 /* If the result is BLKmode, use that to access the object
7020 now as well. */
7021 if (mode == BLKmode)
7022 mode1 = BLKmode;
7024 /* Get a reference to just this component. */
7025 if (modifier == EXPAND_CONST_ADDRESS
7026 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7027 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7028 else
7029 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7031 if (op0 == orig_op0)
7032 op0 = copy_rtx (op0);
7034 set_mem_attributes (op0, exp, 0);
7035 if (REG_P (XEXP (op0, 0)))
7036 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7038 MEM_VOLATILE_P (op0) |= volatilep;
7039 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7040 || modifier == EXPAND_CONST_ADDRESS
7041 || modifier == EXPAND_INITIALIZER)
7042 return op0;
7043 else if (target == 0)
7044 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7046 convert_move (target, op0, unsignedp);
7047 return target;
7050 case OBJ_TYPE_REF:
7051 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7053 case CALL_EXPR:
7054 /* Check for a built-in function. */
7055 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7056 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7057 == FUNCTION_DECL)
7058 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7060 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7061 == BUILT_IN_FRONTEND)
7062 return lang_hooks.expand_expr (exp, original_target,
7063 tmode, modifier,
7064 alt_rtl);
7065 else
7066 return expand_builtin (exp, target, subtarget, tmode, ignore);
7069 return expand_call (exp, target, ignore);
7071 case NON_LVALUE_EXPR:
7072 case NOP_EXPR:
7073 case CONVERT_EXPR:
7074 if (TREE_OPERAND (exp, 0) == error_mark_node)
7075 return const0_rtx;
7077 if (TREE_CODE (type) == UNION_TYPE)
7079 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7081 /* If both input and output are BLKmode, this conversion isn't doing
7082 anything except possibly changing memory attribute. */
7083 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7085 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7086 modifier);
7088 result = copy_rtx (result);
7089 set_mem_attributes (result, exp, 0);
7090 return result;
7093 if (target == 0)
7095 if (TYPE_MODE (type) != BLKmode)
7096 target = gen_reg_rtx (TYPE_MODE (type));
7097 else
7098 target = assign_temp (type, 0, 1, 1);
7101 if (MEM_P (target))
7102 /* Store data into beginning of memory target. */
7103 store_expr (TREE_OPERAND (exp, 0),
7104 adjust_address (target, TYPE_MODE (valtype), 0),
7105 modifier == EXPAND_STACK_PARM ? 2 : 0);
7107 else if (REG_P (target))
7108 /* Store this field into a union of the proper type. */
7109 store_field (target,
7110 MIN ((int_size_in_bytes (TREE_TYPE
7111 (TREE_OPERAND (exp, 0)))
7112 * BITS_PER_UNIT),
7113 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7114 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7115 VOIDmode, 0, type, 0);
7116 else
7117 abort ();
7119 /* Return the entire union. */
7120 return target;
7123 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7125 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7126 modifier);
7128 /* If the signedness of the conversion differs and OP0 is
7129 a promoted SUBREG, clear that indication since we now
7130 have to do the proper extension. */
7131 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7132 && GET_CODE (op0) == SUBREG)
7133 SUBREG_PROMOTED_VAR_P (op0) = 0;
7135 return REDUCE_BIT_FIELD (op0);
7138 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7139 op0 = REDUCE_BIT_FIELD (op0);
7140 if (GET_MODE (op0) == mode)
7141 return op0;
7143 /* If OP0 is a constant, just convert it into the proper mode. */
7144 if (CONSTANT_P (op0))
7146 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7147 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7149 if (modifier == EXPAND_INITIALIZER)
7150 return simplify_gen_subreg (mode, op0, inner_mode,
7151 subreg_lowpart_offset (mode,
7152 inner_mode));
7153 else
7154 return convert_modes (mode, inner_mode, op0,
7155 TYPE_UNSIGNED (inner_type));
7158 if (modifier == EXPAND_INITIALIZER)
7159 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7161 if (target == 0)
7162 return
7163 convert_to_mode (mode, op0,
7164 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7165 else
7166 convert_move (target, op0,
7167 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7168 return target;
7170 case VIEW_CONVERT_EXPR:
7171 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7173 /* If the input and output modes are both the same, we are done.
7174 Otherwise, if neither mode is BLKmode and both are integral and within
7175 a word, we can use gen_lowpart. If neither is true, make sure the
7176 operand is in memory and convert the MEM to the new mode. */
7177 if (TYPE_MODE (type) == GET_MODE (op0))
7179 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7180 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7181 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7182 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7183 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7184 op0 = gen_lowpart (TYPE_MODE (type), op0);
7185 else if (!MEM_P (op0))
7187 /* If the operand is not a MEM, force it into memory. Since we
7188 are going to be be changing the mode of the MEM, don't call
7189 force_const_mem for constants because we don't allow pool
7190 constants to change mode. */
7191 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7193 if (TREE_ADDRESSABLE (exp))
7194 abort ();
7196 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7197 target
7198 = assign_stack_temp_for_type
7199 (TYPE_MODE (inner_type),
7200 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7202 emit_move_insn (target, op0);
7203 op0 = target;
7206 /* At this point, OP0 is in the correct mode. If the output type is such
7207 that the operand is known to be aligned, indicate that it is.
7208 Otherwise, we need only be concerned about alignment for non-BLKmode
7209 results. */
7210 if (MEM_P (op0))
7212 op0 = copy_rtx (op0);
7214 if (TYPE_ALIGN_OK (type))
7215 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7216 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7217 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7219 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7220 HOST_WIDE_INT temp_size
7221 = MAX (int_size_in_bytes (inner_type),
7222 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7223 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7224 temp_size, 0, type);
7225 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7227 if (TREE_ADDRESSABLE (exp))
7228 abort ();
7230 if (GET_MODE (op0) == BLKmode)
7231 emit_block_move (new_with_op0_mode, op0,
7232 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7233 (modifier == EXPAND_STACK_PARM
7234 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7235 else
7236 emit_move_insn (new_with_op0_mode, op0);
7238 op0 = new;
7241 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7244 return op0;
7246 case PLUS_EXPR:
7247 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7248 something else, make sure we add the register to the constant and
7249 then to the other thing. This case can occur during strength
7250 reduction and doing it this way will produce better code if the
7251 frame pointer or argument pointer is eliminated.
7253 fold-const.c will ensure that the constant is always in the inner
7254 PLUS_EXPR, so the only case we need to do anything about is if
7255 sp, ap, or fp is our second argument, in which case we must swap
7256 the innermost first argument and our second argument. */
7258 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7259 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7260 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7261 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7262 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7263 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7265 tree t = TREE_OPERAND (exp, 1);
7267 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7268 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7271 /* If the result is to be ptr_mode and we are adding an integer to
7272 something, we might be forming a constant. So try to use
7273 plus_constant. If it produces a sum and we can't accept it,
7274 use force_operand. This allows P = &ARR[const] to generate
7275 efficient code on machines where a SYMBOL_REF is not a valid
7276 address.
7278 If this is an EXPAND_SUM call, always return the sum. */
7279 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7280 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7282 if (modifier == EXPAND_STACK_PARM)
7283 target = 0;
7284 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7285 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7286 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7288 rtx constant_part;
7290 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7291 EXPAND_SUM);
7292 /* Use immed_double_const to ensure that the constant is
7293 truncated according to the mode of OP1, then sign extended
7294 to a HOST_WIDE_INT. Using the constant directly can result
7295 in non-canonical RTL in a 64x32 cross compile. */
7296 constant_part
7297 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7298 (HOST_WIDE_INT) 0,
7299 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7300 op1 = plus_constant (op1, INTVAL (constant_part));
7301 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7302 op1 = force_operand (op1, target);
7303 return REDUCE_BIT_FIELD (op1);
7306 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7307 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7308 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7310 rtx constant_part;
7312 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7313 (modifier == EXPAND_INITIALIZER
7314 ? EXPAND_INITIALIZER : EXPAND_SUM));
7315 if (! CONSTANT_P (op0))
7317 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7318 VOIDmode, modifier);
7319 /* Return a PLUS if modifier says it's OK. */
7320 if (modifier == EXPAND_SUM
7321 || modifier == EXPAND_INITIALIZER)
7322 return simplify_gen_binary (PLUS, mode, op0, op1);
7323 goto binop2;
7325 /* Use immed_double_const to ensure that the constant is
7326 truncated according to the mode of OP1, then sign extended
7327 to a HOST_WIDE_INT. Using the constant directly can result
7328 in non-canonical RTL in a 64x32 cross compile. */
7329 constant_part
7330 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7331 (HOST_WIDE_INT) 0,
7332 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7333 op0 = plus_constant (op0, INTVAL (constant_part));
7334 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7335 op0 = force_operand (op0, target);
7336 return REDUCE_BIT_FIELD (op0);
7340 /* No sense saving up arithmetic to be done
7341 if it's all in the wrong mode to form part of an address.
7342 And force_operand won't know whether to sign-extend or
7343 zero-extend. */
7344 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7345 || mode != ptr_mode)
7347 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7348 subtarget, &op0, &op1, 0);
7349 if (op0 == const0_rtx)
7350 return op1;
7351 if (op1 == const0_rtx)
7352 return op0;
7353 goto binop2;
7356 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7357 subtarget, &op0, &op1, modifier);
7358 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7360 case MINUS_EXPR:
7361 /* For initializers, we are allowed to return a MINUS of two
7362 symbolic constants. Here we handle all cases when both operands
7363 are constant. */
7364 /* Handle difference of two symbolic constants,
7365 for the sake of an initializer. */
7366 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7367 && really_constant_p (TREE_OPERAND (exp, 0))
7368 && really_constant_p (TREE_OPERAND (exp, 1)))
7370 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7371 NULL_RTX, &op0, &op1, modifier);
7373 /* If the last operand is a CONST_INT, use plus_constant of
7374 the negated constant. Else make the MINUS. */
7375 if (GET_CODE (op1) == CONST_INT)
7376 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7377 else
7378 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7381 /* No sense saving up arithmetic to be done
7382 if it's all in the wrong mode to form part of an address.
7383 And force_operand won't know whether to sign-extend or
7384 zero-extend. */
7385 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7386 || mode != ptr_mode)
7387 goto binop;
7389 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7390 subtarget, &op0, &op1, modifier);
7392 /* Convert A - const to A + (-const). */
7393 if (GET_CODE (op1) == CONST_INT)
7395 op1 = negate_rtx (mode, op1);
7396 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7399 goto binop2;
7401 case MULT_EXPR:
7402 /* If first operand is constant, swap them.
7403 Thus the following special case checks need only
7404 check the second operand. */
7405 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7407 tree t1 = TREE_OPERAND (exp, 0);
7408 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7409 TREE_OPERAND (exp, 1) = t1;
7412 /* Attempt to return something suitable for generating an
7413 indexed address, for machines that support that. */
7415 if (modifier == EXPAND_SUM && mode == ptr_mode
7416 && host_integerp (TREE_OPERAND (exp, 1), 0))
7418 tree exp1 = TREE_OPERAND (exp, 1);
7420 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7421 EXPAND_SUM);
7423 if (!REG_P (op0))
7424 op0 = force_operand (op0, NULL_RTX);
7425 if (!REG_P (op0))
7426 op0 = copy_to_mode_reg (mode, op0);
7428 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7429 gen_int_mode (tree_low_cst (exp1, 0),
7430 TYPE_MODE (TREE_TYPE (exp1)))));
7433 if (modifier == EXPAND_STACK_PARM)
7434 target = 0;
7436 /* Check for multiplying things that have been extended
7437 from a narrower type. If this machine supports multiplying
7438 in that narrower type with a result in the desired type,
7439 do it that way, and avoid the explicit type-conversion. */
7440 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7441 && TREE_CODE (type) == INTEGER_TYPE
7442 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7443 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7444 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7445 && int_fits_type_p (TREE_OPERAND (exp, 1),
7446 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7447 /* Don't use a widening multiply if a shift will do. */
7448 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7449 > HOST_BITS_PER_WIDE_INT)
7450 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7452 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7453 && (TYPE_PRECISION (TREE_TYPE
7454 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7455 == TYPE_PRECISION (TREE_TYPE
7456 (TREE_OPERAND
7457 (TREE_OPERAND (exp, 0), 0))))
7458 /* If both operands are extended, they must either both
7459 be zero-extended or both be sign-extended. */
7460 && (TYPE_UNSIGNED (TREE_TYPE
7461 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7462 == TYPE_UNSIGNED (TREE_TYPE
7463 (TREE_OPERAND
7464 (TREE_OPERAND (exp, 0), 0)))))))
7466 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7467 enum machine_mode innermode = TYPE_MODE (op0type);
7468 bool zextend_p = TYPE_UNSIGNED (op0type);
7469 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7470 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7472 if (mode == GET_MODE_WIDER_MODE (innermode))
7474 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7476 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7477 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7478 TREE_OPERAND (exp, 1),
7479 NULL_RTX, &op0, &op1, 0);
7480 else
7481 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7482 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7483 NULL_RTX, &op0, &op1, 0);
7484 goto binop3;
7486 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7487 && innermode == word_mode)
7489 rtx htem, hipart;
7490 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7491 NULL_RTX, VOIDmode, 0);
7492 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7493 op1 = convert_modes (innermode, mode,
7494 expand_expr (TREE_OPERAND (exp, 1),
7495 NULL_RTX, VOIDmode, 0),
7496 unsignedp);
7497 else
7498 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7499 NULL_RTX, VOIDmode, 0);
7500 temp = expand_binop (mode, other_optab, op0, op1, target,
7501 unsignedp, OPTAB_LIB_WIDEN);
7502 hipart = gen_highpart (innermode, temp);
7503 htem = expand_mult_highpart_adjust (innermode, hipart,
7504 op0, op1, hipart,
7505 zextend_p);
7506 if (htem != hipart)
7507 emit_move_insn (hipart, htem);
7508 return REDUCE_BIT_FIELD (temp);
7512 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7513 subtarget, &op0, &op1, 0);
7514 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7516 case TRUNC_DIV_EXPR:
7517 case FLOOR_DIV_EXPR:
7518 case CEIL_DIV_EXPR:
7519 case ROUND_DIV_EXPR:
7520 case EXACT_DIV_EXPR:
7521 if (modifier == EXPAND_STACK_PARM)
7522 target = 0;
7523 /* Possible optimization: compute the dividend with EXPAND_SUM
7524 then if the divisor is constant can optimize the case
7525 where some terms of the dividend have coeffs divisible by it. */
7526 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7527 subtarget, &op0, &op1, 0);
7528 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7530 case RDIV_EXPR:
7531 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7532 expensive divide. If not, combine will rebuild the original
7533 computation. */
7534 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7535 && TREE_CODE (type) == REAL_TYPE
7536 && !real_onep (TREE_OPERAND (exp, 0)))
7537 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7538 build2 (RDIV_EXPR, type,
7539 build_real (type, dconst1),
7540 TREE_OPERAND (exp, 1))),
7541 target, tmode, modifier);
7543 goto binop;
7545 case TRUNC_MOD_EXPR:
7546 case FLOOR_MOD_EXPR:
7547 case CEIL_MOD_EXPR:
7548 case ROUND_MOD_EXPR:
7549 if (modifier == EXPAND_STACK_PARM)
7550 target = 0;
7551 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7552 subtarget, &op0, &op1, 0);
7553 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7555 case FIX_ROUND_EXPR:
7556 case FIX_FLOOR_EXPR:
7557 case FIX_CEIL_EXPR:
7558 abort (); /* Not used for C. */
7560 case FIX_TRUNC_EXPR:
7561 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7562 if (target == 0 || modifier == EXPAND_STACK_PARM)
7563 target = gen_reg_rtx (mode);
7564 expand_fix (target, op0, unsignedp);
7565 return target;
7567 case FLOAT_EXPR:
7568 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7569 if (target == 0 || modifier == EXPAND_STACK_PARM)
7570 target = gen_reg_rtx (mode);
7571 /* expand_float can't figure out what to do if FROM has VOIDmode.
7572 So give it the correct mode. With -O, cse will optimize this. */
7573 if (GET_MODE (op0) == VOIDmode)
7574 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7575 op0);
7576 expand_float (target, op0,
7577 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7578 return target;
7580 case NEGATE_EXPR:
7581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7582 if (modifier == EXPAND_STACK_PARM)
7583 target = 0;
7584 temp = expand_unop (mode,
7585 optab_for_tree_code (NEGATE_EXPR, type),
7586 op0, target, 0);
7587 if (temp == 0)
7588 abort ();
7589 return REDUCE_BIT_FIELD (temp);
7591 case ABS_EXPR:
7592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7593 if (modifier == EXPAND_STACK_PARM)
7594 target = 0;
7596 /* ABS_EXPR is not valid for complex arguments. */
7597 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7598 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7599 abort ();
7601 /* Unsigned abs is simply the operand. Testing here means we don't
7602 risk generating incorrect code below. */
7603 if (TYPE_UNSIGNED (type))
7604 return op0;
7606 return expand_abs (mode, op0, target, unsignedp,
7607 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7609 case MAX_EXPR:
7610 case MIN_EXPR:
7611 target = original_target;
7612 if (target == 0
7613 || modifier == EXPAND_STACK_PARM
7614 || (MEM_P (target) && MEM_VOLATILE_P (target))
7615 || GET_MODE (target) != mode
7616 || (REG_P (target)
7617 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7618 target = gen_reg_rtx (mode);
7619 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7620 target, &op0, &op1, 0);
7622 /* First try to do it with a special MIN or MAX instruction.
7623 If that does not win, use a conditional jump to select the proper
7624 value. */
7625 this_optab = optab_for_tree_code (code, type);
7626 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7627 OPTAB_WIDEN);
7628 if (temp != 0)
7629 return temp;
7631 /* At this point, a MEM target is no longer useful; we will get better
7632 code without it. */
7634 if (MEM_P (target))
7635 target = gen_reg_rtx (mode);
7637 /* If op1 was placed in target, swap op0 and op1. */
7638 if (target != op0 && target == op1)
7640 rtx tem = op0;
7641 op0 = op1;
7642 op1 = tem;
7645 if (target != op0)
7646 emit_move_insn (target, op0);
7648 op0 = gen_label_rtx ();
7650 /* If this mode is an integer too wide to compare properly,
7651 compare word by word. Rely on cse to optimize constant cases. */
7652 if (GET_MODE_CLASS (mode) == MODE_INT
7653 && ! can_compare_p (GE, mode, ccp_jump))
7655 if (code == MAX_EXPR)
7656 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7657 NULL_RTX, op0);
7658 else
7659 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7660 NULL_RTX, op0);
7662 else
7664 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7665 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7667 emit_move_insn (target, op1);
7668 emit_label (op0);
7669 return target;
7671 case BIT_NOT_EXPR:
7672 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7673 if (modifier == EXPAND_STACK_PARM)
7674 target = 0;
7675 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7676 if (temp == 0)
7677 abort ();
7678 return temp;
7680 /* ??? Can optimize bitwise operations with one arg constant.
7681 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7682 and (a bitwise1 b) bitwise2 b (etc)
7683 but that is probably not worth while. */
7685 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7686 boolean values when we want in all cases to compute both of them. In
7687 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7688 as actual zero-or-1 values and then bitwise anding. In cases where
7689 there cannot be any side effects, better code would be made by
7690 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7691 how to recognize those cases. */
7693 case TRUTH_AND_EXPR:
7694 code = BIT_AND_EXPR;
7695 case BIT_AND_EXPR:
7696 goto binop;
7698 case TRUTH_OR_EXPR:
7699 code = BIT_IOR_EXPR;
7700 case BIT_IOR_EXPR:
7701 goto binop;
7703 case TRUTH_XOR_EXPR:
7704 code = BIT_XOR_EXPR;
7705 case BIT_XOR_EXPR:
7706 goto binop;
7708 case LSHIFT_EXPR:
7709 case RSHIFT_EXPR:
7710 case LROTATE_EXPR:
7711 case RROTATE_EXPR:
7712 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7713 subtarget = 0;
7714 if (modifier == EXPAND_STACK_PARM)
7715 target = 0;
7716 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7717 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7718 unsignedp);
7720 /* Could determine the answer when only additive constants differ. Also,
7721 the addition of one can be handled by changing the condition. */
7722 case LT_EXPR:
7723 case LE_EXPR:
7724 case GT_EXPR:
7725 case GE_EXPR:
7726 case EQ_EXPR:
7727 case NE_EXPR:
7728 case UNORDERED_EXPR:
7729 case ORDERED_EXPR:
7730 case UNLT_EXPR:
7731 case UNLE_EXPR:
7732 case UNGT_EXPR:
7733 case UNGE_EXPR:
7734 case UNEQ_EXPR:
7735 case LTGT_EXPR:
7736 temp = do_store_flag (exp,
7737 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7738 tmode != VOIDmode ? tmode : mode, 0);
7739 if (temp != 0)
7740 return temp;
7742 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7743 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7744 && original_target
7745 && REG_P (original_target)
7746 && (GET_MODE (original_target)
7747 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7749 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7750 VOIDmode, 0);
7752 /* If temp is constant, we can just compute the result. */
7753 if (GET_CODE (temp) == CONST_INT)
7755 if (INTVAL (temp) != 0)
7756 emit_move_insn (target, const1_rtx);
7757 else
7758 emit_move_insn (target, const0_rtx);
7760 return target;
7763 if (temp != original_target)
7765 enum machine_mode mode1 = GET_MODE (temp);
7766 if (mode1 == VOIDmode)
7767 mode1 = tmode != VOIDmode ? tmode : mode;
7769 temp = copy_to_mode_reg (mode1, temp);
7772 op1 = gen_label_rtx ();
7773 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7774 GET_MODE (temp), unsignedp, op1);
7775 emit_move_insn (temp, const1_rtx);
7776 emit_label (op1);
7777 return temp;
7780 /* If no set-flag instruction, must generate a conditional store
7781 into a temporary variable. Drop through and handle this
7782 like && and ||. */
7784 if (! ignore
7785 && (target == 0
7786 || modifier == EXPAND_STACK_PARM
7787 || ! safe_from_p (target, exp, 1)
7788 /* Make sure we don't have a hard reg (such as function's return
7789 value) live across basic blocks, if not optimizing. */
7790 || (!optimize && REG_P (target)
7791 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7792 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7794 if (target)
7795 emit_move_insn (target, const0_rtx);
7797 op1 = gen_label_rtx ();
7798 jumpifnot (exp, op1);
7800 if (target)
7801 emit_move_insn (target, const1_rtx);
7803 emit_label (op1);
7804 return ignore ? const0_rtx : target;
7806 case TRUTH_NOT_EXPR:
7807 if (modifier == EXPAND_STACK_PARM)
7808 target = 0;
7809 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7810 /* The parser is careful to generate TRUTH_NOT_EXPR
7811 only with operands that are always zero or one. */
7812 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7813 target, 1, OPTAB_LIB_WIDEN);
7814 if (temp == 0)
7815 abort ();
7816 return temp;
7818 case STATEMENT_LIST:
7820 tree_stmt_iterator iter;
7822 if (!ignore)
7823 abort ();
7825 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7826 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7828 return const0_rtx;
7830 case COND_EXPR:
7831 /* If it's void, we don't need to worry about computing a value. */
7832 if (VOID_TYPE_P (TREE_TYPE (exp)))
7834 tree pred = TREE_OPERAND (exp, 0);
7835 tree then_ = TREE_OPERAND (exp, 1);
7836 tree else_ = TREE_OPERAND (exp, 2);
7838 if (TREE_CODE (then_) != GOTO_EXPR
7839 || TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL
7840 || TREE_CODE (else_) != GOTO_EXPR
7841 || TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL)
7842 abort ();
7844 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7845 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7848 /* Note that COND_EXPRs whose type is a structure or union
7849 are required to be constructed to contain assignments of
7850 a temporary variable, so that we can evaluate them here
7851 for side effect only. If type is void, we must do likewise. */
7853 if (TREE_ADDRESSABLE (type)
7854 || ignore
7855 || TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node
7856 || TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node)
7857 abort ();
7859 /* If we are not to produce a result, we have no target. Otherwise,
7860 if a target was specified use it; it will not be used as an
7861 intermediate target unless it is safe. If no target, use a
7862 temporary. */
7864 if (modifier != EXPAND_STACK_PARM
7865 && original_target
7866 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7867 && GET_MODE (original_target) == mode
7868 #ifdef HAVE_conditional_move
7869 && (! can_conditionally_move_p (mode)
7870 || REG_P (original_target))
7871 #endif
7872 && !MEM_P (original_target))
7873 temp = original_target;
7874 else
7875 temp = assign_temp (type, 0, 0, 1);
7877 do_pending_stack_adjust ();
7878 NO_DEFER_POP;
7879 op0 = gen_label_rtx ();
7880 op1 = gen_label_rtx ();
7881 jumpifnot (TREE_OPERAND (exp, 0), op0);
7882 store_expr (TREE_OPERAND (exp, 1), temp,
7883 modifier == EXPAND_STACK_PARM ? 2 : 0);
7885 emit_jump_insn (gen_jump (op1));
7886 emit_barrier ();
7887 emit_label (op0);
7888 store_expr (TREE_OPERAND (exp, 2), temp,
7889 modifier == EXPAND_STACK_PARM ? 2 : 0);
7891 emit_label (op1);
7892 OK_DEFER_POP;
7893 return temp;
7895 case MODIFY_EXPR:
7897 /* If lhs is complex, expand calls in rhs before computing it.
7898 That's so we don't compute a pointer and save it over a
7899 call. If lhs is simple, compute it first so we can give it
7900 as a target if the rhs is just a call. This avoids an
7901 extra temp and copy and that prevents a partial-subsumption
7902 which makes bad code. Actually we could treat
7903 component_ref's of vars like vars. */
7905 tree lhs = TREE_OPERAND (exp, 0);
7906 tree rhs = TREE_OPERAND (exp, 1);
7908 temp = 0;
7910 /* Check for |= or &= of a bitfield of size one into another bitfield
7911 of size 1. In this case, (unless we need the result of the
7912 assignment) we can do this more efficiently with a
7913 test followed by an assignment, if necessary.
7915 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7916 things change so we do, this code should be enhanced to
7917 support it. */
7918 if (ignore
7919 && TREE_CODE (lhs) == COMPONENT_REF
7920 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7921 || TREE_CODE (rhs) == BIT_AND_EXPR)
7922 && TREE_OPERAND (rhs, 0) == lhs
7923 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7924 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7925 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7927 rtx label = gen_label_rtx ();
7929 do_jump (TREE_OPERAND (rhs, 1),
7930 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7931 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7932 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7933 (TREE_CODE (rhs) == BIT_IOR_EXPR
7934 ? integer_one_node
7935 : integer_zero_node)),
7937 do_pending_stack_adjust ();
7938 emit_label (label);
7939 return const0_rtx;
7942 temp = expand_assignment (lhs, rhs, ! ignore);
7944 return temp;
7947 case RETURN_EXPR:
7948 if (!TREE_OPERAND (exp, 0))
7949 expand_null_return ();
7950 else
7951 expand_return (TREE_OPERAND (exp, 0));
7952 return const0_rtx;
7954 case ADDR_EXPR:
7955 if (modifier == EXPAND_STACK_PARM)
7956 target = 0;
7957 /* If we are taking the address of something erroneous, just
7958 return a zero. */
7959 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7960 return const0_rtx;
7961 /* If we are taking the address of a constant and are at the
7962 top level, we have to use output_constant_def since we can't
7963 call force_const_mem at top level. */
7964 else if (cfun == 0
7965 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7966 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
7967 == 'c')))
7968 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
7969 else
7971 /* We make sure to pass const0_rtx down if we came in with
7972 ignore set, to avoid doing the cleanups twice for something. */
7973 op0 = expand_expr (TREE_OPERAND (exp, 0),
7974 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7975 (modifier == EXPAND_INITIALIZER
7976 ? modifier : EXPAND_CONST_ADDRESS));
7978 /* If we are going to ignore the result, OP0 will have been set
7979 to const0_rtx, so just return it. Don't get confused and
7980 think we are taking the address of the constant. */
7981 if (ignore)
7982 return op0;
7984 /* We would like the object in memory. If it is a constant, we can
7985 have it be statically allocated into memory. For a non-constant,
7986 we need to allocate some memory and store the value into it. */
7988 if (CONSTANT_P (op0))
7989 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7990 op0);
7991 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
7992 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
7993 || GET_CODE (op0) == LO_SUM)
7995 /* If this object is in a register, it can't be BLKmode. */
7996 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7997 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7999 if (GET_CODE (op0) == PARALLEL)
8000 /* Handle calls that pass values in multiple
8001 non-contiguous locations. The Irix 6 ABI has examples
8002 of this. */
8003 emit_group_store (memloc, op0, inner_type,
8004 int_size_in_bytes (inner_type));
8005 else
8006 emit_move_insn (memloc, op0);
8008 op0 = memloc;
8011 if (!MEM_P (op0))
8012 abort ();
8014 mark_temp_addr_taken (op0);
8015 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8017 op0 = XEXP (op0, 0);
8018 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8019 op0 = convert_memory_address (ptr_mode, op0);
8020 return op0;
8023 /* If OP0 is not aligned as least as much as the type requires, we
8024 need to make a temporary, copy OP0 to it, and take the address of
8025 the temporary. We want to use the alignment of the type, not of
8026 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8027 the test for BLKmode means that can't happen. The test for
8028 BLKmode is because we never make mis-aligned MEMs with
8029 non-BLKmode.
8031 We don't need to do this at all if the machine doesn't have
8032 strict alignment. */
8033 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8034 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8035 > MEM_ALIGN (op0))
8036 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8038 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8039 rtx new;
8041 if (TYPE_ALIGN_OK (inner_type))
8042 abort ();
8044 if (TREE_ADDRESSABLE (inner_type))
8046 /* We can't make a bitwise copy of this object, so fail. */
8047 error ("cannot take the address of an unaligned member");
8048 return const0_rtx;
8051 new = assign_stack_temp_for_type
8052 (TYPE_MODE (inner_type),
8053 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8054 : int_size_in_bytes (inner_type),
8055 1, build_qualified_type (inner_type,
8056 (TYPE_QUALS (inner_type)
8057 | TYPE_QUAL_CONST)));
8059 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8060 (modifier == EXPAND_STACK_PARM
8061 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8063 op0 = new;
8066 op0 = force_operand (XEXP (op0, 0), target);
8069 if (flag_force_addr
8070 && !REG_P (op0)
8071 && modifier != EXPAND_CONST_ADDRESS
8072 && modifier != EXPAND_INITIALIZER
8073 && modifier != EXPAND_SUM)
8074 op0 = force_reg (Pmode, op0);
8076 if (REG_P (op0)
8077 && ! REG_USERVAR_P (op0))
8078 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8080 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8081 op0 = convert_memory_address (ptr_mode, op0);
8083 return op0;
8085 /* COMPLEX type for Extended Pascal & Fortran */
8086 case COMPLEX_EXPR:
8088 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8089 rtx insns;
8091 /* Get the rtx code of the operands. */
8092 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8093 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8095 if (! target)
8096 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8098 start_sequence ();
8100 /* Move the real (op0) and imaginary (op1) parts to their location. */
8101 emit_move_insn (gen_realpart (mode, target), op0);
8102 emit_move_insn (gen_imagpart (mode, target), op1);
8104 insns = get_insns ();
8105 end_sequence ();
8107 /* Complex construction should appear as a single unit. */
8108 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8109 each with a separate pseudo as destination.
8110 It's not correct for flow to treat them as a unit. */
8111 if (GET_CODE (target) != CONCAT)
8112 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8113 else
8114 emit_insn (insns);
8116 return target;
8119 case REALPART_EXPR:
8120 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8121 return gen_realpart (mode, op0);
8123 case IMAGPART_EXPR:
8124 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8125 return gen_imagpart (mode, op0);
8127 case RESX_EXPR:
8128 expand_resx_expr (exp);
8129 return const0_rtx;
8131 case TRY_CATCH_EXPR:
8132 case CATCH_EXPR:
8133 case EH_FILTER_EXPR:
8134 case TRY_FINALLY_EXPR:
8135 /* Lowered by tree-eh.c. */
8136 abort ();
8138 case WITH_CLEANUP_EXPR:
8139 case CLEANUP_POINT_EXPR:
8140 case TARGET_EXPR:
8141 case CASE_LABEL_EXPR:
8142 case VA_ARG_EXPR:
8143 case BIND_EXPR:
8144 case INIT_EXPR:
8145 case CONJ_EXPR:
8146 case COMPOUND_EXPR:
8147 case PREINCREMENT_EXPR:
8148 case PREDECREMENT_EXPR:
8149 case POSTINCREMENT_EXPR:
8150 case POSTDECREMENT_EXPR:
8151 case LOOP_EXPR:
8152 case EXIT_EXPR:
8153 case LABELED_BLOCK_EXPR:
8154 case EXIT_BLOCK_EXPR:
8155 case TRUTH_ANDIF_EXPR:
8156 case TRUTH_ORIF_EXPR:
8157 /* Lowered by gimplify.c. */
8158 abort ();
8160 case EXC_PTR_EXPR:
8161 return get_exception_pointer (cfun);
8163 case FILTER_EXPR:
8164 return get_exception_filter (cfun);
8166 case FDESC_EXPR:
8167 /* Function descriptors are not valid except for as
8168 initialization constants, and should not be expanded. */
8169 abort ();
8171 case SWITCH_EXPR:
8172 expand_case (exp);
8173 return const0_rtx;
8175 case LABEL_EXPR:
8176 expand_label (TREE_OPERAND (exp, 0));
8177 return const0_rtx;
8179 case ASM_EXPR:
8180 expand_asm_expr (exp);
8181 return const0_rtx;
8183 case WITH_SIZE_EXPR:
8184 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8185 have pulled out the size to use in whatever context it needed. */
8186 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8187 modifier, alt_rtl);
8189 default:
8190 return lang_hooks.expand_expr (exp, original_target, tmode,
8191 modifier, alt_rtl);
8194 /* Here to do an ordinary binary operator. */
8195 binop:
8196 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8197 subtarget, &op0, &op1, 0);
8198 binop2:
8199 this_optab = optab_for_tree_code (code, type);
8200 binop3:
8201 if (modifier == EXPAND_STACK_PARM)
8202 target = 0;
8203 temp = expand_binop (mode, this_optab, op0, op1, target,
8204 unsignedp, OPTAB_LIB_WIDEN);
8205 if (temp == 0)
8206 abort ();
8207 return REDUCE_BIT_FIELD (temp);
8209 #undef REDUCE_BIT_FIELD
8211 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8212 signedness of TYPE), possibly returning the result in TARGET. */
8213 static rtx
8214 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8216 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8217 if (target && GET_MODE (target) != GET_MODE (exp))
8218 target = 0;
8219 if (TYPE_UNSIGNED (type))
8221 rtx mask;
8222 if (prec < HOST_BITS_PER_WIDE_INT)
8223 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8224 GET_MODE (exp));
8225 else
8226 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8227 ((unsigned HOST_WIDE_INT) 1
8228 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8229 GET_MODE (exp));
8230 return expand_and (GET_MODE (exp), exp, mask, target);
8232 else
8234 tree count = build_int_cst (NULL_TREE,
8235 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8236 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8237 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8241 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8242 when applied to the address of EXP produces an address known to be
8243 aligned more than BIGGEST_ALIGNMENT. */
8245 static int
8246 is_aligning_offset (tree offset, tree exp)
8248 /* Strip off any conversions. */
8249 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8250 || TREE_CODE (offset) == NOP_EXPR
8251 || TREE_CODE (offset) == CONVERT_EXPR)
8252 offset = TREE_OPERAND (offset, 0);
8254 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8255 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8256 if (TREE_CODE (offset) != BIT_AND_EXPR
8257 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8258 || compare_tree_int (TREE_OPERAND (offset, 1),
8259 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8260 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8261 return 0;
8263 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8264 It must be NEGATE_EXPR. Then strip any more conversions. */
8265 offset = TREE_OPERAND (offset, 0);
8266 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8267 || TREE_CODE (offset) == NOP_EXPR
8268 || TREE_CODE (offset) == CONVERT_EXPR)
8269 offset = TREE_OPERAND (offset, 0);
8271 if (TREE_CODE (offset) != NEGATE_EXPR)
8272 return 0;
8274 offset = TREE_OPERAND (offset, 0);
8275 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8276 || TREE_CODE (offset) == NOP_EXPR
8277 || TREE_CODE (offset) == CONVERT_EXPR)
8278 offset = TREE_OPERAND (offset, 0);
8280 /* This must now be the address of EXP. */
8281 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8284 /* Return the tree node if an ARG corresponds to a string constant or zero
8285 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8286 in bytes within the string that ARG is accessing. The type of the
8287 offset will be `sizetype'. */
8289 tree
8290 string_constant (tree arg, tree *ptr_offset)
8292 STRIP_NOPS (arg);
8294 if (TREE_CODE (arg) == ADDR_EXPR
8295 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8297 *ptr_offset = size_zero_node;
8298 return TREE_OPERAND (arg, 0);
8300 if (TREE_CODE (arg) == ADDR_EXPR
8301 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8302 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8304 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8305 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8307 else if (TREE_CODE (arg) == PLUS_EXPR)
8309 tree arg0 = TREE_OPERAND (arg, 0);
8310 tree arg1 = TREE_OPERAND (arg, 1);
8312 STRIP_NOPS (arg0);
8313 STRIP_NOPS (arg1);
8315 if (TREE_CODE (arg0) == ADDR_EXPR
8316 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8318 *ptr_offset = convert (sizetype, arg1);
8319 return TREE_OPERAND (arg0, 0);
8321 else if (TREE_CODE (arg1) == ADDR_EXPR
8322 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8324 *ptr_offset = convert (sizetype, arg0);
8325 return TREE_OPERAND (arg1, 0);
8329 return 0;
8332 /* Generate code to calculate EXP using a store-flag instruction
8333 and return an rtx for the result. EXP is either a comparison
8334 or a TRUTH_NOT_EXPR whose operand is a comparison.
8336 If TARGET is nonzero, store the result there if convenient.
8338 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8339 cheap.
8341 Return zero if there is no suitable set-flag instruction
8342 available on this machine.
8344 Once expand_expr has been called on the arguments of the comparison,
8345 we are committed to doing the store flag, since it is not safe to
8346 re-evaluate the expression. We emit the store-flag insn by calling
8347 emit_store_flag, but only expand the arguments if we have a reason
8348 to believe that emit_store_flag will be successful. If we think that
8349 it will, but it isn't, we have to simulate the store-flag with a
8350 set/jump/set sequence. */
8352 static rtx
8353 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8355 enum rtx_code code;
8356 tree arg0, arg1, type;
8357 tree tem;
8358 enum machine_mode operand_mode;
8359 int invert = 0;
8360 int unsignedp;
8361 rtx op0, op1;
8362 enum insn_code icode;
8363 rtx subtarget = target;
8364 rtx result, label;
8366 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8367 result at the end. We can't simply invert the test since it would
8368 have already been inverted if it were valid. This case occurs for
8369 some floating-point comparisons. */
8371 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8372 invert = 1, exp = TREE_OPERAND (exp, 0);
8374 arg0 = TREE_OPERAND (exp, 0);
8375 arg1 = TREE_OPERAND (exp, 1);
8377 /* Don't crash if the comparison was erroneous. */
8378 if (arg0 == error_mark_node || arg1 == error_mark_node)
8379 return const0_rtx;
8381 type = TREE_TYPE (arg0);
8382 operand_mode = TYPE_MODE (type);
8383 unsignedp = TYPE_UNSIGNED (type);
8385 /* We won't bother with BLKmode store-flag operations because it would mean
8386 passing a lot of information to emit_store_flag. */
8387 if (operand_mode == BLKmode)
8388 return 0;
8390 /* We won't bother with store-flag operations involving function pointers
8391 when function pointers must be canonicalized before comparisons. */
8392 #ifdef HAVE_canonicalize_funcptr_for_compare
8393 if (HAVE_canonicalize_funcptr_for_compare
8394 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8395 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8396 == FUNCTION_TYPE))
8397 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8398 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8399 == FUNCTION_TYPE))))
8400 return 0;
8401 #endif
8403 STRIP_NOPS (arg0);
8404 STRIP_NOPS (arg1);
8406 /* Get the rtx comparison code to use. We know that EXP is a comparison
8407 operation of some type. Some comparisons against 1 and -1 can be
8408 converted to comparisons with zero. Do so here so that the tests
8409 below will be aware that we have a comparison with zero. These
8410 tests will not catch constants in the first operand, but constants
8411 are rarely passed as the first operand. */
8413 switch (TREE_CODE (exp))
8415 case EQ_EXPR:
8416 code = EQ;
8417 break;
8418 case NE_EXPR:
8419 code = NE;
8420 break;
8421 case LT_EXPR:
8422 if (integer_onep (arg1))
8423 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8424 else
8425 code = unsignedp ? LTU : LT;
8426 break;
8427 case LE_EXPR:
8428 if (! unsignedp && integer_all_onesp (arg1))
8429 arg1 = integer_zero_node, code = LT;
8430 else
8431 code = unsignedp ? LEU : LE;
8432 break;
8433 case GT_EXPR:
8434 if (! unsignedp && integer_all_onesp (arg1))
8435 arg1 = integer_zero_node, code = GE;
8436 else
8437 code = unsignedp ? GTU : GT;
8438 break;
8439 case GE_EXPR:
8440 if (integer_onep (arg1))
8441 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8442 else
8443 code = unsignedp ? GEU : GE;
8444 break;
8446 case UNORDERED_EXPR:
8447 code = UNORDERED;
8448 break;
8449 case ORDERED_EXPR:
8450 code = ORDERED;
8451 break;
8452 case UNLT_EXPR:
8453 code = UNLT;
8454 break;
8455 case UNLE_EXPR:
8456 code = UNLE;
8457 break;
8458 case UNGT_EXPR:
8459 code = UNGT;
8460 break;
8461 case UNGE_EXPR:
8462 code = UNGE;
8463 break;
8464 case UNEQ_EXPR:
8465 code = UNEQ;
8466 break;
8467 case LTGT_EXPR:
8468 code = LTGT;
8469 break;
8471 default:
8472 abort ();
8475 /* Put a constant second. */
8476 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8478 tem = arg0; arg0 = arg1; arg1 = tem;
8479 code = swap_condition (code);
8482 /* If this is an equality or inequality test of a single bit, we can
8483 do this by shifting the bit being tested to the low-order bit and
8484 masking the result with the constant 1. If the condition was EQ,
8485 we xor it with 1. This does not require an scc insn and is faster
8486 than an scc insn even if we have it.
8488 The code to make this transformation was moved into fold_single_bit_test,
8489 so we just call into the folder and expand its result. */
8491 if ((code == NE || code == EQ)
8492 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8493 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8495 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8496 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8497 arg0, arg1, type),
8498 target, VOIDmode, EXPAND_NORMAL);
8501 /* Now see if we are likely to be able to do this. Return if not. */
8502 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8503 return 0;
8505 icode = setcc_gen_code[(int) code];
8506 if (icode == CODE_FOR_nothing
8507 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8509 /* We can only do this if it is one of the special cases that
8510 can be handled without an scc insn. */
8511 if ((code == LT && integer_zerop (arg1))
8512 || (! only_cheap && code == GE && integer_zerop (arg1)))
8514 else if (BRANCH_COST >= 0
8515 && ! only_cheap && (code == NE || code == EQ)
8516 && TREE_CODE (type) != REAL_TYPE
8517 && ((abs_optab->handlers[(int) operand_mode].insn_code
8518 != CODE_FOR_nothing)
8519 || (ffs_optab->handlers[(int) operand_mode].insn_code
8520 != CODE_FOR_nothing)))
8522 else
8523 return 0;
8526 if (! get_subtarget (target)
8527 || GET_MODE (subtarget) != operand_mode)
8528 subtarget = 0;
8530 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8532 if (target == 0)
8533 target = gen_reg_rtx (mode);
8535 result = emit_store_flag (target, code, op0, op1,
8536 operand_mode, unsignedp, 1);
8538 if (result)
8540 if (invert)
8541 result = expand_binop (mode, xor_optab, result, const1_rtx,
8542 result, 0, OPTAB_LIB_WIDEN);
8543 return result;
8546 /* If this failed, we have to do this with set/compare/jump/set code. */
8547 if (!REG_P (target)
8548 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8549 target = gen_reg_rtx (GET_MODE (target));
8551 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8552 result = compare_from_rtx (op0, op1, code, unsignedp,
8553 operand_mode, NULL_RTX);
8554 if (GET_CODE (result) == CONST_INT)
8555 return (((result == const0_rtx && ! invert)
8556 || (result != const0_rtx && invert))
8557 ? const0_rtx : const1_rtx);
8559 /* The code of RESULT may not match CODE if compare_from_rtx
8560 decided to swap its operands and reverse the original code.
8562 We know that compare_from_rtx returns either a CONST_INT or
8563 a new comparison code, so it is safe to just extract the
8564 code from RESULT. */
8565 code = GET_CODE (result);
8567 label = gen_label_rtx ();
8568 if (bcc_gen_fctn[(int) code] == 0)
8569 abort ();
8571 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8572 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8573 emit_label (label);
8575 return target;
8579 /* Stubs in case we haven't got a casesi insn. */
8580 #ifndef HAVE_casesi
8581 # define HAVE_casesi 0
8582 # define gen_casesi(a, b, c, d, e) (0)
8583 # define CODE_FOR_casesi CODE_FOR_nothing
8584 #endif
8586 /* If the machine does not have a case insn that compares the bounds,
8587 this means extra overhead for dispatch tables, which raises the
8588 threshold for using them. */
8589 #ifndef CASE_VALUES_THRESHOLD
8590 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8591 #endif /* CASE_VALUES_THRESHOLD */
8593 unsigned int
8594 case_values_threshold (void)
8596 return CASE_VALUES_THRESHOLD;
8599 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8600 0 otherwise (i.e. if there is no casesi instruction). */
8602 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8603 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8605 enum machine_mode index_mode = SImode;
8606 int index_bits = GET_MODE_BITSIZE (index_mode);
8607 rtx op1, op2, index;
8608 enum machine_mode op_mode;
8610 if (! HAVE_casesi)
8611 return 0;
8613 /* Convert the index to SImode. */
8614 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8616 enum machine_mode omode = TYPE_MODE (index_type);
8617 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8619 /* We must handle the endpoints in the original mode. */
8620 index_expr = build2 (MINUS_EXPR, index_type,
8621 index_expr, minval);
8622 minval = integer_zero_node;
8623 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8624 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8625 omode, 1, default_label);
8626 /* Now we can safely truncate. */
8627 index = convert_to_mode (index_mode, index, 0);
8629 else
8631 if (TYPE_MODE (index_type) != index_mode)
8633 index_expr = convert (lang_hooks.types.type_for_size
8634 (index_bits, 0), index_expr);
8635 index_type = TREE_TYPE (index_expr);
8638 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8641 do_pending_stack_adjust ();
8643 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8644 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8645 (index, op_mode))
8646 index = copy_to_mode_reg (op_mode, index);
8648 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8650 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8651 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8652 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8653 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8654 (op1, op_mode))
8655 op1 = copy_to_mode_reg (op_mode, op1);
8657 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8659 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8660 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8661 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8662 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8663 (op2, op_mode))
8664 op2 = copy_to_mode_reg (op_mode, op2);
8666 emit_jump_insn (gen_casesi (index, op1, op2,
8667 table_label, default_label));
8668 return 1;
8671 /* Attempt to generate a tablejump instruction; same concept. */
8672 #ifndef HAVE_tablejump
8673 #define HAVE_tablejump 0
8674 #define gen_tablejump(x, y) (0)
8675 #endif
8677 /* Subroutine of the next function.
8679 INDEX is the value being switched on, with the lowest value
8680 in the table already subtracted.
8681 MODE is its expected mode (needed if INDEX is constant).
8682 RANGE is the length of the jump table.
8683 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8685 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8686 index value is out of range. */
8688 static void
8689 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8690 rtx default_label)
8692 rtx temp, vector;
8694 if (INTVAL (range) > cfun->max_jumptable_ents)
8695 cfun->max_jumptable_ents = INTVAL (range);
8697 /* Do an unsigned comparison (in the proper mode) between the index
8698 expression and the value which represents the length of the range.
8699 Since we just finished subtracting the lower bound of the range
8700 from the index expression, this comparison allows us to simultaneously
8701 check that the original index expression value is both greater than
8702 or equal to the minimum value of the range and less than or equal to
8703 the maximum value of the range. */
8705 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8706 default_label);
8708 /* If index is in range, it must fit in Pmode.
8709 Convert to Pmode so we can index with it. */
8710 if (mode != Pmode)
8711 index = convert_to_mode (Pmode, index, 1);
8713 /* Don't let a MEM slip through, because then INDEX that comes
8714 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8715 and break_out_memory_refs will go to work on it and mess it up. */
8716 #ifdef PIC_CASE_VECTOR_ADDRESS
8717 if (flag_pic && !REG_P (index))
8718 index = copy_to_mode_reg (Pmode, index);
8719 #endif
8721 /* If flag_force_addr were to affect this address
8722 it could interfere with the tricky assumptions made
8723 about addresses that contain label-refs,
8724 which may be valid only very near the tablejump itself. */
8725 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8726 GET_MODE_SIZE, because this indicates how large insns are. The other
8727 uses should all be Pmode, because they are addresses. This code
8728 could fail if addresses and insns are not the same size. */
8729 index = gen_rtx_PLUS (Pmode,
8730 gen_rtx_MULT (Pmode, index,
8731 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8732 gen_rtx_LABEL_REF (Pmode, table_label));
8733 #ifdef PIC_CASE_VECTOR_ADDRESS
8734 if (flag_pic)
8735 index = PIC_CASE_VECTOR_ADDRESS (index);
8736 else
8737 #endif
8738 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8739 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8740 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8741 convert_move (temp, vector, 0);
8743 emit_jump_insn (gen_tablejump (temp, table_label));
8745 /* If we are generating PIC code or if the table is PC-relative, the
8746 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8747 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8748 emit_barrier ();
8752 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8753 rtx table_label, rtx default_label)
8755 rtx index;
8757 if (! HAVE_tablejump)
8758 return 0;
8760 index_expr = fold (build2 (MINUS_EXPR, index_type,
8761 convert (index_type, index_expr),
8762 convert (index_type, minval)));
8763 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8764 do_pending_stack_adjust ();
8766 do_tablejump (index, TYPE_MODE (index_type),
8767 convert_modes (TYPE_MODE (index_type),
8768 TYPE_MODE (TREE_TYPE (range)),
8769 expand_expr (range, NULL_RTX,
8770 VOIDmode, 0),
8771 TYPE_UNSIGNED (TREE_TYPE (range))),
8772 table_label, default_label);
8773 return 1;
8776 /* Nonzero if the mode is a valid vector mode for this architecture.
8777 This returns nonzero even if there is no hardware support for the
8778 vector mode, but we can emulate with narrower modes. */
8781 vector_mode_valid_p (enum machine_mode mode)
8783 enum mode_class class = GET_MODE_CLASS (mode);
8784 enum machine_mode innermode;
8786 /* Doh! What's going on? */
8787 if (class != MODE_VECTOR_INT
8788 && class != MODE_VECTOR_FLOAT)
8789 return 0;
8791 /* Hardware support. Woo hoo! */
8792 if (targetm.vector_mode_supported_p (mode))
8793 return 1;
8795 innermode = GET_MODE_INNER (mode);
8797 /* We should probably return 1 if requesting V4DI and we have no DI,
8798 but we have V2DI, but this is probably very unlikely. */
8800 /* If we have support for the inner mode, we can safely emulate it.
8801 We may not have V2DI, but me can emulate with a pair of DIs. */
8802 return targetm.scalar_mode_supported_p (innermode);
8805 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8806 static rtx
8807 const_vector_from_tree (tree exp)
8809 rtvec v;
8810 int units, i;
8811 tree link, elt;
8812 enum machine_mode inner, mode;
8814 mode = TYPE_MODE (TREE_TYPE (exp));
8816 if (initializer_zerop (exp))
8817 return CONST0_RTX (mode);
8819 units = GET_MODE_NUNITS (mode);
8820 inner = GET_MODE_INNER (mode);
8822 v = rtvec_alloc (units);
8824 link = TREE_VECTOR_CST_ELTS (exp);
8825 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8827 elt = TREE_VALUE (link);
8829 if (TREE_CODE (elt) == REAL_CST)
8830 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8831 inner);
8832 else
8833 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8834 TREE_INT_CST_HIGH (elt),
8835 inner);
8838 /* Initialize remaining elements to 0. */
8839 for (; i < units; ++i)
8840 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8842 return gen_rtx_raw_CONST_VECTOR (mode, v);
8844 #include "gt-expr.h"