Testcase from PR rtl-optimization/18611
[official-gcc.git] / gcc / expr.c
blobf94993085cfdf5ca5a4c25a4aa00b7e1efb56ef4
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 #endif
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
219 void
220 init_expr_once (void)
222 rtx insn, pat;
223 enum machine_mode mode;
224 int num_clobbers;
225 rtx mem, mem1;
226 rtx reg;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
245 int regno;
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
263 REGNO (reg) = regno;
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
296 enum insn_code ic;
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
300 continue;
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
312 void
313 init_expr (void)
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
323 void
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
341 nothing to do. */
342 if (to == from)
343 return;
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
347 TO here. */
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
361 return;
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
370 else
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
374 return;
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
381 return;
384 if (to_real)
386 rtx value, insns;
387 convert_optab tab;
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else
395 tab = trunc_optab;
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 return;
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
413 start_sequence ();
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 1, from, from_mode);
416 insns = get_insns ();
417 end_sequence ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 from)
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
422 return;
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
439 to, from, UNKNOWN);
440 return;
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
451 to, from, UNKNOWN);
452 if (to_mode == full_mode)
453 return;
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
465 rtx insns;
466 rtx lowpart;
467 rtx fill_value;
468 rtx lowfrom;
469 int i;
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
475 != CODE_FOR_nothing)
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
484 return;
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
491 if (REG_P (to))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
500 return;
503 /* No special multiword conversion insn; do it by hand. */
504 start_sequence ();
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
515 else
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
524 if (unsignedp)
525 fill_value = const0_rtx;
526 else
528 #ifdef HAVE_slt
529 if (HAVE_slt
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
534 lowpart_mode, 0);
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
538 else
539 #endif
541 fill_value
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
544 NULL_RTX, 0);
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
562 end_sequence ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
566 return;
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
573 if (!((MEM_P (from)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || REG_P (from)
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
581 return;
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
592 if (!((MEM_P (from)
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || REG_P (from)
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
603 return;
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
611 != CODE_FOR_nothing)
613 if (flag_force_mem)
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
617 return;
619 else
621 enum machine_mode intermediate;
622 rtx tmp;
623 tree shift_amount;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
629 != CODE_FOR_nothing)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
638 return;
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
648 to, unsignedp);
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
650 to, unsignedp);
651 if (tmp != to)
652 emit_move_insn (to, tmp);
653 return;
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
661 to, from, UNKNOWN);
662 return;
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
676 return;
679 /* Mode combination is not recognized. */
680 gcc_unreachable ();
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
709 rtx temp;
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
722 if (mode == oldmode)
723 return x;
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
762 || (REG_P (x)
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 if (! unsignedp
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
791 subreg operation. */
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
800 return temp;
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
812 succeed. */
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
831 stpcpy. */
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
845 data.offset = 0;
846 data.from_addr = from_addr;
847 if (to)
849 to_addr = XEXP (to, 0);
850 data.to = to;
851 data.autinc_to
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
854 data.reverse
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
857 else
859 to_addr = NULL_RTX;
860 data.to = NULL_RTX;
861 data.autinc_to = 1;
862 #ifdef STACK_GROWS_DOWNWARD
863 data.reverse = 1;
864 #else
865 data.reverse = 0;
866 #endif
868 data.to_addr = to_addr;
869 data.from = from;
870 data.autinc_from
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
878 data.len = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
890 mode = tmode;
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
909 data.autinc_to = 1;
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
915 data.autinc_to = 1;
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
925 else
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
930 tmode != VOIDmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
934 break;
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
942 while (max_size > 1)
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
947 mode = tmode;
949 if (mode == VOIDmode)
950 break;
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
962 if (endp)
964 rtx to1;
966 gcc_assert (!data.reverse);
967 if (data.autinc_to)
969 if (endp == 2)
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
973 else
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
975 -1));
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
978 data.offset);
980 else
982 if (endp == 2)
983 --data.offset;
984 to1 = adjust_address (data.to, QImode, data.offset);
986 return to1;
988 else
989 return data.to;
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1005 else
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1010 tmode != VOIDmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1014 break;
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1027 mode = tmode;
1029 if (mode == VOIDmode)
1030 break;
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1039 gcc_assert (!l);
1040 return n_insns;
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1047 static void
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1056 if (data->reverse)
1057 data->offset -= size;
1059 if (data->to)
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1063 data->offset);
1064 else
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1070 data->offset);
1071 else
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1081 if (data->to)
1082 emit_insn ((*genfun) (to1, from1));
1083 else
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1087 #else
1088 gcc_unreachable ();
1089 #endif
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1100 data->len -= size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1114 0 otherwise. */
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1119 bool may_use_call;
1120 rtx retval = 0;
1121 unsigned int align;
1123 switch (method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1127 break;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1134 NO_DEFER_POP;
1135 break;
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1139 break;
1141 default:
1142 gcc_unreachable ();
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1149 gcc_assert (size);
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1161 return 0;
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1175 else
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1179 OK_DEFER_POP;
1181 return retval;
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1188 static bool
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1192 if (PUSH_ARGS)
1193 return true;
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1200 (void) fn;
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1202 return false;
1204 #endif
1206 /* If any argument goes in memory, then it might clobber an outgoing
1207 argument. */
1209 CUMULATIVE_ARGS args_so_far;
1210 tree fn, arg;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1221 return false;
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1223 NULL_TREE, 1))
1224 return false;
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1228 return true;
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1234 static bool
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1242 volatile_ok = 1;
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1270 rtx op2;
1271 rtx last = get_last_insn ();
1272 rtx pat;
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1285 if (pat)
1287 emit_insn (pat);
1288 volatile_ok = save_volatile_ok;
1289 return true;
1291 else
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1297 return false;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1303 static rtx
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1309 rtx retval;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1313 use them later. */
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1349 return retval;
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1358 void
1359 init_block_move_fn (const char *asmspec)
1361 if (!block_move_fn)
1363 tree args, fn;
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1368 NULL_TREE);
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1376 block_move_fn = fn;
1379 if (asmspec)
1380 set_user_assembler_name (block_move_fn, asmspec);
1383 static tree
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1388 if (!block_move_fn)
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1405 static void
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1439 if (tmp != iter)
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1445 true, top_label);
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1451 void
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1454 int i;
1455 #ifdef HAVE_load_multiple
1456 rtx pat;
1457 rtx last;
1458 #endif
1460 if (nregs == 0)
1461 return;
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1472 GEN_INT (nregs));
1473 if (pat)
1475 emit_insn (pat);
1476 return;
1478 else
1479 delete_insns_since (last);
1481 #endif
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1491 void
1492 move_block_from_reg (int regno, rtx x, int nregs)
1494 int i;
1496 if (nregs == 0)
1497 return;
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1505 GEN_INT (nregs));
1506 if (pat)
1508 emit_insn (pat);
1509 return;
1511 else
1512 delete_insns_since (last);
1514 #endif
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1520 gcc_assert (tem);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1535 int i, length;
1536 rtx *tmps;
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1546 if (i)
1547 tmps[0] = 0;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1564 static void
1565 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1567 rtx src;
1568 int start, i;
1569 enum machine_mode m = GET_MODE (orig_src);
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1573 if (!SCALAR_INT_MODE_P (m)
1574 && !MEM_P (orig_src) && GET_CODE (orig_src) != CONCAT)
1576 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1577 if (imode == BLKmode)
1578 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1579 else
1580 src = gen_reg_rtx (imode);
1581 if (imode != BLKmode)
1582 src = gen_lowpart (GET_MODE (orig_src), src);
1583 emit_move_insn (src, orig_src);
1584 /* ...and back again. */
1585 if (imode != BLKmode)
1586 src = gen_lowpart (imode, src);
1587 emit_group_load_1 (tmps, dst, src, type, ssize);
1588 return;
1591 /* Check for a NULL entry, used to indicate that the parameter goes
1592 both on the stack and in registers. */
1593 if (XEXP (XVECEXP (dst, 0, 0), 0))
1594 start = 0;
1595 else
1596 start = 1;
1598 /* Process the pieces. */
1599 for (i = start; i < XVECLEN (dst, 0); i++)
1601 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1602 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1603 unsigned int bytelen = GET_MODE_SIZE (mode);
1604 int shift = 0;
1606 /* Handle trailing fragments that run over the size of the struct. */
1607 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1609 /* Arrange to shift the fragment to where it belongs.
1610 extract_bit_field loads to the lsb of the reg. */
1611 if (
1612 #ifdef BLOCK_REG_PADDING
1613 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1614 == (BYTES_BIG_ENDIAN ? upward : downward)
1615 #else
1616 BYTES_BIG_ENDIAN
1617 #endif
1619 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1620 bytelen = ssize - bytepos;
1621 gcc_assert (bytelen > 0);
1624 /* If we won't be loading directly from memory, protect the real source
1625 from strange tricks we might play; but make sure that the source can
1626 be loaded directly into the destination. */
1627 src = orig_src;
1628 if (!MEM_P (orig_src)
1629 && (!CONSTANT_P (orig_src)
1630 || (GET_MODE (orig_src) != mode
1631 && GET_MODE (orig_src) != VOIDmode)))
1633 if (GET_MODE (orig_src) == VOIDmode)
1634 src = gen_reg_rtx (mode);
1635 else
1636 src = gen_reg_rtx (GET_MODE (orig_src));
1638 emit_move_insn (src, orig_src);
1641 /* Optimize the access just a bit. */
1642 if (MEM_P (src)
1643 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1644 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1645 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1646 && bytelen == GET_MODE_SIZE (mode))
1648 tmps[i] = gen_reg_rtx (mode);
1649 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1651 else if (GET_CODE (src) == CONCAT)
1653 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1654 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1656 if ((bytepos == 0 && bytelen == slen0)
1657 || (bytepos != 0 && bytepos + bytelen <= slen))
1659 /* The following assumes that the concatenated objects all
1660 have the same size. In this case, a simple calculation
1661 can be used to determine the object and the bit field
1662 to be extracted. */
1663 tmps[i] = XEXP (src, bytepos / slen0);
1664 if (! CONSTANT_P (tmps[i])
1665 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1666 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1667 (bytepos % slen0) * BITS_PER_UNIT,
1668 1, NULL_RTX, mode, mode);
1670 else
1672 rtx mem;
1674 gcc_assert (!bytepos);
1675 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1676 emit_move_insn (mem, src);
1677 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1678 0, 1, NULL_RTX, mode, mode);
1681 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1682 SIMD register, which is currently broken. While we get GCC
1683 to emit proper RTL for these cases, let's dump to memory. */
1684 else if (VECTOR_MODE_P (GET_MODE (dst))
1685 && REG_P (src))
1687 int slen = GET_MODE_SIZE (GET_MODE (src));
1688 rtx mem;
1690 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1691 emit_move_insn (mem, src);
1692 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1694 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1695 && XVECLEN (dst, 0) > 1)
1696 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1697 else if (CONSTANT_P (src)
1698 || (REG_P (src) && GET_MODE (src) == mode))
1699 tmps[i] = src;
1700 else
1701 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1702 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1703 mode, mode);
1705 if (shift)
1706 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1707 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1711 /* Emit code to move a block SRC of type TYPE to a block DST,
1712 where DST is non-consecutive registers represented by a PARALLEL.
1713 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1714 if not known. */
1716 void
1717 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1719 rtx *tmps;
1720 int i;
1722 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1723 emit_group_load_1 (tmps, dst, src, type, ssize);
1725 /* Copy the extracted pieces into the proper (probable) hard regs. */
1726 for (i = 0; i < XVECLEN (dst, 0); i++)
1728 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1729 if (d == NULL)
1730 continue;
1731 emit_move_insn (d, tmps[i]);
1735 /* Similar, but load SRC into new pseudos in a format that looks like
1736 PARALLEL. This can later be fed to emit_group_move to get things
1737 in the right place. */
1740 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1742 rtvec vec;
1743 int i;
1745 vec = rtvec_alloc (XVECLEN (parallel, 0));
1746 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1748 /* Convert the vector to look just like the original PARALLEL, except
1749 with the computed values. */
1750 for (i = 0; i < XVECLEN (parallel, 0); i++)
1752 rtx e = XVECEXP (parallel, 0, i);
1753 rtx d = XEXP (e, 0);
1755 if (d)
1757 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1758 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1760 RTVEC_ELT (vec, i) = e;
1763 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1766 /* Emit code to move a block SRC to block DST, where SRC and DST are
1767 non-consecutive groups of registers, each represented by a PARALLEL. */
1769 void
1770 emit_group_move (rtx dst, rtx src)
1772 int i;
1774 gcc_assert (GET_CODE (src) == PARALLEL
1775 && GET_CODE (dst) == PARALLEL
1776 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1778 /* Skip first entry if NULL. */
1779 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1780 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1781 XEXP (XVECEXP (src, 0, i), 0));
1784 /* Move a group of registers represented by a PARALLEL into pseudos. */
1787 emit_group_move_into_temps (rtx src)
1789 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1790 int i;
1792 for (i = 0; i < XVECLEN (src, 0); i++)
1794 rtx e = XVECEXP (src, 0, i);
1795 rtx d = XEXP (e, 0);
1797 if (d)
1798 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1799 RTVEC_ELT (vec, i) = e;
1802 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1805 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1806 where SRC is non-consecutive registers represented by a PARALLEL.
1807 SSIZE represents the total size of block ORIG_DST, or -1 if not
1808 known. */
1810 void
1811 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1813 rtx *tmps, dst;
1814 int start, i;
1815 enum machine_mode m = GET_MODE (orig_dst);
1817 gcc_assert (GET_CODE (src) == PARALLEL);
1819 if (!SCALAR_INT_MODE_P (m)
1820 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1822 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1823 if (imode == BLKmode)
1824 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1825 else
1826 dst = gen_reg_rtx (imode);
1827 emit_group_store (dst, src, type, ssize);
1828 if (imode != BLKmode)
1829 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1830 emit_move_insn (orig_dst, dst);
1831 return;
1834 /* Check for a NULL entry, used to indicate that the parameter goes
1835 both on the stack and in registers. */
1836 if (XEXP (XVECEXP (src, 0, 0), 0))
1837 start = 0;
1838 else
1839 start = 1;
1841 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1843 /* Copy the (probable) hard regs into pseudos. */
1844 for (i = start; i < XVECLEN (src, 0); i++)
1846 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1847 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1848 emit_move_insn (tmps[i], reg);
1851 /* If we won't be storing directly into memory, protect the real destination
1852 from strange tricks we might play. */
1853 dst = orig_dst;
1854 if (GET_CODE (dst) == PARALLEL)
1856 rtx temp;
1858 /* We can get a PARALLEL dst if there is a conditional expression in
1859 a return statement. In that case, the dst and src are the same,
1860 so no action is necessary. */
1861 if (rtx_equal_p (dst, src))
1862 return;
1864 /* It is unclear if we can ever reach here, but we may as well handle
1865 it. Allocate a temporary, and split this into a store/load to/from
1866 the temporary. */
1868 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1869 emit_group_store (temp, src, type, ssize);
1870 emit_group_load (dst, temp, type, ssize);
1871 return;
1873 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1875 dst = gen_reg_rtx (GET_MODE (orig_dst));
1876 /* Make life a bit easier for combine. */
1877 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1880 /* Process the pieces. */
1881 for (i = start; i < XVECLEN (src, 0); i++)
1883 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1884 enum machine_mode mode = GET_MODE (tmps[i]);
1885 unsigned int bytelen = GET_MODE_SIZE (mode);
1886 rtx dest = dst;
1888 /* Handle trailing fragments that run over the size of the struct. */
1889 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1891 /* store_bit_field always takes its value from the lsb.
1892 Move the fragment to the lsb if it's not already there. */
1893 if (
1894 #ifdef BLOCK_REG_PADDING
1895 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1896 == (BYTES_BIG_ENDIAN ? upward : downward)
1897 #else
1898 BYTES_BIG_ENDIAN
1899 #endif
1902 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1903 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1904 build_int_cst (NULL_TREE, shift),
1905 tmps[i], 0);
1907 bytelen = ssize - bytepos;
1910 if (GET_CODE (dst) == CONCAT)
1912 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1913 dest = XEXP (dst, 0);
1914 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1916 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1917 dest = XEXP (dst, 1);
1919 else
1921 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1922 dest = assign_stack_temp (GET_MODE (dest),
1923 GET_MODE_SIZE (GET_MODE (dest)), 0);
1924 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1925 tmps[i]);
1926 dst = dest;
1927 break;
1931 /* Optimize the access just a bit. */
1932 if (MEM_P (dest)
1933 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1934 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1935 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1936 && bytelen == GET_MODE_SIZE (mode))
1937 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1938 else
1939 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1940 mode, tmps[i]);
1943 /* Copy from the pseudo into the (probable) hard reg. */
1944 if (orig_dst != dst)
1945 emit_move_insn (orig_dst, dst);
1948 /* Generate code to copy a BLKmode object of TYPE out of a
1949 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1950 is null, a stack temporary is created. TGTBLK is returned.
1952 The purpose of this routine is to handle functions that return
1953 BLKmode structures in registers. Some machines (the PA for example)
1954 want to return all small structures in registers regardless of the
1955 structure's alignment. */
1958 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1960 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1961 rtx src = NULL, dst = NULL;
1962 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1963 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1965 if (tgtblk == 0)
1967 tgtblk = assign_temp (build_qualified_type (type,
1968 (TYPE_QUALS (type)
1969 | TYPE_QUAL_CONST)),
1970 0, 1, 1);
1971 preserve_temp_slots (tgtblk);
1974 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1975 into a new pseudo which is a full word. */
1977 if (GET_MODE (srcreg) != BLKmode
1978 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1979 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1981 /* If the structure doesn't take up a whole number of words, see whether
1982 SRCREG is padded on the left or on the right. If it's on the left,
1983 set PADDING_CORRECTION to the number of bits to skip.
1985 In most ABIs, the structure will be returned at the least end of
1986 the register, which translates to right padding on little-endian
1987 targets and left padding on big-endian targets. The opposite
1988 holds if the structure is returned at the most significant
1989 end of the register. */
1990 if (bytes % UNITS_PER_WORD != 0
1991 && (targetm.calls.return_in_msb (type)
1992 ? !BYTES_BIG_ENDIAN
1993 : BYTES_BIG_ENDIAN))
1994 padding_correction
1995 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1997 /* Copy the structure BITSIZE bites at a time.
1999 We could probably emit more efficient code for machines which do not use
2000 strict alignment, but it doesn't seem worth the effort at the current
2001 time. */
2002 for (bitpos = 0, xbitpos = padding_correction;
2003 bitpos < bytes * BITS_PER_UNIT;
2004 bitpos += bitsize, xbitpos += bitsize)
2006 /* We need a new source operand each time xbitpos is on a
2007 word boundary and when xbitpos == padding_correction
2008 (the first time through). */
2009 if (xbitpos % BITS_PER_WORD == 0
2010 || xbitpos == padding_correction)
2011 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2012 GET_MODE (srcreg));
2014 /* We need a new destination operand each time bitpos is on
2015 a word boundary. */
2016 if (bitpos % BITS_PER_WORD == 0)
2017 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2019 /* Use xbitpos for the source extraction (right justified) and
2020 xbitpos for the destination store (left justified). */
2021 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2022 extract_bit_field (src, bitsize,
2023 xbitpos % BITS_PER_WORD, 1,
2024 NULL_RTX, word_mode, word_mode));
2027 return tgtblk;
2030 /* Add a USE expression for REG to the (possibly empty) list pointed
2031 to by CALL_FUSAGE. REG must denote a hard register. */
2033 void
2034 use_reg (rtx *call_fusage, rtx reg)
2036 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2038 *call_fusage
2039 = gen_rtx_EXPR_LIST (VOIDmode,
2040 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2043 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2044 starting at REGNO. All of these registers must be hard registers. */
2046 void
2047 use_regs (rtx *call_fusage, int regno, int nregs)
2049 int i;
2051 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2053 for (i = 0; i < nregs; i++)
2054 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2057 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2058 PARALLEL REGS. This is for calls that pass values in multiple
2059 non-contiguous locations. The Irix 6 ABI has examples of this. */
2061 void
2062 use_group_regs (rtx *call_fusage, rtx regs)
2064 int i;
2066 for (i = 0; i < XVECLEN (regs, 0); i++)
2068 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2070 /* A NULL entry means the parameter goes both on the stack and in
2071 registers. This can also be a MEM for targets that pass values
2072 partially on the stack and partially in registers. */
2073 if (reg != 0 && REG_P (reg))
2074 use_reg (call_fusage, reg);
2079 /* Determine whether the LEN bytes generated by CONSTFUN can be
2080 stored to memory using several move instructions. CONSTFUNDATA is
2081 a pointer which will be passed as argument in every CONSTFUN call.
2082 ALIGN is maximum alignment we can assume. Return nonzero if a
2083 call to store_by_pieces should succeed. */
2086 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2087 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2088 void *constfundata, unsigned int align)
2090 unsigned HOST_WIDE_INT l;
2091 unsigned int max_size;
2092 HOST_WIDE_INT offset = 0;
2093 enum machine_mode mode, tmode;
2094 enum insn_code icode;
2095 int reverse;
2096 rtx cst;
2098 if (len == 0)
2099 return 1;
2101 if (! STORE_BY_PIECES_P (len, align))
2102 return 0;
2104 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2105 if (align >= GET_MODE_ALIGNMENT (tmode))
2106 align = GET_MODE_ALIGNMENT (tmode);
2107 else
2109 enum machine_mode xmode;
2111 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2112 tmode != VOIDmode;
2113 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2114 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2115 || SLOW_UNALIGNED_ACCESS (tmode, align))
2116 break;
2118 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2121 /* We would first store what we can in the largest integer mode, then go to
2122 successively smaller modes. */
2124 for (reverse = 0;
2125 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2126 reverse++)
2128 l = len;
2129 mode = VOIDmode;
2130 max_size = STORE_MAX_PIECES + 1;
2131 while (max_size > 1)
2133 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2134 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2135 if (GET_MODE_SIZE (tmode) < max_size)
2136 mode = tmode;
2138 if (mode == VOIDmode)
2139 break;
2141 icode = mov_optab->handlers[(int) mode].insn_code;
2142 if (icode != CODE_FOR_nothing
2143 && align >= GET_MODE_ALIGNMENT (mode))
2145 unsigned int size = GET_MODE_SIZE (mode);
2147 while (l >= size)
2149 if (reverse)
2150 offset -= size;
2152 cst = (*constfun) (constfundata, offset, mode);
2153 if (!LEGITIMATE_CONSTANT_P (cst))
2154 return 0;
2156 if (!reverse)
2157 offset += size;
2159 l -= size;
2163 max_size = GET_MODE_SIZE (mode);
2166 /* The code above should have handled everything. */
2167 gcc_assert (!l);
2170 return 1;
2173 /* Generate several move instructions to store LEN bytes generated by
2174 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2175 pointer which will be passed as argument in every CONSTFUN call.
2176 ALIGN is maximum alignment we can assume.
2177 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2178 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2179 stpcpy. */
2182 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2183 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2184 void *constfundata, unsigned int align, int endp)
2186 struct store_by_pieces data;
2188 if (len == 0)
2190 gcc_assert (endp != 2);
2191 return to;
2194 gcc_assert (STORE_BY_PIECES_P (len, align));
2195 data.constfun = constfun;
2196 data.constfundata = constfundata;
2197 data.len = len;
2198 data.to = to;
2199 store_by_pieces_1 (&data, align);
2200 if (endp)
2202 rtx to1;
2204 gcc_assert (!data.reverse);
2205 if (data.autinc_to)
2207 if (endp == 2)
2209 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2210 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2211 else
2212 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2213 -1));
2215 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2216 data.offset);
2218 else
2220 if (endp == 2)
2221 --data.offset;
2222 to1 = adjust_address (data.to, QImode, data.offset);
2224 return to1;
2226 else
2227 return data.to;
2230 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2231 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2233 static void
2234 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2236 struct store_by_pieces data;
2238 if (len == 0)
2239 return;
2241 data.constfun = clear_by_pieces_1;
2242 data.constfundata = NULL;
2243 data.len = len;
2244 data.to = to;
2245 store_by_pieces_1 (&data, align);
2248 /* Callback routine for clear_by_pieces.
2249 Return const0_rtx unconditionally. */
2251 static rtx
2252 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2253 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2254 enum machine_mode mode ATTRIBUTE_UNUSED)
2256 return const0_rtx;
2259 /* Subroutine of clear_by_pieces and store_by_pieces.
2260 Generate several move instructions to store LEN bytes of block TO. (A MEM
2261 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2263 static void
2264 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2265 unsigned int align ATTRIBUTE_UNUSED)
2267 rtx to_addr = XEXP (data->to, 0);
2268 unsigned int max_size = STORE_MAX_PIECES + 1;
2269 enum machine_mode mode = VOIDmode, tmode;
2270 enum insn_code icode;
2272 data->offset = 0;
2273 data->to_addr = to_addr;
2274 data->autinc_to
2275 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2276 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2278 data->explicit_inc_to = 0;
2279 data->reverse
2280 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2281 if (data->reverse)
2282 data->offset = data->len;
2284 /* If storing requires more than two move insns,
2285 copy addresses to registers (to make displacements shorter)
2286 and use post-increment if available. */
2287 if (!data->autinc_to
2288 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2290 /* Determine the main mode we'll be using. */
2291 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2292 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2293 if (GET_MODE_SIZE (tmode) < max_size)
2294 mode = tmode;
2296 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2298 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2299 data->autinc_to = 1;
2300 data->explicit_inc_to = -1;
2303 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2304 && ! data->autinc_to)
2306 data->to_addr = copy_addr_to_reg (to_addr);
2307 data->autinc_to = 1;
2308 data->explicit_inc_to = 1;
2311 if ( !data->autinc_to && CONSTANT_P (to_addr))
2312 data->to_addr = copy_addr_to_reg (to_addr);
2315 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2316 if (align >= GET_MODE_ALIGNMENT (tmode))
2317 align = GET_MODE_ALIGNMENT (tmode);
2318 else
2320 enum machine_mode xmode;
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2323 tmode != VOIDmode;
2324 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2325 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2326 || SLOW_UNALIGNED_ACCESS (tmode, align))
2327 break;
2329 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2332 /* First store what we can in the largest integer mode, then go to
2333 successively smaller modes. */
2335 while (max_size > 1)
2337 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2338 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2339 if (GET_MODE_SIZE (tmode) < max_size)
2340 mode = tmode;
2342 if (mode == VOIDmode)
2343 break;
2345 icode = mov_optab->handlers[(int) mode].insn_code;
2346 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2347 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2349 max_size = GET_MODE_SIZE (mode);
2352 /* The code above should have handled everything. */
2353 gcc_assert (!data->len);
2356 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2357 with move instructions for mode MODE. GENFUN is the gen_... function
2358 to make a move insn for that mode. DATA has all the other info. */
2360 static void
2361 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2362 struct store_by_pieces *data)
2364 unsigned int size = GET_MODE_SIZE (mode);
2365 rtx to1, cst;
2367 while (data->len >= size)
2369 if (data->reverse)
2370 data->offset -= size;
2372 if (data->autinc_to)
2373 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2374 data->offset);
2375 else
2376 to1 = adjust_address (data->to, mode, data->offset);
2378 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2379 emit_insn (gen_add2_insn (data->to_addr,
2380 GEN_INT (-(HOST_WIDE_INT) size)));
2382 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2383 emit_insn ((*genfun) (to1, cst));
2385 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2386 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2388 if (! data->reverse)
2389 data->offset += size;
2391 data->len -= size;
2395 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2396 its length in bytes. */
2399 clear_storage (rtx object, rtx size)
2401 rtx retval = 0;
2402 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2403 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2405 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2406 just move a zero. Otherwise, do this a piece at a time. */
2407 if (GET_MODE (object) != BLKmode
2408 && GET_CODE (size) == CONST_INT
2409 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2410 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2411 else
2413 if (size == const0_rtx)
2415 else if (GET_CODE (size) == CONST_INT
2416 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2417 clear_by_pieces (object, INTVAL (size), align);
2418 else if (clear_storage_via_clrmem (object, size, align))
2420 else
2421 retval = clear_storage_via_libcall (object, size);
2424 return retval;
2427 /* A subroutine of clear_storage. Expand a clrmem pattern;
2428 return true if successful. */
2430 static bool
2431 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2433 /* Try the most limited insn first, because there's no point
2434 including more than one in the machine description unless
2435 the more limited one has some advantage. */
2437 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2438 enum machine_mode mode;
2440 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2441 mode = GET_MODE_WIDER_MODE (mode))
2443 enum insn_code code = clrmem_optab[(int) mode];
2444 insn_operand_predicate_fn pred;
2446 if (code != CODE_FOR_nothing
2447 /* We don't need MODE to be narrower than
2448 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2449 the mode mask, as it is returned by the macro, it will
2450 definitely be less than the actual mode mask. */
2451 && ((GET_CODE (size) == CONST_INT
2452 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2453 <= (GET_MODE_MASK (mode) >> 1)))
2454 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2455 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2456 || (*pred) (object, BLKmode))
2457 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2458 || (*pred) (opalign, VOIDmode)))
2460 rtx op1;
2461 rtx last = get_last_insn ();
2462 rtx pat;
2464 op1 = convert_to_mode (mode, size, 1);
2465 pred = insn_data[(int) code].operand[1].predicate;
2466 if (pred != 0 && ! (*pred) (op1, mode))
2467 op1 = copy_to_mode_reg (mode, op1);
2469 pat = GEN_FCN ((int) code) (object, op1, opalign);
2470 if (pat)
2472 emit_insn (pat);
2473 return true;
2475 else
2476 delete_insns_since (last);
2480 return false;
2483 /* A subroutine of clear_storage. Expand a call to memset.
2484 Return the return value of memset, 0 otherwise. */
2486 static rtx
2487 clear_storage_via_libcall (rtx object, rtx size)
2489 tree call_expr, arg_list, fn, object_tree, size_tree;
2490 enum machine_mode size_mode;
2491 rtx retval;
2493 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2494 place those into new pseudos into a VAR_DECL and use them later. */
2496 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2498 size_mode = TYPE_MODE (sizetype);
2499 size = convert_to_mode (size_mode, size, 1);
2500 size = copy_to_mode_reg (size_mode, size);
2502 /* It is incorrect to use the libcall calling conventions to call
2503 memset in this context. This could be a user call to memset and
2504 the user may wish to examine the return value from memset. For
2505 targets where libcalls and normal calls have different conventions
2506 for returning pointers, we could end up generating incorrect code. */
2508 object_tree = make_tree (ptr_type_node, object);
2509 size_tree = make_tree (sizetype, size);
2511 fn = clear_storage_libcall_fn (true);
2512 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2513 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2514 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2516 /* Now we have to build up the CALL_EXPR itself. */
2517 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2518 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2519 call_expr, arg_list, NULL_TREE);
2521 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2523 return retval;
2526 /* A subroutine of clear_storage_via_libcall. Create the tree node
2527 for the function we use for block clears. The first time FOR_CALL
2528 is true, we call assemble_external. */
2530 static GTY(()) tree block_clear_fn;
2532 void
2533 init_block_clear_fn (const char *asmspec)
2535 if (!block_clear_fn)
2537 tree fn, args;
2539 fn = get_identifier ("memset");
2540 args = build_function_type_list (ptr_type_node, ptr_type_node,
2541 integer_type_node, sizetype,
2542 NULL_TREE);
2544 fn = build_decl (FUNCTION_DECL, fn, args);
2545 DECL_EXTERNAL (fn) = 1;
2546 TREE_PUBLIC (fn) = 1;
2547 DECL_ARTIFICIAL (fn) = 1;
2548 TREE_NOTHROW (fn) = 1;
2550 block_clear_fn = fn;
2553 if (asmspec)
2554 set_user_assembler_name (block_clear_fn, asmspec);
2557 static tree
2558 clear_storage_libcall_fn (int for_call)
2560 static bool emitted_extern;
2562 if (!block_clear_fn)
2563 init_block_clear_fn (NULL);
2565 if (for_call && !emitted_extern)
2567 emitted_extern = true;
2568 make_decl_rtl (block_clear_fn);
2569 assemble_external (block_clear_fn);
2572 return block_clear_fn;
2575 /* Generate code to copy Y into X.
2576 Both Y and X must have the same mode, except that
2577 Y can be a constant with VOIDmode.
2578 This mode cannot be BLKmode; use emit_block_move for that.
2580 Return the last instruction emitted. */
2583 emit_move_insn (rtx x, rtx y)
2585 enum machine_mode mode = GET_MODE (x);
2586 rtx y_cst = NULL_RTX;
2587 rtx last_insn, set;
2589 gcc_assert (mode != BLKmode
2590 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2592 if (CONSTANT_P (y))
2594 if (optimize
2595 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2596 && (last_insn = compress_float_constant (x, y)))
2597 return last_insn;
2599 y_cst = y;
2601 if (!LEGITIMATE_CONSTANT_P (y))
2603 y = force_const_mem (mode, y);
2605 /* If the target's cannot_force_const_mem prevented the spill,
2606 assume that the target's move expanders will also take care
2607 of the non-legitimate constant. */
2608 if (!y)
2609 y = y_cst;
2613 /* If X or Y are memory references, verify that their addresses are valid
2614 for the machine. */
2615 if (MEM_P (x)
2616 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2617 && ! push_operand (x, GET_MODE (x)))
2618 || (flag_force_addr
2619 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2620 x = validize_mem (x);
2622 if (MEM_P (y)
2623 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2624 || (flag_force_addr
2625 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2626 y = validize_mem (y);
2628 gcc_assert (mode != BLKmode);
2630 last_insn = emit_move_insn_1 (x, y);
2632 if (y_cst && REG_P (x)
2633 && (set = single_set (last_insn)) != NULL_RTX
2634 && SET_DEST (set) == x
2635 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2636 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2638 return last_insn;
2641 /* Low level part of emit_move_insn.
2642 Called just like emit_move_insn, but assumes X and Y
2643 are basically valid. */
2646 emit_move_insn_1 (rtx x, rtx y)
2648 enum machine_mode mode = GET_MODE (x);
2649 enum machine_mode submode;
2650 enum mode_class class = GET_MODE_CLASS (mode);
2652 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2654 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2655 return
2656 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2658 /* Expand complex moves by moving real part and imag part, if possible. */
2659 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2660 && BLKmode != (submode = GET_MODE_INNER (mode))
2661 && (mov_optab->handlers[(int) submode].insn_code
2662 != CODE_FOR_nothing))
2664 /* Don't split destination if it is a stack push. */
2665 int stack = push_operand (x, GET_MODE (x));
2667 #ifdef PUSH_ROUNDING
2668 /* In case we output to the stack, but the size is smaller than the
2669 machine can push exactly, we need to use move instructions. */
2670 if (stack
2671 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2672 != GET_MODE_SIZE (submode)))
2674 rtx temp;
2675 HOST_WIDE_INT offset1, offset2;
2677 /* Do not use anti_adjust_stack, since we don't want to update
2678 stack_pointer_delta. */
2679 temp = expand_binop (Pmode,
2680 #ifdef STACK_GROWS_DOWNWARD
2681 sub_optab,
2682 #else
2683 add_optab,
2684 #endif
2685 stack_pointer_rtx,
2686 GEN_INT
2687 (PUSH_ROUNDING
2688 (GET_MODE_SIZE (GET_MODE (x)))),
2689 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2691 if (temp != stack_pointer_rtx)
2692 emit_move_insn (stack_pointer_rtx, temp);
2694 #ifdef STACK_GROWS_DOWNWARD
2695 offset1 = 0;
2696 offset2 = GET_MODE_SIZE (submode);
2697 #else
2698 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2699 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2700 + GET_MODE_SIZE (submode));
2701 #endif
2703 emit_move_insn (change_address (x, submode,
2704 gen_rtx_PLUS (Pmode,
2705 stack_pointer_rtx,
2706 GEN_INT (offset1))),
2707 gen_realpart (submode, y));
2708 emit_move_insn (change_address (x, submode,
2709 gen_rtx_PLUS (Pmode,
2710 stack_pointer_rtx,
2711 GEN_INT (offset2))),
2712 gen_imagpart (submode, y));
2714 else
2715 #endif
2716 /* If this is a stack, push the highpart first, so it
2717 will be in the argument order.
2719 In that case, change_address is used only to convert
2720 the mode, not to change the address. */
2721 if (stack)
2723 /* Note that the real part always precedes the imag part in memory
2724 regardless of machine's endianness. */
2725 #ifdef STACK_GROWS_DOWNWARD
2726 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2727 gen_imagpart (submode, y));
2728 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2729 gen_realpart (submode, y));
2730 #else
2731 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2732 gen_realpart (submode, y));
2733 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2734 gen_imagpart (submode, y));
2735 #endif
2737 else
2739 rtx realpart_x, realpart_y;
2740 rtx imagpart_x, imagpart_y;
2742 /* If this is a complex value with each part being smaller than a
2743 word, the usual calling sequence will likely pack the pieces into
2744 a single register. Unfortunately, SUBREG of hard registers only
2745 deals in terms of words, so we have a problem converting input
2746 arguments to the CONCAT of two registers that is used elsewhere
2747 for complex values. If this is before reload, we can copy it into
2748 memory and reload. FIXME, we should see about using extract and
2749 insert on integer registers, but complex short and complex char
2750 variables should be rarely used. */
2751 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2752 && (reload_in_progress | reload_completed) == 0)
2754 int packed_dest_p
2755 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2756 int packed_src_p
2757 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2759 if (packed_dest_p || packed_src_p)
2761 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2762 ? MODE_FLOAT : MODE_INT);
2764 enum machine_mode reg_mode
2765 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2767 if (reg_mode != BLKmode)
2769 rtx mem = assign_stack_temp (reg_mode,
2770 GET_MODE_SIZE (mode), 0);
2771 rtx cmem = adjust_address (mem, mode, 0);
2773 if (packed_dest_p)
2775 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2777 emit_move_insn_1 (cmem, y);
2778 return emit_move_insn_1 (sreg, mem);
2780 else
2782 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2784 emit_move_insn_1 (mem, sreg);
2785 return emit_move_insn_1 (x, cmem);
2791 realpart_x = gen_realpart (submode, x);
2792 realpart_y = gen_realpart (submode, y);
2793 imagpart_x = gen_imagpart (submode, x);
2794 imagpart_y = gen_imagpart (submode, y);
2796 /* Show the output dies here. This is necessary for SUBREGs
2797 of pseudos since we cannot track their lifetimes correctly;
2798 hard regs shouldn't appear here except as return values.
2799 We never want to emit such a clobber after reload. */
2800 if (x != y
2801 && ! (reload_in_progress || reload_completed)
2802 && (GET_CODE (realpart_x) == SUBREG
2803 || GET_CODE (imagpart_x) == SUBREG))
2804 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2806 emit_move_insn (realpart_x, realpart_y);
2807 emit_move_insn (imagpart_x, imagpart_y);
2810 return get_last_insn ();
2813 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2814 find a mode to do it in. If we have a movcc, use it. Otherwise,
2815 find the MODE_INT mode of the same width. */
2816 else if (GET_MODE_CLASS (mode) == MODE_CC
2817 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2819 enum insn_code insn_code;
2820 enum machine_mode tmode = VOIDmode;
2821 rtx x1 = x, y1 = y;
2823 if (mode != CCmode
2824 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2825 tmode = CCmode;
2826 else
2827 for (tmode = QImode; tmode != VOIDmode;
2828 tmode = GET_MODE_WIDER_MODE (tmode))
2829 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2830 break;
2832 gcc_assert (tmode != VOIDmode);
2834 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2835 may call change_address which is not appropriate if we were
2836 called when a reload was in progress. We don't have to worry
2837 about changing the address since the size in bytes is supposed to
2838 be the same. Copy the MEM to change the mode and move any
2839 substitutions from the old MEM to the new one. */
2841 if (reload_in_progress)
2843 x = gen_lowpart_common (tmode, x1);
2844 if (x == 0 && MEM_P (x1))
2846 x = adjust_address_nv (x1, tmode, 0);
2847 copy_replacements (x1, x);
2850 y = gen_lowpart_common (tmode, y1);
2851 if (y == 0 && MEM_P (y1))
2853 y = adjust_address_nv (y1, tmode, 0);
2854 copy_replacements (y1, y);
2857 else
2859 x = gen_lowpart (tmode, x);
2860 y = gen_lowpart (tmode, y);
2863 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2864 return emit_insn (GEN_FCN (insn_code) (x, y));
2867 /* Try using a move pattern for the corresponding integer mode. This is
2868 only safe when simplify_subreg can convert MODE constants into integer
2869 constants. At present, it can only do this reliably if the value
2870 fits within a HOST_WIDE_INT. */
2871 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2872 && (submode = int_mode_for_mode (mode)) != BLKmode
2873 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2874 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2875 (simplify_gen_subreg (submode, x, mode, 0),
2876 simplify_gen_subreg (submode, y, mode, 0)));
2878 /* This will handle any multi-word or full-word mode that lacks a move_insn
2879 pattern. However, you will get better code if you define such patterns,
2880 even if they must turn into multiple assembler instructions. */
2881 else
2883 rtx last_insn = 0;
2884 rtx seq, inner;
2885 int need_clobber;
2886 int i;
2888 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2890 #ifdef PUSH_ROUNDING
2892 /* If X is a push on the stack, do the push now and replace
2893 X with a reference to the stack pointer. */
2894 if (push_operand (x, GET_MODE (x)))
2896 rtx temp;
2897 enum rtx_code code;
2899 /* Do not use anti_adjust_stack, since we don't want to update
2900 stack_pointer_delta. */
2901 temp = expand_binop (Pmode,
2902 #ifdef STACK_GROWS_DOWNWARD
2903 sub_optab,
2904 #else
2905 add_optab,
2906 #endif
2907 stack_pointer_rtx,
2908 GEN_INT
2909 (PUSH_ROUNDING
2910 (GET_MODE_SIZE (GET_MODE (x)))),
2911 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2913 if (temp != stack_pointer_rtx)
2914 emit_move_insn (stack_pointer_rtx, temp);
2916 code = GET_CODE (XEXP (x, 0));
2918 /* Just hope that small offsets off SP are OK. */
2919 if (code == POST_INC)
2920 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2921 GEN_INT (-((HOST_WIDE_INT)
2922 GET_MODE_SIZE (GET_MODE (x)))));
2923 else if (code == POST_DEC)
2924 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2925 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2926 else
2927 temp = stack_pointer_rtx;
2929 x = change_address (x, VOIDmode, temp);
2931 #endif
2933 /* If we are in reload, see if either operand is a MEM whose address
2934 is scheduled for replacement. */
2935 if (reload_in_progress && MEM_P (x)
2936 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2937 x = replace_equiv_address_nv (x, inner);
2938 if (reload_in_progress && MEM_P (y)
2939 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2940 y = replace_equiv_address_nv (y, inner);
2942 start_sequence ();
2944 need_clobber = 0;
2945 for (i = 0;
2946 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2947 i++)
2949 rtx xpart = operand_subword (x, i, 1, mode);
2950 rtx ypart = operand_subword (y, i, 1, mode);
2952 /* If we can't get a part of Y, put Y into memory if it is a
2953 constant. Otherwise, force it into a register. If we still
2954 can't get a part of Y, abort. */
2955 if (ypart == 0 && CONSTANT_P (y))
2957 y = force_const_mem (mode, y);
2958 ypart = operand_subword (y, i, 1, mode);
2960 else if (ypart == 0)
2961 ypart = operand_subword_force (y, i, mode);
2963 gcc_assert (xpart && ypart);
2965 need_clobber |= (GET_CODE (xpart) == SUBREG);
2967 last_insn = emit_move_insn (xpart, ypart);
2970 seq = get_insns ();
2971 end_sequence ();
2973 /* Show the output dies here. This is necessary for SUBREGs
2974 of pseudos since we cannot track their lifetimes correctly;
2975 hard regs shouldn't appear here except as return values.
2976 We never want to emit such a clobber after reload. */
2977 if (x != y
2978 && ! (reload_in_progress || reload_completed)
2979 && need_clobber != 0)
2980 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2982 emit_insn (seq);
2984 return last_insn;
2988 /* If Y is representable exactly in a narrower mode, and the target can
2989 perform the extension directly from constant or memory, then emit the
2990 move as an extension. */
2992 static rtx
2993 compress_float_constant (rtx x, rtx y)
2995 enum machine_mode dstmode = GET_MODE (x);
2996 enum machine_mode orig_srcmode = GET_MODE (y);
2997 enum machine_mode srcmode;
2998 REAL_VALUE_TYPE r;
3000 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3002 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3003 srcmode != orig_srcmode;
3004 srcmode = GET_MODE_WIDER_MODE (srcmode))
3006 enum insn_code ic;
3007 rtx trunc_y, last_insn;
3009 /* Skip if the target can't extend this way. */
3010 ic = can_extend_p (dstmode, srcmode, 0);
3011 if (ic == CODE_FOR_nothing)
3012 continue;
3014 /* Skip if the narrowed value isn't exact. */
3015 if (! exact_real_truncate (srcmode, &r))
3016 continue;
3018 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3020 if (LEGITIMATE_CONSTANT_P (trunc_y))
3022 /* Skip if the target needs extra instructions to perform
3023 the extension. */
3024 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3025 continue;
3027 else if (float_extend_from_mem[dstmode][srcmode])
3028 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3029 else
3030 continue;
3032 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3033 last_insn = get_last_insn ();
3035 if (REG_P (x))
3036 set_unique_reg_note (last_insn, REG_EQUAL, y);
3038 return last_insn;
3041 return NULL_RTX;
3044 /* Pushing data onto the stack. */
3046 /* Push a block of length SIZE (perhaps variable)
3047 and return an rtx to address the beginning of the block.
3048 The value may be virtual_outgoing_args_rtx.
3050 EXTRA is the number of bytes of padding to push in addition to SIZE.
3051 BELOW nonzero means this padding comes at low addresses;
3052 otherwise, the padding comes at high addresses. */
3055 push_block (rtx size, int extra, int below)
3057 rtx temp;
3059 size = convert_modes (Pmode, ptr_mode, size, 1);
3060 if (CONSTANT_P (size))
3061 anti_adjust_stack (plus_constant (size, extra));
3062 else if (REG_P (size) && extra == 0)
3063 anti_adjust_stack (size);
3064 else
3066 temp = copy_to_mode_reg (Pmode, size);
3067 if (extra != 0)
3068 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3069 temp, 0, OPTAB_LIB_WIDEN);
3070 anti_adjust_stack (temp);
3073 #ifndef STACK_GROWS_DOWNWARD
3074 if (0)
3075 #else
3076 if (1)
3077 #endif
3079 temp = virtual_outgoing_args_rtx;
3080 if (extra != 0 && below)
3081 temp = plus_constant (temp, extra);
3083 else
3085 if (GET_CODE (size) == CONST_INT)
3086 temp = plus_constant (virtual_outgoing_args_rtx,
3087 -INTVAL (size) - (below ? 0 : extra));
3088 else if (extra != 0 && !below)
3089 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3090 negate_rtx (Pmode, plus_constant (size, extra)));
3091 else
3092 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3093 negate_rtx (Pmode, size));
3096 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3099 #ifdef PUSH_ROUNDING
3101 /* Emit single push insn. */
3103 static void
3104 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3106 rtx dest_addr;
3107 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3108 rtx dest;
3109 enum insn_code icode;
3110 insn_operand_predicate_fn pred;
3112 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3113 /* If there is push pattern, use it. Otherwise try old way of throwing
3114 MEM representing push operation to move expander. */
3115 icode = push_optab->handlers[(int) mode].insn_code;
3116 if (icode != CODE_FOR_nothing)
3118 if (((pred = insn_data[(int) icode].operand[0].predicate)
3119 && !((*pred) (x, mode))))
3120 x = force_reg (mode, x);
3121 emit_insn (GEN_FCN (icode) (x));
3122 return;
3124 if (GET_MODE_SIZE (mode) == rounded_size)
3125 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3126 /* If we are to pad downward, adjust the stack pointer first and
3127 then store X into the stack location using an offset. This is
3128 because emit_move_insn does not know how to pad; it does not have
3129 access to type. */
3130 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3132 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3133 HOST_WIDE_INT offset;
3135 emit_move_insn (stack_pointer_rtx,
3136 expand_binop (Pmode,
3137 #ifdef STACK_GROWS_DOWNWARD
3138 sub_optab,
3139 #else
3140 add_optab,
3141 #endif
3142 stack_pointer_rtx,
3143 GEN_INT (rounded_size),
3144 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3146 offset = (HOST_WIDE_INT) padding_size;
3147 #ifdef STACK_GROWS_DOWNWARD
3148 if (STACK_PUSH_CODE == POST_DEC)
3149 /* We have already decremented the stack pointer, so get the
3150 previous value. */
3151 offset += (HOST_WIDE_INT) rounded_size;
3152 #else
3153 if (STACK_PUSH_CODE == POST_INC)
3154 /* We have already incremented the stack pointer, so get the
3155 previous value. */
3156 offset -= (HOST_WIDE_INT) rounded_size;
3157 #endif
3158 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3160 else
3162 #ifdef STACK_GROWS_DOWNWARD
3163 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3164 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3165 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3166 #else
3167 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3168 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3169 GEN_INT (rounded_size));
3170 #endif
3171 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3174 dest = gen_rtx_MEM (mode, dest_addr);
3176 if (type != 0)
3178 set_mem_attributes (dest, type, 1);
3180 if (flag_optimize_sibling_calls)
3181 /* Function incoming arguments may overlap with sibling call
3182 outgoing arguments and we cannot allow reordering of reads
3183 from function arguments with stores to outgoing arguments
3184 of sibling calls. */
3185 set_mem_alias_set (dest, 0);
3187 emit_move_insn (dest, x);
3189 #endif
3191 /* Generate code to push X onto the stack, assuming it has mode MODE and
3192 type TYPE.
3193 MODE is redundant except when X is a CONST_INT (since they don't
3194 carry mode info).
3195 SIZE is an rtx for the size of data to be copied (in bytes),
3196 needed only if X is BLKmode.
3198 ALIGN (in bits) is maximum alignment we can assume.
3200 If PARTIAL and REG are both nonzero, then copy that many of the first
3201 words of X into registers starting with REG, and push the rest of X.
3202 The amount of space pushed is decreased by PARTIAL words,
3203 rounded *down* to a multiple of PARM_BOUNDARY.
3204 REG must be a hard register in this case.
3205 If REG is zero but PARTIAL is not, take any all others actions for an
3206 argument partially in registers, but do not actually load any
3207 registers.
3209 EXTRA is the amount in bytes of extra space to leave next to this arg.
3210 This is ignored if an argument block has already been allocated.
3212 On a machine that lacks real push insns, ARGS_ADDR is the address of
3213 the bottom of the argument block for this call. We use indexing off there
3214 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3215 argument block has not been preallocated.
3217 ARGS_SO_FAR is the size of args previously pushed for this call.
3219 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3220 for arguments passed in registers. If nonzero, it will be the number
3221 of bytes required. */
3223 void
3224 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3225 unsigned int align, int partial, rtx reg, int extra,
3226 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3227 rtx alignment_pad)
3229 rtx xinner;
3230 enum direction stack_direction
3231 #ifdef STACK_GROWS_DOWNWARD
3232 = downward;
3233 #else
3234 = upward;
3235 #endif
3237 /* Decide where to pad the argument: `downward' for below,
3238 `upward' for above, or `none' for don't pad it.
3239 Default is below for small data on big-endian machines; else above. */
3240 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3242 /* Invert direction if stack is post-decrement.
3243 FIXME: why? */
3244 if (STACK_PUSH_CODE == POST_DEC)
3245 if (where_pad != none)
3246 where_pad = (where_pad == downward ? upward : downward);
3248 xinner = x;
3250 if (mode == BLKmode)
3252 /* Copy a block into the stack, entirely or partially. */
3254 rtx temp;
3255 int used = partial * UNITS_PER_WORD;
3256 int offset;
3257 int skip;
3259 if (reg && GET_CODE (reg) == PARALLEL)
3261 /* Use the size of the elt to compute offset. */
3262 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3263 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3264 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3266 else
3267 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3269 gcc_assert (size);
3271 used -= offset;
3273 /* USED is now the # of bytes we need not copy to the stack
3274 because registers will take care of them. */
3276 if (partial != 0)
3277 xinner = adjust_address (xinner, BLKmode, used);
3279 /* If the partial register-part of the arg counts in its stack size,
3280 skip the part of stack space corresponding to the registers.
3281 Otherwise, start copying to the beginning of the stack space,
3282 by setting SKIP to 0. */
3283 skip = (reg_parm_stack_space == 0) ? 0 : used;
3285 #ifdef PUSH_ROUNDING
3286 /* Do it with several push insns if that doesn't take lots of insns
3287 and if there is no difficulty with push insns that skip bytes
3288 on the stack for alignment purposes. */
3289 if (args_addr == 0
3290 && PUSH_ARGS
3291 && GET_CODE (size) == CONST_INT
3292 && skip == 0
3293 && MEM_ALIGN (xinner) >= align
3294 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3295 /* Here we avoid the case of a structure whose weak alignment
3296 forces many pushes of a small amount of data,
3297 and such small pushes do rounding that causes trouble. */
3298 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3299 || align >= BIGGEST_ALIGNMENT
3300 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3301 == (align / BITS_PER_UNIT)))
3302 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3304 /* Push padding now if padding above and stack grows down,
3305 or if padding below and stack grows up.
3306 But if space already allocated, this has already been done. */
3307 if (extra && args_addr == 0
3308 && where_pad != none && where_pad != stack_direction)
3309 anti_adjust_stack (GEN_INT (extra));
3311 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3313 else
3314 #endif /* PUSH_ROUNDING */
3316 rtx target;
3318 /* Otherwise make space on the stack and copy the data
3319 to the address of that space. */
3321 /* Deduct words put into registers from the size we must copy. */
3322 if (partial != 0)
3324 if (GET_CODE (size) == CONST_INT)
3325 size = GEN_INT (INTVAL (size) - used);
3326 else
3327 size = expand_binop (GET_MODE (size), sub_optab, size,
3328 GEN_INT (used), NULL_RTX, 0,
3329 OPTAB_LIB_WIDEN);
3332 /* Get the address of the stack space.
3333 In this case, we do not deal with EXTRA separately.
3334 A single stack adjust will do. */
3335 if (! args_addr)
3337 temp = push_block (size, extra, where_pad == downward);
3338 extra = 0;
3340 else if (GET_CODE (args_so_far) == CONST_INT)
3341 temp = memory_address (BLKmode,
3342 plus_constant (args_addr,
3343 skip + INTVAL (args_so_far)));
3344 else
3345 temp = memory_address (BLKmode,
3346 plus_constant (gen_rtx_PLUS (Pmode,
3347 args_addr,
3348 args_so_far),
3349 skip));
3351 if (!ACCUMULATE_OUTGOING_ARGS)
3353 /* If the source is referenced relative to the stack pointer,
3354 copy it to another register to stabilize it. We do not need
3355 to do this if we know that we won't be changing sp. */
3357 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3358 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3359 temp = copy_to_reg (temp);
3362 target = gen_rtx_MEM (BLKmode, temp);
3364 /* We do *not* set_mem_attributes here, because incoming arguments
3365 may overlap with sibling call outgoing arguments and we cannot
3366 allow reordering of reads from function arguments with stores
3367 to outgoing arguments of sibling calls. We do, however, want
3368 to record the alignment of the stack slot. */
3369 /* ALIGN may well be better aligned than TYPE, e.g. due to
3370 PARM_BOUNDARY. Assume the caller isn't lying. */
3371 set_mem_align (target, align);
3373 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3376 else if (partial > 0)
3378 /* Scalar partly in registers. */
3380 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3381 int i;
3382 int not_stack;
3383 /* # words of start of argument
3384 that we must make space for but need not store. */
3385 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3386 int args_offset = INTVAL (args_so_far);
3387 int skip;
3389 /* Push padding now if padding above and stack grows down,
3390 or if padding below and stack grows up.
3391 But if space already allocated, this has already been done. */
3392 if (extra && args_addr == 0
3393 && where_pad != none && where_pad != stack_direction)
3394 anti_adjust_stack (GEN_INT (extra));
3396 /* If we make space by pushing it, we might as well push
3397 the real data. Otherwise, we can leave OFFSET nonzero
3398 and leave the space uninitialized. */
3399 if (args_addr == 0)
3400 offset = 0;
3402 /* Now NOT_STACK gets the number of words that we don't need to
3403 allocate on the stack. */
3404 not_stack = partial - offset;
3406 /* If the partial register-part of the arg counts in its stack size,
3407 skip the part of stack space corresponding to the registers.
3408 Otherwise, start copying to the beginning of the stack space,
3409 by setting SKIP to 0. */
3410 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3412 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3413 x = validize_mem (force_const_mem (mode, x));
3415 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3416 SUBREGs of such registers are not allowed. */
3417 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3418 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3419 x = copy_to_reg (x);
3421 /* Loop over all the words allocated on the stack for this arg. */
3422 /* We can do it by words, because any scalar bigger than a word
3423 has a size a multiple of a word. */
3424 #ifndef PUSH_ARGS_REVERSED
3425 for (i = not_stack; i < size; i++)
3426 #else
3427 for (i = size - 1; i >= not_stack; i--)
3428 #endif
3429 if (i >= not_stack + offset)
3430 emit_push_insn (operand_subword_force (x, i, mode),
3431 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3432 0, args_addr,
3433 GEN_INT (args_offset + ((i - not_stack + skip)
3434 * UNITS_PER_WORD)),
3435 reg_parm_stack_space, alignment_pad);
3437 else
3439 rtx addr;
3440 rtx dest;
3442 /* Push padding now if padding above and stack grows down,
3443 or if padding below and stack grows up.
3444 But if space already allocated, this has already been done. */
3445 if (extra && args_addr == 0
3446 && where_pad != none && where_pad != stack_direction)
3447 anti_adjust_stack (GEN_INT (extra));
3449 #ifdef PUSH_ROUNDING
3450 if (args_addr == 0 && PUSH_ARGS)
3451 emit_single_push_insn (mode, x, type);
3452 else
3453 #endif
3455 if (GET_CODE (args_so_far) == CONST_INT)
3456 addr
3457 = memory_address (mode,
3458 plus_constant (args_addr,
3459 INTVAL (args_so_far)));
3460 else
3461 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3462 args_so_far));
3463 dest = gen_rtx_MEM (mode, addr);
3465 /* We do *not* set_mem_attributes here, because incoming arguments
3466 may overlap with sibling call outgoing arguments and we cannot
3467 allow reordering of reads from function arguments with stores
3468 to outgoing arguments of sibling calls. We do, however, want
3469 to record the alignment of the stack slot. */
3470 /* ALIGN may well be better aligned than TYPE, e.g. due to
3471 PARM_BOUNDARY. Assume the caller isn't lying. */
3472 set_mem_align (dest, align);
3474 emit_move_insn (dest, x);
3478 /* If part should go in registers, copy that part
3479 into the appropriate registers. Do this now, at the end,
3480 since mem-to-mem copies above may do function calls. */
3481 if (partial > 0 && reg != 0)
3483 /* Handle calls that pass values in multiple non-contiguous locations.
3484 The Irix 6 ABI has examples of this. */
3485 if (GET_CODE (reg) == PARALLEL)
3486 emit_group_load (reg, x, type, -1);
3487 else
3488 move_block_to_reg (REGNO (reg), x, partial, mode);
3491 if (extra && args_addr == 0 && where_pad == stack_direction)
3492 anti_adjust_stack (GEN_INT (extra));
3494 if (alignment_pad && args_addr == 0)
3495 anti_adjust_stack (alignment_pad);
3498 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3499 operations. */
3501 static rtx
3502 get_subtarget (rtx x)
3504 return (optimize
3505 || x == 0
3506 /* Only registers can be subtargets. */
3507 || !REG_P (x)
3508 /* Don't use hard regs to avoid extending their life. */
3509 || REGNO (x) < FIRST_PSEUDO_REGISTER
3510 ? 0 : x);
3513 /* Expand an assignment that stores the value of FROM into TO. */
3515 void
3516 expand_assignment (tree to, tree from)
3518 rtx to_rtx = 0;
3519 rtx result;
3521 /* Don't crash if the lhs of the assignment was erroneous. */
3523 if (TREE_CODE (to) == ERROR_MARK)
3525 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3526 return;
3529 /* Assignment of a structure component needs special treatment
3530 if the structure component's rtx is not simply a MEM.
3531 Assignment of an array element at a constant index, and assignment of
3532 an array element in an unaligned packed structure field, has the same
3533 problem. */
3535 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3536 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3537 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3539 enum machine_mode mode1;
3540 HOST_WIDE_INT bitsize, bitpos;
3541 rtx orig_to_rtx;
3542 tree offset;
3543 int unsignedp;
3544 int volatilep = 0;
3545 tree tem;
3547 push_temp_slots ();
3548 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3549 &unsignedp, &volatilep);
3551 /* If we are going to use store_bit_field and extract_bit_field,
3552 make sure to_rtx will be safe for multiple use. */
3554 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3556 if (offset != 0)
3558 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3560 gcc_assert (MEM_P (to_rtx));
3562 #ifdef POINTERS_EXTEND_UNSIGNED
3563 if (GET_MODE (offset_rtx) != Pmode)
3564 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3565 #else
3566 if (GET_MODE (offset_rtx) != ptr_mode)
3567 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3568 #endif
3570 /* A constant address in TO_RTX can have VOIDmode, we must not try
3571 to call force_reg for that case. Avoid that case. */
3572 if (MEM_P (to_rtx)
3573 && GET_MODE (to_rtx) == BLKmode
3574 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3575 && bitsize > 0
3576 && (bitpos % bitsize) == 0
3577 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3578 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3580 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3581 bitpos = 0;
3584 to_rtx = offset_address (to_rtx, offset_rtx,
3585 highest_pow2_factor_for_target (to,
3586 offset));
3589 if (MEM_P (to_rtx))
3591 /* If the field is at offset zero, we could have been given the
3592 DECL_RTX of the parent struct. Don't munge it. */
3593 to_rtx = shallow_copy_rtx (to_rtx);
3595 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3598 /* Deal with volatile and readonly fields. The former is only done
3599 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3600 if (volatilep && MEM_P (to_rtx))
3602 if (to_rtx == orig_to_rtx)
3603 to_rtx = copy_rtx (to_rtx);
3604 MEM_VOLATILE_P (to_rtx) = 1;
3607 if (MEM_P (to_rtx) && ! can_address_p (to))
3609 if (to_rtx == orig_to_rtx)
3610 to_rtx = copy_rtx (to_rtx);
3611 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3614 /* Optimize bitfld op= val in certain cases. */
3615 while (mode1 == VOIDmode
3616 && bitsize > 0 && bitsize < BITS_PER_WORD
3617 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3618 && !TREE_SIDE_EFFECTS (to)
3619 && !TREE_THIS_VOLATILE (to))
3621 tree src, op0, op1;
3622 rtx value, str_rtx = to_rtx;
3623 HOST_WIDE_INT bitpos1 = bitpos;
3624 optab binop;
3626 src = from;
3627 STRIP_NOPS (src);
3628 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3629 || !BINARY_CLASS_P (src))
3630 break;
3632 op0 = TREE_OPERAND (src, 0);
3633 op1 = TREE_OPERAND (src, 1);
3634 STRIP_NOPS (op0);
3636 if (! operand_equal_p (to, op0, 0))
3637 break;
3639 if (MEM_P (str_rtx))
3641 enum machine_mode mode = GET_MODE (str_rtx);
3642 HOST_WIDE_INT offset1;
3644 if (GET_MODE_BITSIZE (mode) == 0
3645 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3646 mode = word_mode;
3647 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3648 mode, 0);
3649 if (mode == VOIDmode)
3650 break;
3652 offset1 = bitpos1;
3653 bitpos1 %= GET_MODE_BITSIZE (mode);
3654 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3655 str_rtx = adjust_address (str_rtx, mode, offset1);
3657 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3658 break;
3660 /* If the bit field covers the whole REG/MEM, store_field
3661 will likely generate better code. */
3662 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3663 break;
3665 /* We can't handle fields split across multiple entities. */
3666 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3667 break;
3669 if (BYTES_BIG_ENDIAN)
3670 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3671 - bitsize;
3673 /* Special case some bitfield op= exp. */
3674 switch (TREE_CODE (src))
3676 case PLUS_EXPR:
3677 case MINUS_EXPR:
3678 /* For now, just optimize the case of the topmost bitfield
3679 where we don't need to do any masking and also
3680 1 bit bitfields where xor can be used.
3681 We might win by one instruction for the other bitfields
3682 too if insv/extv instructions aren't used, so that
3683 can be added later. */
3684 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3685 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3686 break;
3687 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3688 value = convert_modes (GET_MODE (str_rtx),
3689 TYPE_MODE (TREE_TYPE (op1)), value,
3690 TYPE_UNSIGNED (TREE_TYPE (op1)));
3692 /* We may be accessing data outside the field, which means
3693 we can alias adjacent data. */
3694 if (MEM_P (str_rtx))
3696 str_rtx = shallow_copy_rtx (str_rtx);
3697 set_mem_alias_set (str_rtx, 0);
3698 set_mem_expr (str_rtx, 0);
3701 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3702 if (bitsize == 1
3703 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3705 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3706 NULL_RTX);
3707 binop = xor_optab;
3709 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3710 build_int_cst (NULL_TREE, bitpos1),
3711 NULL_RTX, 1);
3712 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3713 value, str_rtx, 1, OPTAB_WIDEN);
3714 if (result != str_rtx)
3715 emit_move_insn (str_rtx, result);
3716 free_temp_slots ();
3717 pop_temp_slots ();
3718 return;
3720 default:
3721 break;
3724 break;
3727 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3728 TREE_TYPE (tem), get_alias_set (to));
3730 preserve_temp_slots (result);
3731 free_temp_slots ();
3732 pop_temp_slots ();
3734 /* If the value is meaningful, convert RESULT to the proper mode.
3735 Otherwise, return nothing. */
3736 return;
3739 /* If the rhs is a function call and its value is not an aggregate,
3740 call the function before we start to compute the lhs.
3741 This is needed for correct code for cases such as
3742 val = setjmp (buf) on machines where reference to val
3743 requires loading up part of an address in a separate insn.
3745 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3746 since it might be a promoted variable where the zero- or sign- extension
3747 needs to be done. Handling this in the normal way is safe because no
3748 computation is done before the call. */
3749 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3750 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3751 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3752 && REG_P (DECL_RTL (to))))
3754 rtx value;
3756 push_temp_slots ();
3757 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3758 if (to_rtx == 0)
3759 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3761 /* Handle calls that return values in multiple non-contiguous locations.
3762 The Irix 6 ABI has examples of this. */
3763 if (GET_CODE (to_rtx) == PARALLEL)
3764 emit_group_load (to_rtx, value, TREE_TYPE (from),
3765 int_size_in_bytes (TREE_TYPE (from)));
3766 else if (GET_MODE (to_rtx) == BLKmode)
3767 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3768 else
3770 if (POINTER_TYPE_P (TREE_TYPE (to)))
3771 value = convert_memory_address (GET_MODE (to_rtx), value);
3772 emit_move_insn (to_rtx, value);
3774 preserve_temp_slots (to_rtx);
3775 free_temp_slots ();
3776 pop_temp_slots ();
3777 return;
3780 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3781 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3783 if (to_rtx == 0)
3784 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3786 /* Don't move directly into a return register. */
3787 if (TREE_CODE (to) == RESULT_DECL
3788 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3790 rtx temp;
3792 push_temp_slots ();
3793 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3795 if (GET_CODE (to_rtx) == PARALLEL)
3796 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3797 int_size_in_bytes (TREE_TYPE (from)));
3798 else
3799 emit_move_insn (to_rtx, temp);
3801 preserve_temp_slots (to_rtx);
3802 free_temp_slots ();
3803 pop_temp_slots ();
3804 return;
3807 /* In case we are returning the contents of an object which overlaps
3808 the place the value is being stored, use a safe function when copying
3809 a value through a pointer into a structure value return block. */
3810 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3811 && current_function_returns_struct
3812 && !current_function_returns_pcc_struct)
3814 rtx from_rtx, size;
3816 push_temp_slots ();
3817 size = expr_size (from);
3818 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3820 emit_library_call (memmove_libfunc, LCT_NORMAL,
3821 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3822 XEXP (from_rtx, 0), Pmode,
3823 convert_to_mode (TYPE_MODE (sizetype),
3824 size, TYPE_UNSIGNED (sizetype)),
3825 TYPE_MODE (sizetype));
3827 preserve_temp_slots (to_rtx);
3828 free_temp_slots ();
3829 pop_temp_slots ();
3830 return;
3833 /* Compute FROM and store the value in the rtx we got. */
3835 push_temp_slots ();
3836 result = store_expr (from, to_rtx, 0);
3837 preserve_temp_slots (result);
3838 free_temp_slots ();
3839 pop_temp_slots ();
3840 return;
3843 /* Generate code for computing expression EXP,
3844 and storing the value into TARGET.
3846 If the mode is BLKmode then we may return TARGET itself.
3847 It turns out that in BLKmode it doesn't cause a problem.
3848 because C has no operators that could combine two different
3849 assignments into the same BLKmode object with different values
3850 with no sequence point. Will other languages need this to
3851 be more thorough?
3853 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3854 stack, and block moves may need to be treated specially. */
3857 store_expr (tree exp, rtx target, int call_param_p)
3859 rtx temp;
3860 rtx alt_rtl = NULL_RTX;
3861 int dont_return_target = 0;
3863 if (VOID_TYPE_P (TREE_TYPE (exp)))
3865 /* C++ can generate ?: expressions with a throw expression in one
3866 branch and an rvalue in the other. Here, we resolve attempts to
3867 store the throw expression's nonexistent result. */
3868 gcc_assert (!call_param_p);
3869 expand_expr (exp, const0_rtx, VOIDmode, 0);
3870 return NULL_RTX;
3872 if (TREE_CODE (exp) == COMPOUND_EXPR)
3874 /* Perform first part of compound expression, then assign from second
3875 part. */
3876 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3877 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3878 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3880 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3882 /* For conditional expression, get safe form of the target. Then
3883 test the condition, doing the appropriate assignment on either
3884 side. This avoids the creation of unnecessary temporaries.
3885 For non-BLKmode, it is more efficient not to do this. */
3887 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3889 do_pending_stack_adjust ();
3890 NO_DEFER_POP;
3891 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3892 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3893 emit_jump_insn (gen_jump (lab2));
3894 emit_barrier ();
3895 emit_label (lab1);
3896 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3897 emit_label (lab2);
3898 OK_DEFER_POP;
3900 return NULL_RTX;
3902 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3903 /* If this is a scalar in a register that is stored in a wider mode
3904 than the declared mode, compute the result into its declared mode
3905 and then convert to the wider mode. Our value is the computed
3906 expression. */
3908 rtx inner_target = 0;
3910 /* We can do the conversion inside EXP, which will often result
3911 in some optimizations. Do the conversion in two steps: first
3912 change the signedness, if needed, then the extend. But don't
3913 do this if the type of EXP is a subtype of something else
3914 since then the conversion might involve more than just
3915 converting modes. */
3916 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3917 && TREE_TYPE (TREE_TYPE (exp)) == 0
3918 && (!lang_hooks.reduce_bit_field_operations
3919 || (GET_MODE_PRECISION (GET_MODE (target))
3920 == TYPE_PRECISION (TREE_TYPE (exp)))))
3922 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3923 != SUBREG_PROMOTED_UNSIGNED_P (target))
3924 exp = convert
3925 (lang_hooks.types.signed_or_unsigned_type
3926 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3928 exp = convert (lang_hooks.types.type_for_mode
3929 (GET_MODE (SUBREG_REG (target)),
3930 SUBREG_PROMOTED_UNSIGNED_P (target)),
3931 exp);
3933 inner_target = SUBREG_REG (target);
3936 temp = expand_expr (exp, inner_target, VOIDmode,
3937 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3939 /* If TEMP is a VOIDmode constant, use convert_modes to make
3940 sure that we properly convert it. */
3941 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3943 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3944 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3945 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3946 GET_MODE (target), temp,
3947 SUBREG_PROMOTED_UNSIGNED_P (target));
3950 convert_move (SUBREG_REG (target), temp,
3951 SUBREG_PROMOTED_UNSIGNED_P (target));
3953 return NULL_RTX;
3955 else
3957 temp = expand_expr_real (exp, target, GET_MODE (target),
3958 (call_param_p
3959 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3960 &alt_rtl);
3961 /* Return TARGET if it's a specified hardware register.
3962 If TARGET is a volatile mem ref, either return TARGET
3963 or return a reg copied *from* TARGET; ANSI requires this.
3965 Otherwise, if TEMP is not TARGET, return TEMP
3966 if it is constant (for efficiency),
3967 or if we really want the correct value. */
3968 if (!(target && REG_P (target)
3969 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3970 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3971 && ! rtx_equal_p (temp, target)
3972 && CONSTANT_P (temp))
3973 dont_return_target = 1;
3976 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3977 the same as that of TARGET, adjust the constant. This is needed, for
3978 example, in case it is a CONST_DOUBLE and we want only a word-sized
3979 value. */
3980 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3981 && TREE_CODE (exp) != ERROR_MARK
3982 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3983 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3984 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3986 /* If value was not generated in the target, store it there.
3987 Convert the value to TARGET's type first if necessary and emit the
3988 pending incrementations that have been queued when expanding EXP.
3989 Note that we cannot emit the whole queue blindly because this will
3990 effectively disable the POST_INC optimization later.
3992 If TEMP and TARGET compare equal according to rtx_equal_p, but
3993 one or both of them are volatile memory refs, we have to distinguish
3994 two cases:
3995 - expand_expr has used TARGET. In this case, we must not generate
3996 another copy. This can be detected by TARGET being equal according
3997 to == .
3998 - expand_expr has not used TARGET - that means that the source just
3999 happens to have the same RTX form. Since temp will have been created
4000 by expand_expr, it will compare unequal according to == .
4001 We must generate a copy in this case, to reach the correct number
4002 of volatile memory references. */
4004 if ((! rtx_equal_p (temp, target)
4005 || (temp != target && (side_effects_p (temp)
4006 || side_effects_p (target))))
4007 && TREE_CODE (exp) != ERROR_MARK
4008 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4009 but TARGET is not valid memory reference, TEMP will differ
4010 from TARGET although it is really the same location. */
4011 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4012 /* If there's nothing to copy, don't bother. Don't call expr_size
4013 unless necessary, because some front-ends (C++) expr_size-hook
4014 aborts on objects that are not supposed to be bit-copied or
4015 bit-initialized. */
4016 && expr_size (exp) != const0_rtx)
4018 if (GET_MODE (temp) != GET_MODE (target)
4019 && GET_MODE (temp) != VOIDmode)
4021 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4022 if (dont_return_target)
4024 /* In this case, we will return TEMP,
4025 so make sure it has the proper mode.
4026 But don't forget to store the value into TARGET. */
4027 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4028 emit_move_insn (target, temp);
4030 else
4031 convert_move (target, temp, unsignedp);
4034 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4036 /* Handle copying a string constant into an array. The string
4037 constant may be shorter than the array. So copy just the string's
4038 actual length, and clear the rest. First get the size of the data
4039 type of the string, which is actually the size of the target. */
4040 rtx size = expr_size (exp);
4042 if (GET_CODE (size) == CONST_INT
4043 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4044 emit_block_move (target, temp, size,
4045 (call_param_p
4046 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4047 else
4049 /* Compute the size of the data to copy from the string. */
4050 tree copy_size
4051 = size_binop (MIN_EXPR,
4052 make_tree (sizetype, size),
4053 size_int (TREE_STRING_LENGTH (exp)));
4054 rtx copy_size_rtx
4055 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4056 (call_param_p
4057 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4058 rtx label = 0;
4060 /* Copy that much. */
4061 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4062 TYPE_UNSIGNED (sizetype));
4063 emit_block_move (target, temp, copy_size_rtx,
4064 (call_param_p
4065 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4067 /* Figure out how much is left in TARGET that we have to clear.
4068 Do all calculations in ptr_mode. */
4069 if (GET_CODE (copy_size_rtx) == CONST_INT)
4071 size = plus_constant (size, -INTVAL (copy_size_rtx));
4072 target = adjust_address (target, BLKmode,
4073 INTVAL (copy_size_rtx));
4075 else
4077 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4078 copy_size_rtx, NULL_RTX, 0,
4079 OPTAB_LIB_WIDEN);
4081 #ifdef POINTERS_EXTEND_UNSIGNED
4082 if (GET_MODE (copy_size_rtx) != Pmode)
4083 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4084 TYPE_UNSIGNED (sizetype));
4085 #endif
4087 target = offset_address (target, copy_size_rtx,
4088 highest_pow2_factor (copy_size));
4089 label = gen_label_rtx ();
4090 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4091 GET_MODE (size), 0, label);
4094 if (size != const0_rtx)
4095 clear_storage (target, size);
4097 if (label)
4098 emit_label (label);
4101 /* Handle calls that return values in multiple non-contiguous locations.
4102 The Irix 6 ABI has examples of this. */
4103 else if (GET_CODE (target) == PARALLEL)
4104 emit_group_load (target, temp, TREE_TYPE (exp),
4105 int_size_in_bytes (TREE_TYPE (exp)));
4106 else if (GET_MODE (temp) == BLKmode)
4107 emit_block_move (target, temp, expr_size (exp),
4108 (call_param_p
4109 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4110 else
4112 temp = force_operand (temp, target);
4113 if (temp != target)
4114 emit_move_insn (target, temp);
4118 return NULL_RTX;
4121 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4122 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4123 are set to non-constant values and place it in *P_NC_ELTS. */
4125 static void
4126 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4127 HOST_WIDE_INT *p_nc_elts)
4129 HOST_WIDE_INT nz_elts, nc_elts;
4130 tree list;
4132 nz_elts = 0;
4133 nc_elts = 0;
4135 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4137 tree value = TREE_VALUE (list);
4138 tree purpose = TREE_PURPOSE (list);
4139 HOST_WIDE_INT mult;
4141 mult = 1;
4142 if (TREE_CODE (purpose) == RANGE_EXPR)
4144 tree lo_index = TREE_OPERAND (purpose, 0);
4145 tree hi_index = TREE_OPERAND (purpose, 1);
4147 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4148 mult = (tree_low_cst (hi_index, 1)
4149 - tree_low_cst (lo_index, 1) + 1);
4152 switch (TREE_CODE (value))
4154 case CONSTRUCTOR:
4156 HOST_WIDE_INT nz = 0, nc = 0;
4157 categorize_ctor_elements_1 (value, &nz, &nc);
4158 nz_elts += mult * nz;
4159 nc_elts += mult * nc;
4161 break;
4163 case INTEGER_CST:
4164 case REAL_CST:
4165 if (!initializer_zerop (value))
4166 nz_elts += mult;
4167 break;
4168 case COMPLEX_CST:
4169 if (!initializer_zerop (TREE_REALPART (value)))
4170 nz_elts += mult;
4171 if (!initializer_zerop (TREE_IMAGPART (value)))
4172 nz_elts += mult;
4173 break;
4174 case VECTOR_CST:
4176 tree v;
4177 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4178 if (!initializer_zerop (TREE_VALUE (v)))
4179 nz_elts += mult;
4181 break;
4183 default:
4184 nz_elts += mult;
4185 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4186 nc_elts += mult;
4187 break;
4191 *p_nz_elts += nz_elts;
4192 *p_nc_elts += nc_elts;
4195 void
4196 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4197 HOST_WIDE_INT *p_nc_elts)
4199 *p_nz_elts = 0;
4200 *p_nc_elts = 0;
4201 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4204 /* Count the number of scalars in TYPE. Return -1 on overflow or
4205 variable-sized. */
4207 HOST_WIDE_INT
4208 count_type_elements (tree type)
4210 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4211 switch (TREE_CODE (type))
4213 case ARRAY_TYPE:
4215 tree telts = array_type_nelts (type);
4216 if (telts && host_integerp (telts, 1))
4218 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4219 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4220 if (n == 0)
4221 return 0;
4222 else if (max / n > m)
4223 return n * m;
4225 return -1;
4228 case RECORD_TYPE:
4230 HOST_WIDE_INT n = 0, t;
4231 tree f;
4233 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4234 if (TREE_CODE (f) == FIELD_DECL)
4236 t = count_type_elements (TREE_TYPE (f));
4237 if (t < 0)
4238 return -1;
4239 n += t;
4242 return n;
4245 case UNION_TYPE:
4246 case QUAL_UNION_TYPE:
4248 /* Ho hum. How in the world do we guess here? Clearly it isn't
4249 right to count the fields. Guess based on the number of words. */
4250 HOST_WIDE_INT n = int_size_in_bytes (type);
4251 if (n < 0)
4252 return -1;
4253 return n / UNITS_PER_WORD;
4256 case COMPLEX_TYPE:
4257 return 2;
4259 case VECTOR_TYPE:
4260 return TYPE_VECTOR_SUBPARTS (type);
4262 case INTEGER_TYPE:
4263 case REAL_TYPE:
4264 case ENUMERAL_TYPE:
4265 case BOOLEAN_TYPE:
4266 case CHAR_TYPE:
4267 case POINTER_TYPE:
4268 case OFFSET_TYPE:
4269 case REFERENCE_TYPE:
4270 return 1;
4272 case VOID_TYPE:
4273 case METHOD_TYPE:
4274 case FILE_TYPE:
4275 case SET_TYPE:
4276 case FUNCTION_TYPE:
4277 case LANG_TYPE:
4278 default:
4279 gcc_unreachable ();
4283 /* Return 1 if EXP contains mostly (3/4) zeros. */
4285 static int
4286 mostly_zeros_p (tree exp)
4288 if (TREE_CODE (exp) == CONSTRUCTOR)
4291 HOST_WIDE_INT nz_elts, nc_elts, elts;
4293 /* If there are no ranges of true bits, it is all zero. */
4294 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4295 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4297 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4298 elts = count_type_elements (TREE_TYPE (exp));
4300 return nz_elts < elts / 4;
4303 return initializer_zerop (exp);
4306 /* Helper function for store_constructor.
4307 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4308 TYPE is the type of the CONSTRUCTOR, not the element type.
4309 CLEARED is as for store_constructor.
4310 ALIAS_SET is the alias set to use for any stores.
4312 This provides a recursive shortcut back to store_constructor when it isn't
4313 necessary to go through store_field. This is so that we can pass through
4314 the cleared field to let store_constructor know that we may not have to
4315 clear a substructure if the outer structure has already been cleared. */
4317 static void
4318 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4319 HOST_WIDE_INT bitpos, enum machine_mode mode,
4320 tree exp, tree type, int cleared, int alias_set)
4322 if (TREE_CODE (exp) == CONSTRUCTOR
4323 /* We can only call store_constructor recursively if the size and
4324 bit position are on a byte boundary. */
4325 && bitpos % BITS_PER_UNIT == 0
4326 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4327 /* If we have a nonzero bitpos for a register target, then we just
4328 let store_field do the bitfield handling. This is unlikely to
4329 generate unnecessary clear instructions anyways. */
4330 && (bitpos == 0 || MEM_P (target)))
4332 if (MEM_P (target))
4333 target
4334 = adjust_address (target,
4335 GET_MODE (target) == BLKmode
4336 || 0 != (bitpos
4337 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4338 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4341 /* Update the alias set, if required. */
4342 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4343 && MEM_ALIAS_SET (target) != 0)
4345 target = copy_rtx (target);
4346 set_mem_alias_set (target, alias_set);
4349 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4351 else
4352 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4355 /* Store the value of constructor EXP into the rtx TARGET.
4356 TARGET is either a REG or a MEM; we know it cannot conflict, since
4357 safe_from_p has been called.
4358 CLEARED is true if TARGET is known to have been zero'd.
4359 SIZE is the number of bytes of TARGET we are allowed to modify: this
4360 may not be the same as the size of EXP if we are assigning to a field
4361 which has been packed to exclude padding bits. */
4363 static void
4364 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4366 tree type = TREE_TYPE (exp);
4367 #ifdef WORD_REGISTER_OPERATIONS
4368 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4369 #endif
4371 switch (TREE_CODE (type))
4373 case RECORD_TYPE:
4374 case UNION_TYPE:
4375 case QUAL_UNION_TYPE:
4377 tree elt;
4379 /* If size is zero or the target is already cleared, do nothing. */
4380 if (size == 0 || cleared)
4381 cleared = 1;
4382 /* We either clear the aggregate or indicate the value is dead. */
4383 else if ((TREE_CODE (type) == UNION_TYPE
4384 || TREE_CODE (type) == QUAL_UNION_TYPE)
4385 && ! CONSTRUCTOR_ELTS (exp))
4386 /* If the constructor is empty, clear the union. */
4388 clear_storage (target, expr_size (exp));
4389 cleared = 1;
4392 /* If we are building a static constructor into a register,
4393 set the initial value as zero so we can fold the value into
4394 a constant. But if more than one register is involved,
4395 this probably loses. */
4396 else if (REG_P (target) && TREE_STATIC (exp)
4397 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4399 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4400 cleared = 1;
4403 /* If the constructor has fewer fields than the structure or
4404 if we are initializing the structure to mostly zeros, clear
4405 the whole structure first. Don't do this if TARGET is a
4406 register whose mode size isn't equal to SIZE since
4407 clear_storage can't handle this case. */
4408 else if (size > 0
4409 && ((list_length (CONSTRUCTOR_ELTS (exp))
4410 != fields_length (type))
4411 || mostly_zeros_p (exp))
4412 && (!REG_P (target)
4413 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4414 == size)))
4416 clear_storage (target, GEN_INT (size));
4417 cleared = 1;
4420 if (! cleared)
4421 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4423 /* Store each element of the constructor into the
4424 corresponding field of TARGET. */
4426 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4428 tree field = TREE_PURPOSE (elt);
4429 tree value = TREE_VALUE (elt);
4430 enum machine_mode mode;
4431 HOST_WIDE_INT bitsize;
4432 HOST_WIDE_INT bitpos = 0;
4433 tree offset;
4434 rtx to_rtx = target;
4436 /* Just ignore missing fields. We cleared the whole
4437 structure, above, if any fields are missing. */
4438 if (field == 0)
4439 continue;
4441 if (cleared && initializer_zerop (value))
4442 continue;
4444 if (host_integerp (DECL_SIZE (field), 1))
4445 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4446 else
4447 bitsize = -1;
4449 mode = DECL_MODE (field);
4450 if (DECL_BIT_FIELD (field))
4451 mode = VOIDmode;
4453 offset = DECL_FIELD_OFFSET (field);
4454 if (host_integerp (offset, 0)
4455 && host_integerp (bit_position (field), 0))
4457 bitpos = int_bit_position (field);
4458 offset = 0;
4460 else
4461 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4463 if (offset)
4465 rtx offset_rtx;
4467 offset
4468 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4469 make_tree (TREE_TYPE (exp),
4470 target));
4472 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4473 gcc_assert (MEM_P (to_rtx));
4475 #ifdef POINTERS_EXTEND_UNSIGNED
4476 if (GET_MODE (offset_rtx) != Pmode)
4477 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4478 #else
4479 if (GET_MODE (offset_rtx) != ptr_mode)
4480 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4481 #endif
4483 to_rtx = offset_address (to_rtx, offset_rtx,
4484 highest_pow2_factor (offset));
4487 #ifdef WORD_REGISTER_OPERATIONS
4488 /* If this initializes a field that is smaller than a
4489 word, at the start of a word, try to widen it to a full
4490 word. This special case allows us to output C++ member
4491 function initializations in a form that the optimizers
4492 can understand. */
4493 if (REG_P (target)
4494 && bitsize < BITS_PER_WORD
4495 && bitpos % BITS_PER_WORD == 0
4496 && GET_MODE_CLASS (mode) == MODE_INT
4497 && TREE_CODE (value) == INTEGER_CST
4498 && exp_size >= 0
4499 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4501 tree type = TREE_TYPE (value);
4503 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4505 type = lang_hooks.types.type_for_size
4506 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4507 value = convert (type, value);
4510 if (BYTES_BIG_ENDIAN)
4511 value
4512 = fold (build2 (LSHIFT_EXPR, type, value,
4513 build_int_cst (NULL_TREE,
4514 BITS_PER_WORD - bitsize)));
4515 bitsize = BITS_PER_WORD;
4516 mode = word_mode;
4518 #endif
4520 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4521 && DECL_NONADDRESSABLE_P (field))
4523 to_rtx = copy_rtx (to_rtx);
4524 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4527 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4528 value, type, cleared,
4529 get_alias_set (TREE_TYPE (field)));
4531 break;
4533 case ARRAY_TYPE:
4535 tree elt;
4536 int i;
4537 int need_to_clear;
4538 tree domain;
4539 tree elttype = TREE_TYPE (type);
4540 int const_bounds_p;
4541 HOST_WIDE_INT minelt = 0;
4542 HOST_WIDE_INT maxelt = 0;
4544 domain = TYPE_DOMAIN (type);
4545 const_bounds_p = (TYPE_MIN_VALUE (domain)
4546 && TYPE_MAX_VALUE (domain)
4547 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4548 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4550 /* If we have constant bounds for the range of the type, get them. */
4551 if (const_bounds_p)
4553 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4554 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4557 /* If the constructor has fewer elements than the array, clear
4558 the whole array first. Similarly if this is static
4559 constructor of a non-BLKmode object. */
4560 if (cleared)
4561 need_to_clear = 0;
4562 else if (REG_P (target) && TREE_STATIC (exp))
4563 need_to_clear = 1;
4564 else
4566 HOST_WIDE_INT count = 0, zero_count = 0;
4567 need_to_clear = ! const_bounds_p;
4569 /* This loop is a more accurate version of the loop in
4570 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4571 is also needed to check for missing elements. */
4572 for (elt = CONSTRUCTOR_ELTS (exp);
4573 elt != NULL_TREE && ! need_to_clear;
4574 elt = TREE_CHAIN (elt))
4576 tree index = TREE_PURPOSE (elt);
4577 HOST_WIDE_INT this_node_count;
4579 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4581 tree lo_index = TREE_OPERAND (index, 0);
4582 tree hi_index = TREE_OPERAND (index, 1);
4584 if (! host_integerp (lo_index, 1)
4585 || ! host_integerp (hi_index, 1))
4587 need_to_clear = 1;
4588 break;
4591 this_node_count = (tree_low_cst (hi_index, 1)
4592 - tree_low_cst (lo_index, 1) + 1);
4594 else
4595 this_node_count = 1;
4597 count += this_node_count;
4598 if (mostly_zeros_p (TREE_VALUE (elt)))
4599 zero_count += this_node_count;
4602 /* Clear the entire array first if there are any missing
4603 elements, or if the incidence of zero elements is >=
4604 75%. */
4605 if (! need_to_clear
4606 && (count < maxelt - minelt + 1
4607 || 4 * zero_count >= 3 * count))
4608 need_to_clear = 1;
4611 if (need_to_clear && size > 0)
4613 if (REG_P (target))
4614 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4615 else
4616 clear_storage (target, GEN_INT (size));
4617 cleared = 1;
4620 if (!cleared && REG_P (target))
4621 /* Inform later passes that the old value is dead. */
4622 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4624 /* Store each element of the constructor into the
4625 corresponding element of TARGET, determined by counting the
4626 elements. */
4627 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4628 elt;
4629 elt = TREE_CHAIN (elt), i++)
4631 enum machine_mode mode;
4632 HOST_WIDE_INT bitsize;
4633 HOST_WIDE_INT bitpos;
4634 int unsignedp;
4635 tree value = TREE_VALUE (elt);
4636 tree index = TREE_PURPOSE (elt);
4637 rtx xtarget = target;
4639 if (cleared && initializer_zerop (value))
4640 continue;
4642 unsignedp = TYPE_UNSIGNED (elttype);
4643 mode = TYPE_MODE (elttype);
4644 if (mode == BLKmode)
4645 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4646 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4647 : -1);
4648 else
4649 bitsize = GET_MODE_BITSIZE (mode);
4651 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4653 tree lo_index = TREE_OPERAND (index, 0);
4654 tree hi_index = TREE_OPERAND (index, 1);
4655 rtx index_r, pos_rtx;
4656 HOST_WIDE_INT lo, hi, count;
4657 tree position;
4659 /* If the range is constant and "small", unroll the loop. */
4660 if (const_bounds_p
4661 && host_integerp (lo_index, 0)
4662 && host_integerp (hi_index, 0)
4663 && (lo = tree_low_cst (lo_index, 0),
4664 hi = tree_low_cst (hi_index, 0),
4665 count = hi - lo + 1,
4666 (!MEM_P (target)
4667 || count <= 2
4668 || (host_integerp (TYPE_SIZE (elttype), 1)
4669 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4670 <= 40 * 8)))))
4672 lo -= minelt; hi -= minelt;
4673 for (; lo <= hi; lo++)
4675 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4677 if (MEM_P (target)
4678 && !MEM_KEEP_ALIAS_SET_P (target)
4679 && TREE_CODE (type) == ARRAY_TYPE
4680 && TYPE_NONALIASED_COMPONENT (type))
4682 target = copy_rtx (target);
4683 MEM_KEEP_ALIAS_SET_P (target) = 1;
4686 store_constructor_field
4687 (target, bitsize, bitpos, mode, value, type, cleared,
4688 get_alias_set (elttype));
4691 else
4693 rtx loop_start = gen_label_rtx ();
4694 rtx loop_end = gen_label_rtx ();
4695 tree exit_cond;
4697 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4698 unsignedp = TYPE_UNSIGNED (domain);
4700 index = build_decl (VAR_DECL, NULL_TREE, domain);
4702 index_r
4703 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4704 &unsignedp, 0));
4705 SET_DECL_RTL (index, index_r);
4706 store_expr (lo_index, index_r, 0);
4708 /* Build the head of the loop. */
4709 do_pending_stack_adjust ();
4710 emit_label (loop_start);
4712 /* Assign value to element index. */
4713 position
4714 = convert (ssizetype,
4715 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4716 index, TYPE_MIN_VALUE (domain))));
4717 position = size_binop (MULT_EXPR, position,
4718 convert (ssizetype,
4719 TYPE_SIZE_UNIT (elttype)));
4721 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4722 xtarget = offset_address (target, pos_rtx,
4723 highest_pow2_factor (position));
4724 xtarget = adjust_address (xtarget, mode, 0);
4725 if (TREE_CODE (value) == CONSTRUCTOR)
4726 store_constructor (value, xtarget, cleared,
4727 bitsize / BITS_PER_UNIT);
4728 else
4729 store_expr (value, xtarget, 0);
4731 /* Generate a conditional jump to exit the loop. */
4732 exit_cond = build2 (LT_EXPR, integer_type_node,
4733 index, hi_index);
4734 jumpif (exit_cond, loop_end);
4736 /* Update the loop counter, and jump to the head of
4737 the loop. */
4738 expand_assignment (index,
4739 build2 (PLUS_EXPR, TREE_TYPE (index),
4740 index, integer_one_node));
4742 emit_jump (loop_start);
4744 /* Build the end of the loop. */
4745 emit_label (loop_end);
4748 else if ((index != 0 && ! host_integerp (index, 0))
4749 || ! host_integerp (TYPE_SIZE (elttype), 1))
4751 tree position;
4753 if (index == 0)
4754 index = ssize_int (1);
4756 if (minelt)
4757 index = fold_convert (ssizetype,
4758 fold (build2 (MINUS_EXPR,
4759 TREE_TYPE (index),
4760 index,
4761 TYPE_MIN_VALUE (domain))));
4763 position = size_binop (MULT_EXPR, index,
4764 convert (ssizetype,
4765 TYPE_SIZE_UNIT (elttype)));
4766 xtarget = offset_address (target,
4767 expand_expr (position, 0, VOIDmode, 0),
4768 highest_pow2_factor (position));
4769 xtarget = adjust_address (xtarget, mode, 0);
4770 store_expr (value, xtarget, 0);
4772 else
4774 if (index != 0)
4775 bitpos = ((tree_low_cst (index, 0) - minelt)
4776 * tree_low_cst (TYPE_SIZE (elttype), 1));
4777 else
4778 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4780 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4781 && TREE_CODE (type) == ARRAY_TYPE
4782 && TYPE_NONALIASED_COMPONENT (type))
4784 target = copy_rtx (target);
4785 MEM_KEEP_ALIAS_SET_P (target) = 1;
4787 store_constructor_field (target, bitsize, bitpos, mode, value,
4788 type, cleared, get_alias_set (elttype));
4791 break;
4794 case VECTOR_TYPE:
4796 tree elt;
4797 int i;
4798 int need_to_clear;
4799 int icode = 0;
4800 tree elttype = TREE_TYPE (type);
4801 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4802 enum machine_mode eltmode = TYPE_MODE (elttype);
4803 HOST_WIDE_INT bitsize;
4804 HOST_WIDE_INT bitpos;
4805 rtx *vector = NULL;
4806 unsigned n_elts;
4808 gcc_assert (eltmode != BLKmode);
4810 n_elts = TYPE_VECTOR_SUBPARTS (type);
4811 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4813 enum machine_mode mode = GET_MODE (target);
4815 icode = (int) vec_init_optab->handlers[mode].insn_code;
4816 if (icode != CODE_FOR_nothing)
4818 unsigned int i;
4820 vector = alloca (n_elts);
4821 for (i = 0; i < n_elts; i++)
4822 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4826 /* If the constructor has fewer elements than the vector,
4827 clear the whole array first. Similarly if this is static
4828 constructor of a non-BLKmode object. */
4829 if (cleared)
4830 need_to_clear = 0;
4831 else if (REG_P (target) && TREE_STATIC (exp))
4832 need_to_clear = 1;
4833 else
4835 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4837 for (elt = CONSTRUCTOR_ELTS (exp);
4838 elt != NULL_TREE;
4839 elt = TREE_CHAIN (elt))
4841 int n_elts_here = tree_low_cst
4842 (int_const_binop (TRUNC_DIV_EXPR,
4843 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4844 TYPE_SIZE (elttype), 0), 1);
4846 count += n_elts_here;
4847 if (mostly_zeros_p (TREE_VALUE (elt)))
4848 zero_count += n_elts_here;
4851 /* Clear the entire vector first if there are any missing elements,
4852 or if the incidence of zero elements is >= 75%. */
4853 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4856 if (need_to_clear && size > 0 && !vector)
4858 if (REG_P (target))
4859 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4860 else
4861 clear_storage (target, GEN_INT (size));
4862 cleared = 1;
4865 if (!cleared && REG_P (target))
4866 /* Inform later passes that the old value is dead. */
4867 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4869 /* Store each element of the constructor into the corresponding
4870 element of TARGET, determined by counting the elements. */
4871 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4872 elt;
4873 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4875 tree value = TREE_VALUE (elt);
4876 tree index = TREE_PURPOSE (elt);
4877 HOST_WIDE_INT eltpos;
4879 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4880 if (cleared && initializer_zerop (value))
4881 continue;
4883 if (index != 0)
4884 eltpos = tree_low_cst (index, 1);
4885 else
4886 eltpos = i;
4888 if (vector)
4890 /* Vector CONSTRUCTORs should only be built from smaller
4891 vectors in the case of BLKmode vectors. */
4892 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4893 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4895 else
4897 enum machine_mode value_mode =
4898 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4899 ? TYPE_MODE (TREE_TYPE (value))
4900 : eltmode;
4901 bitpos = eltpos * elt_size;
4902 store_constructor_field (target, bitsize, bitpos,
4903 value_mode, value, type,
4904 cleared, get_alias_set (elttype));
4908 if (vector)
4909 emit_insn (GEN_FCN (icode)
4910 (target,
4911 gen_rtx_PARALLEL (GET_MODE (target),
4912 gen_rtvec_v (n_elts, vector))));
4913 break;
4916 /* Set constructor assignments. */
4917 case SET_TYPE:
4919 tree elt = CONSTRUCTOR_ELTS (exp);
4920 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4921 tree domain = TYPE_DOMAIN (type);
4922 tree domain_min, domain_max, bitlength;
4924 /* The default implementation strategy is to extract the
4925 constant parts of the constructor, use that to initialize
4926 the target, and then "or" in whatever non-constant ranges
4927 we need in addition.
4929 If a large set is all zero or all ones, it is probably
4930 better to set it using memset. Also, if a large set has
4931 just a single range, it may also be better to first clear
4932 all the first clear the set (using memset), and set the
4933 bits we want. */
4935 /* Check for all zeros. */
4936 if (elt == NULL_TREE && size > 0)
4938 if (!cleared)
4939 clear_storage (target, GEN_INT (size));
4940 return;
4943 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4944 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4945 bitlength = size_binop (PLUS_EXPR,
4946 size_diffop (domain_max, domain_min),
4947 ssize_int (1));
4949 nbits = tree_low_cst (bitlength, 1);
4951 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4952 that are "complicated" (more than one range), initialize
4953 (the constant parts) by copying from a constant. */
4954 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4955 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4957 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4958 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4959 char *bit_buffer = alloca (nbits);
4960 HOST_WIDE_INT word = 0;
4961 unsigned int bit_pos = 0;
4962 unsigned int ibit = 0;
4963 unsigned int offset = 0; /* In bytes from beginning of set. */
4965 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4966 for (;;)
4968 if (bit_buffer[ibit])
4970 if (BYTES_BIG_ENDIAN)
4971 word |= (1 << (set_word_size - 1 - bit_pos));
4972 else
4973 word |= 1 << bit_pos;
4976 bit_pos++; ibit++;
4977 if (bit_pos >= set_word_size || ibit == nbits)
4979 if (word != 0 || ! cleared)
4981 rtx datum = gen_int_mode (word, mode);
4982 rtx to_rtx;
4984 /* The assumption here is that it is safe to
4985 use XEXP if the set is multi-word, but not
4986 if it's single-word. */
4987 if (MEM_P (target))
4988 to_rtx = adjust_address (target, mode, offset);
4989 else
4991 gcc_assert (!offset);
4992 to_rtx = target;
4994 emit_move_insn (to_rtx, datum);
4997 if (ibit == nbits)
4998 break;
4999 word = 0;
5000 bit_pos = 0;
5001 offset += set_word_size / BITS_PER_UNIT;
5005 else if (!cleared)
5006 /* Don't bother clearing storage if the set is all ones. */
5007 if (TREE_CHAIN (elt) != NULL_TREE
5008 || (TREE_PURPOSE (elt) == NULL_TREE
5009 ? nbits != 1
5010 : ( ! host_integerp (TREE_VALUE (elt), 0)
5011 || ! host_integerp (TREE_PURPOSE (elt), 0)
5012 || (tree_low_cst (TREE_VALUE (elt), 0)
5013 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5014 != (HOST_WIDE_INT) nbits))))
5015 clear_storage (target, expr_size (exp));
5017 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5019 /* Start of range of element or NULL. */
5020 tree startbit = TREE_PURPOSE (elt);
5021 /* End of range of element, or element value. */
5022 tree endbit = TREE_VALUE (elt);
5023 HOST_WIDE_INT startb, endb;
5024 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5026 bitlength_rtx = expand_expr (bitlength,
5027 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5029 /* Handle non-range tuple element like [ expr ]. */
5030 if (startbit == NULL_TREE)
5032 startbit = save_expr (endbit);
5033 endbit = startbit;
5036 startbit = convert (sizetype, startbit);
5037 endbit = convert (sizetype, endbit);
5038 if (! integer_zerop (domain_min))
5040 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5041 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5043 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5044 EXPAND_CONST_ADDRESS);
5045 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5046 EXPAND_CONST_ADDRESS);
5048 if (REG_P (target))
5050 targetx
5051 = assign_temp
5052 ((build_qualified_type (lang_hooks.types.type_for_mode
5053 (GET_MODE (target), 0),
5054 TYPE_QUAL_CONST)),
5055 0, 1, 1);
5056 emit_move_insn (targetx, target);
5059 else
5061 gcc_assert (MEM_P (target));
5062 targetx = target;
5065 /* Optimization: If startbit and endbit are constants divisible
5066 by BITS_PER_UNIT, call memset instead. */
5067 if (TREE_CODE (startbit) == INTEGER_CST
5068 && TREE_CODE (endbit) == INTEGER_CST
5069 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5070 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5072 emit_library_call (memset_libfunc, LCT_NORMAL,
5073 VOIDmode, 3,
5074 plus_constant (XEXP (targetx, 0),
5075 startb / BITS_PER_UNIT),
5076 Pmode,
5077 constm1_rtx, TYPE_MODE (integer_type_node),
5078 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5079 TYPE_MODE (sizetype));
5081 else
5082 emit_library_call (setbits_libfunc, LCT_NORMAL,
5083 VOIDmode, 4, XEXP (targetx, 0),
5084 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5085 startbit_rtx, TYPE_MODE (sizetype),
5086 endbit_rtx, TYPE_MODE (sizetype));
5088 if (REG_P (target))
5089 emit_move_insn (target, targetx);
5091 break;
5093 default:
5094 gcc_unreachable ();
5098 /* Store the value of EXP (an expression tree)
5099 into a subfield of TARGET which has mode MODE and occupies
5100 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5101 If MODE is VOIDmode, it means that we are storing into a bit-field.
5103 Always return const0_rtx unless we have something particular to
5104 return.
5106 TYPE is the type of the underlying object,
5108 ALIAS_SET is the alias set for the destination. This value will
5109 (in general) be different from that for TARGET, since TARGET is a
5110 reference to the containing structure. */
5112 static rtx
5113 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5114 enum machine_mode mode, tree exp, tree type, int alias_set)
5116 HOST_WIDE_INT width_mask = 0;
5118 if (TREE_CODE (exp) == ERROR_MARK)
5119 return const0_rtx;
5121 /* If we have nothing to store, do nothing unless the expression has
5122 side-effects. */
5123 if (bitsize == 0)
5124 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5125 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5126 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5128 /* If we are storing into an unaligned field of an aligned union that is
5129 in a register, we may have the mode of TARGET being an integer mode but
5130 MODE == BLKmode. In that case, get an aligned object whose size and
5131 alignment are the same as TARGET and store TARGET into it (we can avoid
5132 the store if the field being stored is the entire width of TARGET). Then
5133 call ourselves recursively to store the field into a BLKmode version of
5134 that object. Finally, load from the object into TARGET. This is not
5135 very efficient in general, but should only be slightly more expensive
5136 than the otherwise-required unaligned accesses. Perhaps this can be
5137 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5138 twice, once with emit_move_insn and once via store_field. */
5140 if (mode == BLKmode
5141 && (REG_P (target) || GET_CODE (target) == SUBREG))
5143 rtx object = assign_temp (type, 0, 1, 1);
5144 rtx blk_object = adjust_address (object, BLKmode, 0);
5146 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5147 emit_move_insn (object, target);
5149 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5151 emit_move_insn (target, object);
5153 /* We want to return the BLKmode version of the data. */
5154 return blk_object;
5157 if (GET_CODE (target) == CONCAT)
5159 /* We're storing into a struct containing a single __complex. */
5161 gcc_assert (!bitpos);
5162 return store_expr (exp, target, 0);
5165 /* If the structure is in a register or if the component
5166 is a bit field, we cannot use addressing to access it.
5167 Use bit-field techniques or SUBREG to store in it. */
5169 if (mode == VOIDmode
5170 || (mode != BLKmode && ! direct_store[(int) mode]
5171 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5172 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5173 || REG_P (target)
5174 || GET_CODE (target) == SUBREG
5175 /* If the field isn't aligned enough to store as an ordinary memref,
5176 store it as a bit field. */
5177 || (mode != BLKmode
5178 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5179 || bitpos % GET_MODE_ALIGNMENT (mode))
5180 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5181 || (bitpos % BITS_PER_UNIT != 0)))
5182 /* If the RHS and field are a constant size and the size of the
5183 RHS isn't the same size as the bitfield, we must use bitfield
5184 operations. */
5185 || (bitsize >= 0
5186 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5187 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5189 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5191 /* If BITSIZE is narrower than the size of the type of EXP
5192 we will be narrowing TEMP. Normally, what's wanted are the
5193 low-order bits. However, if EXP's type is a record and this is
5194 big-endian machine, we want the upper BITSIZE bits. */
5195 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5196 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5197 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5198 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5199 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5200 - bitsize),
5201 NULL_RTX, 1);
5203 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5204 MODE. */
5205 if (mode != VOIDmode && mode != BLKmode
5206 && mode != TYPE_MODE (TREE_TYPE (exp)))
5207 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5209 /* If the modes of TARGET and TEMP are both BLKmode, both
5210 must be in memory and BITPOS must be aligned on a byte
5211 boundary. If so, we simply do a block copy. */
5212 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5214 gcc_assert (MEM_P (target) && MEM_P (temp)
5215 && !(bitpos % BITS_PER_UNIT));
5217 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5218 emit_block_move (target, temp,
5219 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5220 / BITS_PER_UNIT),
5221 BLOCK_OP_NORMAL);
5223 return const0_rtx;
5226 /* Store the value in the bitfield. */
5227 store_bit_field (target, bitsize, bitpos, mode, temp);
5229 return const0_rtx;
5231 else
5233 /* Now build a reference to just the desired component. */
5234 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5236 if (to_rtx == target)
5237 to_rtx = copy_rtx (to_rtx);
5239 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5240 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5241 set_mem_alias_set (to_rtx, alias_set);
5243 return store_expr (exp, to_rtx, 0);
5247 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5248 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5249 codes and find the ultimate containing object, which we return.
5251 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5252 bit position, and *PUNSIGNEDP to the signedness of the field.
5253 If the position of the field is variable, we store a tree
5254 giving the variable offset (in units) in *POFFSET.
5255 This offset is in addition to the bit position.
5256 If the position is not variable, we store 0 in *POFFSET.
5258 If any of the extraction expressions is volatile,
5259 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5261 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5262 is a mode that can be used to access the field. In that case, *PBITSIZE
5263 is redundant.
5265 If the field describes a variable-sized object, *PMODE is set to
5266 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5267 this case, but the address of the object can be found. */
5269 tree
5270 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5271 HOST_WIDE_INT *pbitpos, tree *poffset,
5272 enum machine_mode *pmode, int *punsignedp,
5273 int *pvolatilep)
5275 tree size_tree = 0;
5276 enum machine_mode mode = VOIDmode;
5277 tree offset = size_zero_node;
5278 tree bit_offset = bitsize_zero_node;
5279 tree tem;
5281 /* First get the mode, signedness, and size. We do this from just the
5282 outermost expression. */
5283 if (TREE_CODE (exp) == COMPONENT_REF)
5285 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5286 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5287 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5289 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5291 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5293 size_tree = TREE_OPERAND (exp, 1);
5294 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5296 else
5298 mode = TYPE_MODE (TREE_TYPE (exp));
5299 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5301 if (mode == BLKmode)
5302 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5303 else
5304 *pbitsize = GET_MODE_BITSIZE (mode);
5307 if (size_tree != 0)
5309 if (! host_integerp (size_tree, 1))
5310 mode = BLKmode, *pbitsize = -1;
5311 else
5312 *pbitsize = tree_low_cst (size_tree, 1);
5315 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5316 and find the ultimate containing object. */
5317 while (1)
5319 if (TREE_CODE (exp) == BIT_FIELD_REF)
5320 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5321 else if (TREE_CODE (exp) == COMPONENT_REF)
5323 tree field = TREE_OPERAND (exp, 1);
5324 tree this_offset = component_ref_field_offset (exp);
5326 /* If this field hasn't been filled in yet, don't go
5327 past it. This should only happen when folding expressions
5328 made during type construction. */
5329 if (this_offset == 0)
5330 break;
5332 offset = size_binop (PLUS_EXPR, offset, this_offset);
5333 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5334 DECL_FIELD_BIT_OFFSET (field));
5336 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5339 else if (TREE_CODE (exp) == ARRAY_REF
5340 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5342 tree index = TREE_OPERAND (exp, 1);
5343 tree low_bound = array_ref_low_bound (exp);
5344 tree unit_size = array_ref_element_size (exp);
5346 /* We assume all arrays have sizes that are a multiple of a byte.
5347 First subtract the lower bound, if any, in the type of the
5348 index, then convert to sizetype and multiply by the size of the
5349 array element. */
5350 if (! integer_zerop (low_bound))
5351 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5352 index, low_bound));
5354 offset = size_binop (PLUS_EXPR, offset,
5355 size_binop (MULT_EXPR,
5356 convert (sizetype, index),
5357 unit_size));
5360 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5361 conversions that don't change the mode, and all view conversions
5362 except those that need to "step up" the alignment. */
5363 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5364 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5365 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5366 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5367 && STRICT_ALIGNMENT
5368 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5369 < BIGGEST_ALIGNMENT)
5370 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5371 || TYPE_ALIGN_OK (TREE_TYPE
5372 (TREE_OPERAND (exp, 0))))))
5373 && ! ((TREE_CODE (exp) == NOP_EXPR
5374 || TREE_CODE (exp) == CONVERT_EXPR)
5375 && (TYPE_MODE (TREE_TYPE (exp))
5376 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5377 break;
5379 /* If any reference in the chain is volatile, the effect is volatile. */
5380 if (TREE_THIS_VOLATILE (exp))
5381 *pvolatilep = 1;
5383 exp = TREE_OPERAND (exp, 0);
5386 /* If OFFSET is constant, see if we can return the whole thing as a
5387 constant bit position. Otherwise, split it up. */
5388 if (host_integerp (offset, 0)
5389 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5390 bitsize_unit_node))
5391 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5392 && host_integerp (tem, 0))
5393 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5394 else
5395 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5397 *pmode = mode;
5398 return exp;
5401 /* Return a tree of sizetype representing the size, in bytes, of the element
5402 of EXP, an ARRAY_REF. */
5404 tree
5405 array_ref_element_size (tree exp)
5407 tree aligned_size = TREE_OPERAND (exp, 3);
5408 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5410 /* If a size was specified in the ARRAY_REF, it's the size measured
5411 in alignment units of the element type. So multiply by that value. */
5412 if (aligned_size)
5414 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5415 sizetype from another type of the same width and signedness. */
5416 if (TREE_TYPE (aligned_size) != sizetype)
5417 aligned_size = fold_convert (sizetype, aligned_size);
5418 return size_binop (MULT_EXPR, aligned_size,
5419 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5422 /* Otherwise, take the size from that of the element type. Substitute
5423 any PLACEHOLDER_EXPR that we have. */
5424 else
5425 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5428 /* Return a tree representing the lower bound of the array mentioned in
5429 EXP, an ARRAY_REF. */
5431 tree
5432 array_ref_low_bound (tree exp)
5434 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5436 /* If a lower bound is specified in EXP, use it. */
5437 if (TREE_OPERAND (exp, 2))
5438 return TREE_OPERAND (exp, 2);
5440 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5441 substituting for a PLACEHOLDER_EXPR as needed. */
5442 if (domain_type && TYPE_MIN_VALUE (domain_type))
5443 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5445 /* Otherwise, return a zero of the appropriate type. */
5446 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5449 /* Return a tree representing the upper bound of the array mentioned in
5450 EXP, an ARRAY_REF. */
5452 tree
5453 array_ref_up_bound (tree exp)
5455 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5457 /* If there is a domain type and it has an upper bound, use it, substituting
5458 for a PLACEHOLDER_EXPR as needed. */
5459 if (domain_type && TYPE_MAX_VALUE (domain_type))
5460 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5462 /* Otherwise fail. */
5463 return NULL_TREE;
5466 /* Return a tree representing the offset, in bytes, of the field referenced
5467 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5469 tree
5470 component_ref_field_offset (tree exp)
5472 tree aligned_offset = TREE_OPERAND (exp, 2);
5473 tree field = TREE_OPERAND (exp, 1);
5475 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5476 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5477 value. */
5478 if (aligned_offset)
5480 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5481 sizetype from another type of the same width and signedness. */
5482 if (TREE_TYPE (aligned_offset) != sizetype)
5483 aligned_offset = fold_convert (sizetype, aligned_offset);
5484 return size_binop (MULT_EXPR, aligned_offset,
5485 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5488 /* Otherwise, take the offset from that of the field. Substitute
5489 any PLACEHOLDER_EXPR that we have. */
5490 else
5491 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5494 /* Return 1 if T is an expression that get_inner_reference handles. */
5497 handled_component_p (tree t)
5499 switch (TREE_CODE (t))
5501 case BIT_FIELD_REF:
5502 case COMPONENT_REF:
5503 case ARRAY_REF:
5504 case ARRAY_RANGE_REF:
5505 case NON_LVALUE_EXPR:
5506 case VIEW_CONVERT_EXPR:
5507 return 1;
5509 /* ??? Sure they are handled, but get_inner_reference may return
5510 a different PBITSIZE, depending upon whether the expression is
5511 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5512 case NOP_EXPR:
5513 case CONVERT_EXPR:
5514 return (TYPE_MODE (TREE_TYPE (t))
5515 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5517 default:
5518 return 0;
5522 /* Given an rtx VALUE that may contain additions and multiplications, return
5523 an equivalent value that just refers to a register, memory, or constant.
5524 This is done by generating instructions to perform the arithmetic and
5525 returning a pseudo-register containing the value.
5527 The returned value may be a REG, SUBREG, MEM or constant. */
5530 force_operand (rtx value, rtx target)
5532 rtx op1, op2;
5533 /* Use subtarget as the target for operand 0 of a binary operation. */
5534 rtx subtarget = get_subtarget (target);
5535 enum rtx_code code = GET_CODE (value);
5537 /* Check for subreg applied to an expression produced by loop optimizer. */
5538 if (code == SUBREG
5539 && !REG_P (SUBREG_REG (value))
5540 && !MEM_P (SUBREG_REG (value)))
5542 value = simplify_gen_subreg (GET_MODE (value),
5543 force_reg (GET_MODE (SUBREG_REG (value)),
5544 force_operand (SUBREG_REG (value),
5545 NULL_RTX)),
5546 GET_MODE (SUBREG_REG (value)),
5547 SUBREG_BYTE (value));
5548 code = GET_CODE (value);
5551 /* Check for a PIC address load. */
5552 if ((code == PLUS || code == MINUS)
5553 && XEXP (value, 0) == pic_offset_table_rtx
5554 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5555 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5556 || GET_CODE (XEXP (value, 1)) == CONST))
5558 if (!subtarget)
5559 subtarget = gen_reg_rtx (GET_MODE (value));
5560 emit_move_insn (subtarget, value);
5561 return subtarget;
5564 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5566 if (!target)
5567 target = gen_reg_rtx (GET_MODE (value));
5568 convert_move (target, force_operand (XEXP (value, 0), NULL),
5569 code == ZERO_EXTEND);
5570 return target;
5573 if (ARITHMETIC_P (value))
5575 op2 = XEXP (value, 1);
5576 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5577 subtarget = 0;
5578 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5580 code = PLUS;
5581 op2 = negate_rtx (GET_MODE (value), op2);
5584 /* Check for an addition with OP2 a constant integer and our first
5585 operand a PLUS of a virtual register and something else. In that
5586 case, we want to emit the sum of the virtual register and the
5587 constant first and then add the other value. This allows virtual
5588 register instantiation to simply modify the constant rather than
5589 creating another one around this addition. */
5590 if (code == PLUS && GET_CODE (op2) == CONST_INT
5591 && GET_CODE (XEXP (value, 0)) == PLUS
5592 && REG_P (XEXP (XEXP (value, 0), 0))
5593 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5594 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5596 rtx temp = expand_simple_binop (GET_MODE (value), code,
5597 XEXP (XEXP (value, 0), 0), op2,
5598 subtarget, 0, OPTAB_LIB_WIDEN);
5599 return expand_simple_binop (GET_MODE (value), code, temp,
5600 force_operand (XEXP (XEXP (value,
5601 0), 1), 0),
5602 target, 0, OPTAB_LIB_WIDEN);
5605 op1 = force_operand (XEXP (value, 0), subtarget);
5606 op2 = force_operand (op2, NULL_RTX);
5607 switch (code)
5609 case MULT:
5610 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5611 case DIV:
5612 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5613 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5614 target, 1, OPTAB_LIB_WIDEN);
5615 else
5616 return expand_divmod (0,
5617 FLOAT_MODE_P (GET_MODE (value))
5618 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5619 GET_MODE (value), op1, op2, target, 0);
5620 break;
5621 case MOD:
5622 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5623 target, 0);
5624 break;
5625 case UDIV:
5626 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5627 target, 1);
5628 break;
5629 case UMOD:
5630 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5631 target, 1);
5632 break;
5633 case ASHIFTRT:
5634 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5635 target, 0, OPTAB_LIB_WIDEN);
5636 break;
5637 default:
5638 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5639 target, 1, OPTAB_LIB_WIDEN);
5642 if (UNARY_P (value))
5644 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5645 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5648 #ifdef INSN_SCHEDULING
5649 /* On machines that have insn scheduling, we want all memory reference to be
5650 explicit, so we need to deal with such paradoxical SUBREGs. */
5651 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5652 && (GET_MODE_SIZE (GET_MODE (value))
5653 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5654 value
5655 = simplify_gen_subreg (GET_MODE (value),
5656 force_reg (GET_MODE (SUBREG_REG (value)),
5657 force_operand (SUBREG_REG (value),
5658 NULL_RTX)),
5659 GET_MODE (SUBREG_REG (value)),
5660 SUBREG_BYTE (value));
5661 #endif
5663 return value;
5666 /* Subroutine of expand_expr: return nonzero iff there is no way that
5667 EXP can reference X, which is being modified. TOP_P is nonzero if this
5668 call is going to be used to determine whether we need a temporary
5669 for EXP, as opposed to a recursive call to this function.
5671 It is always safe for this routine to return zero since it merely
5672 searches for optimization opportunities. */
5675 safe_from_p (rtx x, tree exp, int top_p)
5677 rtx exp_rtl = 0;
5678 int i, nops;
5680 if (x == 0
5681 /* If EXP has varying size, we MUST use a target since we currently
5682 have no way of allocating temporaries of variable size
5683 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5684 So we assume here that something at a higher level has prevented a
5685 clash. This is somewhat bogus, but the best we can do. Only
5686 do this when X is BLKmode and when we are at the top level. */
5687 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5688 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5689 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5690 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5691 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5692 != INTEGER_CST)
5693 && GET_MODE (x) == BLKmode)
5694 /* If X is in the outgoing argument area, it is always safe. */
5695 || (MEM_P (x)
5696 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5697 || (GET_CODE (XEXP (x, 0)) == PLUS
5698 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5699 return 1;
5701 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5702 find the underlying pseudo. */
5703 if (GET_CODE (x) == SUBREG)
5705 x = SUBREG_REG (x);
5706 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5707 return 0;
5710 /* Now look at our tree code and possibly recurse. */
5711 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5713 case tcc_declaration:
5714 exp_rtl = DECL_RTL_IF_SET (exp);
5715 break;
5717 case tcc_constant:
5718 return 1;
5720 case tcc_exceptional:
5721 if (TREE_CODE (exp) == TREE_LIST)
5723 while (1)
5725 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5726 return 0;
5727 exp = TREE_CHAIN (exp);
5728 if (!exp)
5729 return 1;
5730 if (TREE_CODE (exp) != TREE_LIST)
5731 return safe_from_p (x, exp, 0);
5734 else if (TREE_CODE (exp) == ERROR_MARK)
5735 return 1; /* An already-visited SAVE_EXPR? */
5736 else
5737 return 0;
5739 case tcc_statement:
5740 /* The only case we look at here is the DECL_INITIAL inside a
5741 DECL_EXPR. */
5742 return (TREE_CODE (exp) != DECL_EXPR
5743 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5744 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5745 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5747 case tcc_binary:
5748 case tcc_comparison:
5749 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5750 return 0;
5751 /* Fall through. */
5753 case tcc_unary:
5754 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5756 case tcc_expression:
5757 case tcc_reference:
5758 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5759 the expression. If it is set, we conflict iff we are that rtx or
5760 both are in memory. Otherwise, we check all operands of the
5761 expression recursively. */
5763 switch (TREE_CODE (exp))
5765 case ADDR_EXPR:
5766 /* If the operand is static or we are static, we can't conflict.
5767 Likewise if we don't conflict with the operand at all. */
5768 if (staticp (TREE_OPERAND (exp, 0))
5769 || TREE_STATIC (exp)
5770 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5771 return 1;
5773 /* Otherwise, the only way this can conflict is if we are taking
5774 the address of a DECL a that address if part of X, which is
5775 very rare. */
5776 exp = TREE_OPERAND (exp, 0);
5777 if (DECL_P (exp))
5779 if (!DECL_RTL_SET_P (exp)
5780 || !MEM_P (DECL_RTL (exp)))
5781 return 0;
5782 else
5783 exp_rtl = XEXP (DECL_RTL (exp), 0);
5785 break;
5787 case MISALIGNED_INDIRECT_REF:
5788 case ALIGN_INDIRECT_REF:
5789 case INDIRECT_REF:
5790 if (MEM_P (x)
5791 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5792 get_alias_set (exp)))
5793 return 0;
5794 break;
5796 case CALL_EXPR:
5797 /* Assume that the call will clobber all hard registers and
5798 all of memory. */
5799 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5800 || MEM_P (x))
5801 return 0;
5802 break;
5804 case WITH_CLEANUP_EXPR:
5805 case CLEANUP_POINT_EXPR:
5806 /* Lowered by gimplify.c. */
5807 gcc_unreachable ();
5809 case SAVE_EXPR:
5810 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5812 default:
5813 break;
5816 /* If we have an rtx, we do not need to scan our operands. */
5817 if (exp_rtl)
5818 break;
5820 nops = first_rtl_op (TREE_CODE (exp));
5821 for (i = 0; i < nops; i++)
5822 if (TREE_OPERAND (exp, i) != 0
5823 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5824 return 0;
5826 /* If this is a language-specific tree code, it may require
5827 special handling. */
5828 if ((unsigned int) TREE_CODE (exp)
5829 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5830 && !lang_hooks.safe_from_p (x, exp))
5831 return 0;
5832 break;
5834 case tcc_type:
5835 /* Should never get a type here. */
5836 gcc_unreachable ();
5839 /* If we have an rtl, find any enclosed object. Then see if we conflict
5840 with it. */
5841 if (exp_rtl)
5843 if (GET_CODE (exp_rtl) == SUBREG)
5845 exp_rtl = SUBREG_REG (exp_rtl);
5846 if (REG_P (exp_rtl)
5847 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5848 return 0;
5851 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5852 are memory and they conflict. */
5853 return ! (rtx_equal_p (x, exp_rtl)
5854 || (MEM_P (x) && MEM_P (exp_rtl)
5855 && true_dependence (exp_rtl, VOIDmode, x,
5856 rtx_addr_varies_p)));
5859 /* If we reach here, it is safe. */
5860 return 1;
5864 /* Return the highest power of two that EXP is known to be a multiple of.
5865 This is used in updating alignment of MEMs in array references. */
5867 static unsigned HOST_WIDE_INT
5868 highest_pow2_factor (tree exp)
5870 unsigned HOST_WIDE_INT c0, c1;
5872 switch (TREE_CODE (exp))
5874 case INTEGER_CST:
5875 /* We can find the lowest bit that's a one. If the low
5876 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5877 We need to handle this case since we can find it in a COND_EXPR,
5878 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5879 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5880 later ICE. */
5881 if (TREE_CONSTANT_OVERFLOW (exp))
5882 return BIGGEST_ALIGNMENT;
5883 else
5885 /* Note: tree_low_cst is intentionally not used here,
5886 we don't care about the upper bits. */
5887 c0 = TREE_INT_CST_LOW (exp);
5888 c0 &= -c0;
5889 return c0 ? c0 : BIGGEST_ALIGNMENT;
5891 break;
5893 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5894 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5895 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5896 return MIN (c0, c1);
5898 case MULT_EXPR:
5899 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5900 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5901 return c0 * c1;
5903 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5904 case CEIL_DIV_EXPR:
5905 if (integer_pow2p (TREE_OPERAND (exp, 1))
5906 && host_integerp (TREE_OPERAND (exp, 1), 1))
5908 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5909 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5910 return MAX (1, c0 / c1);
5912 break;
5914 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5915 case SAVE_EXPR:
5916 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5918 case COMPOUND_EXPR:
5919 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5921 case COND_EXPR:
5922 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5923 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5924 return MIN (c0, c1);
5926 default:
5927 break;
5930 return 1;
5933 /* Similar, except that the alignment requirements of TARGET are
5934 taken into account. Assume it is at least as aligned as its
5935 type, unless it is a COMPONENT_REF in which case the layout of
5936 the structure gives the alignment. */
5938 static unsigned HOST_WIDE_INT
5939 highest_pow2_factor_for_target (tree target, tree exp)
5941 unsigned HOST_WIDE_INT target_align, factor;
5943 factor = highest_pow2_factor (exp);
5944 if (TREE_CODE (target) == COMPONENT_REF)
5945 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5946 else
5947 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5948 return MAX (factor, target_align);
5951 /* Expands variable VAR. */
5953 void
5954 expand_var (tree var)
5956 if (DECL_EXTERNAL (var))
5957 return;
5959 if (TREE_STATIC (var))
5960 /* If this is an inlined copy of a static local variable,
5961 look up the original decl. */
5962 var = DECL_ORIGIN (var);
5964 if (TREE_STATIC (var)
5965 ? !TREE_ASM_WRITTEN (var)
5966 : !DECL_RTL_SET_P (var))
5968 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5969 /* Should be ignored. */;
5970 else if (lang_hooks.expand_decl (var))
5971 /* OK. */;
5972 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5973 expand_decl (var);
5974 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5975 rest_of_decl_compilation (var, 0, 0);
5976 else
5977 /* No expansion needed. */
5978 gcc_assert (TREE_CODE (var) == TYPE_DECL
5979 || TREE_CODE (var) == CONST_DECL
5980 || TREE_CODE (var) == FUNCTION_DECL
5981 || TREE_CODE (var) == LABEL_DECL);
5985 /* Subroutine of expand_expr. Expand the two operands of a binary
5986 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5987 The value may be stored in TARGET if TARGET is nonzero. The
5988 MODIFIER argument is as documented by expand_expr. */
5990 static void
5991 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5992 enum expand_modifier modifier)
5994 if (! safe_from_p (target, exp1, 1))
5995 target = 0;
5996 if (operand_equal_p (exp0, exp1, 0))
5998 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5999 *op1 = copy_rtx (*op0);
6001 else
6003 /* If we need to preserve evaluation order, copy exp0 into its own
6004 temporary variable so that it can't be clobbered by exp1. */
6005 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6006 exp0 = save_expr (exp0);
6007 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6008 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6013 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6014 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6016 static rtx
6017 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6018 enum expand_modifier modifier)
6020 rtx result, subtarget;
6021 tree inner, offset;
6022 HOST_WIDE_INT bitsize, bitpos;
6023 int volatilep, unsignedp;
6024 enum machine_mode mode1;
6026 /* If we are taking the address of a constant and are at the top level,
6027 we have to use output_constant_def since we can't call force_const_mem
6028 at top level. */
6029 /* ??? This should be considered a front-end bug. We should not be
6030 generating ADDR_EXPR of something that isn't an LVALUE. The only
6031 exception here is STRING_CST. */
6032 if (TREE_CODE (exp) == CONSTRUCTOR
6033 || CONSTANT_CLASS_P (exp))
6034 return XEXP (output_constant_def (exp, 0), 0);
6036 /* Everything must be something allowed by is_gimple_addressable. */
6037 switch (TREE_CODE (exp))
6039 case INDIRECT_REF:
6040 /* This case will happen via recursion for &a->b. */
6041 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6043 case CONST_DECL:
6044 /* Recurse and make the output_constant_def clause above handle this. */
6045 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6046 tmode, modifier);
6048 case REALPART_EXPR:
6049 /* The real part of the complex number is always first, therefore
6050 the address is the same as the address of the parent object. */
6051 offset = 0;
6052 bitpos = 0;
6053 inner = TREE_OPERAND (exp, 0);
6054 break;
6056 case IMAGPART_EXPR:
6057 /* The imaginary part of the complex number is always second.
6058 The expression is therefore always offset by the size of the
6059 scalar type. */
6060 offset = 0;
6061 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6062 inner = TREE_OPERAND (exp, 0);
6063 break;
6065 default:
6066 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6067 expand_expr, as that can have various side effects; LABEL_DECLs for
6068 example, may not have their DECL_RTL set yet. Assume language
6069 specific tree nodes can be expanded in some interesting way. */
6070 if (DECL_P (exp)
6071 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6073 result = expand_expr (exp, target, tmode,
6074 modifier == EXPAND_INITIALIZER
6075 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6077 /* If the DECL isn't in memory, then the DECL wasn't properly
6078 marked TREE_ADDRESSABLE, which will be either a front-end
6079 or a tree optimizer bug. */
6080 gcc_assert (GET_CODE (result) == MEM);
6081 result = XEXP (result, 0);
6083 /* ??? Is this needed anymore? */
6084 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6086 assemble_external (exp);
6087 TREE_USED (exp) = 1;
6090 if (modifier != EXPAND_INITIALIZER
6091 && modifier != EXPAND_CONST_ADDRESS)
6092 result = force_operand (result, target);
6093 return result;
6096 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6097 &mode1, &unsignedp, &volatilep);
6098 break;
6101 /* We must have made progress. */
6102 gcc_assert (inner != exp);
6104 subtarget = offset || bitpos ? NULL_RTX : target;
6105 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6107 if (offset)
6109 rtx tmp;
6111 if (modifier != EXPAND_NORMAL)
6112 result = force_operand (result, NULL);
6113 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6115 result = convert_memory_address (tmode, result);
6116 tmp = convert_memory_address (tmode, tmp);
6118 if (modifier == EXPAND_SUM)
6119 result = gen_rtx_PLUS (tmode, result, tmp);
6120 else
6122 subtarget = bitpos ? NULL_RTX : target;
6123 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6124 1, OPTAB_LIB_WIDEN);
6128 if (bitpos)
6130 /* Someone beforehand should have rejected taking the address
6131 of such an object. */
6132 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6134 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6135 if (modifier < EXPAND_SUM)
6136 result = force_operand (result, target);
6139 return result;
6142 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6143 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6145 static rtx
6146 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6147 enum expand_modifier modifier)
6149 enum machine_mode rmode;
6150 rtx result;
6152 /* Target mode of VOIDmode says "whatever's natural". */
6153 if (tmode == VOIDmode)
6154 tmode = TYPE_MODE (TREE_TYPE (exp));
6156 /* We can get called with some Weird Things if the user does silliness
6157 like "(short) &a". In that case, convert_memory_address won't do
6158 the right thing, so ignore the given target mode. */
6159 if (tmode != Pmode && tmode != ptr_mode)
6160 tmode = Pmode;
6162 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6163 tmode, modifier);
6165 /* Despite expand_expr claims concerning ignoring TMODE when not
6166 strictly convenient, stuff breaks if we don't honor it. Note
6167 that combined with the above, we only do this for pointer modes. */
6168 rmode = GET_MODE (result);
6169 if (rmode == VOIDmode)
6170 rmode = tmode;
6171 if (rmode != tmode)
6172 result = convert_memory_address (tmode, result);
6174 return result;
6178 /* expand_expr: generate code for computing expression EXP.
6179 An rtx for the computed value is returned. The value is never null.
6180 In the case of a void EXP, const0_rtx is returned.
6182 The value may be stored in TARGET if TARGET is nonzero.
6183 TARGET is just a suggestion; callers must assume that
6184 the rtx returned may not be the same as TARGET.
6186 If TARGET is CONST0_RTX, it means that the value will be ignored.
6188 If TMODE is not VOIDmode, it suggests generating the
6189 result in mode TMODE. But this is done only when convenient.
6190 Otherwise, TMODE is ignored and the value generated in its natural mode.
6191 TMODE is just a suggestion; callers must assume that
6192 the rtx returned may not have mode TMODE.
6194 Note that TARGET may have neither TMODE nor MODE. In that case, it
6195 probably will not be used.
6197 If MODIFIER is EXPAND_SUM then when EXP is an addition
6198 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6199 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6200 products as above, or REG or MEM, or constant.
6201 Ordinarily in such cases we would output mul or add instructions
6202 and then return a pseudo reg containing the sum.
6204 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6205 it also marks a label as absolutely required (it can't be dead).
6206 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6207 This is used for outputting expressions used in initializers.
6209 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6210 with a constant address even if that address is not normally legitimate.
6211 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6213 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6214 a call parameter. Such targets require special care as we haven't yet
6215 marked TARGET so that it's safe from being trashed by libcalls. We
6216 don't want to use TARGET for anything but the final result;
6217 Intermediate values must go elsewhere. Additionally, calls to
6218 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6220 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6221 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6222 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6223 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6224 recursively. */
6226 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6227 enum expand_modifier, rtx *);
6230 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6231 enum expand_modifier modifier, rtx *alt_rtl)
6233 int rn = -1;
6234 rtx ret, last = NULL;
6236 /* Handle ERROR_MARK before anybody tries to access its type. */
6237 if (TREE_CODE (exp) == ERROR_MARK
6238 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6240 ret = CONST0_RTX (tmode);
6241 return ret ? ret : const0_rtx;
6244 if (flag_non_call_exceptions)
6246 rn = lookup_stmt_eh_region (exp);
6247 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6248 if (rn >= 0)
6249 last = get_last_insn ();
6252 /* If this is an expression of some kind and it has an associated line
6253 number, then emit the line number before expanding the expression.
6255 We need to save and restore the file and line information so that
6256 errors discovered during expansion are emitted with the right
6257 information. It would be better of the diagnostic routines
6258 used the file/line information embedded in the tree nodes rather
6259 than globals. */
6260 if (cfun && EXPR_HAS_LOCATION (exp))
6262 location_t saved_location = input_location;
6263 input_location = EXPR_LOCATION (exp);
6264 emit_line_note (input_location);
6266 /* Record where the insns produced belong. */
6267 record_block_change (TREE_BLOCK (exp));
6269 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6271 input_location = saved_location;
6273 else
6275 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6278 /* If using non-call exceptions, mark all insns that may trap.
6279 expand_call() will mark CALL_INSNs before we get to this code,
6280 but it doesn't handle libcalls, and these may trap. */
6281 if (rn >= 0)
6283 rtx insn;
6284 for (insn = next_real_insn (last); insn;
6285 insn = next_real_insn (insn))
6287 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6288 /* If we want exceptions for non-call insns, any
6289 may_trap_p instruction may throw. */
6290 && GET_CODE (PATTERN (insn)) != CLOBBER
6291 && GET_CODE (PATTERN (insn)) != USE
6292 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6294 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6295 REG_NOTES (insn));
6300 return ret;
6303 static rtx
6304 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6305 enum expand_modifier modifier, rtx *alt_rtl)
6307 rtx op0, op1, temp;
6308 tree type = TREE_TYPE (exp);
6309 int unsignedp;
6310 enum machine_mode mode;
6311 enum tree_code code = TREE_CODE (exp);
6312 optab this_optab;
6313 rtx subtarget, original_target;
6314 int ignore;
6315 tree context;
6316 bool reduce_bit_field = false;
6317 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6318 ? reduce_to_bit_field_precision ((expr), \
6319 target, \
6320 type) \
6321 : (expr))
6323 mode = TYPE_MODE (type);
6324 unsignedp = TYPE_UNSIGNED (type);
6325 if (lang_hooks.reduce_bit_field_operations
6326 && TREE_CODE (type) == INTEGER_TYPE
6327 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6329 /* An operation in what may be a bit-field type needs the
6330 result to be reduced to the precision of the bit-field type,
6331 which is narrower than that of the type's mode. */
6332 reduce_bit_field = true;
6333 if (modifier == EXPAND_STACK_PARM)
6334 target = 0;
6337 /* Use subtarget as the target for operand 0 of a binary operation. */
6338 subtarget = get_subtarget (target);
6339 original_target = target;
6340 ignore = (target == const0_rtx
6341 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6342 || code == CONVERT_EXPR || code == COND_EXPR
6343 || code == VIEW_CONVERT_EXPR)
6344 && TREE_CODE (type) == VOID_TYPE));
6346 /* If we are going to ignore this result, we need only do something
6347 if there is a side-effect somewhere in the expression. If there
6348 is, short-circuit the most common cases here. Note that we must
6349 not call expand_expr with anything but const0_rtx in case this
6350 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6352 if (ignore)
6354 if (! TREE_SIDE_EFFECTS (exp))
6355 return const0_rtx;
6357 /* Ensure we reference a volatile object even if value is ignored, but
6358 don't do this if all we are doing is taking its address. */
6359 if (TREE_THIS_VOLATILE (exp)
6360 && TREE_CODE (exp) != FUNCTION_DECL
6361 && mode != VOIDmode && mode != BLKmode
6362 && modifier != EXPAND_CONST_ADDRESS)
6364 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6365 if (MEM_P (temp))
6366 temp = copy_to_reg (temp);
6367 return const0_rtx;
6370 if (TREE_CODE_CLASS (code) == tcc_unary
6371 || code == COMPONENT_REF || code == INDIRECT_REF)
6372 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6373 modifier);
6375 else if (TREE_CODE_CLASS (code) == tcc_binary
6376 || TREE_CODE_CLASS (code) == tcc_comparison
6377 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6379 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6380 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6381 return const0_rtx;
6383 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6384 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6385 /* If the second operand has no side effects, just evaluate
6386 the first. */
6387 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6388 modifier);
6389 else if (code == BIT_FIELD_REF)
6391 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6392 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6393 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6394 return const0_rtx;
6397 target = 0;
6400 /* If will do cse, generate all results into pseudo registers
6401 since 1) that allows cse to find more things
6402 and 2) otherwise cse could produce an insn the machine
6403 cannot support. An exception is a CONSTRUCTOR into a multi-word
6404 MEM: that's much more likely to be most efficient into the MEM.
6405 Another is a CALL_EXPR which must return in memory. */
6407 if (! cse_not_expected && mode != BLKmode && target
6408 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6409 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6410 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6411 target = 0;
6413 switch (code)
6415 case LABEL_DECL:
6417 tree function = decl_function_context (exp);
6419 temp = label_rtx (exp);
6420 temp = gen_rtx_LABEL_REF (Pmode, temp);
6422 if (function != current_function_decl
6423 && function != 0)
6424 LABEL_REF_NONLOCAL_P (temp) = 1;
6426 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6427 return temp;
6430 case SSA_NAME:
6431 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6432 NULL);
6434 case PARM_DECL:
6435 case VAR_DECL:
6436 /* If a static var's type was incomplete when the decl was written,
6437 but the type is complete now, lay out the decl now. */
6438 if (DECL_SIZE (exp) == 0
6439 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6440 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6441 layout_decl (exp, 0);
6443 /* ... fall through ... */
6445 case FUNCTION_DECL:
6446 case RESULT_DECL:
6447 gcc_assert (DECL_RTL (exp));
6449 /* Ensure variable marked as used even if it doesn't go through
6450 a parser. If it hasn't be used yet, write out an external
6451 definition. */
6452 if (! TREE_USED (exp))
6454 assemble_external (exp);
6455 TREE_USED (exp) = 1;
6458 /* Show we haven't gotten RTL for this yet. */
6459 temp = 0;
6461 /* Variables inherited from containing functions should have
6462 been lowered by this point. */
6463 context = decl_function_context (exp);
6464 gcc_assert (!context
6465 || context == current_function_decl
6466 || TREE_STATIC (exp)
6467 /* ??? C++ creates functions that are not TREE_STATIC. */
6468 || TREE_CODE (exp) == FUNCTION_DECL);
6470 /* This is the case of an array whose size is to be determined
6471 from its initializer, while the initializer is still being parsed.
6472 See expand_decl. */
6474 if (MEM_P (DECL_RTL (exp))
6475 && REG_P (XEXP (DECL_RTL (exp), 0)))
6476 temp = validize_mem (DECL_RTL (exp));
6478 /* If DECL_RTL is memory, we are in the normal case and either
6479 the address is not valid or it is not a register and -fforce-addr
6480 is specified, get the address into a register. */
6482 else if (MEM_P (DECL_RTL (exp))
6483 && modifier != EXPAND_CONST_ADDRESS
6484 && modifier != EXPAND_SUM
6485 && modifier != EXPAND_INITIALIZER
6486 && (! memory_address_p (DECL_MODE (exp),
6487 XEXP (DECL_RTL (exp), 0))
6488 || (flag_force_addr
6489 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6491 if (alt_rtl)
6492 *alt_rtl = DECL_RTL (exp);
6493 temp = replace_equiv_address (DECL_RTL (exp),
6494 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6497 /* If we got something, return it. But first, set the alignment
6498 if the address is a register. */
6499 if (temp != 0)
6501 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6502 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6504 return temp;
6507 /* If the mode of DECL_RTL does not match that of the decl, it
6508 must be a promoted value. We return a SUBREG of the wanted mode,
6509 but mark it so that we know that it was already extended. */
6511 if (REG_P (DECL_RTL (exp))
6512 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6514 enum machine_mode pmode;
6516 /* Get the signedness used for this variable. Ensure we get the
6517 same mode we got when the variable was declared. */
6518 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6519 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6520 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6522 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6523 SUBREG_PROMOTED_VAR_P (temp) = 1;
6524 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6525 return temp;
6528 return DECL_RTL (exp);
6530 case INTEGER_CST:
6531 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6532 TREE_INT_CST_HIGH (exp), mode);
6534 /* ??? If overflow is set, fold will have done an incomplete job,
6535 which can result in (plus xx (const_int 0)), which can get
6536 simplified by validate_replace_rtx during virtual register
6537 instantiation, which can result in unrecognizable insns.
6538 Avoid this by forcing all overflows into registers. */
6539 if (TREE_CONSTANT_OVERFLOW (exp)
6540 && modifier != EXPAND_INITIALIZER)
6541 temp = force_reg (mode, temp);
6543 return temp;
6545 case VECTOR_CST:
6546 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6547 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6548 return const_vector_from_tree (exp);
6549 else
6550 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6551 TREE_VECTOR_CST_ELTS (exp)),
6552 ignore ? const0_rtx : target, tmode, modifier);
6554 case CONST_DECL:
6555 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6557 case REAL_CST:
6558 /* If optimized, generate immediate CONST_DOUBLE
6559 which will be turned into memory by reload if necessary.
6561 We used to force a register so that loop.c could see it. But
6562 this does not allow gen_* patterns to perform optimizations with
6563 the constants. It also produces two insns in cases like "x = 1.0;".
6564 On most machines, floating-point constants are not permitted in
6565 many insns, so we'd end up copying it to a register in any case.
6567 Now, we do the copying in expand_binop, if appropriate. */
6568 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6569 TYPE_MODE (TREE_TYPE (exp)));
6571 case COMPLEX_CST:
6572 /* Handle evaluating a complex constant in a CONCAT target. */
6573 if (original_target && GET_CODE (original_target) == CONCAT)
6575 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6576 rtx rtarg, itarg;
6578 rtarg = XEXP (original_target, 0);
6579 itarg = XEXP (original_target, 1);
6581 /* Move the real and imaginary parts separately. */
6582 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6583 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6585 if (op0 != rtarg)
6586 emit_move_insn (rtarg, op0);
6587 if (op1 != itarg)
6588 emit_move_insn (itarg, op1);
6590 return original_target;
6593 /* ... fall through ... */
6595 case STRING_CST:
6596 temp = output_constant_def (exp, 1);
6598 /* temp contains a constant address.
6599 On RISC machines where a constant address isn't valid,
6600 make some insns to get that address into a register. */
6601 if (modifier != EXPAND_CONST_ADDRESS
6602 && modifier != EXPAND_INITIALIZER
6603 && modifier != EXPAND_SUM
6604 && (! memory_address_p (mode, XEXP (temp, 0))
6605 || flag_force_addr))
6606 return replace_equiv_address (temp,
6607 copy_rtx (XEXP (temp, 0)));
6608 return temp;
6610 case SAVE_EXPR:
6612 tree val = TREE_OPERAND (exp, 0);
6613 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6615 if (!SAVE_EXPR_RESOLVED_P (exp))
6617 /* We can indeed still hit this case, typically via builtin
6618 expanders calling save_expr immediately before expanding
6619 something. Assume this means that we only have to deal
6620 with non-BLKmode values. */
6621 gcc_assert (GET_MODE (ret) != BLKmode);
6623 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6624 DECL_ARTIFICIAL (val) = 1;
6625 DECL_IGNORED_P (val) = 1;
6626 TREE_OPERAND (exp, 0) = val;
6627 SAVE_EXPR_RESOLVED_P (exp) = 1;
6629 if (!CONSTANT_P (ret))
6630 ret = copy_to_reg (ret);
6631 SET_DECL_RTL (val, ret);
6634 return ret;
6637 case GOTO_EXPR:
6638 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6639 expand_goto (TREE_OPERAND (exp, 0));
6640 else
6641 expand_computed_goto (TREE_OPERAND (exp, 0));
6642 return const0_rtx;
6644 case CONSTRUCTOR:
6645 /* If we don't need the result, just ensure we evaluate any
6646 subexpressions. */
6647 if (ignore)
6649 tree elt;
6651 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6652 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6654 return const0_rtx;
6657 /* All elts simple constants => refer to a constant in memory. But
6658 if this is a non-BLKmode mode, let it store a field at a time
6659 since that should make a CONST_INT or CONST_DOUBLE when we
6660 fold. Likewise, if we have a target we can use, it is best to
6661 store directly into the target unless the type is large enough
6662 that memcpy will be used. If we are making an initializer and
6663 all operands are constant, put it in memory as well.
6665 FIXME: Avoid trying to fill vector constructors piece-meal.
6666 Output them with output_constant_def below unless we're sure
6667 they're zeros. This should go away when vector initializers
6668 are treated like VECTOR_CST instead of arrays.
6670 else if ((TREE_STATIC (exp)
6671 && ((mode == BLKmode
6672 && ! (target != 0 && safe_from_p (target, exp, 1)))
6673 || TREE_ADDRESSABLE (exp)
6674 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6675 && (! MOVE_BY_PIECES_P
6676 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6677 TYPE_ALIGN (type)))
6678 && ! mostly_zeros_p (exp))))
6679 || ((modifier == EXPAND_INITIALIZER
6680 || modifier == EXPAND_CONST_ADDRESS)
6681 && TREE_CONSTANT (exp)))
6683 rtx constructor = output_constant_def (exp, 1);
6685 if (modifier != EXPAND_CONST_ADDRESS
6686 && modifier != EXPAND_INITIALIZER
6687 && modifier != EXPAND_SUM)
6688 constructor = validize_mem (constructor);
6690 return constructor;
6692 else
6694 /* Handle calls that pass values in multiple non-contiguous
6695 locations. The Irix 6 ABI has examples of this. */
6696 if (target == 0 || ! safe_from_p (target, exp, 1)
6697 || GET_CODE (target) == PARALLEL
6698 || modifier == EXPAND_STACK_PARM)
6699 target
6700 = assign_temp (build_qualified_type (type,
6701 (TYPE_QUALS (type)
6702 | (TREE_READONLY (exp)
6703 * TYPE_QUAL_CONST))),
6704 0, TREE_ADDRESSABLE (exp), 1);
6706 store_constructor (exp, target, 0, int_expr_size (exp));
6707 return target;
6710 case MISALIGNED_INDIRECT_REF:
6711 case ALIGN_INDIRECT_REF:
6712 case INDIRECT_REF:
6714 tree exp1 = TREE_OPERAND (exp, 0);
6715 tree orig;
6717 if (code == MISALIGNED_INDIRECT_REF
6718 && !targetm.vectorize.misaligned_mem_ok (mode))
6719 abort ();
6721 if (modifier != EXPAND_WRITE)
6723 tree t;
6725 t = fold_read_from_constant_string (exp);
6726 if (t)
6727 return expand_expr (t, target, tmode, modifier);
6730 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6731 op0 = memory_address (mode, op0);
6733 if (code == ALIGN_INDIRECT_REF)
6735 int align = TYPE_ALIGN_UNIT (type);
6736 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6737 op0 = memory_address (mode, op0);
6740 temp = gen_rtx_MEM (mode, op0);
6742 orig = REF_ORIGINAL (exp);
6743 if (!orig)
6744 orig = exp;
6745 set_mem_attributes (temp, orig, 0);
6747 return temp;
6750 case ARRAY_REF:
6753 tree array = TREE_OPERAND (exp, 0);
6754 tree low_bound = array_ref_low_bound (exp);
6755 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6756 HOST_WIDE_INT i;
6758 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
6760 /* Optimize the special-case of a zero lower bound.
6762 We convert the low_bound to sizetype to avoid some problems
6763 with constant folding. (E.g. suppose the lower bound is 1,
6764 and its mode is QI. Without the conversion, (ARRAY
6765 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6766 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6768 if (! integer_zerop (low_bound))
6769 index = size_diffop (index, convert (sizetype, low_bound));
6771 /* Fold an expression like: "foo"[2].
6772 This is not done in fold so it won't happen inside &.
6773 Don't fold if this is for wide characters since it's too
6774 difficult to do correctly and this is a very rare case. */
6776 if (modifier != EXPAND_CONST_ADDRESS
6777 && modifier != EXPAND_INITIALIZER
6778 && modifier != EXPAND_MEMORY)
6780 tree t = fold_read_from_constant_string (exp);
6782 if (t)
6783 return expand_expr (t, target, tmode, modifier);
6786 /* If this is a constant index into a constant array,
6787 just get the value from the array. Handle both the cases when
6788 we have an explicit constructor and when our operand is a variable
6789 that was declared const. */
6791 if (modifier != EXPAND_CONST_ADDRESS
6792 && modifier != EXPAND_INITIALIZER
6793 && modifier != EXPAND_MEMORY
6794 && TREE_CODE (array) == CONSTRUCTOR
6795 && ! TREE_SIDE_EFFECTS (array)
6796 && TREE_CODE (index) == INTEGER_CST
6797 && 0 > compare_tree_int (index,
6798 list_length (CONSTRUCTOR_ELTS
6799 (TREE_OPERAND (exp, 0)))))
6801 tree elem;
6803 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6804 i = TREE_INT_CST_LOW (index);
6805 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6808 if (elem)
6809 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6810 modifier);
6813 else if (optimize >= 1
6814 && modifier != EXPAND_CONST_ADDRESS
6815 && modifier != EXPAND_INITIALIZER
6816 && modifier != EXPAND_MEMORY
6817 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6818 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6819 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6820 && targetm.binds_local_p (array))
6822 if (TREE_CODE (index) == INTEGER_CST)
6824 tree init = DECL_INITIAL (array);
6826 if (TREE_CODE (init) == CONSTRUCTOR)
6828 tree elem;
6830 for (elem = CONSTRUCTOR_ELTS (init);
6831 (elem
6832 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6833 elem = TREE_CHAIN (elem))
6836 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6837 return expand_expr (fold (TREE_VALUE (elem)), target,
6838 tmode, modifier);
6840 else if (TREE_CODE (init) == STRING_CST
6841 && 0 > compare_tree_int (index,
6842 TREE_STRING_LENGTH (init)))
6844 tree type = TREE_TYPE (TREE_TYPE (init));
6845 enum machine_mode mode = TYPE_MODE (type);
6847 if (GET_MODE_CLASS (mode) == MODE_INT
6848 && GET_MODE_SIZE (mode) == 1)
6849 return gen_int_mode (TREE_STRING_POINTER (init)
6850 [TREE_INT_CST_LOW (index)], mode);
6855 goto normal_inner_ref;
6857 case COMPONENT_REF:
6858 /* If the operand is a CONSTRUCTOR, we can just extract the
6859 appropriate field if it is present. */
6860 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6862 tree elt;
6864 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6865 elt = TREE_CHAIN (elt))
6866 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6867 /* We can normally use the value of the field in the
6868 CONSTRUCTOR. However, if this is a bitfield in
6869 an integral mode that we can fit in a HOST_WIDE_INT,
6870 we must mask only the number of bits in the bitfield,
6871 since this is done implicitly by the constructor. If
6872 the bitfield does not meet either of those conditions,
6873 we can't do this optimization. */
6874 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6875 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6876 == MODE_INT)
6877 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6878 <= HOST_BITS_PER_WIDE_INT))))
6880 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6881 && modifier == EXPAND_STACK_PARM)
6882 target = 0;
6883 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6884 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6886 HOST_WIDE_INT bitsize
6887 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6888 enum machine_mode imode
6889 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6891 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6893 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6894 op0 = expand_and (imode, op0, op1, target);
6896 else
6898 tree count
6899 = build_int_cst (NULL_TREE,
6900 GET_MODE_BITSIZE (imode) - bitsize);
6902 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6903 target, 0);
6904 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6905 target, 0);
6909 return op0;
6912 goto normal_inner_ref;
6914 case BIT_FIELD_REF:
6915 case ARRAY_RANGE_REF:
6916 normal_inner_ref:
6918 enum machine_mode mode1;
6919 HOST_WIDE_INT bitsize, bitpos;
6920 tree offset;
6921 int volatilep = 0;
6922 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6923 &mode1, &unsignedp, &volatilep);
6924 rtx orig_op0;
6926 /* If we got back the original object, something is wrong. Perhaps
6927 we are evaluating an expression too early. In any event, don't
6928 infinitely recurse. */
6929 gcc_assert (tem != exp);
6931 /* If TEM's type is a union of variable size, pass TARGET to the inner
6932 computation, since it will need a temporary and TARGET is known
6933 to have to do. This occurs in unchecked conversion in Ada. */
6935 orig_op0 = op0
6936 = expand_expr (tem,
6937 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6938 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6939 != INTEGER_CST)
6940 && modifier != EXPAND_STACK_PARM
6941 ? target : NULL_RTX),
6942 VOIDmode,
6943 (modifier == EXPAND_INITIALIZER
6944 || modifier == EXPAND_CONST_ADDRESS
6945 || modifier == EXPAND_STACK_PARM)
6946 ? modifier : EXPAND_NORMAL);
6948 /* If this is a constant, put it into a register if it is a
6949 legitimate constant and OFFSET is 0 and memory if it isn't. */
6950 if (CONSTANT_P (op0))
6952 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6953 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6954 && offset == 0)
6955 op0 = force_reg (mode, op0);
6956 else
6957 op0 = validize_mem (force_const_mem (mode, op0));
6960 /* Otherwise, if this object not in memory and we either have an
6961 offset or a BLKmode result, put it there. This case can't occur in
6962 C, but can in Ada if we have unchecked conversion of an expression
6963 from a scalar type to an array or record type or for an
6964 ARRAY_RANGE_REF whose type is BLKmode. */
6965 else if (!MEM_P (op0)
6966 && (offset != 0
6967 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6969 tree nt = build_qualified_type (TREE_TYPE (tem),
6970 (TYPE_QUALS (TREE_TYPE (tem))
6971 | TYPE_QUAL_CONST));
6972 rtx memloc = assign_temp (nt, 1, 1, 1);
6974 emit_move_insn (memloc, op0);
6975 op0 = memloc;
6978 if (offset != 0)
6980 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6981 EXPAND_SUM);
6983 gcc_assert (MEM_P (op0));
6985 #ifdef POINTERS_EXTEND_UNSIGNED
6986 if (GET_MODE (offset_rtx) != Pmode)
6987 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6988 #else
6989 if (GET_MODE (offset_rtx) != ptr_mode)
6990 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6991 #endif
6993 if (GET_MODE (op0) == BLKmode
6994 /* A constant address in OP0 can have VOIDmode, we must
6995 not try to call force_reg in that case. */
6996 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6997 && bitsize != 0
6998 && (bitpos % bitsize) == 0
6999 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7000 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7002 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7003 bitpos = 0;
7006 op0 = offset_address (op0, offset_rtx,
7007 highest_pow2_factor (offset));
7010 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7011 record its alignment as BIGGEST_ALIGNMENT. */
7012 if (MEM_P (op0) && bitpos == 0 && offset != 0
7013 && is_aligning_offset (offset, tem))
7014 set_mem_align (op0, BIGGEST_ALIGNMENT);
7016 /* Don't forget about volatility even if this is a bitfield. */
7017 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7019 if (op0 == orig_op0)
7020 op0 = copy_rtx (op0);
7022 MEM_VOLATILE_P (op0) = 1;
7025 /* The following code doesn't handle CONCAT.
7026 Assume only bitpos == 0 can be used for CONCAT, due to
7027 one element arrays having the same mode as its element. */
7028 if (GET_CODE (op0) == CONCAT)
7030 gcc_assert (bitpos == 0
7031 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7032 return op0;
7035 /* In cases where an aligned union has an unaligned object
7036 as a field, we might be extracting a BLKmode value from
7037 an integer-mode (e.g., SImode) object. Handle this case
7038 by doing the extract into an object as wide as the field
7039 (which we know to be the width of a basic mode), then
7040 storing into memory, and changing the mode to BLKmode. */
7041 if (mode1 == VOIDmode
7042 || REG_P (op0) || GET_CODE (op0) == SUBREG
7043 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7044 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7045 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7046 && modifier != EXPAND_CONST_ADDRESS
7047 && modifier != EXPAND_INITIALIZER)
7048 /* If the field isn't aligned enough to fetch as a memref,
7049 fetch it as a bit field. */
7050 || (mode1 != BLKmode
7051 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7052 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7053 || (MEM_P (op0)
7054 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7055 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7056 && ((modifier == EXPAND_CONST_ADDRESS
7057 || modifier == EXPAND_INITIALIZER)
7058 ? STRICT_ALIGNMENT
7059 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7060 || (bitpos % BITS_PER_UNIT != 0)))
7061 /* If the type and the field are a constant size and the
7062 size of the type isn't the same size as the bitfield,
7063 we must use bitfield operations. */
7064 || (bitsize >= 0
7065 && TYPE_SIZE (TREE_TYPE (exp))
7066 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7067 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7068 bitsize)))
7070 enum machine_mode ext_mode = mode;
7072 if (ext_mode == BLKmode
7073 && ! (target != 0 && MEM_P (op0)
7074 && MEM_P (target)
7075 && bitpos % BITS_PER_UNIT == 0))
7076 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7078 if (ext_mode == BLKmode)
7080 if (target == 0)
7081 target = assign_temp (type, 0, 1, 1);
7083 if (bitsize == 0)
7084 return target;
7086 /* In this case, BITPOS must start at a byte boundary and
7087 TARGET, if specified, must be a MEM. */
7088 gcc_assert (MEM_P (op0)
7089 && (!target || MEM_P (target))
7090 && !(bitpos % BITS_PER_UNIT));
7092 emit_block_move (target,
7093 adjust_address (op0, VOIDmode,
7094 bitpos / BITS_PER_UNIT),
7095 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7096 / BITS_PER_UNIT),
7097 (modifier == EXPAND_STACK_PARM
7098 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7100 return target;
7103 op0 = validize_mem (op0);
7105 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7106 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7108 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7109 (modifier == EXPAND_STACK_PARM
7110 ? NULL_RTX : target),
7111 ext_mode, ext_mode);
7113 /* If the result is a record type and BITSIZE is narrower than
7114 the mode of OP0, an integral mode, and this is a big endian
7115 machine, we must put the field into the high-order bits. */
7116 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7117 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7118 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7119 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7120 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7121 - bitsize),
7122 op0, 1);
7124 /* If the result type is BLKmode, store the data into a temporary
7125 of the appropriate type, but with the mode corresponding to the
7126 mode for the data we have (op0's mode). It's tempting to make
7127 this a constant type, since we know it's only being stored once,
7128 but that can cause problems if we are taking the address of this
7129 COMPONENT_REF because the MEM of any reference via that address
7130 will have flags corresponding to the type, which will not
7131 necessarily be constant. */
7132 if (mode == BLKmode)
7134 rtx new
7135 = assign_stack_temp_for_type
7136 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7138 emit_move_insn (new, op0);
7139 op0 = copy_rtx (new);
7140 PUT_MODE (op0, BLKmode);
7141 set_mem_attributes (op0, exp, 1);
7144 return op0;
7147 /* If the result is BLKmode, use that to access the object
7148 now as well. */
7149 if (mode == BLKmode)
7150 mode1 = BLKmode;
7152 /* Get a reference to just this component. */
7153 if (modifier == EXPAND_CONST_ADDRESS
7154 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7155 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7156 else
7157 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7159 if (op0 == orig_op0)
7160 op0 = copy_rtx (op0);
7162 set_mem_attributes (op0, exp, 0);
7163 if (REG_P (XEXP (op0, 0)))
7164 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7166 MEM_VOLATILE_P (op0) |= volatilep;
7167 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7168 || modifier == EXPAND_CONST_ADDRESS
7169 || modifier == EXPAND_INITIALIZER)
7170 return op0;
7171 else if (target == 0)
7172 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7174 convert_move (target, op0, unsignedp);
7175 return target;
7178 case OBJ_TYPE_REF:
7179 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7181 case CALL_EXPR:
7182 /* Check for a built-in function. */
7183 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7184 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7185 == FUNCTION_DECL)
7186 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7188 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7189 == BUILT_IN_FRONTEND)
7190 return lang_hooks.expand_expr (exp, original_target,
7191 tmode, modifier,
7192 alt_rtl);
7193 else
7194 return expand_builtin (exp, target, subtarget, tmode, ignore);
7197 return expand_call (exp, target, ignore);
7199 case NON_LVALUE_EXPR:
7200 case NOP_EXPR:
7201 case CONVERT_EXPR:
7202 if (TREE_OPERAND (exp, 0) == error_mark_node)
7203 return const0_rtx;
7205 if (TREE_CODE (type) == UNION_TYPE)
7207 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7209 /* If both input and output are BLKmode, this conversion isn't doing
7210 anything except possibly changing memory attribute. */
7211 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7213 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7214 modifier);
7216 result = copy_rtx (result);
7217 set_mem_attributes (result, exp, 0);
7218 return result;
7221 if (target == 0)
7223 if (TYPE_MODE (type) != BLKmode)
7224 target = gen_reg_rtx (TYPE_MODE (type));
7225 else
7226 target = assign_temp (type, 0, 1, 1);
7229 if (MEM_P (target))
7230 /* Store data into beginning of memory target. */
7231 store_expr (TREE_OPERAND (exp, 0),
7232 adjust_address (target, TYPE_MODE (valtype), 0),
7233 modifier == EXPAND_STACK_PARM);
7235 else
7237 gcc_assert (REG_P (target));
7239 /* Store this field into a union of the proper type. */
7240 store_field (target,
7241 MIN ((int_size_in_bytes (TREE_TYPE
7242 (TREE_OPERAND (exp, 0)))
7243 * BITS_PER_UNIT),
7244 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7245 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7246 type, 0);
7249 /* Return the entire union. */
7250 return target;
7253 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7255 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7256 modifier);
7258 /* If the signedness of the conversion differs and OP0 is
7259 a promoted SUBREG, clear that indication since we now
7260 have to do the proper extension. */
7261 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7262 && GET_CODE (op0) == SUBREG)
7263 SUBREG_PROMOTED_VAR_P (op0) = 0;
7265 return REDUCE_BIT_FIELD (op0);
7268 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7269 op0 = REDUCE_BIT_FIELD (op0);
7270 if (GET_MODE (op0) == mode)
7271 return op0;
7273 /* If OP0 is a constant, just convert it into the proper mode. */
7274 if (CONSTANT_P (op0))
7276 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7277 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7279 if (modifier == EXPAND_INITIALIZER)
7280 return simplify_gen_subreg (mode, op0, inner_mode,
7281 subreg_lowpart_offset (mode,
7282 inner_mode));
7283 else
7284 return convert_modes (mode, inner_mode, op0,
7285 TYPE_UNSIGNED (inner_type));
7288 if (modifier == EXPAND_INITIALIZER)
7289 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7291 if (target == 0)
7292 return
7293 convert_to_mode (mode, op0,
7294 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7295 else
7296 convert_move (target, op0,
7297 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7298 return target;
7300 case VIEW_CONVERT_EXPR:
7301 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7303 /* If the input and output modes are both the same, we are done.
7304 Otherwise, if neither mode is BLKmode and both are integral and within
7305 a word, we can use gen_lowpart. If neither is true, make sure the
7306 operand is in memory and convert the MEM to the new mode. */
7307 if (TYPE_MODE (type) == GET_MODE (op0))
7309 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7310 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7311 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7312 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7313 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7314 op0 = gen_lowpart (TYPE_MODE (type), op0);
7315 else if (!MEM_P (op0))
7317 /* If the operand is not a MEM, force it into memory. Since we
7318 are going to be be changing the mode of the MEM, don't call
7319 force_const_mem for constants because we don't allow pool
7320 constants to change mode. */
7321 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7323 gcc_assert (!TREE_ADDRESSABLE (exp));
7325 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7326 target
7327 = assign_stack_temp_for_type
7328 (TYPE_MODE (inner_type),
7329 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7331 emit_move_insn (target, op0);
7332 op0 = target;
7335 /* At this point, OP0 is in the correct mode. If the output type is such
7336 that the operand is known to be aligned, indicate that it is.
7337 Otherwise, we need only be concerned about alignment for non-BLKmode
7338 results. */
7339 if (MEM_P (op0))
7341 op0 = copy_rtx (op0);
7343 if (TYPE_ALIGN_OK (type))
7344 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7345 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7346 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7348 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7349 HOST_WIDE_INT temp_size
7350 = MAX (int_size_in_bytes (inner_type),
7351 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7352 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7353 temp_size, 0, type);
7354 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7356 gcc_assert (!TREE_ADDRESSABLE (exp));
7358 if (GET_MODE (op0) == BLKmode)
7359 emit_block_move (new_with_op0_mode, op0,
7360 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7361 (modifier == EXPAND_STACK_PARM
7362 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7363 else
7364 emit_move_insn (new_with_op0_mode, op0);
7366 op0 = new;
7369 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7372 return op0;
7374 case PLUS_EXPR:
7375 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7376 something else, make sure we add the register to the constant and
7377 then to the other thing. This case can occur during strength
7378 reduction and doing it this way will produce better code if the
7379 frame pointer or argument pointer is eliminated.
7381 fold-const.c will ensure that the constant is always in the inner
7382 PLUS_EXPR, so the only case we need to do anything about is if
7383 sp, ap, or fp is our second argument, in which case we must swap
7384 the innermost first argument and our second argument. */
7386 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7387 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7388 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7389 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7390 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7391 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7393 tree t = TREE_OPERAND (exp, 1);
7395 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7396 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7399 /* If the result is to be ptr_mode and we are adding an integer to
7400 something, we might be forming a constant. So try to use
7401 plus_constant. If it produces a sum and we can't accept it,
7402 use force_operand. This allows P = &ARR[const] to generate
7403 efficient code on machines where a SYMBOL_REF is not a valid
7404 address.
7406 If this is an EXPAND_SUM call, always return the sum. */
7407 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7408 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7410 if (modifier == EXPAND_STACK_PARM)
7411 target = 0;
7412 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7413 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7414 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7416 rtx constant_part;
7418 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7419 EXPAND_SUM);
7420 /* Use immed_double_const to ensure that the constant is
7421 truncated according to the mode of OP1, then sign extended
7422 to a HOST_WIDE_INT. Using the constant directly can result
7423 in non-canonical RTL in a 64x32 cross compile. */
7424 constant_part
7425 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7426 (HOST_WIDE_INT) 0,
7427 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7428 op1 = plus_constant (op1, INTVAL (constant_part));
7429 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7430 op1 = force_operand (op1, target);
7431 return REDUCE_BIT_FIELD (op1);
7434 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7435 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7436 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7438 rtx constant_part;
7440 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7441 (modifier == EXPAND_INITIALIZER
7442 ? EXPAND_INITIALIZER : EXPAND_SUM));
7443 if (! CONSTANT_P (op0))
7445 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7446 VOIDmode, modifier);
7447 /* Return a PLUS if modifier says it's OK. */
7448 if (modifier == EXPAND_SUM
7449 || modifier == EXPAND_INITIALIZER)
7450 return simplify_gen_binary (PLUS, mode, op0, op1);
7451 goto binop2;
7453 /* Use immed_double_const to ensure that the constant is
7454 truncated according to the mode of OP1, then sign extended
7455 to a HOST_WIDE_INT. Using the constant directly can result
7456 in non-canonical RTL in a 64x32 cross compile. */
7457 constant_part
7458 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7459 (HOST_WIDE_INT) 0,
7460 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7461 op0 = plus_constant (op0, INTVAL (constant_part));
7462 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7463 op0 = force_operand (op0, target);
7464 return REDUCE_BIT_FIELD (op0);
7468 /* No sense saving up arithmetic to be done
7469 if it's all in the wrong mode to form part of an address.
7470 And force_operand won't know whether to sign-extend or
7471 zero-extend. */
7472 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7473 || mode != ptr_mode)
7475 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7476 subtarget, &op0, &op1, 0);
7477 if (op0 == const0_rtx)
7478 return op1;
7479 if (op1 == const0_rtx)
7480 return op0;
7481 goto binop2;
7484 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7485 subtarget, &op0, &op1, modifier);
7486 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7488 case MINUS_EXPR:
7489 /* For initializers, we are allowed to return a MINUS of two
7490 symbolic constants. Here we handle all cases when both operands
7491 are constant. */
7492 /* Handle difference of two symbolic constants,
7493 for the sake of an initializer. */
7494 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7495 && really_constant_p (TREE_OPERAND (exp, 0))
7496 && really_constant_p (TREE_OPERAND (exp, 1)))
7498 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7499 NULL_RTX, &op0, &op1, modifier);
7501 /* If the last operand is a CONST_INT, use plus_constant of
7502 the negated constant. Else make the MINUS. */
7503 if (GET_CODE (op1) == CONST_INT)
7504 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7505 else
7506 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7509 /* No sense saving up arithmetic to be done
7510 if it's all in the wrong mode to form part of an address.
7511 And force_operand won't know whether to sign-extend or
7512 zero-extend. */
7513 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7514 || mode != ptr_mode)
7515 goto binop;
7517 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7518 subtarget, &op0, &op1, modifier);
7520 /* Convert A - const to A + (-const). */
7521 if (GET_CODE (op1) == CONST_INT)
7523 op1 = negate_rtx (mode, op1);
7524 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7527 goto binop2;
7529 case MULT_EXPR:
7530 /* If first operand is constant, swap them.
7531 Thus the following special case checks need only
7532 check the second operand. */
7533 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7535 tree t1 = TREE_OPERAND (exp, 0);
7536 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7537 TREE_OPERAND (exp, 1) = t1;
7540 /* Attempt to return something suitable for generating an
7541 indexed address, for machines that support that. */
7543 if (modifier == EXPAND_SUM && mode == ptr_mode
7544 && host_integerp (TREE_OPERAND (exp, 1), 0))
7546 tree exp1 = TREE_OPERAND (exp, 1);
7548 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7549 EXPAND_SUM);
7551 if (!REG_P (op0))
7552 op0 = force_operand (op0, NULL_RTX);
7553 if (!REG_P (op0))
7554 op0 = copy_to_mode_reg (mode, op0);
7556 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7557 gen_int_mode (tree_low_cst (exp1, 0),
7558 TYPE_MODE (TREE_TYPE (exp1)))));
7561 if (modifier == EXPAND_STACK_PARM)
7562 target = 0;
7564 /* Check for multiplying things that have been extended
7565 from a narrower type. If this machine supports multiplying
7566 in that narrower type with a result in the desired type,
7567 do it that way, and avoid the explicit type-conversion. */
7568 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7569 && TREE_CODE (type) == INTEGER_TYPE
7570 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7571 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7572 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7573 && int_fits_type_p (TREE_OPERAND (exp, 1),
7574 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7575 /* Don't use a widening multiply if a shift will do. */
7576 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7577 > HOST_BITS_PER_WIDE_INT)
7578 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7580 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7581 && (TYPE_PRECISION (TREE_TYPE
7582 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7583 == TYPE_PRECISION (TREE_TYPE
7584 (TREE_OPERAND
7585 (TREE_OPERAND (exp, 0), 0))))
7586 /* If both operands are extended, they must either both
7587 be zero-extended or both be sign-extended. */
7588 && (TYPE_UNSIGNED (TREE_TYPE
7589 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7590 == TYPE_UNSIGNED (TREE_TYPE
7591 (TREE_OPERAND
7592 (TREE_OPERAND (exp, 0), 0)))))))
7594 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7595 enum machine_mode innermode = TYPE_MODE (op0type);
7596 bool zextend_p = TYPE_UNSIGNED (op0type);
7597 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7598 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7600 if (mode == GET_MODE_WIDER_MODE (innermode))
7602 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7604 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7605 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7606 TREE_OPERAND (exp, 1),
7607 NULL_RTX, &op0, &op1, 0);
7608 else
7609 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7610 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7611 NULL_RTX, &op0, &op1, 0);
7612 goto binop3;
7614 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7615 && innermode == word_mode)
7617 rtx htem, hipart;
7618 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7619 NULL_RTX, VOIDmode, 0);
7620 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7621 op1 = convert_modes (innermode, mode,
7622 expand_expr (TREE_OPERAND (exp, 1),
7623 NULL_RTX, VOIDmode, 0),
7624 unsignedp);
7625 else
7626 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7627 NULL_RTX, VOIDmode, 0);
7628 temp = expand_binop (mode, other_optab, op0, op1, target,
7629 unsignedp, OPTAB_LIB_WIDEN);
7630 hipart = gen_highpart (innermode, temp);
7631 htem = expand_mult_highpart_adjust (innermode, hipart,
7632 op0, op1, hipart,
7633 zextend_p);
7634 if (htem != hipart)
7635 emit_move_insn (hipart, htem);
7636 return REDUCE_BIT_FIELD (temp);
7640 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7641 subtarget, &op0, &op1, 0);
7642 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7644 case TRUNC_DIV_EXPR:
7645 case FLOOR_DIV_EXPR:
7646 case CEIL_DIV_EXPR:
7647 case ROUND_DIV_EXPR:
7648 case EXACT_DIV_EXPR:
7649 if (modifier == EXPAND_STACK_PARM)
7650 target = 0;
7651 /* Possible optimization: compute the dividend with EXPAND_SUM
7652 then if the divisor is constant can optimize the case
7653 where some terms of the dividend have coeffs divisible by it. */
7654 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7655 subtarget, &op0, &op1, 0);
7656 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7658 case RDIV_EXPR:
7659 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7660 expensive divide. If not, combine will rebuild the original
7661 computation. */
7662 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7663 && TREE_CODE (type) == REAL_TYPE
7664 && !real_onep (TREE_OPERAND (exp, 0)))
7665 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7666 build2 (RDIV_EXPR, type,
7667 build_real (type, dconst1),
7668 TREE_OPERAND (exp, 1))),
7669 target, tmode, modifier);
7671 goto binop;
7673 case TRUNC_MOD_EXPR:
7674 case FLOOR_MOD_EXPR:
7675 case CEIL_MOD_EXPR:
7676 case ROUND_MOD_EXPR:
7677 if (modifier == EXPAND_STACK_PARM)
7678 target = 0;
7679 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7680 subtarget, &op0, &op1, 0);
7681 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7683 case FIX_ROUND_EXPR:
7684 case FIX_FLOOR_EXPR:
7685 case FIX_CEIL_EXPR:
7686 gcc_unreachable (); /* Not used for C. */
7688 case FIX_TRUNC_EXPR:
7689 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7690 if (target == 0 || modifier == EXPAND_STACK_PARM)
7691 target = gen_reg_rtx (mode);
7692 expand_fix (target, op0, unsignedp);
7693 return target;
7695 case FLOAT_EXPR:
7696 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7697 if (target == 0 || modifier == EXPAND_STACK_PARM)
7698 target = gen_reg_rtx (mode);
7699 /* expand_float can't figure out what to do if FROM has VOIDmode.
7700 So give it the correct mode. With -O, cse will optimize this. */
7701 if (GET_MODE (op0) == VOIDmode)
7702 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7703 op0);
7704 expand_float (target, op0,
7705 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7706 return target;
7708 case NEGATE_EXPR:
7709 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7710 if (modifier == EXPAND_STACK_PARM)
7711 target = 0;
7712 temp = expand_unop (mode,
7713 optab_for_tree_code (NEGATE_EXPR, type),
7714 op0, target, 0);
7715 gcc_assert (temp);
7716 return REDUCE_BIT_FIELD (temp);
7718 case ABS_EXPR:
7719 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7720 if (modifier == EXPAND_STACK_PARM)
7721 target = 0;
7723 /* ABS_EXPR is not valid for complex arguments. */
7724 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7725 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7727 /* Unsigned abs is simply the operand. Testing here means we don't
7728 risk generating incorrect code below. */
7729 if (TYPE_UNSIGNED (type))
7730 return op0;
7732 return expand_abs (mode, op0, target, unsignedp,
7733 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7735 case MAX_EXPR:
7736 case MIN_EXPR:
7737 target = original_target;
7738 if (target == 0
7739 || modifier == EXPAND_STACK_PARM
7740 || (MEM_P (target) && MEM_VOLATILE_P (target))
7741 || GET_MODE (target) != mode
7742 || (REG_P (target)
7743 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7744 target = gen_reg_rtx (mode);
7745 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7746 target, &op0, &op1, 0);
7748 /* First try to do it with a special MIN or MAX instruction.
7749 If that does not win, use a conditional jump to select the proper
7750 value. */
7751 this_optab = optab_for_tree_code (code, type);
7752 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7753 OPTAB_WIDEN);
7754 if (temp != 0)
7755 return temp;
7757 /* At this point, a MEM target is no longer useful; we will get better
7758 code without it. */
7760 if (MEM_P (target))
7761 target = gen_reg_rtx (mode);
7763 /* If op1 was placed in target, swap op0 and op1. */
7764 if (target != op0 && target == op1)
7766 rtx tem = op0;
7767 op0 = op1;
7768 op1 = tem;
7771 if (target != op0)
7772 emit_move_insn (target, op0);
7774 op0 = gen_label_rtx ();
7776 /* If this mode is an integer too wide to compare properly,
7777 compare word by word. Rely on cse to optimize constant cases. */
7778 if (GET_MODE_CLASS (mode) == MODE_INT
7779 && ! can_compare_p (GE, mode, ccp_jump))
7781 if (code == MAX_EXPR)
7782 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7783 NULL_RTX, op0);
7784 else
7785 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7786 NULL_RTX, op0);
7788 else
7790 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7791 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7793 emit_move_insn (target, op1);
7794 emit_label (op0);
7795 return target;
7797 case BIT_NOT_EXPR:
7798 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7799 if (modifier == EXPAND_STACK_PARM)
7800 target = 0;
7801 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7802 gcc_assert (temp);
7803 return temp;
7805 /* ??? Can optimize bitwise operations with one arg constant.
7806 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7807 and (a bitwise1 b) bitwise2 b (etc)
7808 but that is probably not worth while. */
7810 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7811 boolean values when we want in all cases to compute both of them. In
7812 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7813 as actual zero-or-1 values and then bitwise anding. In cases where
7814 there cannot be any side effects, better code would be made by
7815 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7816 how to recognize those cases. */
7818 case TRUTH_AND_EXPR:
7819 code = BIT_AND_EXPR;
7820 case BIT_AND_EXPR:
7821 goto binop;
7823 case TRUTH_OR_EXPR:
7824 code = BIT_IOR_EXPR;
7825 case BIT_IOR_EXPR:
7826 goto binop;
7828 case TRUTH_XOR_EXPR:
7829 code = BIT_XOR_EXPR;
7830 case BIT_XOR_EXPR:
7831 goto binop;
7833 case LSHIFT_EXPR:
7834 case RSHIFT_EXPR:
7835 case LROTATE_EXPR:
7836 case RROTATE_EXPR:
7837 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7838 subtarget = 0;
7839 if (modifier == EXPAND_STACK_PARM)
7840 target = 0;
7841 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7842 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7843 unsignedp);
7845 /* Could determine the answer when only additive constants differ. Also,
7846 the addition of one can be handled by changing the condition. */
7847 case LT_EXPR:
7848 case LE_EXPR:
7849 case GT_EXPR:
7850 case GE_EXPR:
7851 case EQ_EXPR:
7852 case NE_EXPR:
7853 case UNORDERED_EXPR:
7854 case ORDERED_EXPR:
7855 case UNLT_EXPR:
7856 case UNLE_EXPR:
7857 case UNGT_EXPR:
7858 case UNGE_EXPR:
7859 case UNEQ_EXPR:
7860 case LTGT_EXPR:
7861 temp = do_store_flag (exp,
7862 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7863 tmode != VOIDmode ? tmode : mode, 0);
7864 if (temp != 0)
7865 return temp;
7867 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7868 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7869 && original_target
7870 && REG_P (original_target)
7871 && (GET_MODE (original_target)
7872 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7874 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7875 VOIDmode, 0);
7877 /* If temp is constant, we can just compute the result. */
7878 if (GET_CODE (temp) == CONST_INT)
7880 if (INTVAL (temp) != 0)
7881 emit_move_insn (target, const1_rtx);
7882 else
7883 emit_move_insn (target, const0_rtx);
7885 return target;
7888 if (temp != original_target)
7890 enum machine_mode mode1 = GET_MODE (temp);
7891 if (mode1 == VOIDmode)
7892 mode1 = tmode != VOIDmode ? tmode : mode;
7894 temp = copy_to_mode_reg (mode1, temp);
7897 op1 = gen_label_rtx ();
7898 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7899 GET_MODE (temp), unsignedp, op1);
7900 emit_move_insn (temp, const1_rtx);
7901 emit_label (op1);
7902 return temp;
7905 /* If no set-flag instruction, must generate a conditional store
7906 into a temporary variable. Drop through and handle this
7907 like && and ||. */
7909 if (! ignore
7910 && (target == 0
7911 || modifier == EXPAND_STACK_PARM
7912 || ! safe_from_p (target, exp, 1)
7913 /* Make sure we don't have a hard reg (such as function's return
7914 value) live across basic blocks, if not optimizing. */
7915 || (!optimize && REG_P (target)
7916 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7917 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7919 if (target)
7920 emit_move_insn (target, const0_rtx);
7922 op1 = gen_label_rtx ();
7923 jumpifnot (exp, op1);
7925 if (target)
7926 emit_move_insn (target, const1_rtx);
7928 emit_label (op1);
7929 return ignore ? const0_rtx : target;
7931 case TRUTH_NOT_EXPR:
7932 if (modifier == EXPAND_STACK_PARM)
7933 target = 0;
7934 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7935 /* The parser is careful to generate TRUTH_NOT_EXPR
7936 only with operands that are always zero or one. */
7937 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7938 target, 1, OPTAB_LIB_WIDEN);
7939 gcc_assert (temp);
7940 return temp;
7942 case STATEMENT_LIST:
7944 tree_stmt_iterator iter;
7946 gcc_assert (ignore);
7948 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7949 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7951 return const0_rtx;
7953 case COND_EXPR:
7954 /* If it's void, we don't need to worry about computing a value. */
7955 if (VOID_TYPE_P (TREE_TYPE (exp)))
7957 tree pred = TREE_OPERAND (exp, 0);
7958 tree then_ = TREE_OPERAND (exp, 1);
7959 tree else_ = TREE_OPERAND (exp, 2);
7961 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
7962 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
7963 && TREE_CODE (else_) == GOTO_EXPR
7964 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
7966 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7967 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7970 /* Note that COND_EXPRs whose type is a structure or union
7971 are required to be constructed to contain assignments of
7972 a temporary variable, so that we can evaluate them here
7973 for side effect only. If type is void, we must do likewise. */
7975 gcc_assert (!TREE_ADDRESSABLE (type)
7976 && !ignore
7977 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7978 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7980 /* If we are not to produce a result, we have no target. Otherwise,
7981 if a target was specified use it; it will not be used as an
7982 intermediate target unless it is safe. If no target, use a
7983 temporary. */
7985 if (modifier != EXPAND_STACK_PARM
7986 && original_target
7987 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7988 && GET_MODE (original_target) == mode
7989 #ifdef HAVE_conditional_move
7990 && (! can_conditionally_move_p (mode)
7991 || REG_P (original_target))
7992 #endif
7993 && !MEM_P (original_target))
7994 temp = original_target;
7995 else
7996 temp = assign_temp (type, 0, 0, 1);
7998 do_pending_stack_adjust ();
7999 NO_DEFER_POP;
8000 op0 = gen_label_rtx ();
8001 op1 = gen_label_rtx ();
8002 jumpifnot (TREE_OPERAND (exp, 0), op0);
8003 store_expr (TREE_OPERAND (exp, 1), temp,
8004 modifier == EXPAND_STACK_PARM);
8006 emit_jump_insn (gen_jump (op1));
8007 emit_barrier ();
8008 emit_label (op0);
8009 store_expr (TREE_OPERAND (exp, 2), temp,
8010 modifier == EXPAND_STACK_PARM);
8012 emit_label (op1);
8013 OK_DEFER_POP;
8014 return temp;
8016 case VEC_COND_EXPR:
8017 target = expand_vec_cond_expr (exp, target);
8018 return target;
8020 case MODIFY_EXPR:
8022 tree lhs = TREE_OPERAND (exp, 0);
8023 tree rhs = TREE_OPERAND (exp, 1);
8025 gcc_assert (ignore);
8027 /* Check for |= or &= of a bitfield of size one into another bitfield
8028 of size 1. In this case, (unless we need the result of the
8029 assignment) we can do this more efficiently with a
8030 test followed by an assignment, if necessary.
8032 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8033 things change so we do, this code should be enhanced to
8034 support it. */
8035 if (TREE_CODE (lhs) == COMPONENT_REF
8036 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8037 || TREE_CODE (rhs) == BIT_AND_EXPR)
8038 && TREE_OPERAND (rhs, 0) == lhs
8039 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8040 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8041 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8043 rtx label = gen_label_rtx ();
8045 do_jump (TREE_OPERAND (rhs, 1),
8046 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8047 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8048 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8049 (TREE_CODE (rhs) == BIT_IOR_EXPR
8050 ? integer_one_node
8051 : integer_zero_node)));
8052 do_pending_stack_adjust ();
8053 emit_label (label);
8054 return const0_rtx;
8057 expand_assignment (lhs, rhs);
8059 return const0_rtx;
8062 case RETURN_EXPR:
8063 if (!TREE_OPERAND (exp, 0))
8064 expand_null_return ();
8065 else
8066 expand_return (TREE_OPERAND (exp, 0));
8067 return const0_rtx;
8069 case ADDR_EXPR:
8070 return expand_expr_addr_expr (exp, target, tmode, modifier);
8072 /* COMPLEX type for Extended Pascal & Fortran */
8073 case COMPLEX_EXPR:
8075 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8076 rtx insns;
8078 /* Get the rtx code of the operands. */
8079 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8080 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8082 if (! target)
8083 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8085 start_sequence ();
8087 /* Move the real (op0) and imaginary (op1) parts to their location. */
8088 emit_move_insn (gen_realpart (mode, target), op0);
8089 emit_move_insn (gen_imagpart (mode, target), op1);
8091 insns = get_insns ();
8092 end_sequence ();
8094 /* Complex construction should appear as a single unit. */
8095 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8096 each with a separate pseudo as destination.
8097 It's not correct for flow to treat them as a unit. */
8098 if (GET_CODE (target) != CONCAT)
8099 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8100 else
8101 emit_insn (insns);
8103 return target;
8106 case REALPART_EXPR:
8107 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8108 return gen_realpart (mode, op0);
8110 case IMAGPART_EXPR:
8111 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8112 return gen_imagpart (mode, op0);
8114 case RESX_EXPR:
8115 expand_resx_expr (exp);
8116 return const0_rtx;
8118 case TRY_CATCH_EXPR:
8119 case CATCH_EXPR:
8120 case EH_FILTER_EXPR:
8121 case TRY_FINALLY_EXPR:
8122 /* Lowered by tree-eh.c. */
8123 gcc_unreachable ();
8125 case WITH_CLEANUP_EXPR:
8126 case CLEANUP_POINT_EXPR:
8127 case TARGET_EXPR:
8128 case CASE_LABEL_EXPR:
8129 case VA_ARG_EXPR:
8130 case BIND_EXPR:
8131 case INIT_EXPR:
8132 case CONJ_EXPR:
8133 case COMPOUND_EXPR:
8134 case PREINCREMENT_EXPR:
8135 case PREDECREMENT_EXPR:
8136 case POSTINCREMENT_EXPR:
8137 case POSTDECREMENT_EXPR:
8138 case LOOP_EXPR:
8139 case EXIT_EXPR:
8140 case LABELED_BLOCK_EXPR:
8141 case EXIT_BLOCK_EXPR:
8142 case TRUTH_ANDIF_EXPR:
8143 case TRUTH_ORIF_EXPR:
8144 /* Lowered by gimplify.c. */
8145 gcc_unreachable ();
8147 case EXC_PTR_EXPR:
8148 return get_exception_pointer (cfun);
8150 case FILTER_EXPR:
8151 return get_exception_filter (cfun);
8153 case FDESC_EXPR:
8154 /* Function descriptors are not valid except for as
8155 initialization constants, and should not be expanded. */
8156 gcc_unreachable ();
8158 case SWITCH_EXPR:
8159 expand_case (exp);
8160 return const0_rtx;
8162 case LABEL_EXPR:
8163 expand_label (TREE_OPERAND (exp, 0));
8164 return const0_rtx;
8166 case ASM_EXPR:
8167 expand_asm_expr (exp);
8168 return const0_rtx;
8170 case WITH_SIZE_EXPR:
8171 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8172 have pulled out the size to use in whatever context it needed. */
8173 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8174 modifier, alt_rtl);
8176 case REALIGN_LOAD_EXPR:
8178 tree oprnd0 = TREE_OPERAND (exp, 0);
8179 tree oprnd1 = TREE_OPERAND (exp, 1);
8180 tree oprnd2 = TREE_OPERAND (exp, 2);
8181 rtx op2;
8183 this_optab = optab_for_tree_code (code, type);
8184 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8185 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8186 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8187 target, unsignedp);
8188 if (temp == 0)
8189 abort ();
8190 return temp;
8194 default:
8195 return lang_hooks.expand_expr (exp, original_target, tmode,
8196 modifier, alt_rtl);
8199 /* Here to do an ordinary binary operator. */
8200 binop:
8201 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8202 subtarget, &op0, &op1, 0);
8203 binop2:
8204 this_optab = optab_for_tree_code (code, type);
8205 binop3:
8206 if (modifier == EXPAND_STACK_PARM)
8207 target = 0;
8208 temp = expand_binop (mode, this_optab, op0, op1, target,
8209 unsignedp, OPTAB_LIB_WIDEN);
8210 gcc_assert (temp);
8211 return REDUCE_BIT_FIELD (temp);
8213 #undef REDUCE_BIT_FIELD
8215 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8216 signedness of TYPE), possibly returning the result in TARGET. */
8217 static rtx
8218 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8220 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8221 if (target && GET_MODE (target) != GET_MODE (exp))
8222 target = 0;
8223 if (TYPE_UNSIGNED (type))
8225 rtx mask;
8226 if (prec < HOST_BITS_PER_WIDE_INT)
8227 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8228 GET_MODE (exp));
8229 else
8230 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8231 ((unsigned HOST_WIDE_INT) 1
8232 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8233 GET_MODE (exp));
8234 return expand_and (GET_MODE (exp), exp, mask, target);
8236 else
8238 tree count = build_int_cst (NULL_TREE,
8239 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8240 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8241 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8245 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8246 when applied to the address of EXP produces an address known to be
8247 aligned more than BIGGEST_ALIGNMENT. */
8249 static int
8250 is_aligning_offset (tree offset, tree exp)
8252 /* Strip off any conversions. */
8253 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8254 || TREE_CODE (offset) == NOP_EXPR
8255 || TREE_CODE (offset) == CONVERT_EXPR)
8256 offset = TREE_OPERAND (offset, 0);
8258 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8259 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8260 if (TREE_CODE (offset) != BIT_AND_EXPR
8261 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8262 || compare_tree_int (TREE_OPERAND (offset, 1),
8263 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8264 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8265 return 0;
8267 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8268 It must be NEGATE_EXPR. Then strip any more conversions. */
8269 offset = TREE_OPERAND (offset, 0);
8270 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8271 || TREE_CODE (offset) == NOP_EXPR
8272 || TREE_CODE (offset) == CONVERT_EXPR)
8273 offset = TREE_OPERAND (offset, 0);
8275 if (TREE_CODE (offset) != NEGATE_EXPR)
8276 return 0;
8278 offset = TREE_OPERAND (offset, 0);
8279 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8280 || TREE_CODE (offset) == NOP_EXPR
8281 || TREE_CODE (offset) == CONVERT_EXPR)
8282 offset = TREE_OPERAND (offset, 0);
8284 /* This must now be the address of EXP. */
8285 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8288 /* Return the tree node if an ARG corresponds to a string constant or zero
8289 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8290 in bytes within the string that ARG is accessing. The type of the
8291 offset will be `sizetype'. */
8293 tree
8294 string_constant (tree arg, tree *ptr_offset)
8296 tree array, offset;
8297 STRIP_NOPS (arg);
8299 if (TREE_CODE (arg) == ADDR_EXPR)
8301 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8303 *ptr_offset = size_zero_node;
8304 return TREE_OPERAND (arg, 0);
8306 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8308 array = TREE_OPERAND (arg, 0);
8309 offset = size_zero_node;
8311 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8313 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8314 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8315 if (TREE_CODE (array) != STRING_CST
8316 && TREE_CODE (array) != VAR_DECL)
8317 return 0;
8319 else
8320 return 0;
8322 else if (TREE_CODE (arg) == PLUS_EXPR)
8324 tree arg0 = TREE_OPERAND (arg, 0);
8325 tree arg1 = TREE_OPERAND (arg, 1);
8327 STRIP_NOPS (arg0);
8328 STRIP_NOPS (arg1);
8330 if (TREE_CODE (arg0) == ADDR_EXPR
8331 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8332 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8334 array = TREE_OPERAND (arg0, 0);
8335 offset = arg1;
8337 else if (TREE_CODE (arg1) == ADDR_EXPR
8338 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8339 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8341 array = TREE_OPERAND (arg1, 0);
8342 offset = arg0;
8344 else
8345 return 0;
8347 else
8348 return 0;
8350 if (TREE_CODE (array) == STRING_CST)
8352 *ptr_offset = convert (sizetype, offset);
8353 return array;
8355 else if (TREE_CODE (array) == VAR_DECL)
8357 int length;
8359 /* Variables initialized to string literals can be handled too. */
8360 if (DECL_INITIAL (array) == NULL_TREE
8361 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8362 return 0;
8364 /* If they are read-only, non-volatile and bind locally. */
8365 if (! TREE_READONLY (array)
8366 || TREE_SIDE_EFFECTS (array)
8367 || ! targetm.binds_local_p (array))
8368 return 0;
8370 /* Avoid const char foo[4] = "abcde"; */
8371 if (DECL_SIZE_UNIT (array) == NULL_TREE
8372 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8373 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8374 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8375 return 0;
8377 /* If variable is bigger than the string literal, OFFSET must be constant
8378 and inside of the bounds of the string literal. */
8379 offset = convert (sizetype, offset);
8380 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8381 && (! host_integerp (offset, 1)
8382 || compare_tree_int (offset, length) >= 0))
8383 return 0;
8385 *ptr_offset = offset;
8386 return DECL_INITIAL (array);
8389 return 0;
8392 /* Generate code to calculate EXP using a store-flag instruction
8393 and return an rtx for the result. EXP is either a comparison
8394 or a TRUTH_NOT_EXPR whose operand is a comparison.
8396 If TARGET is nonzero, store the result there if convenient.
8398 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8399 cheap.
8401 Return zero if there is no suitable set-flag instruction
8402 available on this machine.
8404 Once expand_expr has been called on the arguments of the comparison,
8405 we are committed to doing the store flag, since it is not safe to
8406 re-evaluate the expression. We emit the store-flag insn by calling
8407 emit_store_flag, but only expand the arguments if we have a reason
8408 to believe that emit_store_flag will be successful. If we think that
8409 it will, but it isn't, we have to simulate the store-flag with a
8410 set/jump/set sequence. */
8412 static rtx
8413 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8415 enum rtx_code code;
8416 tree arg0, arg1, type;
8417 tree tem;
8418 enum machine_mode operand_mode;
8419 int invert = 0;
8420 int unsignedp;
8421 rtx op0, op1;
8422 enum insn_code icode;
8423 rtx subtarget = target;
8424 rtx result, label;
8426 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8427 result at the end. We can't simply invert the test since it would
8428 have already been inverted if it were valid. This case occurs for
8429 some floating-point comparisons. */
8431 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8432 invert = 1, exp = TREE_OPERAND (exp, 0);
8434 arg0 = TREE_OPERAND (exp, 0);
8435 arg1 = TREE_OPERAND (exp, 1);
8437 /* Don't crash if the comparison was erroneous. */
8438 if (arg0 == error_mark_node || arg1 == error_mark_node)
8439 return const0_rtx;
8441 type = TREE_TYPE (arg0);
8442 operand_mode = TYPE_MODE (type);
8443 unsignedp = TYPE_UNSIGNED (type);
8445 /* We won't bother with BLKmode store-flag operations because it would mean
8446 passing a lot of information to emit_store_flag. */
8447 if (operand_mode == BLKmode)
8448 return 0;
8450 /* We won't bother with store-flag operations involving function pointers
8451 when function pointers must be canonicalized before comparisons. */
8452 #ifdef HAVE_canonicalize_funcptr_for_compare
8453 if (HAVE_canonicalize_funcptr_for_compare
8454 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8455 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8456 == FUNCTION_TYPE))
8457 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8458 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8459 == FUNCTION_TYPE))))
8460 return 0;
8461 #endif
8463 STRIP_NOPS (arg0);
8464 STRIP_NOPS (arg1);
8466 /* Get the rtx comparison code to use. We know that EXP is a comparison
8467 operation of some type. Some comparisons against 1 and -1 can be
8468 converted to comparisons with zero. Do so here so that the tests
8469 below will be aware that we have a comparison with zero. These
8470 tests will not catch constants in the first operand, but constants
8471 are rarely passed as the first operand. */
8473 switch (TREE_CODE (exp))
8475 case EQ_EXPR:
8476 code = EQ;
8477 break;
8478 case NE_EXPR:
8479 code = NE;
8480 break;
8481 case LT_EXPR:
8482 if (integer_onep (arg1))
8483 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8484 else
8485 code = unsignedp ? LTU : LT;
8486 break;
8487 case LE_EXPR:
8488 if (! unsignedp && integer_all_onesp (arg1))
8489 arg1 = integer_zero_node, code = LT;
8490 else
8491 code = unsignedp ? LEU : LE;
8492 break;
8493 case GT_EXPR:
8494 if (! unsignedp && integer_all_onesp (arg1))
8495 arg1 = integer_zero_node, code = GE;
8496 else
8497 code = unsignedp ? GTU : GT;
8498 break;
8499 case GE_EXPR:
8500 if (integer_onep (arg1))
8501 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8502 else
8503 code = unsignedp ? GEU : GE;
8504 break;
8506 case UNORDERED_EXPR:
8507 code = UNORDERED;
8508 break;
8509 case ORDERED_EXPR:
8510 code = ORDERED;
8511 break;
8512 case UNLT_EXPR:
8513 code = UNLT;
8514 break;
8515 case UNLE_EXPR:
8516 code = UNLE;
8517 break;
8518 case UNGT_EXPR:
8519 code = UNGT;
8520 break;
8521 case UNGE_EXPR:
8522 code = UNGE;
8523 break;
8524 case UNEQ_EXPR:
8525 code = UNEQ;
8526 break;
8527 case LTGT_EXPR:
8528 code = LTGT;
8529 break;
8531 default:
8532 gcc_unreachable ();
8535 /* Put a constant second. */
8536 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8538 tem = arg0; arg0 = arg1; arg1 = tem;
8539 code = swap_condition (code);
8542 /* If this is an equality or inequality test of a single bit, we can
8543 do this by shifting the bit being tested to the low-order bit and
8544 masking the result with the constant 1. If the condition was EQ,
8545 we xor it with 1. This does not require an scc insn and is faster
8546 than an scc insn even if we have it.
8548 The code to make this transformation was moved into fold_single_bit_test,
8549 so we just call into the folder and expand its result. */
8551 if ((code == NE || code == EQ)
8552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8555 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8556 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8557 arg0, arg1, type),
8558 target, VOIDmode, EXPAND_NORMAL);
8561 /* Now see if we are likely to be able to do this. Return if not. */
8562 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8563 return 0;
8565 icode = setcc_gen_code[(int) code];
8566 if (icode == CODE_FOR_nothing
8567 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8569 /* We can only do this if it is one of the special cases that
8570 can be handled without an scc insn. */
8571 if ((code == LT && integer_zerop (arg1))
8572 || (! only_cheap && code == GE && integer_zerop (arg1)))
8574 else if (BRANCH_COST >= 0
8575 && ! only_cheap && (code == NE || code == EQ)
8576 && TREE_CODE (type) != REAL_TYPE
8577 && ((abs_optab->handlers[(int) operand_mode].insn_code
8578 != CODE_FOR_nothing)
8579 || (ffs_optab->handlers[(int) operand_mode].insn_code
8580 != CODE_FOR_nothing)))
8582 else
8583 return 0;
8586 if (! get_subtarget (target)
8587 || GET_MODE (subtarget) != operand_mode)
8588 subtarget = 0;
8590 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8592 if (target == 0)
8593 target = gen_reg_rtx (mode);
8595 result = emit_store_flag (target, code, op0, op1,
8596 operand_mode, unsignedp, 1);
8598 if (result)
8600 if (invert)
8601 result = expand_binop (mode, xor_optab, result, const1_rtx,
8602 result, 0, OPTAB_LIB_WIDEN);
8603 return result;
8606 /* If this failed, we have to do this with set/compare/jump/set code. */
8607 if (!REG_P (target)
8608 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8609 target = gen_reg_rtx (GET_MODE (target));
8611 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8612 result = compare_from_rtx (op0, op1, code, unsignedp,
8613 operand_mode, NULL_RTX);
8614 if (GET_CODE (result) == CONST_INT)
8615 return (((result == const0_rtx && ! invert)
8616 || (result != const0_rtx && invert))
8617 ? const0_rtx : const1_rtx);
8619 /* The code of RESULT may not match CODE if compare_from_rtx
8620 decided to swap its operands and reverse the original code.
8622 We know that compare_from_rtx returns either a CONST_INT or
8623 a new comparison code, so it is safe to just extract the
8624 code from RESULT. */
8625 code = GET_CODE (result);
8627 label = gen_label_rtx ();
8628 gcc_assert (bcc_gen_fctn[(int) code]);
8630 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8631 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8632 emit_label (label);
8634 return target;
8638 /* Stubs in case we haven't got a casesi insn. */
8639 #ifndef HAVE_casesi
8640 # define HAVE_casesi 0
8641 # define gen_casesi(a, b, c, d, e) (0)
8642 # define CODE_FOR_casesi CODE_FOR_nothing
8643 #endif
8645 /* If the machine does not have a case insn that compares the bounds,
8646 this means extra overhead for dispatch tables, which raises the
8647 threshold for using them. */
8648 #ifndef CASE_VALUES_THRESHOLD
8649 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8650 #endif /* CASE_VALUES_THRESHOLD */
8652 unsigned int
8653 case_values_threshold (void)
8655 return CASE_VALUES_THRESHOLD;
8658 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8659 0 otherwise (i.e. if there is no casesi instruction). */
8661 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8662 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8664 enum machine_mode index_mode = SImode;
8665 int index_bits = GET_MODE_BITSIZE (index_mode);
8666 rtx op1, op2, index;
8667 enum machine_mode op_mode;
8669 if (! HAVE_casesi)
8670 return 0;
8672 /* Convert the index to SImode. */
8673 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8675 enum machine_mode omode = TYPE_MODE (index_type);
8676 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8678 /* We must handle the endpoints in the original mode. */
8679 index_expr = build2 (MINUS_EXPR, index_type,
8680 index_expr, minval);
8681 minval = integer_zero_node;
8682 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8683 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8684 omode, 1, default_label);
8685 /* Now we can safely truncate. */
8686 index = convert_to_mode (index_mode, index, 0);
8688 else
8690 if (TYPE_MODE (index_type) != index_mode)
8692 index_expr = convert (lang_hooks.types.type_for_size
8693 (index_bits, 0), index_expr);
8694 index_type = TREE_TYPE (index_expr);
8697 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8700 do_pending_stack_adjust ();
8702 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8703 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8704 (index, op_mode))
8705 index = copy_to_mode_reg (op_mode, index);
8707 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8709 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8710 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8711 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8712 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8713 (op1, op_mode))
8714 op1 = copy_to_mode_reg (op_mode, op1);
8716 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8718 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8719 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8720 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8721 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8722 (op2, op_mode))
8723 op2 = copy_to_mode_reg (op_mode, op2);
8725 emit_jump_insn (gen_casesi (index, op1, op2,
8726 table_label, default_label));
8727 return 1;
8730 /* Attempt to generate a tablejump instruction; same concept. */
8731 #ifndef HAVE_tablejump
8732 #define HAVE_tablejump 0
8733 #define gen_tablejump(x, y) (0)
8734 #endif
8736 /* Subroutine of the next function.
8738 INDEX is the value being switched on, with the lowest value
8739 in the table already subtracted.
8740 MODE is its expected mode (needed if INDEX is constant).
8741 RANGE is the length of the jump table.
8742 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8744 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8745 index value is out of range. */
8747 static void
8748 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8749 rtx default_label)
8751 rtx temp, vector;
8753 if (INTVAL (range) > cfun->max_jumptable_ents)
8754 cfun->max_jumptable_ents = INTVAL (range);
8756 /* Do an unsigned comparison (in the proper mode) between the index
8757 expression and the value which represents the length of the range.
8758 Since we just finished subtracting the lower bound of the range
8759 from the index expression, this comparison allows us to simultaneously
8760 check that the original index expression value is both greater than
8761 or equal to the minimum value of the range and less than or equal to
8762 the maximum value of the range. */
8764 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8765 default_label);
8767 /* If index is in range, it must fit in Pmode.
8768 Convert to Pmode so we can index with it. */
8769 if (mode != Pmode)
8770 index = convert_to_mode (Pmode, index, 1);
8772 /* Don't let a MEM slip through, because then INDEX that comes
8773 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8774 and break_out_memory_refs will go to work on it and mess it up. */
8775 #ifdef PIC_CASE_VECTOR_ADDRESS
8776 if (flag_pic && !REG_P (index))
8777 index = copy_to_mode_reg (Pmode, index);
8778 #endif
8780 /* If flag_force_addr were to affect this address
8781 it could interfere with the tricky assumptions made
8782 about addresses that contain label-refs,
8783 which may be valid only very near the tablejump itself. */
8784 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8785 GET_MODE_SIZE, because this indicates how large insns are. The other
8786 uses should all be Pmode, because they are addresses. This code
8787 could fail if addresses and insns are not the same size. */
8788 index = gen_rtx_PLUS (Pmode,
8789 gen_rtx_MULT (Pmode, index,
8790 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8791 gen_rtx_LABEL_REF (Pmode, table_label));
8792 #ifdef PIC_CASE_VECTOR_ADDRESS
8793 if (flag_pic)
8794 index = PIC_CASE_VECTOR_ADDRESS (index);
8795 else
8796 #endif
8797 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8798 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8799 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8800 convert_move (temp, vector, 0);
8802 emit_jump_insn (gen_tablejump (temp, table_label));
8804 /* If we are generating PIC code or if the table is PC-relative, the
8805 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8806 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8807 emit_barrier ();
8811 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8812 rtx table_label, rtx default_label)
8814 rtx index;
8816 if (! HAVE_tablejump)
8817 return 0;
8819 index_expr = fold (build2 (MINUS_EXPR, index_type,
8820 convert (index_type, index_expr),
8821 convert (index_type, minval)));
8822 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8823 do_pending_stack_adjust ();
8825 do_tablejump (index, TYPE_MODE (index_type),
8826 convert_modes (TYPE_MODE (index_type),
8827 TYPE_MODE (TREE_TYPE (range)),
8828 expand_expr (range, NULL_RTX,
8829 VOIDmode, 0),
8830 TYPE_UNSIGNED (TREE_TYPE (range))),
8831 table_label, default_label);
8832 return 1;
8835 /* Nonzero if the mode is a valid vector mode for this architecture.
8836 This returns nonzero even if there is no hardware support for the
8837 vector mode, but we can emulate with narrower modes. */
8840 vector_mode_valid_p (enum machine_mode mode)
8842 enum mode_class class = GET_MODE_CLASS (mode);
8843 enum machine_mode innermode;
8845 /* Doh! What's going on? */
8846 if (class != MODE_VECTOR_INT
8847 && class != MODE_VECTOR_FLOAT)
8848 return 0;
8850 /* Hardware support. Woo hoo! */
8851 if (targetm.vector_mode_supported_p (mode))
8852 return 1;
8854 innermode = GET_MODE_INNER (mode);
8856 /* We should probably return 1 if requesting V4DI and we have no DI,
8857 but we have V2DI, but this is probably very unlikely. */
8859 /* If we have support for the inner mode, we can safely emulate it.
8860 We may not have V2DI, but me can emulate with a pair of DIs. */
8861 return targetm.scalar_mode_supported_p (innermode);
8864 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8865 static rtx
8866 const_vector_from_tree (tree exp)
8868 rtvec v;
8869 int units, i;
8870 tree link, elt;
8871 enum machine_mode inner, mode;
8873 mode = TYPE_MODE (TREE_TYPE (exp));
8875 if (initializer_zerop (exp))
8876 return CONST0_RTX (mode);
8878 units = GET_MODE_NUNITS (mode);
8879 inner = GET_MODE_INNER (mode);
8881 v = rtvec_alloc (units);
8883 link = TREE_VECTOR_CST_ELTS (exp);
8884 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8886 elt = TREE_VALUE (link);
8888 if (TREE_CODE (elt) == REAL_CST)
8889 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8890 inner);
8891 else
8892 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8893 TREE_INT_CST_HIGH (elt),
8894 inner);
8897 /* Initialize remaining elements to 0. */
8898 for (; i < units; ++i)
8899 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8901 return gen_rtx_CONST_VECTOR (mode, v);
8903 #include "gt-expr.h"