2007-06-05 H.J. Lu <hongjiu.lu@intel.com>
[official-gcc.git] / gcc / expr.c
blob05e23833b4107a64249219e6246f6f9b77d1444a
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
242 void
243 init_expr_once (void)
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
268 int regno;
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
286 REGNO (reg) = regno;
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 enum insn_code ic;
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
325 PUT_MODE (mem, srcmode);
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
333 /* This is run at the start of compiling a function. */
335 void
336 init_expr (void)
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361 gcc_assert (to_real == from_real);
362 gcc_assert (to_mode != BLKmode);
363 gcc_assert (from_mode != BLKmode);
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
385 emit_move_insn (to, from);
386 return;
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
398 emit_move_insn (to, from);
399 return;
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
409 if (to_real)
411 rtx value, insns;
412 convert_optab tab;
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423 tab = sext_optab;
424 else
425 tab = trunc_optab;
427 /* Try converting directly if the insn is supported. */
429 code = tab->handlers[to_mode][from_mode].insn_code;
430 if (code != CODE_FOR_nothing)
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434 return;
437 /* Otherwise use a libcall. */
438 libcall = tab->handlers[to_mode][from_mode].libfunc;
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
443 start_sequence ();
444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445 1, from, from_mode);
446 insns = get_insns ();
447 end_sequence ();
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
452 return;
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 != CODE_FOR_nothing);
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469 to, from, UNKNOWN);
470 return;
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
474 rtx new_from;
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 != CODE_FOR_nothing);
481 if (to_mode == full_mode)
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 to, from, UNKNOWN);
485 return;
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from, from, UNKNOWN);
492 /* else proceed to integer conversions below. */
493 from_mode = full_mode;
494 from = new_from;
497 /* Now both modes are integers. */
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
529 if (REG_P (to))
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
566 #ifdef HAVE_slt
567 if (HAVE_slt
568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 && STORE_FLAG_VALUE == -1)
571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572 lowpart_mode, 0);
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
576 else
577 #endif
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582 NULL_RTX, 0);
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
593 gcc_assert (subword);
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
599 insns = get_insns ();
600 end_sequence ();
602 emit_no_conflict_block (insns, to, from, NULL_RTX,
603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604 return;
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
611 if (!((MEM_P (from)
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
615 || REG_P (from)
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
622 /* Now follow all the conversions between integers
623 no more than a word long. */
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (from_mode)))
630 if (!((MEM_P (from)
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
634 || REG_P (from)
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
654 else
656 enum machine_mode intermediate;
657 rtx tmp;
658 tree shift_amount;
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
680 - GET_MODE_BITSIZE (from_mode));
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696 to, from, UNKNOWN);
697 return;
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
714 /* Mode combination is not recognized. */
715 gcc_unreachable ();
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
728 return convert_modes (mode, VOIDmode, x, unsignedp);
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
744 rtx temp;
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
757 if (mode == oldmode)
758 return x;
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
770 HOST_WIDE_INT val = INTVAL (x);
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
797 || (REG_P (x)
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
819 return gen_int_mode (val, mode);
822 return gen_lowpart (mode, x);
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
853 return MOVE_BY_PIECES_P (len, align);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
872 struct move_by_pieces data;
873 rtx to_addr, from_addr = XEXP (from, 0);
874 unsigned int max_size = MOVE_MAX_PIECES + 1;
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
880 data.offset = 0;
881 data.from_addr = from_addr;
882 if (to)
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
892 else
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
921 /* Find the mode of the largest move... */
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
962 enum machine_mode xmode;
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
977 while (max_size > 1)
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
982 mode = tmode;
984 if (mode == VOIDmode)
985 break;
987 icode = mov_optab->handlers[(int) mode].insn_code;
988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
991 max_size = GET_MODE_SIZE (mode);
994 /* The code above should have handled everything. */
995 gcc_assert (!data.len);
997 if (endp)
999 rtx to1;
1001 gcc_assert (!data.reverse);
1002 if (data.autinc_to)
1004 if (endp == 2)
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1015 else
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1021 return to1;
1023 else
1024 return data.to;
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
1034 unsigned HOST_WIDE_INT n_insns = 0;
1035 enum machine_mode tmode;
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1042 enum machine_mode tmode, xmode;
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1054 while (max_size > 1)
1056 enum machine_mode mode = VOIDmode;
1057 enum insn_code icode;
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
1062 mode = tmode;
1064 if (mode == VOIDmode)
1065 break;
1067 icode = mov_optab->handlers[(int) mode].insn_code;
1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1071 max_size = GET_MODE_SIZE (mode);
1074 gcc_assert (!l);
1075 return n_insns;
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1082 static void
1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
1086 unsigned int size = GET_MODE_SIZE (mode);
1087 rtx to1 = NULL_RTX, from1;
1089 while (data->len >= size)
1091 if (data->reverse)
1092 data->offset -= size;
1094 if (data->to)
1096 if (data->autinc_to)
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
1099 else
1100 to1 = adjust_address (data->to, mode, data->offset);
1103 if (data->autinc_from)
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
1106 else
1107 from1 = adjust_address (data->from, mode, data->offset);
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122 #else
1123 gcc_unreachable ();
1124 #endif
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1132 if (! data->reverse)
1133 data->offset += size;
1135 data->len -= size;
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1152 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1153 unsigned int expected_align, HOST_WIDE_INT expected_size)
1155 bool may_use_call;
1156 rtx retval = 0;
1157 unsigned int align;
1159 switch (method)
1161 case BLOCK_OP_NORMAL:
1162 case BLOCK_OP_TAILCALL:
1163 may_use_call = true;
1164 break;
1166 case BLOCK_OP_CALL_PARM:
1167 may_use_call = block_move_libcall_safe_for_call_parm ();
1169 /* Make inhibit_defer_pop nonzero around the library call
1170 to force it to pop the arguments right away. */
1171 NO_DEFER_POP;
1172 break;
1174 case BLOCK_OP_NO_LIBCALL:
1175 may_use_call = false;
1176 break;
1178 default:
1179 gcc_unreachable ();
1182 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1184 gcc_assert (MEM_P (x));
1185 gcc_assert (MEM_P (y));
1186 gcc_assert (size);
1188 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1189 block copy is more efficient for other large modes, e.g. DCmode. */
1190 x = adjust_address (x, BLKmode, 0);
1191 y = adjust_address (y, BLKmode, 0);
1193 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1194 can be incorrect is coming from __builtin_memcpy. */
1195 if (GET_CODE (size) == CONST_INT)
1197 if (INTVAL (size) == 0)
1198 return 0;
1200 x = shallow_copy_rtx (x);
1201 y = shallow_copy_rtx (y);
1202 set_mem_size (x, size);
1203 set_mem_size (y, size);
1206 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1207 move_by_pieces (x, y, INTVAL (size), align, 0);
1208 else if (emit_block_move_via_movmem (x, y, size, align,
1209 expected_align, expected_size))
1211 else if (may_use_call)
1212 retval = emit_block_move_via_libcall (x, y, size,
1213 method == BLOCK_OP_TAILCALL);
1214 else
1215 emit_block_move_via_loop (x, y, size, align);
1217 if (method == BLOCK_OP_CALL_PARM)
1218 OK_DEFER_POP;
1220 return retval;
1224 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1226 return emit_block_move_hints (x, y, size, method, 0, -1);
1229 /* A subroutine of emit_block_move. Returns true if calling the
1230 block move libcall will not clobber any parameters which may have
1231 already been placed on the stack. */
1233 static bool
1234 block_move_libcall_safe_for_call_parm (void)
1236 /* If arguments are pushed on the stack, then they're safe. */
1237 if (PUSH_ARGS)
1238 return true;
1240 /* If registers go on the stack anyway, any argument is sure to clobber
1241 an outgoing argument. */
1242 #if defined (REG_PARM_STACK_SPACE)
1243 if (OUTGOING_REG_PARM_STACK_SPACE)
1245 tree fn;
1246 fn = emit_block_move_libcall_fn (false);
1247 if (REG_PARM_STACK_SPACE (fn) != 0)
1248 return false;
1250 #endif
1252 /* If any argument goes in memory, then it might clobber an outgoing
1253 argument. */
1255 CUMULATIVE_ARGS args_so_far;
1256 tree fn, arg;
1258 fn = emit_block_move_libcall_fn (false);
1259 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1261 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1262 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1264 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1265 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1266 if (!tmp || !REG_P (tmp))
1267 return false;
1268 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1269 return false;
1270 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1273 return true;
1276 /* A subroutine of emit_block_move. Expand a movmem pattern;
1277 return true if successful. */
1279 static bool
1280 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1281 unsigned int expected_align, HOST_WIDE_INT expected_size)
1283 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1284 int save_volatile_ok = volatile_ok;
1285 enum machine_mode mode;
1287 if (expected_align < align)
1288 expected_align = align;
1290 /* Since this is a move insn, we don't care about volatility. */
1291 volatile_ok = 1;
1293 /* Try the most limited insn first, because there's no point
1294 including more than one in the machine description unless
1295 the more limited one has some advantage. */
1297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1298 mode = GET_MODE_WIDER_MODE (mode))
1300 enum insn_code code = movmem_optab[(int) mode];
1301 insn_operand_predicate_fn pred;
1303 if (code != CODE_FOR_nothing
1304 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1305 here because if SIZE is less than the mode mask, as it is
1306 returned by the macro, it will definitely be less than the
1307 actual mode mask. */
1308 && ((GET_CODE (size) == CONST_INT
1309 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1310 <= (GET_MODE_MASK (mode) >> 1)))
1311 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1312 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1313 || (*pred) (x, BLKmode))
1314 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1315 || (*pred) (y, BLKmode))
1316 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1317 || (*pred) (opalign, VOIDmode)))
1319 rtx op2;
1320 rtx last = get_last_insn ();
1321 rtx pat;
1323 op2 = convert_to_mode (mode, size, 1);
1324 pred = insn_data[(int) code].operand[2].predicate;
1325 if (pred != 0 && ! (*pred) (op2, mode))
1326 op2 = copy_to_mode_reg (mode, op2);
1328 /* ??? When called via emit_block_move_for_call, it'd be
1329 nice if there were some way to inform the backend, so
1330 that it doesn't fail the expansion because it thinks
1331 emitting the libcall would be more efficient. */
1333 if (insn_data[(int) code].n_operands == 4)
1334 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1335 else
1336 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1337 GEN_INT (expected_align),
1338 GEN_INT (expected_size));
1339 if (pat)
1341 emit_insn (pat);
1342 volatile_ok = save_volatile_ok;
1343 return true;
1345 else
1346 delete_insns_since (last);
1350 volatile_ok = save_volatile_ok;
1351 return false;
1354 /* A subroutine of emit_block_move. Expand a call to memcpy.
1355 Return the return value from memcpy, 0 otherwise. */
1358 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1360 rtx dst_addr, src_addr;
1361 tree call_expr, fn, src_tree, dst_tree, size_tree;
1362 enum machine_mode size_mode;
1363 rtx retval;
1365 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1366 pseudos. We can then place those new pseudos into a VAR_DECL and
1367 use them later. */
1369 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1370 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1372 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1373 src_addr = convert_memory_address (ptr_mode, src_addr);
1375 dst_tree = make_tree (ptr_type_node, dst_addr);
1376 src_tree = make_tree (ptr_type_node, src_addr);
1378 size_mode = TYPE_MODE (sizetype);
1380 size = convert_to_mode (size_mode, size, 1);
1381 size = copy_to_mode_reg (size_mode, size);
1383 /* It is incorrect to use the libcall calling conventions to call
1384 memcpy in this context. This could be a user call to memcpy and
1385 the user may wish to examine the return value from memcpy. For
1386 targets where libcalls and normal calls have different conventions
1387 for returning pointers, we could end up generating incorrect code. */
1389 size_tree = make_tree (sizetype, size);
1391 fn = emit_block_move_libcall_fn (true);
1392 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1393 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1395 retval = expand_normal (call_expr);
1397 return retval;
1400 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1401 for the function we use for block copies. The first time FOR_CALL
1402 is true, we call assemble_external. */
1404 static GTY(()) tree block_move_fn;
1406 void
1407 init_block_move_fn (const char *asmspec)
1409 if (!block_move_fn)
1411 tree args, fn;
1413 fn = get_identifier ("memcpy");
1414 args = build_function_type_list (ptr_type_node, ptr_type_node,
1415 const_ptr_type_node, sizetype,
1416 NULL_TREE);
1418 fn = build_decl (FUNCTION_DECL, fn, args);
1419 DECL_EXTERNAL (fn) = 1;
1420 TREE_PUBLIC (fn) = 1;
1421 DECL_ARTIFICIAL (fn) = 1;
1422 TREE_NOTHROW (fn) = 1;
1423 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1424 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1426 block_move_fn = fn;
1429 if (asmspec)
1430 set_user_assembler_name (block_move_fn, asmspec);
1433 static tree
1434 emit_block_move_libcall_fn (int for_call)
1436 static bool emitted_extern;
1438 if (!block_move_fn)
1439 init_block_move_fn (NULL);
1441 if (for_call && !emitted_extern)
1443 emitted_extern = true;
1444 make_decl_rtl (block_move_fn);
1445 assemble_external (block_move_fn);
1448 return block_move_fn;
1451 /* A subroutine of emit_block_move. Copy the data via an explicit
1452 loop. This is used only when libcalls are forbidden. */
1453 /* ??? It'd be nice to copy in hunks larger than QImode. */
1455 static void
1456 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1457 unsigned int align ATTRIBUTE_UNUSED)
1459 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1460 enum machine_mode iter_mode;
1462 iter_mode = GET_MODE (size);
1463 if (iter_mode == VOIDmode)
1464 iter_mode = word_mode;
1466 top_label = gen_label_rtx ();
1467 cmp_label = gen_label_rtx ();
1468 iter = gen_reg_rtx (iter_mode);
1470 emit_move_insn (iter, const0_rtx);
1472 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1473 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1474 do_pending_stack_adjust ();
1476 emit_jump (cmp_label);
1477 emit_label (top_label);
1479 tmp = convert_modes (Pmode, iter_mode, iter, true);
1480 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1481 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1482 x = change_address (x, QImode, x_addr);
1483 y = change_address (y, QImode, y_addr);
1485 emit_move_insn (x, y);
1487 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1488 true, OPTAB_LIB_WIDEN);
1489 if (tmp != iter)
1490 emit_move_insn (iter, tmp);
1492 emit_label (cmp_label);
1494 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1495 true, top_label);
1498 /* Copy all or part of a value X into registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1501 void
1502 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1504 int i;
1505 #ifdef HAVE_load_multiple
1506 rtx pat;
1507 rtx last;
1508 #endif
1510 if (nregs == 0)
1511 return;
1513 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1514 x = validize_mem (force_const_mem (mode, x));
1516 /* See if the machine can do this with a load multiple insn. */
1517 #ifdef HAVE_load_multiple
1518 if (HAVE_load_multiple)
1520 last = get_last_insn ();
1521 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1522 GEN_INT (nregs));
1523 if (pat)
1525 emit_insn (pat);
1526 return;
1528 else
1529 delete_insns_since (last);
1531 #endif
1533 for (i = 0; i < nregs; i++)
1534 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1535 operand_subword_force (x, i, mode));
1538 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1539 The number of registers to be filled is NREGS. */
1541 void
1542 move_block_from_reg (int regno, rtx x, int nregs)
1544 int i;
1546 if (nregs == 0)
1547 return;
1549 /* See if the machine can do this with a store multiple insn. */
1550 #ifdef HAVE_store_multiple
1551 if (HAVE_store_multiple)
1553 rtx last = get_last_insn ();
1554 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1555 GEN_INT (nregs));
1556 if (pat)
1558 emit_insn (pat);
1559 return;
1561 else
1562 delete_insns_since (last);
1564 #endif
1566 for (i = 0; i < nregs; i++)
1568 rtx tem = operand_subword (x, i, 1, BLKmode);
1570 gcc_assert (tem);
1572 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1576 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1577 ORIG, where ORIG is a non-consecutive group of registers represented by
1578 a PARALLEL. The clone is identical to the original except in that the
1579 original set of registers is replaced by a new set of pseudo registers.
1580 The new set has the same modes as the original set. */
1583 gen_group_rtx (rtx orig)
1585 int i, length;
1586 rtx *tmps;
1588 gcc_assert (GET_CODE (orig) == PARALLEL);
1590 length = XVECLEN (orig, 0);
1591 tmps = alloca (sizeof (rtx) * length);
1593 /* Skip a NULL entry in first slot. */
1594 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1596 if (i)
1597 tmps[0] = 0;
1599 for (; i < length; i++)
1601 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1602 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1604 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1607 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1610 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1611 except that values are placed in TMPS[i], and must later be moved
1612 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1614 static void
1615 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1617 rtx src;
1618 int start, i;
1619 enum machine_mode m = GET_MODE (orig_src);
1621 gcc_assert (GET_CODE (dst) == PARALLEL);
1623 if (m != VOIDmode
1624 && !SCALAR_INT_MODE_P (m)
1625 && !MEM_P (orig_src)
1626 && GET_CODE (orig_src) != CONCAT)
1628 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1629 if (imode == BLKmode)
1630 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1631 else
1632 src = gen_reg_rtx (imode);
1633 if (imode != BLKmode)
1634 src = gen_lowpart (GET_MODE (orig_src), src);
1635 emit_move_insn (src, orig_src);
1636 /* ...and back again. */
1637 if (imode != BLKmode)
1638 src = gen_lowpart (imode, src);
1639 emit_group_load_1 (tmps, dst, src, type, ssize);
1640 return;
1643 /* Check for a NULL entry, used to indicate that the parameter goes
1644 both on the stack and in registers. */
1645 if (XEXP (XVECEXP (dst, 0, 0), 0))
1646 start = 0;
1647 else
1648 start = 1;
1650 /* Process the pieces. */
1651 for (i = start; i < XVECLEN (dst, 0); i++)
1653 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1654 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1655 unsigned int bytelen = GET_MODE_SIZE (mode);
1656 int shift = 0;
1658 /* Handle trailing fragments that run over the size of the struct. */
1659 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1661 /* Arrange to shift the fragment to where it belongs.
1662 extract_bit_field loads to the lsb of the reg. */
1663 if (
1664 #ifdef BLOCK_REG_PADDING
1665 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1666 == (BYTES_BIG_ENDIAN ? upward : downward)
1667 #else
1668 BYTES_BIG_ENDIAN
1669 #endif
1671 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1672 bytelen = ssize - bytepos;
1673 gcc_assert (bytelen > 0);
1676 /* If we won't be loading directly from memory, protect the real source
1677 from strange tricks we might play; but make sure that the source can
1678 be loaded directly into the destination. */
1679 src = orig_src;
1680 if (!MEM_P (orig_src)
1681 && (!CONSTANT_P (orig_src)
1682 || (GET_MODE (orig_src) != mode
1683 && GET_MODE (orig_src) != VOIDmode)))
1685 if (GET_MODE (orig_src) == VOIDmode)
1686 src = gen_reg_rtx (mode);
1687 else
1688 src = gen_reg_rtx (GET_MODE (orig_src));
1690 emit_move_insn (src, orig_src);
1693 /* Optimize the access just a bit. */
1694 if (MEM_P (src)
1695 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1696 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1697 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1698 && bytelen == GET_MODE_SIZE (mode))
1700 tmps[i] = gen_reg_rtx (mode);
1701 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1703 else if (COMPLEX_MODE_P (mode)
1704 && GET_MODE (src) == mode
1705 && bytelen == GET_MODE_SIZE (mode))
1706 /* Let emit_move_complex do the bulk of the work. */
1707 tmps[i] = src;
1708 else if (GET_CODE (src) == CONCAT)
1710 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1711 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1713 if ((bytepos == 0 && bytelen == slen0)
1714 || (bytepos != 0 && bytepos + bytelen <= slen))
1716 /* The following assumes that the concatenated objects all
1717 have the same size. In this case, a simple calculation
1718 can be used to determine the object and the bit field
1719 to be extracted. */
1720 tmps[i] = XEXP (src, bytepos / slen0);
1721 if (! CONSTANT_P (tmps[i])
1722 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1723 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1724 (bytepos % slen0) * BITS_PER_UNIT,
1725 1, NULL_RTX, mode, mode);
1727 else
1729 rtx mem;
1731 gcc_assert (!bytepos);
1732 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1733 emit_move_insn (mem, src);
1734 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1735 0, 1, NULL_RTX, mode, mode);
1738 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1739 SIMD register, which is currently broken. While we get GCC
1740 to emit proper RTL for these cases, let's dump to memory. */
1741 else if (VECTOR_MODE_P (GET_MODE (dst))
1742 && REG_P (src))
1744 int slen = GET_MODE_SIZE (GET_MODE (src));
1745 rtx mem;
1747 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1748 emit_move_insn (mem, src);
1749 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1751 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1752 && XVECLEN (dst, 0) > 1)
1753 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1754 else if (CONSTANT_P (src)
1755 || (REG_P (src) && GET_MODE (src) == mode))
1756 tmps[i] = src;
1757 else
1758 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1759 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1760 mode, mode);
1762 if (shift)
1763 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1764 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1768 /* Emit code to move a block SRC of type TYPE to a block DST,
1769 where DST is non-consecutive registers represented by a PARALLEL.
1770 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1771 if not known. */
1773 void
1774 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1776 rtx *tmps;
1777 int i;
1779 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1780 emit_group_load_1 (tmps, dst, src, type, ssize);
1782 /* Copy the extracted pieces into the proper (probable) hard regs. */
1783 for (i = 0; i < XVECLEN (dst, 0); i++)
1785 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1786 if (d == NULL)
1787 continue;
1788 emit_move_insn (d, tmps[i]);
1792 /* Similar, but load SRC into new pseudos in a format that looks like
1793 PARALLEL. This can later be fed to emit_group_move to get things
1794 in the right place. */
1797 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1799 rtvec vec;
1800 int i;
1802 vec = rtvec_alloc (XVECLEN (parallel, 0));
1803 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1805 /* Convert the vector to look just like the original PARALLEL, except
1806 with the computed values. */
1807 for (i = 0; i < XVECLEN (parallel, 0); i++)
1809 rtx e = XVECEXP (parallel, 0, i);
1810 rtx d = XEXP (e, 0);
1812 if (d)
1814 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1815 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1817 RTVEC_ELT (vec, i) = e;
1820 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1823 /* Emit code to move a block SRC to block DST, where SRC and DST are
1824 non-consecutive groups of registers, each represented by a PARALLEL. */
1826 void
1827 emit_group_move (rtx dst, rtx src)
1829 int i;
1831 gcc_assert (GET_CODE (src) == PARALLEL
1832 && GET_CODE (dst) == PARALLEL
1833 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1835 /* Skip first entry if NULL. */
1836 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1837 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1838 XEXP (XVECEXP (src, 0, i), 0));
1841 /* Move a group of registers represented by a PARALLEL into pseudos. */
1844 emit_group_move_into_temps (rtx src)
1846 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1847 int i;
1849 for (i = 0; i < XVECLEN (src, 0); i++)
1851 rtx e = XVECEXP (src, 0, i);
1852 rtx d = XEXP (e, 0);
1854 if (d)
1855 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1856 RTVEC_ELT (vec, i) = e;
1859 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1862 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1863 where SRC is non-consecutive registers represented by a PARALLEL.
1864 SSIZE represents the total size of block ORIG_DST, or -1 if not
1865 known. */
1867 void
1868 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1870 rtx *tmps, dst;
1871 int start, finish, i;
1872 enum machine_mode m = GET_MODE (orig_dst);
1874 gcc_assert (GET_CODE (src) == PARALLEL);
1876 if (!SCALAR_INT_MODE_P (m)
1877 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1879 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1880 if (imode == BLKmode)
1881 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1882 else
1883 dst = gen_reg_rtx (imode);
1884 emit_group_store (dst, src, type, ssize);
1885 if (imode != BLKmode)
1886 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1887 emit_move_insn (orig_dst, dst);
1888 return;
1891 /* Check for a NULL entry, used to indicate that the parameter goes
1892 both on the stack and in registers. */
1893 if (XEXP (XVECEXP (src, 0, 0), 0))
1894 start = 0;
1895 else
1896 start = 1;
1897 finish = XVECLEN (src, 0);
1899 tmps = alloca (sizeof (rtx) * finish);
1901 /* Copy the (probable) hard regs into pseudos. */
1902 for (i = start; i < finish; i++)
1904 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1905 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1907 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1908 emit_move_insn (tmps[i], reg);
1910 else
1911 tmps[i] = reg;
1914 /* If we won't be storing directly into memory, protect the real destination
1915 from strange tricks we might play. */
1916 dst = orig_dst;
1917 if (GET_CODE (dst) == PARALLEL)
1919 rtx temp;
1921 /* We can get a PARALLEL dst if there is a conditional expression in
1922 a return statement. In that case, the dst and src are the same,
1923 so no action is necessary. */
1924 if (rtx_equal_p (dst, src))
1925 return;
1927 /* It is unclear if we can ever reach here, but we may as well handle
1928 it. Allocate a temporary, and split this into a store/load to/from
1929 the temporary. */
1931 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1932 emit_group_store (temp, src, type, ssize);
1933 emit_group_load (dst, temp, type, ssize);
1934 return;
1936 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1938 enum machine_mode outer = GET_MODE (dst);
1939 enum machine_mode inner;
1940 HOST_WIDE_INT bytepos;
1941 bool done = false;
1942 rtx temp;
1944 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1945 dst = gen_reg_rtx (outer);
1947 /* Make life a bit easier for combine. */
1948 /* If the first element of the vector is the low part
1949 of the destination mode, use a paradoxical subreg to
1950 initialize the destination. */
1951 if (start < finish)
1953 inner = GET_MODE (tmps[start]);
1954 bytepos = subreg_lowpart_offset (inner, outer);
1955 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1957 temp = simplify_gen_subreg (outer, tmps[start],
1958 inner, 0);
1959 if (temp)
1961 emit_move_insn (dst, temp);
1962 done = true;
1963 start++;
1968 /* If the first element wasn't the low part, try the last. */
1969 if (!done
1970 && start < finish - 1)
1972 inner = GET_MODE (tmps[finish - 1]);
1973 bytepos = subreg_lowpart_offset (inner, outer);
1974 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1976 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1977 inner, 0);
1978 if (temp)
1980 emit_move_insn (dst, temp);
1981 done = true;
1982 finish--;
1987 /* Otherwise, simply initialize the result to zero. */
1988 if (!done)
1989 emit_move_insn (dst, CONST0_RTX (outer));
1992 /* Process the pieces. */
1993 for (i = start; i < finish; i++)
1995 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1996 enum machine_mode mode = GET_MODE (tmps[i]);
1997 unsigned int bytelen = GET_MODE_SIZE (mode);
1998 rtx dest = dst;
2000 /* Handle trailing fragments that run over the size of the struct. */
2001 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2003 /* store_bit_field always takes its value from the lsb.
2004 Move the fragment to the lsb if it's not already there. */
2005 if (
2006 #ifdef BLOCK_REG_PADDING
2007 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2008 == (BYTES_BIG_ENDIAN ? upward : downward)
2009 #else
2010 BYTES_BIG_ENDIAN
2011 #endif
2014 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2015 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2016 build_int_cst (NULL_TREE, shift),
2017 tmps[i], 0);
2019 bytelen = ssize - bytepos;
2022 if (GET_CODE (dst) == CONCAT)
2024 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2025 dest = XEXP (dst, 0);
2026 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2028 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2029 dest = XEXP (dst, 1);
2031 else
2033 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2034 dest = assign_stack_temp (GET_MODE (dest),
2035 GET_MODE_SIZE (GET_MODE (dest)), 0);
2036 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2037 tmps[i]);
2038 dst = dest;
2039 break;
2043 /* Optimize the access just a bit. */
2044 if (MEM_P (dest)
2045 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2046 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2047 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2048 && bytelen == GET_MODE_SIZE (mode))
2049 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2050 else
2051 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2052 mode, tmps[i]);
2055 /* Copy from the pseudo into the (probable) hard reg. */
2056 if (orig_dst != dst)
2057 emit_move_insn (orig_dst, dst);
2060 /* Generate code to copy a BLKmode object of TYPE out of a
2061 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2062 is null, a stack temporary is created. TGTBLK is returned.
2064 The purpose of this routine is to handle functions that return
2065 BLKmode structures in registers. Some machines (the PA for example)
2066 want to return all small structures in registers regardless of the
2067 structure's alignment. */
2070 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2072 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2073 rtx src = NULL, dst = NULL;
2074 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2075 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2077 if (tgtblk == 0)
2079 tgtblk = assign_temp (build_qualified_type (type,
2080 (TYPE_QUALS (type)
2081 | TYPE_QUAL_CONST)),
2082 0, 1, 1);
2083 preserve_temp_slots (tgtblk);
2086 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2087 into a new pseudo which is a full word. */
2089 if (GET_MODE (srcreg) != BLKmode
2090 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2091 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2093 /* If the structure doesn't take up a whole number of words, see whether
2094 SRCREG is padded on the left or on the right. If it's on the left,
2095 set PADDING_CORRECTION to the number of bits to skip.
2097 In most ABIs, the structure will be returned at the least end of
2098 the register, which translates to right padding on little-endian
2099 targets and left padding on big-endian targets. The opposite
2100 holds if the structure is returned at the most significant
2101 end of the register. */
2102 if (bytes % UNITS_PER_WORD != 0
2103 && (targetm.calls.return_in_msb (type)
2104 ? !BYTES_BIG_ENDIAN
2105 : BYTES_BIG_ENDIAN))
2106 padding_correction
2107 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2109 /* Copy the structure BITSIZE bites at a time.
2111 We could probably emit more efficient code for machines which do not use
2112 strict alignment, but it doesn't seem worth the effort at the current
2113 time. */
2114 for (bitpos = 0, xbitpos = padding_correction;
2115 bitpos < bytes * BITS_PER_UNIT;
2116 bitpos += bitsize, xbitpos += bitsize)
2118 /* We need a new source operand each time xbitpos is on a
2119 word boundary and when xbitpos == padding_correction
2120 (the first time through). */
2121 if (xbitpos % BITS_PER_WORD == 0
2122 || xbitpos == padding_correction)
2123 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2124 GET_MODE (srcreg));
2126 /* We need a new destination operand each time bitpos is on
2127 a word boundary. */
2128 if (bitpos % BITS_PER_WORD == 0)
2129 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2131 /* Use xbitpos for the source extraction (right justified) and
2132 xbitpos for the destination store (left justified). */
2133 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2134 extract_bit_field (src, bitsize,
2135 xbitpos % BITS_PER_WORD, 1,
2136 NULL_RTX, word_mode, word_mode));
2139 return tgtblk;
2142 /* Add a USE expression for REG to the (possibly empty) list pointed
2143 to by CALL_FUSAGE. REG must denote a hard register. */
2145 void
2146 use_reg (rtx *call_fusage, rtx reg)
2148 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2150 *call_fusage
2151 = gen_rtx_EXPR_LIST (VOIDmode,
2152 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2155 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2156 starting at REGNO. All of these registers must be hard registers. */
2158 void
2159 use_regs (rtx *call_fusage, int regno, int nregs)
2161 int i;
2163 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2165 for (i = 0; i < nregs; i++)
2166 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2169 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2170 PARALLEL REGS. This is for calls that pass values in multiple
2171 non-contiguous locations. The Irix 6 ABI has examples of this. */
2173 void
2174 use_group_regs (rtx *call_fusage, rtx regs)
2176 int i;
2178 for (i = 0; i < XVECLEN (regs, 0); i++)
2180 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2182 /* A NULL entry means the parameter goes both on the stack and in
2183 registers. This can also be a MEM for targets that pass values
2184 partially on the stack and partially in registers. */
2185 if (reg != 0 && REG_P (reg))
2186 use_reg (call_fusage, reg);
2191 /* Determine whether the LEN bytes generated by CONSTFUN can be
2192 stored to memory using several move instructions. CONSTFUNDATA is
2193 a pointer which will be passed as argument in every CONSTFUN call.
2194 ALIGN is maximum alignment we can assume. Return nonzero if a
2195 call to store_by_pieces should succeed. */
2198 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2199 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2200 void *constfundata, unsigned int align)
2202 unsigned HOST_WIDE_INT l;
2203 unsigned int max_size;
2204 HOST_WIDE_INT offset = 0;
2205 enum machine_mode mode, tmode;
2206 enum insn_code icode;
2207 int reverse;
2208 rtx cst;
2210 if (len == 0)
2211 return 1;
2213 if (! STORE_BY_PIECES_P (len, align))
2214 return 0;
2216 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2217 if (align >= GET_MODE_ALIGNMENT (tmode))
2218 align = GET_MODE_ALIGNMENT (tmode);
2219 else
2221 enum machine_mode xmode;
2223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2224 tmode != VOIDmode;
2225 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2226 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2227 || SLOW_UNALIGNED_ACCESS (tmode, align))
2228 break;
2230 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2233 /* We would first store what we can in the largest integer mode, then go to
2234 successively smaller modes. */
2236 for (reverse = 0;
2237 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2238 reverse++)
2240 l = len;
2241 mode = VOIDmode;
2242 max_size = STORE_MAX_PIECES + 1;
2243 while (max_size > 1)
2245 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2246 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2247 if (GET_MODE_SIZE (tmode) < max_size)
2248 mode = tmode;
2250 if (mode == VOIDmode)
2251 break;
2253 icode = mov_optab->handlers[(int) mode].insn_code;
2254 if (icode != CODE_FOR_nothing
2255 && align >= GET_MODE_ALIGNMENT (mode))
2257 unsigned int size = GET_MODE_SIZE (mode);
2259 while (l >= size)
2261 if (reverse)
2262 offset -= size;
2264 cst = (*constfun) (constfundata, offset, mode);
2265 if (!LEGITIMATE_CONSTANT_P (cst))
2266 return 0;
2268 if (!reverse)
2269 offset += size;
2271 l -= size;
2275 max_size = GET_MODE_SIZE (mode);
2278 /* The code above should have handled everything. */
2279 gcc_assert (!l);
2282 return 1;
2285 /* Generate several move instructions to store LEN bytes generated by
2286 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2287 pointer which will be passed as argument in every CONSTFUN call.
2288 ALIGN is maximum alignment we can assume.
2289 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2290 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2291 stpcpy. */
2294 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2295 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2296 void *constfundata, unsigned int align, int endp)
2298 struct store_by_pieces data;
2300 if (len == 0)
2302 gcc_assert (endp != 2);
2303 return to;
2306 gcc_assert (STORE_BY_PIECES_P (len, align));
2307 data.constfun = constfun;
2308 data.constfundata = constfundata;
2309 data.len = len;
2310 data.to = to;
2311 store_by_pieces_1 (&data, align);
2312 if (endp)
2314 rtx to1;
2316 gcc_assert (!data.reverse);
2317 if (data.autinc_to)
2319 if (endp == 2)
2321 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2322 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2323 else
2324 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2325 -1));
2327 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2328 data.offset);
2330 else
2332 if (endp == 2)
2333 --data.offset;
2334 to1 = adjust_address (data.to, QImode, data.offset);
2336 return to1;
2338 else
2339 return data.to;
2342 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2343 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2345 static void
2346 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2348 struct store_by_pieces data;
2350 if (len == 0)
2351 return;
2353 data.constfun = clear_by_pieces_1;
2354 data.constfundata = NULL;
2355 data.len = len;
2356 data.to = to;
2357 store_by_pieces_1 (&data, align);
2360 /* Callback routine for clear_by_pieces.
2361 Return const0_rtx unconditionally. */
2363 static rtx
2364 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2365 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2366 enum machine_mode mode ATTRIBUTE_UNUSED)
2368 return const0_rtx;
2371 /* Subroutine of clear_by_pieces and store_by_pieces.
2372 Generate several move instructions to store LEN bytes of block TO. (A MEM
2373 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2375 static void
2376 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2377 unsigned int align ATTRIBUTE_UNUSED)
2379 rtx to_addr = XEXP (data->to, 0);
2380 unsigned int max_size = STORE_MAX_PIECES + 1;
2381 enum machine_mode mode = VOIDmode, tmode;
2382 enum insn_code icode;
2384 data->offset = 0;
2385 data->to_addr = to_addr;
2386 data->autinc_to
2387 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2388 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2390 data->explicit_inc_to = 0;
2391 data->reverse
2392 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2393 if (data->reverse)
2394 data->offset = data->len;
2396 /* If storing requires more than two move insns,
2397 copy addresses to registers (to make displacements shorter)
2398 and use post-increment if available. */
2399 if (!data->autinc_to
2400 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2402 /* Determine the main mode we'll be using. */
2403 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2404 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2405 if (GET_MODE_SIZE (tmode) < max_size)
2406 mode = tmode;
2408 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2410 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2411 data->autinc_to = 1;
2412 data->explicit_inc_to = -1;
2415 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2416 && ! data->autinc_to)
2418 data->to_addr = copy_addr_to_reg (to_addr);
2419 data->autinc_to = 1;
2420 data->explicit_inc_to = 1;
2423 if ( !data->autinc_to && CONSTANT_P (to_addr))
2424 data->to_addr = copy_addr_to_reg (to_addr);
2427 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2428 if (align >= GET_MODE_ALIGNMENT (tmode))
2429 align = GET_MODE_ALIGNMENT (tmode);
2430 else
2432 enum machine_mode xmode;
2434 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2435 tmode != VOIDmode;
2436 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2437 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2438 || SLOW_UNALIGNED_ACCESS (tmode, align))
2439 break;
2441 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2444 /* First store what we can in the largest integer mode, then go to
2445 successively smaller modes. */
2447 while (max_size > 1)
2449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2451 if (GET_MODE_SIZE (tmode) < max_size)
2452 mode = tmode;
2454 if (mode == VOIDmode)
2455 break;
2457 icode = mov_optab->handlers[(int) mode].insn_code;
2458 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2459 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2461 max_size = GET_MODE_SIZE (mode);
2464 /* The code above should have handled everything. */
2465 gcc_assert (!data->len);
2468 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2469 with move instructions for mode MODE. GENFUN is the gen_... function
2470 to make a move insn for that mode. DATA has all the other info. */
2472 static void
2473 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2474 struct store_by_pieces *data)
2476 unsigned int size = GET_MODE_SIZE (mode);
2477 rtx to1, cst;
2479 while (data->len >= size)
2481 if (data->reverse)
2482 data->offset -= size;
2484 if (data->autinc_to)
2485 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2486 data->offset);
2487 else
2488 to1 = adjust_address (data->to, mode, data->offset);
2490 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2491 emit_insn (gen_add2_insn (data->to_addr,
2492 GEN_INT (-(HOST_WIDE_INT) size)));
2494 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2495 emit_insn ((*genfun) (to1, cst));
2497 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2498 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2500 if (! data->reverse)
2501 data->offset += size;
2503 data->len -= size;
2507 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2508 its length in bytes. */
2511 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2512 unsigned int expected_align, HOST_WIDE_INT expected_size)
2514 enum machine_mode mode = GET_MODE (object);
2515 unsigned int align;
2517 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2519 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2520 just move a zero. Otherwise, do this a piece at a time. */
2521 if (mode != BLKmode
2522 && GET_CODE (size) == CONST_INT
2523 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2525 rtx zero = CONST0_RTX (mode);
2526 if (zero != NULL)
2528 emit_move_insn (object, zero);
2529 return NULL;
2532 if (COMPLEX_MODE_P (mode))
2534 zero = CONST0_RTX (GET_MODE_INNER (mode));
2535 if (zero != NULL)
2537 write_complex_part (object, zero, 0);
2538 write_complex_part (object, zero, 1);
2539 return NULL;
2544 if (size == const0_rtx)
2545 return NULL;
2547 align = MEM_ALIGN (object);
2549 if (GET_CODE (size) == CONST_INT
2550 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2551 clear_by_pieces (object, INTVAL (size), align);
2552 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2553 expected_align, expected_size))
2555 else
2556 return set_storage_via_libcall (object, size, const0_rtx,
2557 method == BLOCK_OP_TAILCALL);
2559 return NULL;
2563 clear_storage (rtx object, rtx size, enum block_op_methods method)
2565 return clear_storage_hints (object, size, method, 0, -1);
2569 /* A subroutine of clear_storage. Expand a call to memset.
2570 Return the return value of memset, 0 otherwise. */
2573 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2575 tree call_expr, fn, object_tree, size_tree, val_tree;
2576 enum machine_mode size_mode;
2577 rtx retval;
2579 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2580 place those into new pseudos into a VAR_DECL and use them later. */
2582 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2584 size_mode = TYPE_MODE (sizetype);
2585 size = convert_to_mode (size_mode, size, 1);
2586 size = copy_to_mode_reg (size_mode, size);
2588 /* It is incorrect to use the libcall calling conventions to call
2589 memset in this context. This could be a user call to memset and
2590 the user may wish to examine the return value from memset. For
2591 targets where libcalls and normal calls have different conventions
2592 for returning pointers, we could end up generating incorrect code. */
2594 object_tree = make_tree (ptr_type_node, object);
2595 if (GET_CODE (val) != CONST_INT)
2596 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2597 size_tree = make_tree (sizetype, size);
2598 val_tree = make_tree (integer_type_node, val);
2600 fn = clear_storage_libcall_fn (true);
2601 call_expr = build_call_expr (fn, 3,
2602 object_tree, integer_zero_node, size_tree);
2603 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2605 retval = expand_normal (call_expr);
2607 return retval;
2610 /* A subroutine of set_storage_via_libcall. Create the tree node
2611 for the function we use for block clears. The first time FOR_CALL
2612 is true, we call assemble_external. */
2614 static GTY(()) tree block_clear_fn;
2616 void
2617 init_block_clear_fn (const char *asmspec)
2619 if (!block_clear_fn)
2621 tree fn, args;
2623 fn = get_identifier ("memset");
2624 args = build_function_type_list (ptr_type_node, ptr_type_node,
2625 integer_type_node, sizetype,
2626 NULL_TREE);
2628 fn = build_decl (FUNCTION_DECL, fn, args);
2629 DECL_EXTERNAL (fn) = 1;
2630 TREE_PUBLIC (fn) = 1;
2631 DECL_ARTIFICIAL (fn) = 1;
2632 TREE_NOTHROW (fn) = 1;
2633 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2634 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2636 block_clear_fn = fn;
2639 if (asmspec)
2640 set_user_assembler_name (block_clear_fn, asmspec);
2643 static tree
2644 clear_storage_libcall_fn (int for_call)
2646 static bool emitted_extern;
2648 if (!block_clear_fn)
2649 init_block_clear_fn (NULL);
2651 if (for_call && !emitted_extern)
2653 emitted_extern = true;
2654 make_decl_rtl (block_clear_fn);
2655 assemble_external (block_clear_fn);
2658 return block_clear_fn;
2661 /* Expand a setmem pattern; return true if successful. */
2663 bool
2664 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2665 unsigned int expected_align, HOST_WIDE_INT expected_size)
2667 /* Try the most limited insn first, because there's no point
2668 including more than one in the machine description unless
2669 the more limited one has some advantage. */
2671 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2672 enum machine_mode mode;
2674 if (expected_align < align)
2675 expected_align = align;
2677 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2678 mode = GET_MODE_WIDER_MODE (mode))
2680 enum insn_code code = setmem_optab[(int) mode];
2681 insn_operand_predicate_fn pred;
2683 if (code != CODE_FOR_nothing
2684 /* We don't need MODE to be narrower than
2685 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2686 the mode mask, as it is returned by the macro, it will
2687 definitely be less than the actual mode mask. */
2688 && ((GET_CODE (size) == CONST_INT
2689 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2690 <= (GET_MODE_MASK (mode) >> 1)))
2691 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2692 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2693 || (*pred) (object, BLKmode))
2694 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2695 || (*pred) (opalign, VOIDmode)))
2697 rtx opsize, opchar;
2698 enum machine_mode char_mode;
2699 rtx last = get_last_insn ();
2700 rtx pat;
2702 opsize = convert_to_mode (mode, size, 1);
2703 pred = insn_data[(int) code].operand[1].predicate;
2704 if (pred != 0 && ! (*pred) (opsize, mode))
2705 opsize = copy_to_mode_reg (mode, opsize);
2707 opchar = val;
2708 char_mode = insn_data[(int) code].operand[2].mode;
2709 if (char_mode != VOIDmode)
2711 opchar = convert_to_mode (char_mode, opchar, 1);
2712 pred = insn_data[(int) code].operand[2].predicate;
2713 if (pred != 0 && ! (*pred) (opchar, char_mode))
2714 opchar = copy_to_mode_reg (char_mode, opchar);
2717 if (insn_data[(int) code].n_operands == 4)
2718 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2719 else
2720 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2721 GEN_INT (expected_align),
2722 GEN_INT (expected_size));
2723 if (pat)
2725 emit_insn (pat);
2726 return true;
2728 else
2729 delete_insns_since (last);
2733 return false;
2737 /* Write to one of the components of the complex value CPLX. Write VAL to
2738 the real part if IMAG_P is false, and the imaginary part if its true. */
2740 static void
2741 write_complex_part (rtx cplx, rtx val, bool imag_p)
2743 enum machine_mode cmode;
2744 enum machine_mode imode;
2745 unsigned ibitsize;
2747 if (GET_CODE (cplx) == CONCAT)
2749 emit_move_insn (XEXP (cplx, imag_p), val);
2750 return;
2753 cmode = GET_MODE (cplx);
2754 imode = GET_MODE_INNER (cmode);
2755 ibitsize = GET_MODE_BITSIZE (imode);
2757 /* For MEMs simplify_gen_subreg may generate an invalid new address
2758 because, e.g., the original address is considered mode-dependent
2759 by the target, which restricts simplify_subreg from invoking
2760 adjust_address_nv. Instead of preparing fallback support for an
2761 invalid address, we call adjust_address_nv directly. */
2762 if (MEM_P (cplx))
2764 emit_move_insn (adjust_address_nv (cplx, imode,
2765 imag_p ? GET_MODE_SIZE (imode) : 0),
2766 val);
2767 return;
2770 /* If the sub-object is at least word sized, then we know that subregging
2771 will work. This special case is important, since store_bit_field
2772 wants to operate on integer modes, and there's rarely an OImode to
2773 correspond to TCmode. */
2774 if (ibitsize >= BITS_PER_WORD
2775 /* For hard regs we have exact predicates. Assume we can split
2776 the original object if it spans an even number of hard regs.
2777 This special case is important for SCmode on 64-bit platforms
2778 where the natural size of floating-point regs is 32-bit. */
2779 || (REG_P (cplx)
2780 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2781 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2783 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2784 imag_p ? GET_MODE_SIZE (imode) : 0);
2785 if (part)
2787 emit_move_insn (part, val);
2788 return;
2790 else
2791 /* simplify_gen_subreg may fail for sub-word MEMs. */
2792 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2795 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2798 /* Extract one of the components of the complex value CPLX. Extract the
2799 real part if IMAG_P is false, and the imaginary part if it's true. */
2801 static rtx
2802 read_complex_part (rtx cplx, bool imag_p)
2804 enum machine_mode cmode, imode;
2805 unsigned ibitsize;
2807 if (GET_CODE (cplx) == CONCAT)
2808 return XEXP (cplx, imag_p);
2810 cmode = GET_MODE (cplx);
2811 imode = GET_MODE_INNER (cmode);
2812 ibitsize = GET_MODE_BITSIZE (imode);
2814 /* Special case reads from complex constants that got spilled to memory. */
2815 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2817 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2818 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2820 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2821 if (CONSTANT_CLASS_P (part))
2822 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2826 /* For MEMs simplify_gen_subreg may generate an invalid new address
2827 because, e.g., the original address is considered mode-dependent
2828 by the target, which restricts simplify_subreg from invoking
2829 adjust_address_nv. Instead of preparing fallback support for an
2830 invalid address, we call adjust_address_nv directly. */
2831 if (MEM_P (cplx))
2832 return adjust_address_nv (cplx, imode,
2833 imag_p ? GET_MODE_SIZE (imode) : 0);
2835 /* If the sub-object is at least word sized, then we know that subregging
2836 will work. This special case is important, since extract_bit_field
2837 wants to operate on integer modes, and there's rarely an OImode to
2838 correspond to TCmode. */
2839 if (ibitsize >= BITS_PER_WORD
2840 /* For hard regs we have exact predicates. Assume we can split
2841 the original object if it spans an even number of hard regs.
2842 This special case is important for SCmode on 64-bit platforms
2843 where the natural size of floating-point regs is 32-bit. */
2844 || (REG_P (cplx)
2845 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2846 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2848 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2849 imag_p ? GET_MODE_SIZE (imode) : 0);
2850 if (ret)
2851 return ret;
2852 else
2853 /* simplify_gen_subreg may fail for sub-word MEMs. */
2854 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2857 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2858 true, NULL_RTX, imode, imode);
2861 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2862 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2863 represented in NEW_MODE. If FORCE is true, this will never happen, as
2864 we'll force-create a SUBREG if needed. */
2866 static rtx
2867 emit_move_change_mode (enum machine_mode new_mode,
2868 enum machine_mode old_mode, rtx x, bool force)
2870 rtx ret;
2872 if (push_operand (x, GET_MODE (x)))
2874 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2875 MEM_COPY_ATTRIBUTES (ret, x);
2877 else if (MEM_P (x))
2879 /* We don't have to worry about changing the address since the
2880 size in bytes is supposed to be the same. */
2881 if (reload_in_progress)
2883 /* Copy the MEM to change the mode and move any
2884 substitutions from the old MEM to the new one. */
2885 ret = adjust_address_nv (x, new_mode, 0);
2886 copy_replacements (x, ret);
2888 else
2889 ret = adjust_address (x, new_mode, 0);
2891 else
2893 /* Note that we do want simplify_subreg's behavior of validating
2894 that the new mode is ok for a hard register. If we were to use
2895 simplify_gen_subreg, we would create the subreg, but would
2896 probably run into the target not being able to implement it. */
2897 /* Except, of course, when FORCE is true, when this is exactly what
2898 we want. Which is needed for CCmodes on some targets. */
2899 if (force)
2900 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2901 else
2902 ret = simplify_subreg (new_mode, x, old_mode, 0);
2905 return ret;
2908 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2909 an integer mode of the same size as MODE. Returns the instruction
2910 emitted, or NULL if such a move could not be generated. */
2912 static rtx
2913 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2915 enum machine_mode imode;
2916 enum insn_code code;
2918 /* There must exist a mode of the exact size we require. */
2919 imode = int_mode_for_mode (mode);
2920 if (imode == BLKmode)
2921 return NULL_RTX;
2923 /* The target must support moves in this mode. */
2924 code = mov_optab->handlers[imode].insn_code;
2925 if (code == CODE_FOR_nothing)
2926 return NULL_RTX;
2928 x = emit_move_change_mode (imode, mode, x, force);
2929 if (x == NULL_RTX)
2930 return NULL_RTX;
2931 y = emit_move_change_mode (imode, mode, y, force);
2932 if (y == NULL_RTX)
2933 return NULL_RTX;
2934 return emit_insn (GEN_FCN (code) (x, y));
2937 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2938 Return an equivalent MEM that does not use an auto-increment. */
2940 static rtx
2941 emit_move_resolve_push (enum machine_mode mode, rtx x)
2943 enum rtx_code code = GET_CODE (XEXP (x, 0));
2944 HOST_WIDE_INT adjust;
2945 rtx temp;
2947 adjust = GET_MODE_SIZE (mode);
2948 #ifdef PUSH_ROUNDING
2949 adjust = PUSH_ROUNDING (adjust);
2950 #endif
2951 if (code == PRE_DEC || code == POST_DEC)
2952 adjust = -adjust;
2953 else if (code == PRE_MODIFY || code == POST_MODIFY)
2955 rtx expr = XEXP (XEXP (x, 0), 1);
2956 HOST_WIDE_INT val;
2958 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2959 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2960 val = INTVAL (XEXP (expr, 1));
2961 if (GET_CODE (expr) == MINUS)
2962 val = -val;
2963 gcc_assert (adjust == val || adjust == -val);
2964 adjust = val;
2967 /* Do not use anti_adjust_stack, since we don't want to update
2968 stack_pointer_delta. */
2969 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2970 GEN_INT (adjust), stack_pointer_rtx,
2971 0, OPTAB_LIB_WIDEN);
2972 if (temp != stack_pointer_rtx)
2973 emit_move_insn (stack_pointer_rtx, temp);
2975 switch (code)
2977 case PRE_INC:
2978 case PRE_DEC:
2979 case PRE_MODIFY:
2980 temp = stack_pointer_rtx;
2981 break;
2982 case POST_INC:
2983 case POST_DEC:
2984 case POST_MODIFY:
2985 temp = plus_constant (stack_pointer_rtx, -adjust);
2986 break;
2987 default:
2988 gcc_unreachable ();
2991 return replace_equiv_address (x, temp);
2994 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2995 X is known to satisfy push_operand, and MODE is known to be complex.
2996 Returns the last instruction emitted. */
2999 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3001 enum machine_mode submode = GET_MODE_INNER (mode);
3002 bool imag_first;
3004 #ifdef PUSH_ROUNDING
3005 unsigned int submodesize = GET_MODE_SIZE (submode);
3007 /* In case we output to the stack, but the size is smaller than the
3008 machine can push exactly, we need to use move instructions. */
3009 if (PUSH_ROUNDING (submodesize) != submodesize)
3011 x = emit_move_resolve_push (mode, x);
3012 return emit_move_insn (x, y);
3014 #endif
3016 /* Note that the real part always precedes the imag part in memory
3017 regardless of machine's endianness. */
3018 switch (GET_CODE (XEXP (x, 0)))
3020 case PRE_DEC:
3021 case POST_DEC:
3022 imag_first = true;
3023 break;
3024 case PRE_INC:
3025 case POST_INC:
3026 imag_first = false;
3027 break;
3028 default:
3029 gcc_unreachable ();
3032 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3033 read_complex_part (y, imag_first));
3034 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3035 read_complex_part (y, !imag_first));
3038 /* A subroutine of emit_move_complex. Perform the move from Y to X
3039 via two moves of the parts. Returns the last instruction emitted. */
3042 emit_move_complex_parts (rtx x, rtx y)
3044 /* Show the output dies here. This is necessary for SUBREGs
3045 of pseudos since we cannot track their lifetimes correctly;
3046 hard regs shouldn't appear here except as return values. */
3047 if (!reload_completed && !reload_in_progress
3048 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3049 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3051 write_complex_part (x, read_complex_part (y, false), false);
3052 write_complex_part (x, read_complex_part (y, true), true);
3054 return get_last_insn ();
3057 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3058 MODE is known to be complex. Returns the last instruction emitted. */
3060 static rtx
3061 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3063 bool try_int;
3065 /* Need to take special care for pushes, to maintain proper ordering
3066 of the data, and possibly extra padding. */
3067 if (push_operand (x, mode))
3068 return emit_move_complex_push (mode, x, y);
3070 /* See if we can coerce the target into moving both values at once. */
3072 /* Move floating point as parts. */
3073 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3074 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3075 try_int = false;
3076 /* Not possible if the values are inherently not adjacent. */
3077 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3078 try_int = false;
3079 /* Is possible if both are registers (or subregs of registers). */
3080 else if (register_operand (x, mode) && register_operand (y, mode))
3081 try_int = true;
3082 /* If one of the operands is a memory, and alignment constraints
3083 are friendly enough, we may be able to do combined memory operations.
3084 We do not attempt this if Y is a constant because that combination is
3085 usually better with the by-parts thing below. */
3086 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3087 && (!STRICT_ALIGNMENT
3088 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3089 try_int = true;
3090 else
3091 try_int = false;
3093 if (try_int)
3095 rtx ret;
3097 /* For memory to memory moves, optimal behavior can be had with the
3098 existing block move logic. */
3099 if (MEM_P (x) && MEM_P (y))
3101 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3102 BLOCK_OP_NO_LIBCALL);
3103 return get_last_insn ();
3106 ret = emit_move_via_integer (mode, x, y, true);
3107 if (ret)
3108 return ret;
3111 return emit_move_complex_parts (x, y);
3114 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3115 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3117 static rtx
3118 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3120 rtx ret;
3122 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3123 if (mode != CCmode)
3125 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3126 if (code != CODE_FOR_nothing)
3128 x = emit_move_change_mode (CCmode, mode, x, true);
3129 y = emit_move_change_mode (CCmode, mode, y, true);
3130 return emit_insn (GEN_FCN (code) (x, y));
3134 /* Otherwise, find the MODE_INT mode of the same width. */
3135 ret = emit_move_via_integer (mode, x, y, false);
3136 gcc_assert (ret != NULL);
3137 return ret;
3140 /* Return true if word I of OP lies entirely in the
3141 undefined bits of a paradoxical subreg. */
3143 static bool
3144 undefined_operand_subword_p (rtx op, int i)
3146 enum machine_mode innermode, innermostmode;
3147 int offset;
3148 if (GET_CODE (op) != SUBREG)
3149 return false;
3150 innermode = GET_MODE (op);
3151 innermostmode = GET_MODE (SUBREG_REG (op));
3152 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3153 /* The SUBREG_BYTE represents offset, as if the value were stored in
3154 memory, except for a paradoxical subreg where we define
3155 SUBREG_BYTE to be 0; undo this exception as in
3156 simplify_subreg. */
3157 if (SUBREG_BYTE (op) == 0
3158 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3160 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3161 if (WORDS_BIG_ENDIAN)
3162 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3163 if (BYTES_BIG_ENDIAN)
3164 offset += difference % UNITS_PER_WORD;
3166 if (offset >= GET_MODE_SIZE (innermostmode)
3167 || offset <= -GET_MODE_SIZE (word_mode))
3168 return true;
3169 return false;
3172 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3173 MODE is any multi-word or full-word mode that lacks a move_insn
3174 pattern. Note that you will get better code if you define such
3175 patterns, even if they must turn into multiple assembler instructions. */
3177 static rtx
3178 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3180 rtx last_insn = 0;
3181 rtx seq, inner;
3182 bool need_clobber;
3183 int i;
3185 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3187 /* If X is a push on the stack, do the push now and replace
3188 X with a reference to the stack pointer. */
3189 if (push_operand (x, mode))
3190 x = emit_move_resolve_push (mode, x);
3192 /* If we are in reload, see if either operand is a MEM whose address
3193 is scheduled for replacement. */
3194 if (reload_in_progress && MEM_P (x)
3195 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3196 x = replace_equiv_address_nv (x, inner);
3197 if (reload_in_progress && MEM_P (y)
3198 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3199 y = replace_equiv_address_nv (y, inner);
3201 start_sequence ();
3203 need_clobber = false;
3204 for (i = 0;
3205 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3206 i++)
3208 rtx xpart = operand_subword (x, i, 1, mode);
3209 rtx ypart;
3211 /* Do not generate code for a move if it would come entirely
3212 from the undefined bits of a paradoxical subreg. */
3213 if (undefined_operand_subword_p (y, i))
3214 continue;
3216 ypart = operand_subword (y, i, 1, mode);
3218 /* If we can't get a part of Y, put Y into memory if it is a
3219 constant. Otherwise, force it into a register. Then we must
3220 be able to get a part of Y. */
3221 if (ypart == 0 && CONSTANT_P (y))
3223 y = use_anchored_address (force_const_mem (mode, y));
3224 ypart = operand_subword (y, i, 1, mode);
3226 else if (ypart == 0)
3227 ypart = operand_subword_force (y, i, mode);
3229 gcc_assert (xpart && ypart);
3231 need_clobber |= (GET_CODE (xpart) == SUBREG);
3233 last_insn = emit_move_insn (xpart, ypart);
3236 seq = get_insns ();
3237 end_sequence ();
3239 /* Show the output dies here. This is necessary for SUBREGs
3240 of pseudos since we cannot track their lifetimes correctly;
3241 hard regs shouldn't appear here except as return values.
3242 We never want to emit such a clobber after reload. */
3243 if (x != y
3244 && ! (reload_in_progress || reload_completed)
3245 && need_clobber != 0)
3246 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3248 emit_insn (seq);
3250 return last_insn;
3253 /* Low level part of emit_move_insn.
3254 Called just like emit_move_insn, but assumes X and Y
3255 are basically valid. */
3258 emit_move_insn_1 (rtx x, rtx y)
3260 enum machine_mode mode = GET_MODE (x);
3261 enum insn_code code;
3263 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3265 code = mov_optab->handlers[mode].insn_code;
3266 if (code != CODE_FOR_nothing)
3267 return emit_insn (GEN_FCN (code) (x, y));
3269 /* Expand complex moves by moving real part and imag part. */
3270 if (COMPLEX_MODE_P (mode))
3271 return emit_move_complex (mode, x, y);
3273 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3275 rtx result = emit_move_via_integer (mode, x, y, true);
3277 /* If we can't find an integer mode, use multi words. */
3278 if (result)
3279 return result;
3280 else
3281 return emit_move_multi_word (mode, x, y);
3284 if (GET_MODE_CLASS (mode) == MODE_CC)
3285 return emit_move_ccmode (mode, x, y);
3287 /* Try using a move pattern for the corresponding integer mode. This is
3288 only safe when simplify_subreg can convert MODE constants into integer
3289 constants. At present, it can only do this reliably if the value
3290 fits within a HOST_WIDE_INT. */
3291 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3293 rtx ret = emit_move_via_integer (mode, x, y, false);
3294 if (ret)
3295 return ret;
3298 return emit_move_multi_word (mode, x, y);
3301 /* Generate code to copy Y into X.
3302 Both Y and X must have the same mode, except that
3303 Y can be a constant with VOIDmode.
3304 This mode cannot be BLKmode; use emit_block_move for that.
3306 Return the last instruction emitted. */
3309 emit_move_insn (rtx x, rtx y)
3311 enum machine_mode mode = GET_MODE (x);
3312 rtx y_cst = NULL_RTX;
3313 rtx last_insn, set;
3315 gcc_assert (mode != BLKmode
3316 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3318 if (CONSTANT_P (y))
3320 if (optimize
3321 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3322 && (last_insn = compress_float_constant (x, y)))
3323 return last_insn;
3325 y_cst = y;
3327 if (!LEGITIMATE_CONSTANT_P (y))
3329 y = force_const_mem (mode, y);
3331 /* If the target's cannot_force_const_mem prevented the spill,
3332 assume that the target's move expanders will also take care
3333 of the non-legitimate constant. */
3334 if (!y)
3335 y = y_cst;
3336 else
3337 y = use_anchored_address (y);
3341 /* If X or Y are memory references, verify that their addresses are valid
3342 for the machine. */
3343 if (MEM_P (x)
3344 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3345 && ! push_operand (x, GET_MODE (x)))
3346 || (flag_force_addr
3347 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3348 x = validize_mem (x);
3350 if (MEM_P (y)
3351 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3352 || (flag_force_addr
3353 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3354 y = validize_mem (y);
3356 gcc_assert (mode != BLKmode);
3358 last_insn = emit_move_insn_1 (x, y);
3360 if (y_cst && REG_P (x)
3361 && (set = single_set (last_insn)) != NULL_RTX
3362 && SET_DEST (set) == x
3363 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3364 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3366 return last_insn;
3369 /* If Y is representable exactly in a narrower mode, and the target can
3370 perform the extension directly from constant or memory, then emit the
3371 move as an extension. */
3373 static rtx
3374 compress_float_constant (rtx x, rtx y)
3376 enum machine_mode dstmode = GET_MODE (x);
3377 enum machine_mode orig_srcmode = GET_MODE (y);
3378 enum machine_mode srcmode;
3379 REAL_VALUE_TYPE r;
3380 int oldcost, newcost;
3382 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3384 if (LEGITIMATE_CONSTANT_P (y))
3385 oldcost = rtx_cost (y, SET);
3386 else
3387 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3389 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3390 srcmode != orig_srcmode;
3391 srcmode = GET_MODE_WIDER_MODE (srcmode))
3393 enum insn_code ic;
3394 rtx trunc_y, last_insn;
3396 /* Skip if the target can't extend this way. */
3397 ic = can_extend_p (dstmode, srcmode, 0);
3398 if (ic == CODE_FOR_nothing)
3399 continue;
3401 /* Skip if the narrowed value isn't exact. */
3402 if (! exact_real_truncate (srcmode, &r))
3403 continue;
3405 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3407 if (LEGITIMATE_CONSTANT_P (trunc_y))
3409 /* Skip if the target needs extra instructions to perform
3410 the extension. */
3411 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3412 continue;
3413 /* This is valid, but may not be cheaper than the original. */
3414 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3415 if (oldcost < newcost)
3416 continue;
3418 else if (float_extend_from_mem[dstmode][srcmode])
3420 trunc_y = force_const_mem (srcmode, trunc_y);
3421 /* This is valid, but may not be cheaper than the original. */
3422 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3423 if (oldcost < newcost)
3424 continue;
3425 trunc_y = validize_mem (trunc_y);
3427 else
3428 continue;
3430 /* For CSE's benefit, force the compressed constant pool entry
3431 into a new pseudo. This constant may be used in different modes,
3432 and if not, combine will put things back together for us. */
3433 trunc_y = force_reg (srcmode, trunc_y);
3434 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3435 last_insn = get_last_insn ();
3437 if (REG_P (x))
3438 set_unique_reg_note (last_insn, REG_EQUAL, y);
3440 return last_insn;
3443 return NULL_RTX;
3446 /* Pushing data onto the stack. */
3448 /* Push a block of length SIZE (perhaps variable)
3449 and return an rtx to address the beginning of the block.
3450 The value may be virtual_outgoing_args_rtx.
3452 EXTRA is the number of bytes of padding to push in addition to SIZE.
3453 BELOW nonzero means this padding comes at low addresses;
3454 otherwise, the padding comes at high addresses. */
3457 push_block (rtx size, int extra, int below)
3459 rtx temp;
3461 size = convert_modes (Pmode, ptr_mode, size, 1);
3462 if (CONSTANT_P (size))
3463 anti_adjust_stack (plus_constant (size, extra));
3464 else if (REG_P (size) && extra == 0)
3465 anti_adjust_stack (size);
3466 else
3468 temp = copy_to_mode_reg (Pmode, size);
3469 if (extra != 0)
3470 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3471 temp, 0, OPTAB_LIB_WIDEN);
3472 anti_adjust_stack (temp);
3475 #ifndef STACK_GROWS_DOWNWARD
3476 if (0)
3477 #else
3478 if (1)
3479 #endif
3481 temp = virtual_outgoing_args_rtx;
3482 if (extra != 0 && below)
3483 temp = plus_constant (temp, extra);
3485 else
3487 if (GET_CODE (size) == CONST_INT)
3488 temp = plus_constant (virtual_outgoing_args_rtx,
3489 -INTVAL (size) - (below ? 0 : extra));
3490 else if (extra != 0 && !below)
3491 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3492 negate_rtx (Pmode, plus_constant (size, extra)));
3493 else
3494 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3495 negate_rtx (Pmode, size));
3498 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3501 #ifdef PUSH_ROUNDING
3503 /* Emit single push insn. */
3505 static void
3506 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3508 rtx dest_addr;
3509 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3510 rtx dest;
3511 enum insn_code icode;
3512 insn_operand_predicate_fn pred;
3514 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3515 /* If there is push pattern, use it. Otherwise try old way of throwing
3516 MEM representing push operation to move expander. */
3517 icode = push_optab->handlers[(int) mode].insn_code;
3518 if (icode != CODE_FOR_nothing)
3520 if (((pred = insn_data[(int) icode].operand[0].predicate)
3521 && !((*pred) (x, mode))))
3522 x = force_reg (mode, x);
3523 emit_insn (GEN_FCN (icode) (x));
3524 return;
3526 if (GET_MODE_SIZE (mode) == rounded_size)
3527 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3528 /* If we are to pad downward, adjust the stack pointer first and
3529 then store X into the stack location using an offset. This is
3530 because emit_move_insn does not know how to pad; it does not have
3531 access to type. */
3532 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3534 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3535 HOST_WIDE_INT offset;
3537 emit_move_insn (stack_pointer_rtx,
3538 expand_binop (Pmode,
3539 #ifdef STACK_GROWS_DOWNWARD
3540 sub_optab,
3541 #else
3542 add_optab,
3543 #endif
3544 stack_pointer_rtx,
3545 GEN_INT (rounded_size),
3546 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3548 offset = (HOST_WIDE_INT) padding_size;
3549 #ifdef STACK_GROWS_DOWNWARD
3550 if (STACK_PUSH_CODE == POST_DEC)
3551 /* We have already decremented the stack pointer, so get the
3552 previous value. */
3553 offset += (HOST_WIDE_INT) rounded_size;
3554 #else
3555 if (STACK_PUSH_CODE == POST_INC)
3556 /* We have already incremented the stack pointer, so get the
3557 previous value. */
3558 offset -= (HOST_WIDE_INT) rounded_size;
3559 #endif
3560 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3562 else
3564 #ifdef STACK_GROWS_DOWNWARD
3565 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3566 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3567 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3568 #else
3569 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3570 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3571 GEN_INT (rounded_size));
3572 #endif
3573 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3576 dest = gen_rtx_MEM (mode, dest_addr);
3578 if (type != 0)
3580 set_mem_attributes (dest, type, 1);
3582 if (flag_optimize_sibling_calls)
3583 /* Function incoming arguments may overlap with sibling call
3584 outgoing arguments and we cannot allow reordering of reads
3585 from function arguments with stores to outgoing arguments
3586 of sibling calls. */
3587 set_mem_alias_set (dest, 0);
3589 emit_move_insn (dest, x);
3591 #endif
3593 /* Generate code to push X onto the stack, assuming it has mode MODE and
3594 type TYPE.
3595 MODE is redundant except when X is a CONST_INT (since they don't
3596 carry mode info).
3597 SIZE is an rtx for the size of data to be copied (in bytes),
3598 needed only if X is BLKmode.
3600 ALIGN (in bits) is maximum alignment we can assume.
3602 If PARTIAL and REG are both nonzero, then copy that many of the first
3603 bytes of X into registers starting with REG, and push the rest of X.
3604 The amount of space pushed is decreased by PARTIAL bytes.
3605 REG must be a hard register in this case.
3606 If REG is zero but PARTIAL is not, take any all others actions for an
3607 argument partially in registers, but do not actually load any
3608 registers.
3610 EXTRA is the amount in bytes of extra space to leave next to this arg.
3611 This is ignored if an argument block has already been allocated.
3613 On a machine that lacks real push insns, ARGS_ADDR is the address of
3614 the bottom of the argument block for this call. We use indexing off there
3615 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3616 argument block has not been preallocated.
3618 ARGS_SO_FAR is the size of args previously pushed for this call.
3620 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3621 for arguments passed in registers. If nonzero, it will be the number
3622 of bytes required. */
3624 void
3625 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3626 unsigned int align, int partial, rtx reg, int extra,
3627 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3628 rtx alignment_pad)
3630 rtx xinner;
3631 enum direction stack_direction
3632 #ifdef STACK_GROWS_DOWNWARD
3633 = downward;
3634 #else
3635 = upward;
3636 #endif
3638 /* Decide where to pad the argument: `downward' for below,
3639 `upward' for above, or `none' for don't pad it.
3640 Default is below for small data on big-endian machines; else above. */
3641 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3643 /* Invert direction if stack is post-decrement.
3644 FIXME: why? */
3645 if (STACK_PUSH_CODE == POST_DEC)
3646 if (where_pad != none)
3647 where_pad = (where_pad == downward ? upward : downward);
3649 xinner = x;
3651 if (mode == BLKmode
3652 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3654 /* Copy a block into the stack, entirely or partially. */
3656 rtx temp;
3657 int used;
3658 int offset;
3659 int skip;
3661 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3662 used = partial - offset;
3664 if (mode != BLKmode)
3666 /* A value is to be stored in an insufficiently aligned
3667 stack slot; copy via a suitably aligned slot if
3668 necessary. */
3669 size = GEN_INT (GET_MODE_SIZE (mode));
3670 if (!MEM_P (xinner))
3672 temp = assign_temp (type, 0, 1, 1);
3673 emit_move_insn (temp, xinner);
3674 xinner = temp;
3678 gcc_assert (size);
3680 /* USED is now the # of bytes we need not copy to the stack
3681 because registers will take care of them. */
3683 if (partial != 0)
3684 xinner = adjust_address (xinner, BLKmode, used);
3686 /* If the partial register-part of the arg counts in its stack size,
3687 skip the part of stack space corresponding to the registers.
3688 Otherwise, start copying to the beginning of the stack space,
3689 by setting SKIP to 0. */
3690 skip = (reg_parm_stack_space == 0) ? 0 : used;
3692 #ifdef PUSH_ROUNDING
3693 /* Do it with several push insns if that doesn't take lots of insns
3694 and if there is no difficulty with push insns that skip bytes
3695 on the stack for alignment purposes. */
3696 if (args_addr == 0
3697 && PUSH_ARGS
3698 && GET_CODE (size) == CONST_INT
3699 && skip == 0
3700 && MEM_ALIGN (xinner) >= align
3701 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3702 /* Here we avoid the case of a structure whose weak alignment
3703 forces many pushes of a small amount of data,
3704 and such small pushes do rounding that causes trouble. */
3705 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3706 || align >= BIGGEST_ALIGNMENT
3707 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3708 == (align / BITS_PER_UNIT)))
3709 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3711 /* Push padding now if padding above and stack grows down,
3712 or if padding below and stack grows up.
3713 But if space already allocated, this has already been done. */
3714 if (extra && args_addr == 0
3715 && where_pad != none && where_pad != stack_direction)
3716 anti_adjust_stack (GEN_INT (extra));
3718 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3720 else
3721 #endif /* PUSH_ROUNDING */
3723 rtx target;
3725 /* Otherwise make space on the stack and copy the data
3726 to the address of that space. */
3728 /* Deduct words put into registers from the size we must copy. */
3729 if (partial != 0)
3731 if (GET_CODE (size) == CONST_INT)
3732 size = GEN_INT (INTVAL (size) - used);
3733 else
3734 size = expand_binop (GET_MODE (size), sub_optab, size,
3735 GEN_INT (used), NULL_RTX, 0,
3736 OPTAB_LIB_WIDEN);
3739 /* Get the address of the stack space.
3740 In this case, we do not deal with EXTRA separately.
3741 A single stack adjust will do. */
3742 if (! args_addr)
3744 temp = push_block (size, extra, where_pad == downward);
3745 extra = 0;
3747 else if (GET_CODE (args_so_far) == CONST_INT)
3748 temp = memory_address (BLKmode,
3749 plus_constant (args_addr,
3750 skip + INTVAL (args_so_far)));
3751 else
3752 temp = memory_address (BLKmode,
3753 plus_constant (gen_rtx_PLUS (Pmode,
3754 args_addr,
3755 args_so_far),
3756 skip));
3758 if (!ACCUMULATE_OUTGOING_ARGS)
3760 /* If the source is referenced relative to the stack pointer,
3761 copy it to another register to stabilize it. We do not need
3762 to do this if we know that we won't be changing sp. */
3764 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3765 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3766 temp = copy_to_reg (temp);
3769 target = gen_rtx_MEM (BLKmode, temp);
3771 /* We do *not* set_mem_attributes here, because incoming arguments
3772 may overlap with sibling call outgoing arguments and we cannot
3773 allow reordering of reads from function arguments with stores
3774 to outgoing arguments of sibling calls. We do, however, want
3775 to record the alignment of the stack slot. */
3776 /* ALIGN may well be better aligned than TYPE, e.g. due to
3777 PARM_BOUNDARY. Assume the caller isn't lying. */
3778 set_mem_align (target, align);
3780 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3783 else if (partial > 0)
3785 /* Scalar partly in registers. */
3787 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3788 int i;
3789 int not_stack;
3790 /* # bytes of start of argument
3791 that we must make space for but need not store. */
3792 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3793 int args_offset = INTVAL (args_so_far);
3794 int skip;
3796 /* Push padding now if padding above and stack grows down,
3797 or if padding below and stack grows up.
3798 But if space already allocated, this has already been done. */
3799 if (extra && args_addr == 0
3800 && where_pad != none && where_pad != stack_direction)
3801 anti_adjust_stack (GEN_INT (extra));
3803 /* If we make space by pushing it, we might as well push
3804 the real data. Otherwise, we can leave OFFSET nonzero
3805 and leave the space uninitialized. */
3806 if (args_addr == 0)
3807 offset = 0;
3809 /* Now NOT_STACK gets the number of words that we don't need to
3810 allocate on the stack. Convert OFFSET to words too. */
3811 not_stack = (partial - offset) / UNITS_PER_WORD;
3812 offset /= UNITS_PER_WORD;
3814 /* If the partial register-part of the arg counts in its stack size,
3815 skip the part of stack space corresponding to the registers.
3816 Otherwise, start copying to the beginning of the stack space,
3817 by setting SKIP to 0. */
3818 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3820 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3821 x = validize_mem (force_const_mem (mode, x));
3823 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3824 SUBREGs of such registers are not allowed. */
3825 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3826 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3827 x = copy_to_reg (x);
3829 /* Loop over all the words allocated on the stack for this arg. */
3830 /* We can do it by words, because any scalar bigger than a word
3831 has a size a multiple of a word. */
3832 #ifndef PUSH_ARGS_REVERSED
3833 for (i = not_stack; i < size; i++)
3834 #else
3835 for (i = size - 1; i >= not_stack; i--)
3836 #endif
3837 if (i >= not_stack + offset)
3838 emit_push_insn (operand_subword_force (x, i, mode),
3839 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3840 0, args_addr,
3841 GEN_INT (args_offset + ((i - not_stack + skip)
3842 * UNITS_PER_WORD)),
3843 reg_parm_stack_space, alignment_pad);
3845 else
3847 rtx addr;
3848 rtx dest;
3850 /* Push padding now if padding above and stack grows down,
3851 or if padding below and stack grows up.
3852 But if space already allocated, this has already been done. */
3853 if (extra && args_addr == 0
3854 && where_pad != none && where_pad != stack_direction)
3855 anti_adjust_stack (GEN_INT (extra));
3857 #ifdef PUSH_ROUNDING
3858 if (args_addr == 0 && PUSH_ARGS)
3859 emit_single_push_insn (mode, x, type);
3860 else
3861 #endif
3863 if (GET_CODE (args_so_far) == CONST_INT)
3864 addr
3865 = memory_address (mode,
3866 plus_constant (args_addr,
3867 INTVAL (args_so_far)));
3868 else
3869 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3870 args_so_far));
3871 dest = gen_rtx_MEM (mode, addr);
3873 /* We do *not* set_mem_attributes here, because incoming arguments
3874 may overlap with sibling call outgoing arguments and we cannot
3875 allow reordering of reads from function arguments with stores
3876 to outgoing arguments of sibling calls. We do, however, want
3877 to record the alignment of the stack slot. */
3878 /* ALIGN may well be better aligned than TYPE, e.g. due to
3879 PARM_BOUNDARY. Assume the caller isn't lying. */
3880 set_mem_align (dest, align);
3882 emit_move_insn (dest, x);
3886 /* If part should go in registers, copy that part
3887 into the appropriate registers. Do this now, at the end,
3888 since mem-to-mem copies above may do function calls. */
3889 if (partial > 0 && reg != 0)
3891 /* Handle calls that pass values in multiple non-contiguous locations.
3892 The Irix 6 ABI has examples of this. */
3893 if (GET_CODE (reg) == PARALLEL)
3894 emit_group_load (reg, x, type, -1);
3895 else
3897 gcc_assert (partial % UNITS_PER_WORD == 0);
3898 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3902 if (extra && args_addr == 0 && where_pad == stack_direction)
3903 anti_adjust_stack (GEN_INT (extra));
3905 if (alignment_pad && args_addr == 0)
3906 anti_adjust_stack (alignment_pad);
3909 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3910 operations. */
3912 static rtx
3913 get_subtarget (rtx x)
3915 return (optimize
3916 || x == 0
3917 /* Only registers can be subtargets. */
3918 || !REG_P (x)
3919 /* Don't use hard regs to avoid extending their life. */
3920 || REGNO (x) < FIRST_PSEUDO_REGISTER
3921 ? 0 : x);
3924 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3925 FIELD is a bitfield. Returns true if the optimization was successful,
3926 and there's nothing else to do. */
3928 static bool
3929 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3930 unsigned HOST_WIDE_INT bitpos,
3931 enum machine_mode mode1, rtx str_rtx,
3932 tree to, tree src)
3934 enum machine_mode str_mode = GET_MODE (str_rtx);
3935 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3936 tree op0, op1;
3937 rtx value, result;
3938 optab binop;
3940 if (mode1 != VOIDmode
3941 || bitsize >= BITS_PER_WORD
3942 || str_bitsize > BITS_PER_WORD
3943 || TREE_SIDE_EFFECTS (to)
3944 || TREE_THIS_VOLATILE (to))
3945 return false;
3947 STRIP_NOPS (src);
3948 if (!BINARY_CLASS_P (src)
3949 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3950 return false;
3952 op0 = TREE_OPERAND (src, 0);
3953 op1 = TREE_OPERAND (src, 1);
3954 STRIP_NOPS (op0);
3956 if (!operand_equal_p (to, op0, 0))
3957 return false;
3959 if (MEM_P (str_rtx))
3961 unsigned HOST_WIDE_INT offset1;
3963 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3964 str_mode = word_mode;
3965 str_mode = get_best_mode (bitsize, bitpos,
3966 MEM_ALIGN (str_rtx), str_mode, 0);
3967 if (str_mode == VOIDmode)
3968 return false;
3969 str_bitsize = GET_MODE_BITSIZE (str_mode);
3971 offset1 = bitpos;
3972 bitpos %= str_bitsize;
3973 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3974 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3976 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3977 return false;
3979 /* If the bit field covers the whole REG/MEM, store_field
3980 will likely generate better code. */
3981 if (bitsize >= str_bitsize)
3982 return false;
3984 /* We can't handle fields split across multiple entities. */
3985 if (bitpos + bitsize > str_bitsize)
3986 return false;
3988 if (BYTES_BIG_ENDIAN)
3989 bitpos = str_bitsize - bitpos - bitsize;
3991 switch (TREE_CODE (src))
3993 case PLUS_EXPR:
3994 case MINUS_EXPR:
3995 /* For now, just optimize the case of the topmost bitfield
3996 where we don't need to do any masking and also
3997 1 bit bitfields where xor can be used.
3998 We might win by one instruction for the other bitfields
3999 too if insv/extv instructions aren't used, so that
4000 can be added later. */
4001 if (bitpos + bitsize != str_bitsize
4002 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4003 break;
4005 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4006 value = convert_modes (str_mode,
4007 TYPE_MODE (TREE_TYPE (op1)), value,
4008 TYPE_UNSIGNED (TREE_TYPE (op1)));
4010 /* We may be accessing data outside the field, which means
4011 we can alias adjacent data. */
4012 if (MEM_P (str_rtx))
4014 str_rtx = shallow_copy_rtx (str_rtx);
4015 set_mem_alias_set (str_rtx, 0);
4016 set_mem_expr (str_rtx, 0);
4019 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4020 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4022 value = expand_and (str_mode, value, const1_rtx, NULL);
4023 binop = xor_optab;
4025 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4026 build_int_cst (NULL_TREE, bitpos),
4027 NULL_RTX, 1);
4028 result = expand_binop (str_mode, binop, str_rtx,
4029 value, str_rtx, 1, OPTAB_WIDEN);
4030 if (result != str_rtx)
4031 emit_move_insn (str_rtx, result);
4032 return true;
4034 case BIT_IOR_EXPR:
4035 case BIT_XOR_EXPR:
4036 if (TREE_CODE (op1) != INTEGER_CST)
4037 break;
4038 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4039 value = convert_modes (GET_MODE (str_rtx),
4040 TYPE_MODE (TREE_TYPE (op1)), value,
4041 TYPE_UNSIGNED (TREE_TYPE (op1)));
4043 /* We may be accessing data outside the field, which means
4044 we can alias adjacent data. */
4045 if (MEM_P (str_rtx))
4047 str_rtx = shallow_copy_rtx (str_rtx);
4048 set_mem_alias_set (str_rtx, 0);
4049 set_mem_expr (str_rtx, 0);
4052 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4053 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4055 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4056 - 1);
4057 value = expand_and (GET_MODE (str_rtx), value, mask,
4058 NULL_RTX);
4060 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4061 build_int_cst (NULL_TREE, bitpos),
4062 NULL_RTX, 1);
4063 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4064 value, str_rtx, 1, OPTAB_WIDEN);
4065 if (result != str_rtx)
4066 emit_move_insn (str_rtx, result);
4067 return true;
4069 default:
4070 break;
4073 return false;
4077 /* Expand an assignment that stores the value of FROM into TO. */
4079 void
4080 expand_assignment (tree to, tree from)
4082 rtx to_rtx = 0;
4083 rtx result;
4085 /* Don't crash if the lhs of the assignment was erroneous. */
4086 if (TREE_CODE (to) == ERROR_MARK)
4088 result = expand_normal (from);
4089 return;
4092 /* Optimize away no-op moves without side-effects. */
4093 if (operand_equal_p (to, from, 0))
4094 return;
4096 /* Assignment of a structure component needs special treatment
4097 if the structure component's rtx is not simply a MEM.
4098 Assignment of an array element at a constant index, and assignment of
4099 an array element in an unaligned packed structure field, has the same
4100 problem. */
4101 if (handled_component_p (to)
4102 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4104 enum machine_mode mode1;
4105 HOST_WIDE_INT bitsize, bitpos;
4106 tree offset;
4107 int unsignedp;
4108 int volatilep = 0;
4109 tree tem;
4111 push_temp_slots ();
4112 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4113 &unsignedp, &volatilep, true);
4115 /* If we are going to use store_bit_field and extract_bit_field,
4116 make sure to_rtx will be safe for multiple use. */
4118 to_rtx = expand_normal (tem);
4120 if (offset != 0)
4122 rtx offset_rtx;
4124 if (!MEM_P (to_rtx))
4126 /* We can get constant negative offsets into arrays with broken
4127 user code. Translate this to a trap instead of ICEing. */
4128 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4129 expand_builtin_trap ();
4130 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4133 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4134 #ifdef POINTERS_EXTEND_UNSIGNED
4135 if (GET_MODE (offset_rtx) != Pmode)
4136 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4137 #else
4138 if (GET_MODE (offset_rtx) != ptr_mode)
4139 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4140 #endif
4142 /* A constant address in TO_RTX can have VOIDmode, we must not try
4143 to call force_reg for that case. Avoid that case. */
4144 if (MEM_P (to_rtx)
4145 && GET_MODE (to_rtx) == BLKmode
4146 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4147 && bitsize > 0
4148 && (bitpos % bitsize) == 0
4149 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4150 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4152 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4153 bitpos = 0;
4156 to_rtx = offset_address (to_rtx, offset_rtx,
4157 highest_pow2_factor_for_target (to,
4158 offset));
4161 /* Handle expand_expr of a complex value returning a CONCAT. */
4162 if (GET_CODE (to_rtx) == CONCAT)
4164 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4166 gcc_assert (bitpos == 0);
4167 result = store_expr (from, to_rtx, false);
4169 else
4171 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4172 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4175 else
4177 if (MEM_P (to_rtx))
4179 /* If the field is at offset zero, we could have been given the
4180 DECL_RTX of the parent struct. Don't munge it. */
4181 to_rtx = shallow_copy_rtx (to_rtx);
4183 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4185 /* Deal with volatile and readonly fields. The former is only
4186 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4187 if (volatilep)
4188 MEM_VOLATILE_P (to_rtx) = 1;
4189 if (component_uses_parent_alias_set (to))
4190 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4193 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4194 to_rtx, to, from))
4195 result = NULL;
4196 else
4197 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4198 TREE_TYPE (tem), get_alias_set (to));
4201 if (result)
4202 preserve_temp_slots (result);
4203 free_temp_slots ();
4204 pop_temp_slots ();
4205 return;
4208 /* If the rhs is a function call and its value is not an aggregate,
4209 call the function before we start to compute the lhs.
4210 This is needed for correct code for cases such as
4211 val = setjmp (buf) on machines where reference to val
4212 requires loading up part of an address in a separate insn.
4214 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4215 since it might be a promoted variable where the zero- or sign- extension
4216 needs to be done. Handling this in the normal way is safe because no
4217 computation is done before the call. */
4218 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4219 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4220 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4221 && REG_P (DECL_RTL (to))))
4223 rtx value;
4225 push_temp_slots ();
4226 value = expand_normal (from);
4227 if (to_rtx == 0)
4228 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4230 /* Handle calls that return values in multiple non-contiguous locations.
4231 The Irix 6 ABI has examples of this. */
4232 if (GET_CODE (to_rtx) == PARALLEL)
4233 emit_group_load (to_rtx, value, TREE_TYPE (from),
4234 int_size_in_bytes (TREE_TYPE (from)));
4235 else if (GET_MODE (to_rtx) == BLKmode)
4236 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4237 else
4239 if (POINTER_TYPE_P (TREE_TYPE (to)))
4240 value = convert_memory_address (GET_MODE (to_rtx), value);
4241 emit_move_insn (to_rtx, value);
4243 preserve_temp_slots (to_rtx);
4244 free_temp_slots ();
4245 pop_temp_slots ();
4246 return;
4249 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4250 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4252 if (to_rtx == 0)
4253 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4255 /* Don't move directly into a return register. */
4256 if (TREE_CODE (to) == RESULT_DECL
4257 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4259 rtx temp;
4261 push_temp_slots ();
4262 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4264 if (GET_CODE (to_rtx) == PARALLEL)
4265 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4266 int_size_in_bytes (TREE_TYPE (from)));
4267 else
4268 emit_move_insn (to_rtx, temp);
4270 preserve_temp_slots (to_rtx);
4271 free_temp_slots ();
4272 pop_temp_slots ();
4273 return;
4276 /* In case we are returning the contents of an object which overlaps
4277 the place the value is being stored, use a safe function when copying
4278 a value through a pointer into a structure value return block. */
4279 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4280 && current_function_returns_struct
4281 && !current_function_returns_pcc_struct)
4283 rtx from_rtx, size;
4285 push_temp_slots ();
4286 size = expr_size (from);
4287 from_rtx = expand_normal (from);
4289 emit_library_call (memmove_libfunc, LCT_NORMAL,
4290 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4291 XEXP (from_rtx, 0), Pmode,
4292 convert_to_mode (TYPE_MODE (sizetype),
4293 size, TYPE_UNSIGNED (sizetype)),
4294 TYPE_MODE (sizetype));
4296 preserve_temp_slots (to_rtx);
4297 free_temp_slots ();
4298 pop_temp_slots ();
4299 return;
4302 /* Compute FROM and store the value in the rtx we got. */
4304 push_temp_slots ();
4305 result = store_expr (from, to_rtx, 0);
4306 preserve_temp_slots (result);
4307 free_temp_slots ();
4308 pop_temp_slots ();
4309 return;
4312 /* Generate code for computing expression EXP,
4313 and storing the value into TARGET.
4315 If the mode is BLKmode then we may return TARGET itself.
4316 It turns out that in BLKmode it doesn't cause a problem.
4317 because C has no operators that could combine two different
4318 assignments into the same BLKmode object with different values
4319 with no sequence point. Will other languages need this to
4320 be more thorough?
4322 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4323 stack, and block moves may need to be treated specially. */
4326 store_expr (tree exp, rtx target, int call_param_p)
4328 rtx temp;
4329 rtx alt_rtl = NULL_RTX;
4330 int dont_return_target = 0;
4332 if (VOID_TYPE_P (TREE_TYPE (exp)))
4334 /* C++ can generate ?: expressions with a throw expression in one
4335 branch and an rvalue in the other. Here, we resolve attempts to
4336 store the throw expression's nonexistent result. */
4337 gcc_assert (!call_param_p);
4338 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4339 return NULL_RTX;
4341 if (TREE_CODE (exp) == COMPOUND_EXPR)
4343 /* Perform first part of compound expression, then assign from second
4344 part. */
4345 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4346 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4347 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4349 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4351 /* For conditional expression, get safe form of the target. Then
4352 test the condition, doing the appropriate assignment on either
4353 side. This avoids the creation of unnecessary temporaries.
4354 For non-BLKmode, it is more efficient not to do this. */
4356 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4358 do_pending_stack_adjust ();
4359 NO_DEFER_POP;
4360 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4361 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4362 emit_jump_insn (gen_jump (lab2));
4363 emit_barrier ();
4364 emit_label (lab1);
4365 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4366 emit_label (lab2);
4367 OK_DEFER_POP;
4369 return NULL_RTX;
4371 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4372 /* If this is a scalar in a register that is stored in a wider mode
4373 than the declared mode, compute the result into its declared mode
4374 and then convert to the wider mode. Our value is the computed
4375 expression. */
4377 rtx inner_target = 0;
4379 /* We can do the conversion inside EXP, which will often result
4380 in some optimizations. Do the conversion in two steps: first
4381 change the signedness, if needed, then the extend. But don't
4382 do this if the type of EXP is a subtype of something else
4383 since then the conversion might involve more than just
4384 converting modes. */
4385 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4386 && TREE_TYPE (TREE_TYPE (exp)) == 0
4387 && (!lang_hooks.reduce_bit_field_operations
4388 || (GET_MODE_PRECISION (GET_MODE (target))
4389 == TYPE_PRECISION (TREE_TYPE (exp)))))
4391 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4392 != SUBREG_PROMOTED_UNSIGNED_P (target))
4394 /* Some types, e.g. Fortran's logical*4, won't have a signed
4395 version, so use the mode instead. */
4396 tree ntype
4397 = (get_signed_or_unsigned_type
4398 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4399 if (ntype == NULL)
4400 ntype = lang_hooks.types.type_for_mode
4401 (TYPE_MODE (TREE_TYPE (exp)),
4402 SUBREG_PROMOTED_UNSIGNED_P (target));
4404 exp = fold_convert (ntype, exp);
4407 exp = fold_convert (lang_hooks.types.type_for_mode
4408 (GET_MODE (SUBREG_REG (target)),
4409 SUBREG_PROMOTED_UNSIGNED_P (target)),
4410 exp);
4412 inner_target = SUBREG_REG (target);
4415 temp = expand_expr (exp, inner_target, VOIDmode,
4416 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4418 /* If TEMP is a VOIDmode constant, use convert_modes to make
4419 sure that we properly convert it. */
4420 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4422 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4423 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4424 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4425 GET_MODE (target), temp,
4426 SUBREG_PROMOTED_UNSIGNED_P (target));
4429 convert_move (SUBREG_REG (target), temp,
4430 SUBREG_PROMOTED_UNSIGNED_P (target));
4432 return NULL_RTX;
4434 else
4436 temp = expand_expr_real (exp, target, GET_MODE (target),
4437 (call_param_p
4438 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4439 &alt_rtl);
4440 /* Return TARGET if it's a specified hardware register.
4441 If TARGET is a volatile mem ref, either return TARGET
4442 or return a reg copied *from* TARGET; ANSI requires this.
4444 Otherwise, if TEMP is not TARGET, return TEMP
4445 if it is constant (for efficiency),
4446 or if we really want the correct value. */
4447 if (!(target && REG_P (target)
4448 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4449 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4450 && ! rtx_equal_p (temp, target)
4451 && CONSTANT_P (temp))
4452 dont_return_target = 1;
4455 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4456 the same as that of TARGET, adjust the constant. This is needed, for
4457 example, in case it is a CONST_DOUBLE and we want only a word-sized
4458 value. */
4459 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4460 && TREE_CODE (exp) != ERROR_MARK
4461 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4462 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4463 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4465 /* If value was not generated in the target, store it there.
4466 Convert the value to TARGET's type first if necessary and emit the
4467 pending incrementations that have been queued when expanding EXP.
4468 Note that we cannot emit the whole queue blindly because this will
4469 effectively disable the POST_INC optimization later.
4471 If TEMP and TARGET compare equal according to rtx_equal_p, but
4472 one or both of them are volatile memory refs, we have to distinguish
4473 two cases:
4474 - expand_expr has used TARGET. In this case, we must not generate
4475 another copy. This can be detected by TARGET being equal according
4476 to == .
4477 - expand_expr has not used TARGET - that means that the source just
4478 happens to have the same RTX form. Since temp will have been created
4479 by expand_expr, it will compare unequal according to == .
4480 We must generate a copy in this case, to reach the correct number
4481 of volatile memory references. */
4483 if ((! rtx_equal_p (temp, target)
4484 || (temp != target && (side_effects_p (temp)
4485 || side_effects_p (target))))
4486 && TREE_CODE (exp) != ERROR_MARK
4487 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4488 but TARGET is not valid memory reference, TEMP will differ
4489 from TARGET although it is really the same location. */
4490 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4491 /* If there's nothing to copy, don't bother. Don't call
4492 expr_size unless necessary, because some front-ends (C++)
4493 expr_size-hook must not be given objects that are not
4494 supposed to be bit-copied or bit-initialized. */
4495 && expr_size (exp) != const0_rtx)
4497 if (GET_MODE (temp) != GET_MODE (target)
4498 && GET_MODE (temp) != VOIDmode)
4500 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4501 if (dont_return_target)
4503 /* In this case, we will return TEMP,
4504 so make sure it has the proper mode.
4505 But don't forget to store the value into TARGET. */
4506 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4507 emit_move_insn (target, temp);
4509 else if (GET_MODE (target) == BLKmode)
4510 emit_block_move (target, temp, expr_size (exp),
4511 (call_param_p
4512 ? BLOCK_OP_CALL_PARM
4513 : BLOCK_OP_NORMAL));
4514 else
4515 convert_move (target, temp, unsignedp);
4518 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4520 /* Handle copying a string constant into an array. The string
4521 constant may be shorter than the array. So copy just the string's
4522 actual length, and clear the rest. First get the size of the data
4523 type of the string, which is actually the size of the target. */
4524 rtx size = expr_size (exp);
4526 if (GET_CODE (size) == CONST_INT
4527 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4528 emit_block_move (target, temp, size,
4529 (call_param_p
4530 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4531 else
4533 /* Compute the size of the data to copy from the string. */
4534 tree copy_size
4535 = size_binop (MIN_EXPR,
4536 make_tree (sizetype, size),
4537 size_int (TREE_STRING_LENGTH (exp)));
4538 rtx copy_size_rtx
4539 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4540 (call_param_p
4541 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4542 rtx label = 0;
4544 /* Copy that much. */
4545 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4546 TYPE_UNSIGNED (sizetype));
4547 emit_block_move (target, temp, copy_size_rtx,
4548 (call_param_p
4549 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4551 /* Figure out how much is left in TARGET that we have to clear.
4552 Do all calculations in ptr_mode. */
4553 if (GET_CODE (copy_size_rtx) == CONST_INT)
4555 size = plus_constant (size, -INTVAL (copy_size_rtx));
4556 target = adjust_address (target, BLKmode,
4557 INTVAL (copy_size_rtx));
4559 else
4561 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4562 copy_size_rtx, NULL_RTX, 0,
4563 OPTAB_LIB_WIDEN);
4565 #ifdef POINTERS_EXTEND_UNSIGNED
4566 if (GET_MODE (copy_size_rtx) != Pmode)
4567 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4568 TYPE_UNSIGNED (sizetype));
4569 #endif
4571 target = offset_address (target, copy_size_rtx,
4572 highest_pow2_factor (copy_size));
4573 label = gen_label_rtx ();
4574 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4575 GET_MODE (size), 0, label);
4578 if (size != const0_rtx)
4579 clear_storage (target, size, BLOCK_OP_NORMAL);
4581 if (label)
4582 emit_label (label);
4585 /* Handle calls that return values in multiple non-contiguous locations.
4586 The Irix 6 ABI has examples of this. */
4587 else if (GET_CODE (target) == PARALLEL)
4588 emit_group_load (target, temp, TREE_TYPE (exp),
4589 int_size_in_bytes (TREE_TYPE (exp)));
4590 else if (GET_MODE (temp) == BLKmode)
4591 emit_block_move (target, temp, expr_size (exp),
4592 (call_param_p
4593 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4594 else
4596 temp = force_operand (temp, target);
4597 if (temp != target)
4598 emit_move_insn (target, temp);
4602 return NULL_RTX;
4605 /* Helper for categorize_ctor_elements. Identical interface. */
4607 static bool
4608 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4609 HOST_WIDE_INT *p_elt_count,
4610 bool *p_must_clear)
4612 unsigned HOST_WIDE_INT idx;
4613 HOST_WIDE_INT nz_elts, elt_count;
4614 tree value, purpose;
4616 /* Whether CTOR is a valid constant initializer, in accordance with what
4617 initializer_constant_valid_p does. If inferred from the constructor
4618 elements, true until proven otherwise. */
4619 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4620 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4622 nz_elts = 0;
4623 elt_count = 0;
4625 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4627 HOST_WIDE_INT mult;
4629 mult = 1;
4630 if (TREE_CODE (purpose) == RANGE_EXPR)
4632 tree lo_index = TREE_OPERAND (purpose, 0);
4633 tree hi_index = TREE_OPERAND (purpose, 1);
4635 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4636 mult = (tree_low_cst (hi_index, 1)
4637 - tree_low_cst (lo_index, 1) + 1);
4640 switch (TREE_CODE (value))
4642 case CONSTRUCTOR:
4644 HOST_WIDE_INT nz = 0, ic = 0;
4646 bool const_elt_p
4647 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4649 nz_elts += mult * nz;
4650 elt_count += mult * ic;
4652 if (const_from_elts_p && const_p)
4653 const_p = const_elt_p;
4655 break;
4657 case INTEGER_CST:
4658 case REAL_CST:
4659 if (!initializer_zerop (value))
4660 nz_elts += mult;
4661 elt_count += mult;
4662 break;
4664 case STRING_CST:
4665 nz_elts += mult * TREE_STRING_LENGTH (value);
4666 elt_count += mult * TREE_STRING_LENGTH (value);
4667 break;
4669 case COMPLEX_CST:
4670 if (!initializer_zerop (TREE_REALPART (value)))
4671 nz_elts += mult;
4672 if (!initializer_zerop (TREE_IMAGPART (value)))
4673 nz_elts += mult;
4674 elt_count += mult;
4675 break;
4677 case VECTOR_CST:
4679 tree v;
4680 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4682 if (!initializer_zerop (TREE_VALUE (v)))
4683 nz_elts += mult;
4684 elt_count += mult;
4687 break;
4689 default:
4690 nz_elts += mult;
4691 elt_count += mult;
4693 if (const_from_elts_p && const_p)
4694 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4695 != NULL_TREE;
4696 break;
4700 if (!*p_must_clear
4701 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4702 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4704 tree init_sub_type;
4705 bool clear_this = true;
4707 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4709 /* We don't expect more than one element of the union to be
4710 initialized. Not sure what we should do otherwise... */
4711 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4712 == 1);
4714 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4715 CONSTRUCTOR_ELTS (ctor),
4716 0)->value);
4718 /* ??? We could look at each element of the union, and find the
4719 largest element. Which would avoid comparing the size of the
4720 initialized element against any tail padding in the union.
4721 Doesn't seem worth the effort... */
4722 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4723 TYPE_SIZE (init_sub_type)) == 1)
4725 /* And now we have to find out if the element itself is fully
4726 constructed. E.g. for union { struct { int a, b; } s; } u
4727 = { .s = { .a = 1 } }. */
4728 if (elt_count == count_type_elements (init_sub_type, false))
4729 clear_this = false;
4733 *p_must_clear = clear_this;
4736 *p_nz_elts += nz_elts;
4737 *p_elt_count += elt_count;
4739 return const_p;
4742 /* Examine CTOR to discover:
4743 * how many scalar fields are set to nonzero values,
4744 and place it in *P_NZ_ELTS;
4745 * how many scalar fields in total are in CTOR,
4746 and place it in *P_ELT_COUNT.
4747 * if a type is a union, and the initializer from the constructor
4748 is not the largest element in the union, then set *p_must_clear.
4750 Return whether or not CTOR is a valid static constant initializer, the same
4751 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4753 bool
4754 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4755 HOST_WIDE_INT *p_elt_count,
4756 bool *p_must_clear)
4758 *p_nz_elts = 0;
4759 *p_elt_count = 0;
4760 *p_must_clear = false;
4762 return
4763 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4766 /* Count the number of scalars in TYPE. Return -1 on overflow or
4767 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4768 array member at the end of the structure. */
4770 HOST_WIDE_INT
4771 count_type_elements (tree type, bool allow_flexarr)
4773 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4774 switch (TREE_CODE (type))
4776 case ARRAY_TYPE:
4778 tree telts = array_type_nelts (type);
4779 if (telts && host_integerp (telts, 1))
4781 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4782 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4783 if (n == 0)
4784 return 0;
4785 else if (max / n > m)
4786 return n * m;
4788 return -1;
4791 case RECORD_TYPE:
4793 HOST_WIDE_INT n = 0, t;
4794 tree f;
4796 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4797 if (TREE_CODE (f) == FIELD_DECL)
4799 t = count_type_elements (TREE_TYPE (f), false);
4800 if (t < 0)
4802 /* Check for structures with flexible array member. */
4803 tree tf = TREE_TYPE (f);
4804 if (allow_flexarr
4805 && TREE_CHAIN (f) == NULL
4806 && TREE_CODE (tf) == ARRAY_TYPE
4807 && TYPE_DOMAIN (tf)
4808 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4809 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4810 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4811 && int_size_in_bytes (type) >= 0)
4812 break;
4814 return -1;
4816 n += t;
4819 return n;
4822 case UNION_TYPE:
4823 case QUAL_UNION_TYPE:
4825 /* Ho hum. How in the world do we guess here? Clearly it isn't
4826 right to count the fields. Guess based on the number of words. */
4827 HOST_WIDE_INT n = int_size_in_bytes (type);
4828 if (n < 0)
4829 return -1;
4830 return n / UNITS_PER_WORD;
4833 case COMPLEX_TYPE:
4834 return 2;
4836 case VECTOR_TYPE:
4837 return TYPE_VECTOR_SUBPARTS (type);
4839 case INTEGER_TYPE:
4840 case REAL_TYPE:
4841 case ENUMERAL_TYPE:
4842 case BOOLEAN_TYPE:
4843 case POINTER_TYPE:
4844 case OFFSET_TYPE:
4845 case REFERENCE_TYPE:
4846 return 1;
4848 case VOID_TYPE:
4849 case METHOD_TYPE:
4850 case FUNCTION_TYPE:
4851 case LANG_TYPE:
4852 default:
4853 gcc_unreachable ();
4857 /* Return 1 if EXP contains mostly (3/4) zeros. */
4859 static int
4860 mostly_zeros_p (tree exp)
4862 if (TREE_CODE (exp) == CONSTRUCTOR)
4865 HOST_WIDE_INT nz_elts, count, elts;
4866 bool must_clear;
4868 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4869 if (must_clear)
4870 return 1;
4872 elts = count_type_elements (TREE_TYPE (exp), false);
4874 return nz_elts < elts / 4;
4877 return initializer_zerop (exp);
4880 /* Return 1 if EXP contains all zeros. */
4882 static int
4883 all_zeros_p (tree exp)
4885 if (TREE_CODE (exp) == CONSTRUCTOR)
4888 HOST_WIDE_INT nz_elts, count;
4889 bool must_clear;
4891 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4892 return nz_elts == 0;
4895 return initializer_zerop (exp);
4898 /* Helper function for store_constructor.
4899 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4900 TYPE is the type of the CONSTRUCTOR, not the element type.
4901 CLEARED is as for store_constructor.
4902 ALIAS_SET is the alias set to use for any stores.
4904 This provides a recursive shortcut back to store_constructor when it isn't
4905 necessary to go through store_field. This is so that we can pass through
4906 the cleared field to let store_constructor know that we may not have to
4907 clear a substructure if the outer structure has already been cleared. */
4909 static void
4910 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4911 HOST_WIDE_INT bitpos, enum machine_mode mode,
4912 tree exp, tree type, int cleared, int alias_set)
4914 if (TREE_CODE (exp) == CONSTRUCTOR
4915 /* We can only call store_constructor recursively if the size and
4916 bit position are on a byte boundary. */
4917 && bitpos % BITS_PER_UNIT == 0
4918 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4919 /* If we have a nonzero bitpos for a register target, then we just
4920 let store_field do the bitfield handling. This is unlikely to
4921 generate unnecessary clear instructions anyways. */
4922 && (bitpos == 0 || MEM_P (target)))
4924 if (MEM_P (target))
4925 target
4926 = adjust_address (target,
4927 GET_MODE (target) == BLKmode
4928 || 0 != (bitpos
4929 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4930 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4933 /* Update the alias set, if required. */
4934 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4935 && MEM_ALIAS_SET (target) != 0)
4937 target = copy_rtx (target);
4938 set_mem_alias_set (target, alias_set);
4941 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4943 else
4944 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4947 /* Store the value of constructor EXP into the rtx TARGET.
4948 TARGET is either a REG or a MEM; we know it cannot conflict, since
4949 safe_from_p has been called.
4950 CLEARED is true if TARGET is known to have been zero'd.
4951 SIZE is the number of bytes of TARGET we are allowed to modify: this
4952 may not be the same as the size of EXP if we are assigning to a field
4953 which has been packed to exclude padding bits. */
4955 static void
4956 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4958 tree type = TREE_TYPE (exp);
4959 #ifdef WORD_REGISTER_OPERATIONS
4960 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4961 #endif
4963 switch (TREE_CODE (type))
4965 case RECORD_TYPE:
4966 case UNION_TYPE:
4967 case QUAL_UNION_TYPE:
4969 unsigned HOST_WIDE_INT idx;
4970 tree field, value;
4972 /* If size is zero or the target is already cleared, do nothing. */
4973 if (size == 0 || cleared)
4974 cleared = 1;
4975 /* We either clear the aggregate or indicate the value is dead. */
4976 else if ((TREE_CODE (type) == UNION_TYPE
4977 || TREE_CODE (type) == QUAL_UNION_TYPE)
4978 && ! CONSTRUCTOR_ELTS (exp))
4979 /* If the constructor is empty, clear the union. */
4981 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4982 cleared = 1;
4985 /* If we are building a static constructor into a register,
4986 set the initial value as zero so we can fold the value into
4987 a constant. But if more than one register is involved,
4988 this probably loses. */
4989 else if (REG_P (target) && TREE_STATIC (exp)
4990 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4992 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4993 cleared = 1;
4996 /* If the constructor has fewer fields than the structure or
4997 if we are initializing the structure to mostly zeros, clear
4998 the whole structure first. Don't do this if TARGET is a
4999 register whose mode size isn't equal to SIZE since
5000 clear_storage can't handle this case. */
5001 else if (size > 0
5002 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5003 != fields_length (type))
5004 || mostly_zeros_p (exp))
5005 && (!REG_P (target)
5006 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5007 == size)))
5009 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5010 cleared = 1;
5013 if (! cleared)
5014 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5016 /* Store each element of the constructor into the
5017 corresponding field of TARGET. */
5018 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5020 enum machine_mode mode;
5021 HOST_WIDE_INT bitsize;
5022 HOST_WIDE_INT bitpos = 0;
5023 tree offset;
5024 rtx to_rtx = target;
5026 /* Just ignore missing fields. We cleared the whole
5027 structure, above, if any fields are missing. */
5028 if (field == 0)
5029 continue;
5031 if (cleared && initializer_zerop (value))
5032 continue;
5034 if (host_integerp (DECL_SIZE (field), 1))
5035 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5036 else
5037 bitsize = -1;
5039 mode = DECL_MODE (field);
5040 if (DECL_BIT_FIELD (field))
5041 mode = VOIDmode;
5043 offset = DECL_FIELD_OFFSET (field);
5044 if (host_integerp (offset, 0)
5045 && host_integerp (bit_position (field), 0))
5047 bitpos = int_bit_position (field);
5048 offset = 0;
5050 else
5051 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5053 if (offset)
5055 rtx offset_rtx;
5057 offset
5058 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5059 make_tree (TREE_TYPE (exp),
5060 target));
5062 offset_rtx = expand_normal (offset);
5063 gcc_assert (MEM_P (to_rtx));
5065 #ifdef POINTERS_EXTEND_UNSIGNED
5066 if (GET_MODE (offset_rtx) != Pmode)
5067 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5068 #else
5069 if (GET_MODE (offset_rtx) != ptr_mode)
5070 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5071 #endif
5073 to_rtx = offset_address (to_rtx, offset_rtx,
5074 highest_pow2_factor (offset));
5077 #ifdef WORD_REGISTER_OPERATIONS
5078 /* If this initializes a field that is smaller than a
5079 word, at the start of a word, try to widen it to a full
5080 word. This special case allows us to output C++ member
5081 function initializations in a form that the optimizers
5082 can understand. */
5083 if (REG_P (target)
5084 && bitsize < BITS_PER_WORD
5085 && bitpos % BITS_PER_WORD == 0
5086 && GET_MODE_CLASS (mode) == MODE_INT
5087 && TREE_CODE (value) == INTEGER_CST
5088 && exp_size >= 0
5089 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5091 tree type = TREE_TYPE (value);
5093 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5095 type = lang_hooks.types.type_for_size
5096 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5097 value = fold_convert (type, value);
5100 if (BYTES_BIG_ENDIAN)
5101 value
5102 = fold_build2 (LSHIFT_EXPR, type, value,
5103 build_int_cst (type,
5104 BITS_PER_WORD - bitsize));
5105 bitsize = BITS_PER_WORD;
5106 mode = word_mode;
5108 #endif
5110 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5111 && DECL_NONADDRESSABLE_P (field))
5113 to_rtx = copy_rtx (to_rtx);
5114 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5117 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5118 value, type, cleared,
5119 get_alias_set (TREE_TYPE (field)));
5121 break;
5123 case ARRAY_TYPE:
5125 tree value, index;
5126 unsigned HOST_WIDE_INT i;
5127 int need_to_clear;
5128 tree domain;
5129 tree elttype = TREE_TYPE (type);
5130 int const_bounds_p;
5131 HOST_WIDE_INT minelt = 0;
5132 HOST_WIDE_INT maxelt = 0;
5134 domain = TYPE_DOMAIN (type);
5135 const_bounds_p = (TYPE_MIN_VALUE (domain)
5136 && TYPE_MAX_VALUE (domain)
5137 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5138 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5140 /* If we have constant bounds for the range of the type, get them. */
5141 if (const_bounds_p)
5143 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5144 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5147 /* If the constructor has fewer elements than the array, clear
5148 the whole array first. Similarly if this is static
5149 constructor of a non-BLKmode object. */
5150 if (cleared)
5151 need_to_clear = 0;
5152 else if (REG_P (target) && TREE_STATIC (exp))
5153 need_to_clear = 1;
5154 else
5156 unsigned HOST_WIDE_INT idx;
5157 tree index, value;
5158 HOST_WIDE_INT count = 0, zero_count = 0;
5159 need_to_clear = ! const_bounds_p;
5161 /* This loop is a more accurate version of the loop in
5162 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5163 is also needed to check for missing elements. */
5164 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5166 HOST_WIDE_INT this_node_count;
5168 if (need_to_clear)
5169 break;
5171 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5173 tree lo_index = TREE_OPERAND (index, 0);
5174 tree hi_index = TREE_OPERAND (index, 1);
5176 if (! host_integerp (lo_index, 1)
5177 || ! host_integerp (hi_index, 1))
5179 need_to_clear = 1;
5180 break;
5183 this_node_count = (tree_low_cst (hi_index, 1)
5184 - tree_low_cst (lo_index, 1) + 1);
5186 else
5187 this_node_count = 1;
5189 count += this_node_count;
5190 if (mostly_zeros_p (value))
5191 zero_count += this_node_count;
5194 /* Clear the entire array first if there are any missing
5195 elements, or if the incidence of zero elements is >=
5196 75%. */
5197 if (! need_to_clear
5198 && (count < maxelt - minelt + 1
5199 || 4 * zero_count >= 3 * count))
5200 need_to_clear = 1;
5203 if (need_to_clear && size > 0)
5205 if (REG_P (target))
5206 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5207 else
5208 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5209 cleared = 1;
5212 if (!cleared && REG_P (target))
5213 /* Inform later passes that the old value is dead. */
5214 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5216 /* Store each element of the constructor into the
5217 corresponding element of TARGET, determined by counting the
5218 elements. */
5219 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5221 enum machine_mode mode;
5222 HOST_WIDE_INT bitsize;
5223 HOST_WIDE_INT bitpos;
5224 int unsignedp;
5225 rtx xtarget = target;
5227 if (cleared && initializer_zerop (value))
5228 continue;
5230 unsignedp = TYPE_UNSIGNED (elttype);
5231 mode = TYPE_MODE (elttype);
5232 if (mode == BLKmode)
5233 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5234 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5235 : -1);
5236 else
5237 bitsize = GET_MODE_BITSIZE (mode);
5239 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5241 tree lo_index = TREE_OPERAND (index, 0);
5242 tree hi_index = TREE_OPERAND (index, 1);
5243 rtx index_r, pos_rtx;
5244 HOST_WIDE_INT lo, hi, count;
5245 tree position;
5247 /* If the range is constant and "small", unroll the loop. */
5248 if (const_bounds_p
5249 && host_integerp (lo_index, 0)
5250 && host_integerp (hi_index, 0)
5251 && (lo = tree_low_cst (lo_index, 0),
5252 hi = tree_low_cst (hi_index, 0),
5253 count = hi - lo + 1,
5254 (!MEM_P (target)
5255 || count <= 2
5256 || (host_integerp (TYPE_SIZE (elttype), 1)
5257 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5258 <= 40 * 8)))))
5260 lo -= minelt; hi -= minelt;
5261 for (; lo <= hi; lo++)
5263 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5265 if (MEM_P (target)
5266 && !MEM_KEEP_ALIAS_SET_P (target)
5267 && TREE_CODE (type) == ARRAY_TYPE
5268 && TYPE_NONALIASED_COMPONENT (type))
5270 target = copy_rtx (target);
5271 MEM_KEEP_ALIAS_SET_P (target) = 1;
5274 store_constructor_field
5275 (target, bitsize, bitpos, mode, value, type, cleared,
5276 get_alias_set (elttype));
5279 else
5281 rtx loop_start = gen_label_rtx ();
5282 rtx loop_end = gen_label_rtx ();
5283 tree exit_cond;
5285 expand_normal (hi_index);
5286 unsignedp = TYPE_UNSIGNED (domain);
5288 index = build_decl (VAR_DECL, NULL_TREE, domain);
5290 index_r
5291 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5292 &unsignedp, 0));
5293 SET_DECL_RTL (index, index_r);
5294 store_expr (lo_index, index_r, 0);
5296 /* Build the head of the loop. */
5297 do_pending_stack_adjust ();
5298 emit_label (loop_start);
5300 /* Assign value to element index. */
5301 position =
5302 fold_convert (ssizetype,
5303 fold_build2 (MINUS_EXPR,
5304 TREE_TYPE (index),
5305 index,
5306 TYPE_MIN_VALUE (domain)));
5308 position =
5309 size_binop (MULT_EXPR, position,
5310 fold_convert (ssizetype,
5311 TYPE_SIZE_UNIT (elttype)));
5313 pos_rtx = expand_normal (position);
5314 xtarget = offset_address (target, pos_rtx,
5315 highest_pow2_factor (position));
5316 xtarget = adjust_address (xtarget, mode, 0);
5317 if (TREE_CODE (value) == CONSTRUCTOR)
5318 store_constructor (value, xtarget, cleared,
5319 bitsize / BITS_PER_UNIT);
5320 else
5321 store_expr (value, xtarget, 0);
5323 /* Generate a conditional jump to exit the loop. */
5324 exit_cond = build2 (LT_EXPR, integer_type_node,
5325 index, hi_index);
5326 jumpif (exit_cond, loop_end);
5328 /* Update the loop counter, and jump to the head of
5329 the loop. */
5330 expand_assignment (index,
5331 build2 (PLUS_EXPR, TREE_TYPE (index),
5332 index, integer_one_node));
5334 emit_jump (loop_start);
5336 /* Build the end of the loop. */
5337 emit_label (loop_end);
5340 else if ((index != 0 && ! host_integerp (index, 0))
5341 || ! host_integerp (TYPE_SIZE (elttype), 1))
5343 tree position;
5345 if (index == 0)
5346 index = ssize_int (1);
5348 if (minelt)
5349 index = fold_convert (ssizetype,
5350 fold_build2 (MINUS_EXPR,
5351 TREE_TYPE (index),
5352 index,
5353 TYPE_MIN_VALUE (domain)));
5355 position =
5356 size_binop (MULT_EXPR, index,
5357 fold_convert (ssizetype,
5358 TYPE_SIZE_UNIT (elttype)));
5359 xtarget = offset_address (target,
5360 expand_normal (position),
5361 highest_pow2_factor (position));
5362 xtarget = adjust_address (xtarget, mode, 0);
5363 store_expr (value, xtarget, 0);
5365 else
5367 if (index != 0)
5368 bitpos = ((tree_low_cst (index, 0) - minelt)
5369 * tree_low_cst (TYPE_SIZE (elttype), 1));
5370 else
5371 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5373 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5374 && TREE_CODE (type) == ARRAY_TYPE
5375 && TYPE_NONALIASED_COMPONENT (type))
5377 target = copy_rtx (target);
5378 MEM_KEEP_ALIAS_SET_P (target) = 1;
5380 store_constructor_field (target, bitsize, bitpos, mode, value,
5381 type, cleared, get_alias_set (elttype));
5384 break;
5387 case VECTOR_TYPE:
5389 unsigned HOST_WIDE_INT idx;
5390 constructor_elt *ce;
5391 int i;
5392 int need_to_clear;
5393 int icode = 0;
5394 tree elttype = TREE_TYPE (type);
5395 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5396 enum machine_mode eltmode = TYPE_MODE (elttype);
5397 HOST_WIDE_INT bitsize;
5398 HOST_WIDE_INT bitpos;
5399 rtvec vector = NULL;
5400 unsigned n_elts;
5402 gcc_assert (eltmode != BLKmode);
5404 n_elts = TYPE_VECTOR_SUBPARTS (type);
5405 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5407 enum machine_mode mode = GET_MODE (target);
5409 icode = (int) vec_init_optab->handlers[mode].insn_code;
5410 if (icode != CODE_FOR_nothing)
5412 unsigned int i;
5414 vector = rtvec_alloc (n_elts);
5415 for (i = 0; i < n_elts; i++)
5416 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5420 /* If the constructor has fewer elements than the vector,
5421 clear the whole array first. Similarly if this is static
5422 constructor of a non-BLKmode object. */
5423 if (cleared)
5424 need_to_clear = 0;
5425 else if (REG_P (target) && TREE_STATIC (exp))
5426 need_to_clear = 1;
5427 else
5429 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5430 tree value;
5432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5434 int n_elts_here = tree_low_cst
5435 (int_const_binop (TRUNC_DIV_EXPR,
5436 TYPE_SIZE (TREE_TYPE (value)),
5437 TYPE_SIZE (elttype), 0), 1);
5439 count += n_elts_here;
5440 if (mostly_zeros_p (value))
5441 zero_count += n_elts_here;
5444 /* Clear the entire vector first if there are any missing elements,
5445 or if the incidence of zero elements is >= 75%. */
5446 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5449 if (need_to_clear && size > 0 && !vector)
5451 if (REG_P (target))
5452 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5453 else
5454 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5455 cleared = 1;
5458 /* Inform later passes that the old value is dead. */
5459 if (!cleared && !vector && REG_P (target))
5460 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5462 /* Store each element of the constructor into the corresponding
5463 element of TARGET, determined by counting the elements. */
5464 for (idx = 0, i = 0;
5465 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5466 idx++, i += bitsize / elt_size)
5468 HOST_WIDE_INT eltpos;
5469 tree value = ce->value;
5471 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5472 if (cleared && initializer_zerop (value))
5473 continue;
5475 if (ce->index)
5476 eltpos = tree_low_cst (ce->index, 1);
5477 else
5478 eltpos = i;
5480 if (vector)
5482 /* Vector CONSTRUCTORs should only be built from smaller
5483 vectors in the case of BLKmode vectors. */
5484 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5485 RTVEC_ELT (vector, eltpos)
5486 = expand_normal (value);
5488 else
5490 enum machine_mode value_mode =
5491 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5492 ? TYPE_MODE (TREE_TYPE (value))
5493 : eltmode;
5494 bitpos = eltpos * elt_size;
5495 store_constructor_field (target, bitsize, bitpos,
5496 value_mode, value, type,
5497 cleared, get_alias_set (elttype));
5501 if (vector)
5502 emit_insn (GEN_FCN (icode)
5503 (target,
5504 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5505 break;
5508 default:
5509 gcc_unreachable ();
5513 /* Store the value of EXP (an expression tree)
5514 into a subfield of TARGET which has mode MODE and occupies
5515 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5516 If MODE is VOIDmode, it means that we are storing into a bit-field.
5518 Always return const0_rtx unless we have something particular to
5519 return.
5521 TYPE is the type of the underlying object,
5523 ALIAS_SET is the alias set for the destination. This value will
5524 (in general) be different from that for TARGET, since TARGET is a
5525 reference to the containing structure. */
5527 static rtx
5528 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5529 enum machine_mode mode, tree exp, tree type, int alias_set)
5531 HOST_WIDE_INT width_mask = 0;
5533 if (TREE_CODE (exp) == ERROR_MARK)
5534 return const0_rtx;
5536 /* If we have nothing to store, do nothing unless the expression has
5537 side-effects. */
5538 if (bitsize == 0)
5539 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5540 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5541 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5543 /* If we are storing into an unaligned field of an aligned union that is
5544 in a register, we may have the mode of TARGET being an integer mode but
5545 MODE == BLKmode. In that case, get an aligned object whose size and
5546 alignment are the same as TARGET and store TARGET into it (we can avoid
5547 the store if the field being stored is the entire width of TARGET). Then
5548 call ourselves recursively to store the field into a BLKmode version of
5549 that object. Finally, load from the object into TARGET. This is not
5550 very efficient in general, but should only be slightly more expensive
5551 than the otherwise-required unaligned accesses. Perhaps this can be
5552 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5553 twice, once with emit_move_insn and once via store_field. */
5555 if (mode == BLKmode
5556 && (REG_P (target) || GET_CODE (target) == SUBREG))
5558 rtx object = assign_temp (type, 0, 1, 1);
5559 rtx blk_object = adjust_address (object, BLKmode, 0);
5561 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5562 emit_move_insn (object, target);
5564 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5566 emit_move_insn (target, object);
5568 /* We want to return the BLKmode version of the data. */
5569 return blk_object;
5572 if (GET_CODE (target) == CONCAT)
5574 /* We're storing into a struct containing a single __complex. */
5576 gcc_assert (!bitpos);
5577 return store_expr (exp, target, 0);
5580 /* If the structure is in a register or if the component
5581 is a bit field, we cannot use addressing to access it.
5582 Use bit-field techniques or SUBREG to store in it. */
5584 if (mode == VOIDmode
5585 || (mode != BLKmode && ! direct_store[(int) mode]
5586 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5587 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5588 || REG_P (target)
5589 || GET_CODE (target) == SUBREG
5590 /* If the field isn't aligned enough to store as an ordinary memref,
5591 store it as a bit field. */
5592 || (mode != BLKmode
5593 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5594 || bitpos % GET_MODE_ALIGNMENT (mode))
5595 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5596 || (bitpos % BITS_PER_UNIT != 0)))
5597 /* If the RHS and field are a constant size and the size of the
5598 RHS isn't the same size as the bitfield, we must use bitfield
5599 operations. */
5600 || (bitsize >= 0
5601 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5602 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5604 rtx temp;
5606 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5607 implies a mask operation. If the precision is the same size as
5608 the field we're storing into, that mask is redundant. This is
5609 particularly common with bit field assignments generated by the
5610 C front end. */
5611 if (TREE_CODE (exp) == NOP_EXPR)
5613 tree type = TREE_TYPE (exp);
5614 if (INTEGRAL_TYPE_P (type)
5615 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5616 && bitsize == TYPE_PRECISION (type))
5618 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5619 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5620 exp = TREE_OPERAND (exp, 0);
5624 temp = expand_normal (exp);
5626 /* If BITSIZE is narrower than the size of the type of EXP
5627 we will be narrowing TEMP. Normally, what's wanted are the
5628 low-order bits. However, if EXP's type is a record and this is
5629 big-endian machine, we want the upper BITSIZE bits. */
5630 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5631 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5632 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5633 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5634 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5635 - bitsize),
5636 NULL_RTX, 1);
5638 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5639 MODE. */
5640 if (mode != VOIDmode && mode != BLKmode
5641 && mode != TYPE_MODE (TREE_TYPE (exp)))
5642 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5644 /* If the modes of TARGET and TEMP are both BLKmode, both
5645 must be in memory and BITPOS must be aligned on a byte
5646 boundary. If so, we simply do a block copy. */
5647 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5649 gcc_assert (MEM_P (target) && MEM_P (temp)
5650 && !(bitpos % BITS_PER_UNIT));
5652 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5653 emit_block_move (target, temp,
5654 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5655 / BITS_PER_UNIT),
5656 BLOCK_OP_NORMAL);
5658 return const0_rtx;
5661 /* Store the value in the bitfield. */
5662 store_bit_field (target, bitsize, bitpos, mode, temp);
5664 return const0_rtx;
5666 else
5668 /* Now build a reference to just the desired component. */
5669 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5671 if (to_rtx == target)
5672 to_rtx = copy_rtx (to_rtx);
5674 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5675 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5676 set_mem_alias_set (to_rtx, alias_set);
5678 return store_expr (exp, to_rtx, 0);
5682 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5683 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5684 codes and find the ultimate containing object, which we return.
5686 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5687 bit position, and *PUNSIGNEDP to the signedness of the field.
5688 If the position of the field is variable, we store a tree
5689 giving the variable offset (in units) in *POFFSET.
5690 This offset is in addition to the bit position.
5691 If the position is not variable, we store 0 in *POFFSET.
5693 If any of the extraction expressions is volatile,
5694 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5696 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5697 is a mode that can be used to access the field. In that case, *PBITSIZE
5698 is redundant.
5700 If the field describes a variable-sized object, *PMODE is set to
5701 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5702 this case, but the address of the object can be found.
5704 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5705 look through nodes that serve as markers of a greater alignment than
5706 the one that can be deduced from the expression. These nodes make it
5707 possible for front-ends to prevent temporaries from being created by
5708 the middle-end on alignment considerations. For that purpose, the
5709 normal operating mode at high-level is to always pass FALSE so that
5710 the ultimate containing object is really returned; moreover, the
5711 associated predicate handled_component_p will always return TRUE
5712 on these nodes, thus indicating that they are essentially handled
5713 by get_inner_reference. TRUE should only be passed when the caller
5714 is scanning the expression in order to build another representation
5715 and specifically knows how to handle these nodes; as such, this is
5716 the normal operating mode in the RTL expanders. */
5718 tree
5719 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5720 HOST_WIDE_INT *pbitpos, tree *poffset,
5721 enum machine_mode *pmode, int *punsignedp,
5722 int *pvolatilep, bool keep_aligning)
5724 tree size_tree = 0;
5725 enum machine_mode mode = VOIDmode;
5726 tree offset = size_zero_node;
5727 tree bit_offset = bitsize_zero_node;
5728 tree tem;
5730 /* First get the mode, signedness, and size. We do this from just the
5731 outermost expression. */
5732 if (TREE_CODE (exp) == COMPONENT_REF)
5734 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5735 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5736 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5738 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5740 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5742 size_tree = TREE_OPERAND (exp, 1);
5743 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5745 /* For vector types, with the correct size of access, use the mode of
5746 inner type. */
5747 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5748 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5749 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5750 mode = TYPE_MODE (TREE_TYPE (exp));
5752 else
5754 mode = TYPE_MODE (TREE_TYPE (exp));
5755 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5757 if (mode == BLKmode)
5758 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5759 else
5760 *pbitsize = GET_MODE_BITSIZE (mode);
5763 if (size_tree != 0)
5765 if (! host_integerp (size_tree, 1))
5766 mode = BLKmode, *pbitsize = -1;
5767 else
5768 *pbitsize = tree_low_cst (size_tree, 1);
5771 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5772 and find the ultimate containing object. */
5773 while (1)
5775 switch (TREE_CODE (exp))
5777 case BIT_FIELD_REF:
5778 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5779 TREE_OPERAND (exp, 2));
5780 break;
5782 case COMPONENT_REF:
5784 tree field = TREE_OPERAND (exp, 1);
5785 tree this_offset = component_ref_field_offset (exp);
5787 /* If this field hasn't been filled in yet, don't go past it.
5788 This should only happen when folding expressions made during
5789 type construction. */
5790 if (this_offset == 0)
5791 break;
5793 offset = size_binop (PLUS_EXPR, offset, this_offset);
5794 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5795 DECL_FIELD_BIT_OFFSET (field));
5797 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5799 break;
5801 case ARRAY_REF:
5802 case ARRAY_RANGE_REF:
5804 tree index = TREE_OPERAND (exp, 1);
5805 tree low_bound = array_ref_low_bound (exp);
5806 tree unit_size = array_ref_element_size (exp);
5808 /* We assume all arrays have sizes that are a multiple of a byte.
5809 First subtract the lower bound, if any, in the type of the
5810 index, then convert to sizetype and multiply by the size of
5811 the array element. */
5812 if (! integer_zerop (low_bound))
5813 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5814 index, low_bound);
5816 offset = size_binop (PLUS_EXPR, offset,
5817 size_binop (MULT_EXPR,
5818 fold_convert (sizetype, index),
5819 unit_size));
5821 break;
5823 case REALPART_EXPR:
5824 break;
5826 case IMAGPART_EXPR:
5827 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5828 bitsize_int (*pbitsize));
5829 break;
5831 case VIEW_CONVERT_EXPR:
5832 if (keep_aligning && STRICT_ALIGNMENT
5833 && (TYPE_ALIGN (TREE_TYPE (exp))
5834 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5835 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5836 < BIGGEST_ALIGNMENT)
5837 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5838 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5839 goto done;
5840 break;
5842 default:
5843 goto done;
5846 /* If any reference in the chain is volatile, the effect is volatile. */
5847 if (TREE_THIS_VOLATILE (exp))
5848 *pvolatilep = 1;
5850 exp = TREE_OPERAND (exp, 0);
5852 done:
5854 /* If OFFSET is constant, see if we can return the whole thing as a
5855 constant bit position. Otherwise, split it up. */
5856 if (host_integerp (offset, 0)
5857 && 0 != (tem = size_binop (MULT_EXPR,
5858 fold_convert (bitsizetype, offset),
5859 bitsize_unit_node))
5860 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5861 && host_integerp (tem, 0))
5862 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5863 else
5864 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5866 *pmode = mode;
5867 return exp;
5870 /* Return a tree of sizetype representing the size, in bytes, of the element
5871 of EXP, an ARRAY_REF. */
5873 tree
5874 array_ref_element_size (tree exp)
5876 tree aligned_size = TREE_OPERAND (exp, 3);
5877 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5879 /* If a size was specified in the ARRAY_REF, it's the size measured
5880 in alignment units of the element type. So multiply by that value. */
5881 if (aligned_size)
5883 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5884 sizetype from another type of the same width and signedness. */
5885 if (TREE_TYPE (aligned_size) != sizetype)
5886 aligned_size = fold_convert (sizetype, aligned_size);
5887 return size_binop (MULT_EXPR, aligned_size,
5888 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5891 /* Otherwise, take the size from that of the element type. Substitute
5892 any PLACEHOLDER_EXPR that we have. */
5893 else
5894 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5897 /* Return a tree representing the lower bound of the array mentioned in
5898 EXP, an ARRAY_REF. */
5900 tree
5901 array_ref_low_bound (tree exp)
5903 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5905 /* If a lower bound is specified in EXP, use it. */
5906 if (TREE_OPERAND (exp, 2))
5907 return TREE_OPERAND (exp, 2);
5909 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5910 substituting for a PLACEHOLDER_EXPR as needed. */
5911 if (domain_type && TYPE_MIN_VALUE (domain_type))
5912 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5914 /* Otherwise, return a zero of the appropriate type. */
5915 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5918 /* Return a tree representing the upper bound of the array mentioned in
5919 EXP, an ARRAY_REF. */
5921 tree
5922 array_ref_up_bound (tree exp)
5924 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5926 /* If there is a domain type and it has an upper bound, use it, substituting
5927 for a PLACEHOLDER_EXPR as needed. */
5928 if (domain_type && TYPE_MAX_VALUE (domain_type))
5929 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5931 /* Otherwise fail. */
5932 return NULL_TREE;
5935 /* Return a tree representing the offset, in bytes, of the field referenced
5936 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5938 tree
5939 component_ref_field_offset (tree exp)
5941 tree aligned_offset = TREE_OPERAND (exp, 2);
5942 tree field = TREE_OPERAND (exp, 1);
5944 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5945 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5946 value. */
5947 if (aligned_offset)
5949 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5950 sizetype from another type of the same width and signedness. */
5951 if (TREE_TYPE (aligned_offset) != sizetype)
5952 aligned_offset = fold_convert (sizetype, aligned_offset);
5953 return size_binop (MULT_EXPR, aligned_offset,
5954 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5957 /* Otherwise, take the offset from that of the field. Substitute
5958 any PLACEHOLDER_EXPR that we have. */
5959 else
5960 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5963 /* Return 1 if T is an expression that get_inner_reference handles. */
5966 handled_component_p (tree t)
5968 switch (TREE_CODE (t))
5970 case BIT_FIELD_REF:
5971 case COMPONENT_REF:
5972 case ARRAY_REF:
5973 case ARRAY_RANGE_REF:
5974 case VIEW_CONVERT_EXPR:
5975 case REALPART_EXPR:
5976 case IMAGPART_EXPR:
5977 return 1;
5979 default:
5980 return 0;
5984 /* Given an rtx VALUE that may contain additions and multiplications, return
5985 an equivalent value that just refers to a register, memory, or constant.
5986 This is done by generating instructions to perform the arithmetic and
5987 returning a pseudo-register containing the value.
5989 The returned value may be a REG, SUBREG, MEM or constant. */
5992 force_operand (rtx value, rtx target)
5994 rtx op1, op2;
5995 /* Use subtarget as the target for operand 0 of a binary operation. */
5996 rtx subtarget = get_subtarget (target);
5997 enum rtx_code code = GET_CODE (value);
5999 /* Check for subreg applied to an expression produced by loop optimizer. */
6000 if (code == SUBREG
6001 && !REG_P (SUBREG_REG (value))
6002 && !MEM_P (SUBREG_REG (value)))
6004 value
6005 = simplify_gen_subreg (GET_MODE (value),
6006 force_reg (GET_MODE (SUBREG_REG (value)),
6007 force_operand (SUBREG_REG (value),
6008 NULL_RTX)),
6009 GET_MODE (SUBREG_REG (value)),
6010 SUBREG_BYTE (value));
6011 code = GET_CODE (value);
6014 /* Check for a PIC address load. */
6015 if ((code == PLUS || code == MINUS)
6016 && XEXP (value, 0) == pic_offset_table_rtx
6017 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6018 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6019 || GET_CODE (XEXP (value, 1)) == CONST))
6021 if (!subtarget)
6022 subtarget = gen_reg_rtx (GET_MODE (value));
6023 emit_move_insn (subtarget, value);
6024 return subtarget;
6027 if (ARITHMETIC_P (value))
6029 op2 = XEXP (value, 1);
6030 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6031 subtarget = 0;
6032 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6034 code = PLUS;
6035 op2 = negate_rtx (GET_MODE (value), op2);
6038 /* Check for an addition with OP2 a constant integer and our first
6039 operand a PLUS of a virtual register and something else. In that
6040 case, we want to emit the sum of the virtual register and the
6041 constant first and then add the other value. This allows virtual
6042 register instantiation to simply modify the constant rather than
6043 creating another one around this addition. */
6044 if (code == PLUS && GET_CODE (op2) == CONST_INT
6045 && GET_CODE (XEXP (value, 0)) == PLUS
6046 && REG_P (XEXP (XEXP (value, 0), 0))
6047 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6048 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6050 rtx temp = expand_simple_binop (GET_MODE (value), code,
6051 XEXP (XEXP (value, 0), 0), op2,
6052 subtarget, 0, OPTAB_LIB_WIDEN);
6053 return expand_simple_binop (GET_MODE (value), code, temp,
6054 force_operand (XEXP (XEXP (value,
6055 0), 1), 0),
6056 target, 0, OPTAB_LIB_WIDEN);
6059 op1 = force_operand (XEXP (value, 0), subtarget);
6060 op2 = force_operand (op2, NULL_RTX);
6061 switch (code)
6063 case MULT:
6064 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6065 case DIV:
6066 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6067 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6068 target, 1, OPTAB_LIB_WIDEN);
6069 else
6070 return expand_divmod (0,
6071 FLOAT_MODE_P (GET_MODE (value))
6072 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6073 GET_MODE (value), op1, op2, target, 0);
6074 case MOD:
6075 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6076 target, 0);
6077 case UDIV:
6078 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6079 target, 1);
6080 case UMOD:
6081 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6082 target, 1);
6083 case ASHIFTRT:
6084 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6085 target, 0, OPTAB_LIB_WIDEN);
6086 default:
6087 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6088 target, 1, OPTAB_LIB_WIDEN);
6091 if (UNARY_P (value))
6093 if (!target)
6094 target = gen_reg_rtx (GET_MODE (value));
6095 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6096 switch (code)
6098 case ZERO_EXTEND:
6099 case SIGN_EXTEND:
6100 case TRUNCATE:
6101 case FLOAT_EXTEND:
6102 case FLOAT_TRUNCATE:
6103 convert_move (target, op1, code == ZERO_EXTEND);
6104 return target;
6106 case FIX:
6107 case UNSIGNED_FIX:
6108 expand_fix (target, op1, code == UNSIGNED_FIX);
6109 return target;
6111 case FLOAT:
6112 case UNSIGNED_FLOAT:
6113 expand_float (target, op1, code == UNSIGNED_FLOAT);
6114 return target;
6116 default:
6117 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6121 #ifdef INSN_SCHEDULING
6122 /* On machines that have insn scheduling, we want all memory reference to be
6123 explicit, so we need to deal with such paradoxical SUBREGs. */
6124 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6125 && (GET_MODE_SIZE (GET_MODE (value))
6126 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6127 value
6128 = simplify_gen_subreg (GET_MODE (value),
6129 force_reg (GET_MODE (SUBREG_REG (value)),
6130 force_operand (SUBREG_REG (value),
6131 NULL_RTX)),
6132 GET_MODE (SUBREG_REG (value)),
6133 SUBREG_BYTE (value));
6134 #endif
6136 return value;
6139 /* Subroutine of expand_expr: return nonzero iff there is no way that
6140 EXP can reference X, which is being modified. TOP_P is nonzero if this
6141 call is going to be used to determine whether we need a temporary
6142 for EXP, as opposed to a recursive call to this function.
6144 It is always safe for this routine to return zero since it merely
6145 searches for optimization opportunities. */
6148 safe_from_p (rtx x, tree exp, int top_p)
6150 rtx exp_rtl = 0;
6151 int i, nops;
6153 if (x == 0
6154 /* If EXP has varying size, we MUST use a target since we currently
6155 have no way of allocating temporaries of variable size
6156 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6157 So we assume here that something at a higher level has prevented a
6158 clash. This is somewhat bogus, but the best we can do. Only
6159 do this when X is BLKmode and when we are at the top level. */
6160 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6161 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6162 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6163 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6164 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6165 != INTEGER_CST)
6166 && GET_MODE (x) == BLKmode)
6167 /* If X is in the outgoing argument area, it is always safe. */
6168 || (MEM_P (x)
6169 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6170 || (GET_CODE (XEXP (x, 0)) == PLUS
6171 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6172 return 1;
6174 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6175 find the underlying pseudo. */
6176 if (GET_CODE (x) == SUBREG)
6178 x = SUBREG_REG (x);
6179 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6180 return 0;
6183 /* Now look at our tree code and possibly recurse. */
6184 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6186 case tcc_declaration:
6187 exp_rtl = DECL_RTL_IF_SET (exp);
6188 break;
6190 case tcc_constant:
6191 return 1;
6193 case tcc_exceptional:
6194 if (TREE_CODE (exp) == TREE_LIST)
6196 while (1)
6198 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6199 return 0;
6200 exp = TREE_CHAIN (exp);
6201 if (!exp)
6202 return 1;
6203 if (TREE_CODE (exp) != TREE_LIST)
6204 return safe_from_p (x, exp, 0);
6207 else if (TREE_CODE (exp) == CONSTRUCTOR)
6209 constructor_elt *ce;
6210 unsigned HOST_WIDE_INT idx;
6212 for (idx = 0;
6213 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6214 idx++)
6215 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6216 || !safe_from_p (x, ce->value, 0))
6217 return 0;
6218 return 1;
6220 else if (TREE_CODE (exp) == ERROR_MARK)
6221 return 1; /* An already-visited SAVE_EXPR? */
6222 else
6223 return 0;
6225 case tcc_statement:
6226 /* The only case we look at here is the DECL_INITIAL inside a
6227 DECL_EXPR. */
6228 return (TREE_CODE (exp) != DECL_EXPR
6229 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6230 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6231 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6233 case tcc_binary:
6234 case tcc_comparison:
6235 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6236 return 0;
6237 /* Fall through. */
6239 case tcc_unary:
6240 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6242 case tcc_expression:
6243 case tcc_reference:
6244 case tcc_vl_exp:
6245 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6246 the expression. If it is set, we conflict iff we are that rtx or
6247 both are in memory. Otherwise, we check all operands of the
6248 expression recursively. */
6250 switch (TREE_CODE (exp))
6252 case ADDR_EXPR:
6253 /* If the operand is static or we are static, we can't conflict.
6254 Likewise if we don't conflict with the operand at all. */
6255 if (staticp (TREE_OPERAND (exp, 0))
6256 || TREE_STATIC (exp)
6257 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6258 return 1;
6260 /* Otherwise, the only way this can conflict is if we are taking
6261 the address of a DECL a that address if part of X, which is
6262 very rare. */
6263 exp = TREE_OPERAND (exp, 0);
6264 if (DECL_P (exp))
6266 if (!DECL_RTL_SET_P (exp)
6267 || !MEM_P (DECL_RTL (exp)))
6268 return 0;
6269 else
6270 exp_rtl = XEXP (DECL_RTL (exp), 0);
6272 break;
6274 case MISALIGNED_INDIRECT_REF:
6275 case ALIGN_INDIRECT_REF:
6276 case INDIRECT_REF:
6277 if (MEM_P (x)
6278 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6279 get_alias_set (exp)))
6280 return 0;
6281 break;
6283 case CALL_EXPR:
6284 /* Assume that the call will clobber all hard registers and
6285 all of memory. */
6286 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6287 || MEM_P (x))
6288 return 0;
6289 break;
6291 case WITH_CLEANUP_EXPR:
6292 case CLEANUP_POINT_EXPR:
6293 /* Lowered by gimplify.c. */
6294 gcc_unreachable ();
6296 case SAVE_EXPR:
6297 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6299 default:
6300 break;
6303 /* If we have an rtx, we do not need to scan our operands. */
6304 if (exp_rtl)
6305 break;
6307 nops = TREE_OPERAND_LENGTH (exp);
6308 for (i = 0; i < nops; i++)
6309 if (TREE_OPERAND (exp, i) != 0
6310 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6311 return 0;
6313 break;
6315 case tcc_type:
6316 /* Should never get a type here. */
6317 gcc_unreachable ();
6319 case tcc_gimple_stmt:
6320 gcc_unreachable ();
6323 /* If we have an rtl, find any enclosed object. Then see if we conflict
6324 with it. */
6325 if (exp_rtl)
6327 if (GET_CODE (exp_rtl) == SUBREG)
6329 exp_rtl = SUBREG_REG (exp_rtl);
6330 if (REG_P (exp_rtl)
6331 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6332 return 0;
6335 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6336 are memory and they conflict. */
6337 return ! (rtx_equal_p (x, exp_rtl)
6338 || (MEM_P (x) && MEM_P (exp_rtl)
6339 && true_dependence (exp_rtl, VOIDmode, x,
6340 rtx_addr_varies_p)));
6343 /* If we reach here, it is safe. */
6344 return 1;
6348 /* Return the highest power of two that EXP is known to be a multiple of.
6349 This is used in updating alignment of MEMs in array references. */
6351 unsigned HOST_WIDE_INT
6352 highest_pow2_factor (tree exp)
6354 unsigned HOST_WIDE_INT c0, c1;
6356 switch (TREE_CODE (exp))
6358 case INTEGER_CST:
6359 /* We can find the lowest bit that's a one. If the low
6360 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6361 We need to handle this case since we can find it in a COND_EXPR,
6362 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6363 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6364 later ICE. */
6365 if (TREE_OVERFLOW (exp))
6366 return BIGGEST_ALIGNMENT;
6367 else
6369 /* Note: tree_low_cst is intentionally not used here,
6370 we don't care about the upper bits. */
6371 c0 = TREE_INT_CST_LOW (exp);
6372 c0 &= -c0;
6373 return c0 ? c0 : BIGGEST_ALIGNMENT;
6375 break;
6377 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6378 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6379 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6380 return MIN (c0, c1);
6382 case MULT_EXPR:
6383 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6384 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6385 return c0 * c1;
6387 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6388 case CEIL_DIV_EXPR:
6389 if (integer_pow2p (TREE_OPERAND (exp, 1))
6390 && host_integerp (TREE_OPERAND (exp, 1), 1))
6392 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6393 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6394 return MAX (1, c0 / c1);
6396 break;
6398 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6399 case SAVE_EXPR:
6400 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6402 case COMPOUND_EXPR:
6403 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6405 case COND_EXPR:
6406 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6407 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6408 return MIN (c0, c1);
6410 default:
6411 break;
6414 return 1;
6417 /* Similar, except that the alignment requirements of TARGET are
6418 taken into account. Assume it is at least as aligned as its
6419 type, unless it is a COMPONENT_REF in which case the layout of
6420 the structure gives the alignment. */
6422 static unsigned HOST_WIDE_INT
6423 highest_pow2_factor_for_target (tree target, tree exp)
6425 unsigned HOST_WIDE_INT target_align, factor;
6427 factor = highest_pow2_factor (exp);
6428 if (TREE_CODE (target) == COMPONENT_REF)
6429 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6430 else
6431 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6432 return MAX (factor, target_align);
6435 /* Return &VAR expression for emulated thread local VAR. */
6437 static tree
6438 emutls_var_address (tree var)
6440 tree emuvar = emutls_decl (var);
6441 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6442 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6443 tree arglist = build_tree_list (NULL_TREE, arg);
6444 tree call = build_function_call_expr (fn, arglist);
6445 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6448 /* Expands variable VAR. */
6450 void
6451 expand_var (tree var)
6453 if (DECL_EXTERNAL (var))
6454 return;
6456 if (TREE_STATIC (var))
6457 /* If this is an inlined copy of a static local variable,
6458 look up the original decl. */
6459 var = DECL_ORIGIN (var);
6461 if (TREE_STATIC (var)
6462 ? !TREE_ASM_WRITTEN (var)
6463 : !DECL_RTL_SET_P (var))
6465 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6466 /* Should be ignored. */;
6467 else if (lang_hooks.expand_decl (var))
6468 /* OK. */;
6469 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6470 expand_decl (var);
6471 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6472 rest_of_decl_compilation (var, 0, 0);
6473 else
6474 /* No expansion needed. */
6475 gcc_assert (TREE_CODE (var) == TYPE_DECL
6476 || TREE_CODE (var) == CONST_DECL
6477 || TREE_CODE (var) == FUNCTION_DECL
6478 || TREE_CODE (var) == LABEL_DECL);
6482 /* Subroutine of expand_expr. Expand the two operands of a binary
6483 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6484 The value may be stored in TARGET if TARGET is nonzero. The
6485 MODIFIER argument is as documented by expand_expr. */
6487 static void
6488 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6489 enum expand_modifier modifier)
6491 if (! safe_from_p (target, exp1, 1))
6492 target = 0;
6493 if (operand_equal_p (exp0, exp1, 0))
6495 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6496 *op1 = copy_rtx (*op0);
6498 else
6500 /* If we need to preserve evaluation order, copy exp0 into its own
6501 temporary variable so that it can't be clobbered by exp1. */
6502 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6503 exp0 = save_expr (exp0);
6504 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6505 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6510 /* Return a MEM that contains constant EXP. DEFER is as for
6511 output_constant_def and MODIFIER is as for expand_expr. */
6513 static rtx
6514 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6516 rtx mem;
6518 mem = output_constant_def (exp, defer);
6519 if (modifier != EXPAND_INITIALIZER)
6520 mem = use_anchored_address (mem);
6521 return mem;
6524 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6525 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6527 static rtx
6528 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6529 enum expand_modifier modifier)
6531 rtx result, subtarget;
6532 tree inner, offset;
6533 HOST_WIDE_INT bitsize, bitpos;
6534 int volatilep, unsignedp;
6535 enum machine_mode mode1;
6537 /* If we are taking the address of a constant and are at the top level,
6538 we have to use output_constant_def since we can't call force_const_mem
6539 at top level. */
6540 /* ??? This should be considered a front-end bug. We should not be
6541 generating ADDR_EXPR of something that isn't an LVALUE. The only
6542 exception here is STRING_CST. */
6543 if (TREE_CODE (exp) == CONSTRUCTOR
6544 || CONSTANT_CLASS_P (exp))
6545 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6547 /* Everything must be something allowed by is_gimple_addressable. */
6548 switch (TREE_CODE (exp))
6550 case INDIRECT_REF:
6551 /* This case will happen via recursion for &a->b. */
6552 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6554 case CONST_DECL:
6555 /* Recurse and make the output_constant_def clause above handle this. */
6556 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6557 tmode, modifier);
6559 case REALPART_EXPR:
6560 /* The real part of the complex number is always first, therefore
6561 the address is the same as the address of the parent object. */
6562 offset = 0;
6563 bitpos = 0;
6564 inner = TREE_OPERAND (exp, 0);
6565 break;
6567 case IMAGPART_EXPR:
6568 /* The imaginary part of the complex number is always second.
6569 The expression is therefore always offset by the size of the
6570 scalar type. */
6571 offset = 0;
6572 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6573 inner = TREE_OPERAND (exp, 0);
6574 break;
6576 case VAR_DECL:
6577 /* TLS emulation hook - replace __thread VAR's &VAR with
6578 __emutls_get_address (&_emutls.VAR). */
6579 if (! targetm.have_tls
6580 && TREE_CODE (exp) == VAR_DECL
6581 && DECL_THREAD_LOCAL_P (exp))
6583 exp = emutls_var_address (exp);
6584 return expand_expr (exp, target, tmode, modifier);
6586 /* Fall through. */
6588 default:
6589 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6590 expand_expr, as that can have various side effects; LABEL_DECLs for
6591 example, may not have their DECL_RTL set yet. Assume language
6592 specific tree nodes can be expanded in some interesting way. */
6593 if (DECL_P (exp)
6594 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6596 result = expand_expr (exp, target, tmode,
6597 modifier == EXPAND_INITIALIZER
6598 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6600 /* If the DECL isn't in memory, then the DECL wasn't properly
6601 marked TREE_ADDRESSABLE, which will be either a front-end
6602 or a tree optimizer bug. */
6603 gcc_assert (MEM_P (result));
6604 result = XEXP (result, 0);
6606 /* ??? Is this needed anymore? */
6607 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6609 assemble_external (exp);
6610 TREE_USED (exp) = 1;
6613 if (modifier != EXPAND_INITIALIZER
6614 && modifier != EXPAND_CONST_ADDRESS)
6615 result = force_operand (result, target);
6616 return result;
6619 /* Pass FALSE as the last argument to get_inner_reference although
6620 we are expanding to RTL. The rationale is that we know how to
6621 handle "aligning nodes" here: we can just bypass them because
6622 they won't change the final object whose address will be returned
6623 (they actually exist only for that purpose). */
6624 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6625 &mode1, &unsignedp, &volatilep, false);
6626 break;
6629 /* We must have made progress. */
6630 gcc_assert (inner != exp);
6632 subtarget = offset || bitpos ? NULL_RTX : target;
6633 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6635 if (offset)
6637 rtx tmp;
6639 if (modifier != EXPAND_NORMAL)
6640 result = force_operand (result, NULL);
6641 tmp = expand_expr (offset, NULL_RTX, tmode, EXPAND_NORMAL);
6643 result = convert_memory_address (tmode, result);
6644 tmp = convert_memory_address (tmode, tmp);
6646 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6647 result = gen_rtx_PLUS (tmode, result, tmp);
6648 else
6650 subtarget = bitpos ? NULL_RTX : target;
6651 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6652 1, OPTAB_LIB_WIDEN);
6656 if (bitpos)
6658 /* Someone beforehand should have rejected taking the address
6659 of such an object. */
6660 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6662 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6663 if (modifier < EXPAND_SUM)
6664 result = force_operand (result, target);
6667 return result;
6670 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6671 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6673 static rtx
6674 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6675 enum expand_modifier modifier)
6677 enum machine_mode rmode;
6678 rtx result;
6680 /* Target mode of VOIDmode says "whatever's natural". */
6681 if (tmode == VOIDmode)
6682 tmode = TYPE_MODE (TREE_TYPE (exp));
6684 /* We can get called with some Weird Things if the user does silliness
6685 like "(short) &a". In that case, convert_memory_address won't do
6686 the right thing, so ignore the given target mode. */
6687 if (tmode != Pmode && tmode != ptr_mode)
6688 tmode = Pmode;
6690 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6691 tmode, modifier);
6693 /* Despite expand_expr claims concerning ignoring TMODE when not
6694 strictly convenient, stuff breaks if we don't honor it. Note
6695 that combined with the above, we only do this for pointer modes. */
6696 rmode = GET_MODE (result);
6697 if (rmode == VOIDmode)
6698 rmode = tmode;
6699 if (rmode != tmode)
6700 result = convert_memory_address (tmode, result);
6702 return result;
6706 /* expand_expr: generate code for computing expression EXP.
6707 An rtx for the computed value is returned. The value is never null.
6708 In the case of a void EXP, const0_rtx is returned.
6710 The value may be stored in TARGET if TARGET is nonzero.
6711 TARGET is just a suggestion; callers must assume that
6712 the rtx returned may not be the same as TARGET.
6714 If TARGET is CONST0_RTX, it means that the value will be ignored.
6716 If TMODE is not VOIDmode, it suggests generating the
6717 result in mode TMODE. But this is done only when convenient.
6718 Otherwise, TMODE is ignored and the value generated in its natural mode.
6719 TMODE is just a suggestion; callers must assume that
6720 the rtx returned may not have mode TMODE.
6722 Note that TARGET may have neither TMODE nor MODE. In that case, it
6723 probably will not be used.
6725 If MODIFIER is EXPAND_SUM then when EXP is an addition
6726 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6727 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6728 products as above, or REG or MEM, or constant.
6729 Ordinarily in such cases we would output mul or add instructions
6730 and then return a pseudo reg containing the sum.
6732 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6733 it also marks a label as absolutely required (it can't be dead).
6734 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6735 This is used for outputting expressions used in initializers.
6737 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6738 with a constant address even if that address is not normally legitimate.
6739 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6741 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6742 a call parameter. Such targets require special care as we haven't yet
6743 marked TARGET so that it's safe from being trashed by libcalls. We
6744 don't want to use TARGET for anything but the final result;
6745 Intermediate values must go elsewhere. Additionally, calls to
6746 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6748 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6749 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6750 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6751 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6752 recursively. */
6754 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6755 enum expand_modifier, rtx *);
6758 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6759 enum expand_modifier modifier, rtx *alt_rtl)
6761 int rn = -1;
6762 rtx ret, last = NULL;
6764 /* Handle ERROR_MARK before anybody tries to access its type. */
6765 if (TREE_CODE (exp) == ERROR_MARK
6766 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6768 ret = CONST0_RTX (tmode);
6769 return ret ? ret : const0_rtx;
6772 if (flag_non_call_exceptions)
6774 rn = lookup_stmt_eh_region (exp);
6775 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6776 if (rn >= 0)
6777 last = get_last_insn ();
6780 /* If this is an expression of some kind and it has an associated line
6781 number, then emit the line number before expanding the expression.
6783 We need to save and restore the file and line information so that
6784 errors discovered during expansion are emitted with the right
6785 information. It would be better of the diagnostic routines
6786 used the file/line information embedded in the tree nodes rather
6787 than globals. */
6788 if (cfun && EXPR_HAS_LOCATION (exp))
6790 location_t saved_location = input_location;
6791 input_location = EXPR_LOCATION (exp);
6792 set_curr_insn_source_location (input_location);
6794 /* Record where the insns produced belong. */
6795 set_curr_insn_block (TREE_BLOCK (exp));
6797 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6799 input_location = saved_location;
6801 else
6803 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6806 /* If using non-call exceptions, mark all insns that may trap.
6807 expand_call() will mark CALL_INSNs before we get to this code,
6808 but it doesn't handle libcalls, and these may trap. */
6809 if (rn >= 0)
6811 rtx insn;
6812 for (insn = next_real_insn (last); insn;
6813 insn = next_real_insn (insn))
6815 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6816 /* If we want exceptions for non-call insns, any
6817 may_trap_p instruction may throw. */
6818 && GET_CODE (PATTERN (insn)) != CLOBBER
6819 && GET_CODE (PATTERN (insn)) != USE
6820 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6822 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6823 REG_NOTES (insn));
6828 return ret;
6831 static rtx
6832 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6833 enum expand_modifier modifier, rtx *alt_rtl)
6835 rtx op0, op1, op2, temp, decl_rtl;
6836 tree type;
6837 int unsignedp;
6838 enum machine_mode mode;
6839 enum tree_code code = TREE_CODE (exp);
6840 optab this_optab;
6841 rtx subtarget, original_target;
6842 int ignore;
6843 tree context, subexp0, subexp1;
6844 bool reduce_bit_field = false;
6845 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6846 ? reduce_to_bit_field_precision ((expr), \
6847 target, \
6848 type) \
6849 : (expr))
6851 if (GIMPLE_STMT_P (exp))
6853 type = void_type_node;
6854 mode = VOIDmode;
6855 unsignedp = 0;
6857 else
6859 type = TREE_TYPE (exp);
6860 mode = TYPE_MODE (type);
6861 unsignedp = TYPE_UNSIGNED (type);
6863 if (lang_hooks.reduce_bit_field_operations
6864 && TREE_CODE (type) == INTEGER_TYPE
6865 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6867 /* An operation in what may be a bit-field type needs the
6868 result to be reduced to the precision of the bit-field type,
6869 which is narrower than that of the type's mode. */
6870 reduce_bit_field = true;
6871 if (modifier == EXPAND_STACK_PARM)
6872 target = 0;
6875 /* Use subtarget as the target for operand 0 of a binary operation. */
6876 subtarget = get_subtarget (target);
6877 original_target = target;
6878 ignore = (target == const0_rtx
6879 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6880 || code == CONVERT_EXPR || code == COND_EXPR
6881 || code == VIEW_CONVERT_EXPR)
6882 && TREE_CODE (type) == VOID_TYPE));
6884 /* If we are going to ignore this result, we need only do something
6885 if there is a side-effect somewhere in the expression. If there
6886 is, short-circuit the most common cases here. Note that we must
6887 not call expand_expr with anything but const0_rtx in case this
6888 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6890 if (ignore)
6892 if (! TREE_SIDE_EFFECTS (exp))
6893 return const0_rtx;
6895 /* Ensure we reference a volatile object even if value is ignored, but
6896 don't do this if all we are doing is taking its address. */
6897 if (TREE_THIS_VOLATILE (exp)
6898 && TREE_CODE (exp) != FUNCTION_DECL
6899 && mode != VOIDmode && mode != BLKmode
6900 && modifier != EXPAND_CONST_ADDRESS)
6902 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6903 if (MEM_P (temp))
6904 temp = copy_to_reg (temp);
6905 return const0_rtx;
6908 if (TREE_CODE_CLASS (code) == tcc_unary
6909 || code == COMPONENT_REF || code == INDIRECT_REF)
6910 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6911 modifier);
6913 else if (TREE_CODE_CLASS (code) == tcc_binary
6914 || TREE_CODE_CLASS (code) == tcc_comparison
6915 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6917 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6918 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6919 return const0_rtx;
6921 else if (code == BIT_FIELD_REF)
6923 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6924 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6925 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6926 return const0_rtx;
6929 target = 0;
6933 switch (code)
6935 case LABEL_DECL:
6937 tree function = decl_function_context (exp);
6939 temp = label_rtx (exp);
6940 temp = gen_rtx_LABEL_REF (Pmode, temp);
6942 if (function != current_function_decl
6943 && function != 0)
6944 LABEL_REF_NONLOCAL_P (temp) = 1;
6946 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6947 return temp;
6950 case SSA_NAME:
6951 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6952 NULL);
6954 case PARM_DECL:
6955 case VAR_DECL:
6956 /* If a static var's type was incomplete when the decl was written,
6957 but the type is complete now, lay out the decl now. */
6958 if (DECL_SIZE (exp) == 0
6959 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6960 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6961 layout_decl (exp, 0);
6963 /* TLS emulation hook - replace __thread vars with
6964 *__emutls_get_address (&_emutls.var). */
6965 if (! targetm.have_tls
6966 && TREE_CODE (exp) == VAR_DECL
6967 && DECL_THREAD_LOCAL_P (exp))
6969 exp = build_fold_indirect_ref (emutls_var_address (exp));
6970 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
6973 /* ... fall through ... */
6975 case FUNCTION_DECL:
6976 case RESULT_DECL:
6977 decl_rtl = DECL_RTL (exp);
6978 gcc_assert (decl_rtl);
6979 decl_rtl = copy_rtx (decl_rtl);
6981 /* Ensure variable marked as used even if it doesn't go through
6982 a parser. If it hasn't be used yet, write out an external
6983 definition. */
6984 if (! TREE_USED (exp))
6986 assemble_external (exp);
6987 TREE_USED (exp) = 1;
6990 /* Show we haven't gotten RTL for this yet. */
6991 temp = 0;
6993 /* Variables inherited from containing functions should have
6994 been lowered by this point. */
6995 context = decl_function_context (exp);
6996 gcc_assert (!context
6997 || context == current_function_decl
6998 || TREE_STATIC (exp)
6999 /* ??? C++ creates functions that are not TREE_STATIC. */
7000 || TREE_CODE (exp) == FUNCTION_DECL);
7002 /* This is the case of an array whose size is to be determined
7003 from its initializer, while the initializer is still being parsed.
7004 See expand_decl. */
7006 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7007 temp = validize_mem (decl_rtl);
7009 /* If DECL_RTL is memory, we are in the normal case and either
7010 the address is not valid or it is not a register and -fforce-addr
7011 is specified, get the address into a register. */
7013 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7015 if (alt_rtl)
7016 *alt_rtl = decl_rtl;
7017 decl_rtl = use_anchored_address (decl_rtl);
7018 if (modifier != EXPAND_CONST_ADDRESS
7019 && modifier != EXPAND_SUM
7020 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7021 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7022 temp = replace_equiv_address (decl_rtl,
7023 copy_rtx (XEXP (decl_rtl, 0)));
7026 /* If we got something, return it. But first, set the alignment
7027 if the address is a register. */
7028 if (temp != 0)
7030 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7031 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7033 return temp;
7036 /* If the mode of DECL_RTL does not match that of the decl, it
7037 must be a promoted value. We return a SUBREG of the wanted mode,
7038 but mark it so that we know that it was already extended. */
7040 if (REG_P (decl_rtl)
7041 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7043 enum machine_mode pmode;
7045 /* Get the signedness used for this variable. Ensure we get the
7046 same mode we got when the variable was declared. */
7047 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7048 (TREE_CODE (exp) == RESULT_DECL
7049 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7050 gcc_assert (GET_MODE (decl_rtl) == pmode);
7052 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7053 SUBREG_PROMOTED_VAR_P (temp) = 1;
7054 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7055 return temp;
7058 return decl_rtl;
7060 case INTEGER_CST:
7061 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7062 TREE_INT_CST_HIGH (exp), mode);
7064 /* ??? If overflow is set, fold will have done an incomplete job,
7065 which can result in (plus xx (const_int 0)), which can get
7066 simplified by validate_replace_rtx during virtual register
7067 instantiation, which can result in unrecognizable insns.
7068 Avoid this by forcing all overflows into registers. */
7069 if (TREE_OVERFLOW (exp)
7070 && modifier != EXPAND_INITIALIZER)
7071 temp = force_reg (mode, temp);
7073 return temp;
7075 case VECTOR_CST:
7077 tree tmp = NULL_TREE;
7078 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7079 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7080 return const_vector_from_tree (exp);
7081 if (GET_MODE_CLASS (mode) == MODE_INT)
7083 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7084 if (type_for_mode)
7085 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7087 if (!tmp)
7088 tmp = build_constructor_from_list (type,
7089 TREE_VECTOR_CST_ELTS (exp));
7090 return expand_expr (tmp, ignore ? const0_rtx : target,
7091 tmode, modifier);
7094 case CONST_DECL:
7095 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7097 case REAL_CST:
7098 /* If optimized, generate immediate CONST_DOUBLE
7099 which will be turned into memory by reload if necessary.
7101 We used to force a register so that loop.c could see it. But
7102 this does not allow gen_* patterns to perform optimizations with
7103 the constants. It also produces two insns in cases like "x = 1.0;".
7104 On most machines, floating-point constants are not permitted in
7105 many insns, so we'd end up copying it to a register in any case.
7107 Now, we do the copying in expand_binop, if appropriate. */
7108 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7109 TYPE_MODE (TREE_TYPE (exp)));
7111 case COMPLEX_CST:
7112 /* Handle evaluating a complex constant in a CONCAT target. */
7113 if (original_target && GET_CODE (original_target) == CONCAT)
7115 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7116 rtx rtarg, itarg;
7118 rtarg = XEXP (original_target, 0);
7119 itarg = XEXP (original_target, 1);
7121 /* Move the real and imaginary parts separately. */
7122 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7123 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7125 if (op0 != rtarg)
7126 emit_move_insn (rtarg, op0);
7127 if (op1 != itarg)
7128 emit_move_insn (itarg, op1);
7130 return original_target;
7133 /* ... fall through ... */
7135 case STRING_CST:
7136 temp = expand_expr_constant (exp, 1, modifier);
7138 /* temp contains a constant address.
7139 On RISC machines where a constant address isn't valid,
7140 make some insns to get that address into a register. */
7141 if (modifier != EXPAND_CONST_ADDRESS
7142 && modifier != EXPAND_INITIALIZER
7143 && modifier != EXPAND_SUM
7144 && (! memory_address_p (mode, XEXP (temp, 0))
7145 || flag_force_addr))
7146 return replace_equiv_address (temp,
7147 copy_rtx (XEXP (temp, 0)));
7148 return temp;
7150 case SAVE_EXPR:
7152 tree val = TREE_OPERAND (exp, 0);
7153 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7155 if (!SAVE_EXPR_RESOLVED_P (exp))
7157 /* We can indeed still hit this case, typically via builtin
7158 expanders calling save_expr immediately before expanding
7159 something. Assume this means that we only have to deal
7160 with non-BLKmode values. */
7161 gcc_assert (GET_MODE (ret) != BLKmode);
7163 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7164 DECL_ARTIFICIAL (val) = 1;
7165 DECL_IGNORED_P (val) = 1;
7166 TREE_OPERAND (exp, 0) = val;
7167 SAVE_EXPR_RESOLVED_P (exp) = 1;
7169 if (!CONSTANT_P (ret))
7170 ret = copy_to_reg (ret);
7171 SET_DECL_RTL (val, ret);
7174 return ret;
7177 case GOTO_EXPR:
7178 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7179 expand_goto (TREE_OPERAND (exp, 0));
7180 else
7181 expand_computed_goto (TREE_OPERAND (exp, 0));
7182 return const0_rtx;
7184 case CONSTRUCTOR:
7185 /* If we don't need the result, just ensure we evaluate any
7186 subexpressions. */
7187 if (ignore)
7189 unsigned HOST_WIDE_INT idx;
7190 tree value;
7192 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7193 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7195 return const0_rtx;
7198 /* Try to avoid creating a temporary at all. This is possible
7199 if all of the initializer is zero.
7200 FIXME: try to handle all [0..255] initializers we can handle
7201 with memset. */
7202 else if (TREE_STATIC (exp)
7203 && !TREE_ADDRESSABLE (exp)
7204 && target != 0 && mode == BLKmode
7205 && all_zeros_p (exp))
7207 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7208 return target;
7211 /* All elts simple constants => refer to a constant in memory. But
7212 if this is a non-BLKmode mode, let it store a field at a time
7213 since that should make a CONST_INT or CONST_DOUBLE when we
7214 fold. Likewise, if we have a target we can use, it is best to
7215 store directly into the target unless the type is large enough
7216 that memcpy will be used. If we are making an initializer and
7217 all operands are constant, put it in memory as well.
7219 FIXME: Avoid trying to fill vector constructors piece-meal.
7220 Output them with output_constant_def below unless we're sure
7221 they're zeros. This should go away when vector initializers
7222 are treated like VECTOR_CST instead of arrays.
7224 else if ((TREE_STATIC (exp)
7225 && ((mode == BLKmode
7226 && ! (target != 0 && safe_from_p (target, exp, 1)))
7227 || TREE_ADDRESSABLE (exp)
7228 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7229 && (! MOVE_BY_PIECES_P
7230 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7231 TYPE_ALIGN (type)))
7232 && ! mostly_zeros_p (exp))))
7233 || ((modifier == EXPAND_INITIALIZER
7234 || modifier == EXPAND_CONST_ADDRESS)
7235 && TREE_CONSTANT (exp)))
7237 rtx constructor = expand_expr_constant (exp, 1, modifier);
7239 if (modifier != EXPAND_CONST_ADDRESS
7240 && modifier != EXPAND_INITIALIZER
7241 && modifier != EXPAND_SUM)
7242 constructor = validize_mem (constructor);
7244 return constructor;
7246 else
7248 /* Handle calls that pass values in multiple non-contiguous
7249 locations. The Irix 6 ABI has examples of this. */
7250 if (target == 0 || ! safe_from_p (target, exp, 1)
7251 || GET_CODE (target) == PARALLEL
7252 || modifier == EXPAND_STACK_PARM)
7253 target
7254 = assign_temp (build_qualified_type (type,
7255 (TYPE_QUALS (type)
7256 | (TREE_READONLY (exp)
7257 * TYPE_QUAL_CONST))),
7258 0, TREE_ADDRESSABLE (exp), 1);
7260 store_constructor (exp, target, 0, int_expr_size (exp));
7261 return target;
7264 case MISALIGNED_INDIRECT_REF:
7265 case ALIGN_INDIRECT_REF:
7266 case INDIRECT_REF:
7268 tree exp1 = TREE_OPERAND (exp, 0);
7270 if (modifier != EXPAND_WRITE)
7272 tree t;
7274 t = fold_read_from_constant_string (exp);
7275 if (t)
7276 return expand_expr (t, target, tmode, modifier);
7279 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7280 op0 = memory_address (mode, op0);
7282 if (code == ALIGN_INDIRECT_REF)
7284 int align = TYPE_ALIGN_UNIT (type);
7285 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7286 op0 = memory_address (mode, op0);
7289 temp = gen_rtx_MEM (mode, op0);
7291 set_mem_attributes (temp, exp, 0);
7293 /* Resolve the misalignment now, so that we don't have to remember
7294 to resolve it later. Of course, this only works for reads. */
7295 /* ??? When we get around to supporting writes, we'll have to handle
7296 this in store_expr directly. The vectorizer isn't generating
7297 those yet, however. */
7298 if (code == MISALIGNED_INDIRECT_REF)
7300 int icode;
7301 rtx reg, insn;
7303 gcc_assert (modifier == EXPAND_NORMAL
7304 || modifier == EXPAND_STACK_PARM);
7306 /* The vectorizer should have already checked the mode. */
7307 icode = movmisalign_optab->handlers[mode].insn_code;
7308 gcc_assert (icode != CODE_FOR_nothing);
7310 /* We've already validated the memory, and we're creating a
7311 new pseudo destination. The predicates really can't fail. */
7312 reg = gen_reg_rtx (mode);
7314 /* Nor can the insn generator. */
7315 insn = GEN_FCN (icode) (reg, temp);
7316 emit_insn (insn);
7318 return reg;
7321 return temp;
7324 case TARGET_MEM_REF:
7326 struct mem_address addr;
7328 get_address_description (exp, &addr);
7329 op0 = addr_for_mem_ref (&addr, true);
7330 op0 = memory_address (mode, op0);
7331 temp = gen_rtx_MEM (mode, op0);
7332 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7334 return temp;
7336 case ARRAY_REF:
7339 tree array = TREE_OPERAND (exp, 0);
7340 tree index = TREE_OPERAND (exp, 1);
7342 /* Fold an expression like: "foo"[2].
7343 This is not done in fold so it won't happen inside &.
7344 Don't fold if this is for wide characters since it's too
7345 difficult to do correctly and this is a very rare case. */
7347 if (modifier != EXPAND_CONST_ADDRESS
7348 && modifier != EXPAND_INITIALIZER
7349 && modifier != EXPAND_MEMORY)
7351 tree t = fold_read_from_constant_string (exp);
7353 if (t)
7354 return expand_expr (t, target, tmode, modifier);
7357 /* If this is a constant index into a constant array,
7358 just get the value from the array. Handle both the cases when
7359 we have an explicit constructor and when our operand is a variable
7360 that was declared const. */
7362 if (modifier != EXPAND_CONST_ADDRESS
7363 && modifier != EXPAND_INITIALIZER
7364 && modifier != EXPAND_MEMORY
7365 && TREE_CODE (array) == CONSTRUCTOR
7366 && ! TREE_SIDE_EFFECTS (array)
7367 && TREE_CODE (index) == INTEGER_CST)
7369 unsigned HOST_WIDE_INT ix;
7370 tree field, value;
7372 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7373 field, value)
7374 if (tree_int_cst_equal (field, index))
7376 if (!TREE_SIDE_EFFECTS (value))
7377 return expand_expr (fold (value), target, tmode, modifier);
7378 break;
7382 else if (optimize >= 1
7383 && modifier != EXPAND_CONST_ADDRESS
7384 && modifier != EXPAND_INITIALIZER
7385 && modifier != EXPAND_MEMORY
7386 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7387 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7388 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7389 && targetm.binds_local_p (array))
7391 if (TREE_CODE (index) == INTEGER_CST)
7393 tree init = DECL_INITIAL (array);
7395 if (TREE_CODE (init) == CONSTRUCTOR)
7397 unsigned HOST_WIDE_INT ix;
7398 tree field, value;
7400 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7401 field, value)
7402 if (tree_int_cst_equal (field, index))
7404 if (!TREE_SIDE_EFFECTS (value))
7405 return expand_expr (fold (value), target, tmode,
7406 modifier);
7407 break;
7410 else if(TREE_CODE (init) == STRING_CST)
7412 tree index1 = index;
7413 tree low_bound = array_ref_low_bound (exp);
7414 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7416 /* Optimize the special-case of a zero lower bound.
7418 We convert the low_bound to sizetype to avoid some problems
7419 with constant folding. (E.g. suppose the lower bound is 1,
7420 and its mode is QI. Without the conversion,l (ARRAY
7421 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7422 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7424 if (! integer_zerop (low_bound))
7425 index1 = size_diffop (index1, fold_convert (sizetype,
7426 low_bound));
7428 if (0 > compare_tree_int (index1,
7429 TREE_STRING_LENGTH (init)))
7431 tree type = TREE_TYPE (TREE_TYPE (init));
7432 enum machine_mode mode = TYPE_MODE (type);
7434 if (GET_MODE_CLASS (mode) == MODE_INT
7435 && GET_MODE_SIZE (mode) == 1)
7436 return gen_int_mode (TREE_STRING_POINTER (init)
7437 [TREE_INT_CST_LOW (index1)],
7438 mode);
7444 goto normal_inner_ref;
7446 case COMPONENT_REF:
7447 /* If the operand is a CONSTRUCTOR, we can just extract the
7448 appropriate field if it is present. */
7449 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7451 unsigned HOST_WIDE_INT idx;
7452 tree field, value;
7454 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7455 idx, field, value)
7456 if (field == TREE_OPERAND (exp, 1)
7457 /* We can normally use the value of the field in the
7458 CONSTRUCTOR. However, if this is a bitfield in
7459 an integral mode that we can fit in a HOST_WIDE_INT,
7460 we must mask only the number of bits in the bitfield,
7461 since this is done implicitly by the constructor. If
7462 the bitfield does not meet either of those conditions,
7463 we can't do this optimization. */
7464 && (! DECL_BIT_FIELD (field)
7465 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7466 && (GET_MODE_BITSIZE (DECL_MODE (field))
7467 <= HOST_BITS_PER_WIDE_INT))))
7469 if (DECL_BIT_FIELD (field)
7470 && modifier == EXPAND_STACK_PARM)
7471 target = 0;
7472 op0 = expand_expr (value, target, tmode, modifier);
7473 if (DECL_BIT_FIELD (field))
7475 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7476 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7478 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7480 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7481 op0 = expand_and (imode, op0, op1, target);
7483 else
7485 tree count
7486 = build_int_cst (NULL_TREE,
7487 GET_MODE_BITSIZE (imode) - bitsize);
7489 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7490 target, 0);
7491 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7492 target, 0);
7496 return op0;
7499 goto normal_inner_ref;
7501 case BIT_FIELD_REF:
7502 case ARRAY_RANGE_REF:
7503 normal_inner_ref:
7505 enum machine_mode mode1;
7506 HOST_WIDE_INT bitsize, bitpos;
7507 tree offset;
7508 int volatilep = 0;
7509 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7510 &mode1, &unsignedp, &volatilep, true);
7511 rtx orig_op0;
7513 /* If we got back the original object, something is wrong. Perhaps
7514 we are evaluating an expression too early. In any event, don't
7515 infinitely recurse. */
7516 gcc_assert (tem != exp);
7518 /* If TEM's type is a union of variable size, pass TARGET to the inner
7519 computation, since it will need a temporary and TARGET is known
7520 to have to do. This occurs in unchecked conversion in Ada. */
7522 orig_op0 = op0
7523 = expand_expr (tem,
7524 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7525 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7526 != INTEGER_CST)
7527 && modifier != EXPAND_STACK_PARM
7528 ? target : NULL_RTX),
7529 VOIDmode,
7530 (modifier == EXPAND_INITIALIZER
7531 || modifier == EXPAND_CONST_ADDRESS
7532 || modifier == EXPAND_STACK_PARM)
7533 ? modifier : EXPAND_NORMAL);
7535 /* If this is a constant, put it into a register if it is a legitimate
7536 constant, OFFSET is 0, and we won't try to extract outside the
7537 register (in case we were passed a partially uninitialized object
7538 or a view_conversion to a larger size). Force the constant to
7539 memory otherwise. */
7540 if (CONSTANT_P (op0))
7542 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7543 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7544 && offset == 0
7545 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7546 op0 = force_reg (mode, op0);
7547 else
7548 op0 = validize_mem (force_const_mem (mode, op0));
7551 /* Otherwise, if this object not in memory and we either have an
7552 offset, a BLKmode result, or a reference outside the object, put it
7553 there. Such cases can occur in Ada if we have unchecked conversion
7554 of an expression from a scalar type to an array or record type or
7555 for an ARRAY_RANGE_REF whose type is BLKmode. */
7556 else if (!MEM_P (op0)
7557 && (offset != 0
7558 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7559 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7561 tree nt = build_qualified_type (TREE_TYPE (tem),
7562 (TYPE_QUALS (TREE_TYPE (tem))
7563 | TYPE_QUAL_CONST));
7564 rtx memloc = assign_temp (nt, 1, 1, 1);
7566 emit_move_insn (memloc, op0);
7567 op0 = memloc;
7570 if (offset != 0)
7572 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7573 EXPAND_SUM);
7575 gcc_assert (MEM_P (op0));
7577 #ifdef POINTERS_EXTEND_UNSIGNED
7578 if (GET_MODE (offset_rtx) != Pmode)
7579 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7580 #else
7581 if (GET_MODE (offset_rtx) != ptr_mode)
7582 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7583 #endif
7585 if (GET_MODE (op0) == BLKmode
7586 /* A constant address in OP0 can have VOIDmode, we must
7587 not try to call force_reg in that case. */
7588 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7589 && bitsize != 0
7590 && (bitpos % bitsize) == 0
7591 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7592 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7594 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7595 bitpos = 0;
7598 op0 = offset_address (op0, offset_rtx,
7599 highest_pow2_factor (offset));
7602 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7603 record its alignment as BIGGEST_ALIGNMENT. */
7604 if (MEM_P (op0) && bitpos == 0 && offset != 0
7605 && is_aligning_offset (offset, tem))
7606 set_mem_align (op0, BIGGEST_ALIGNMENT);
7608 /* Don't forget about volatility even if this is a bitfield. */
7609 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7611 if (op0 == orig_op0)
7612 op0 = copy_rtx (op0);
7614 MEM_VOLATILE_P (op0) = 1;
7617 /* The following code doesn't handle CONCAT.
7618 Assume only bitpos == 0 can be used for CONCAT, due to
7619 one element arrays having the same mode as its element. */
7620 if (GET_CODE (op0) == CONCAT)
7622 gcc_assert (bitpos == 0
7623 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7624 return op0;
7627 /* In cases where an aligned union has an unaligned object
7628 as a field, we might be extracting a BLKmode value from
7629 an integer-mode (e.g., SImode) object. Handle this case
7630 by doing the extract into an object as wide as the field
7631 (which we know to be the width of a basic mode), then
7632 storing into memory, and changing the mode to BLKmode. */
7633 if (mode1 == VOIDmode
7634 || REG_P (op0) || GET_CODE (op0) == SUBREG
7635 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7636 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7637 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7638 && modifier != EXPAND_CONST_ADDRESS
7639 && modifier != EXPAND_INITIALIZER)
7640 /* If the field isn't aligned enough to fetch as a memref,
7641 fetch it as a bit field. */
7642 || (mode1 != BLKmode
7643 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7644 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7645 || (MEM_P (op0)
7646 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7647 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7648 && ((modifier == EXPAND_CONST_ADDRESS
7649 || modifier == EXPAND_INITIALIZER)
7650 ? STRICT_ALIGNMENT
7651 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7652 || (bitpos % BITS_PER_UNIT != 0)))
7653 /* If the type and the field are a constant size and the
7654 size of the type isn't the same size as the bitfield,
7655 we must use bitfield operations. */
7656 || (bitsize >= 0
7657 && TYPE_SIZE (TREE_TYPE (exp))
7658 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7659 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7660 bitsize)))
7662 enum machine_mode ext_mode = mode;
7664 if (ext_mode == BLKmode
7665 && ! (target != 0 && MEM_P (op0)
7666 && MEM_P (target)
7667 && bitpos % BITS_PER_UNIT == 0))
7668 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7670 if (ext_mode == BLKmode)
7672 if (target == 0)
7673 target = assign_temp (type, 0, 1, 1);
7675 if (bitsize == 0)
7676 return target;
7678 /* In this case, BITPOS must start at a byte boundary and
7679 TARGET, if specified, must be a MEM. */
7680 gcc_assert (MEM_P (op0)
7681 && (!target || MEM_P (target))
7682 && !(bitpos % BITS_PER_UNIT));
7684 emit_block_move (target,
7685 adjust_address (op0, VOIDmode,
7686 bitpos / BITS_PER_UNIT),
7687 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7688 / BITS_PER_UNIT),
7689 (modifier == EXPAND_STACK_PARM
7690 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7692 return target;
7695 op0 = validize_mem (op0);
7697 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7698 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7700 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7701 (modifier == EXPAND_STACK_PARM
7702 ? NULL_RTX : target),
7703 ext_mode, ext_mode);
7705 /* If the result is a record type and BITSIZE is narrower than
7706 the mode of OP0, an integral mode, and this is a big endian
7707 machine, we must put the field into the high-order bits. */
7708 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7709 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7710 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7711 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7712 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7713 - bitsize),
7714 op0, 1);
7716 /* If the result type is BLKmode, store the data into a temporary
7717 of the appropriate type, but with the mode corresponding to the
7718 mode for the data we have (op0's mode). It's tempting to make
7719 this a constant type, since we know it's only being stored once,
7720 but that can cause problems if we are taking the address of this
7721 COMPONENT_REF because the MEM of any reference via that address
7722 will have flags corresponding to the type, which will not
7723 necessarily be constant. */
7724 if (mode == BLKmode)
7726 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7727 rtx new;
7729 /* If the reference doesn't use the alias set of its type,
7730 we cannot create the temporary using that type. */
7731 if (component_uses_parent_alias_set (exp))
7733 new = assign_stack_local (ext_mode, size, 0);
7734 set_mem_alias_set (new, get_alias_set (exp));
7736 else
7737 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7739 emit_move_insn (new, op0);
7740 op0 = copy_rtx (new);
7741 PUT_MODE (op0, BLKmode);
7742 set_mem_attributes (op0, exp, 1);
7745 return op0;
7748 /* If the result is BLKmode, use that to access the object
7749 now as well. */
7750 if (mode == BLKmode)
7751 mode1 = BLKmode;
7753 /* Get a reference to just this component. */
7754 if (modifier == EXPAND_CONST_ADDRESS
7755 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7756 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7757 else
7758 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7760 if (op0 == orig_op0)
7761 op0 = copy_rtx (op0);
7763 set_mem_attributes (op0, exp, 0);
7764 if (REG_P (XEXP (op0, 0)))
7765 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7767 MEM_VOLATILE_P (op0) |= volatilep;
7768 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7769 || modifier == EXPAND_CONST_ADDRESS
7770 || modifier == EXPAND_INITIALIZER)
7771 return op0;
7772 else if (target == 0)
7773 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7775 convert_move (target, op0, unsignedp);
7776 return target;
7779 case OBJ_TYPE_REF:
7780 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7782 case CALL_EXPR:
7783 /* Check for a built-in function. */
7784 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7785 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7786 == FUNCTION_DECL)
7787 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7789 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7790 == BUILT_IN_FRONTEND)
7791 return lang_hooks.expand_expr (exp, original_target,
7792 tmode, modifier,
7793 alt_rtl);
7794 else
7795 return expand_builtin (exp, target, subtarget, tmode, ignore);
7798 return expand_call (exp, target, ignore);
7800 case NON_LVALUE_EXPR:
7801 case NOP_EXPR:
7802 case CONVERT_EXPR:
7803 if (TREE_OPERAND (exp, 0) == error_mark_node)
7804 return const0_rtx;
7806 if (TREE_CODE (type) == UNION_TYPE)
7808 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7810 /* If both input and output are BLKmode, this conversion isn't doing
7811 anything except possibly changing memory attribute. */
7812 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7814 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7815 modifier);
7817 result = copy_rtx (result);
7818 set_mem_attributes (result, exp, 0);
7819 return result;
7822 if (target == 0)
7824 if (TYPE_MODE (type) != BLKmode)
7825 target = gen_reg_rtx (TYPE_MODE (type));
7826 else
7827 target = assign_temp (type, 0, 1, 1);
7830 if (MEM_P (target))
7831 /* Store data into beginning of memory target. */
7832 store_expr (TREE_OPERAND (exp, 0),
7833 adjust_address (target, TYPE_MODE (valtype), 0),
7834 modifier == EXPAND_STACK_PARM);
7836 else
7838 gcc_assert (REG_P (target));
7840 /* Store this field into a union of the proper type. */
7841 store_field (target,
7842 MIN ((int_size_in_bytes (TREE_TYPE
7843 (TREE_OPERAND (exp, 0)))
7844 * BITS_PER_UNIT),
7845 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7846 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7847 type, 0);
7850 /* Return the entire union. */
7851 return target;
7854 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7856 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7857 modifier);
7859 /* If the signedness of the conversion differs and OP0 is
7860 a promoted SUBREG, clear that indication since we now
7861 have to do the proper extension. */
7862 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7863 && GET_CODE (op0) == SUBREG)
7864 SUBREG_PROMOTED_VAR_P (op0) = 0;
7866 return REDUCE_BIT_FIELD (op0);
7869 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7870 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7871 if (GET_MODE (op0) == mode)
7874 /* If OP0 is a constant, just convert it into the proper mode. */
7875 else if (CONSTANT_P (op0))
7877 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7878 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7880 if (modifier == EXPAND_INITIALIZER)
7881 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7882 subreg_lowpart_offset (mode,
7883 inner_mode));
7884 else
7885 op0= convert_modes (mode, inner_mode, op0,
7886 TYPE_UNSIGNED (inner_type));
7889 else if (modifier == EXPAND_INITIALIZER)
7890 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7892 else if (target == 0)
7893 op0 = convert_to_mode (mode, op0,
7894 TYPE_UNSIGNED (TREE_TYPE
7895 (TREE_OPERAND (exp, 0))));
7896 else
7898 convert_move (target, op0,
7899 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7900 op0 = target;
7903 return REDUCE_BIT_FIELD (op0);
7905 case VIEW_CONVERT_EXPR:
7906 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7908 /* If the input and output modes are both the same, we are done. */
7909 if (TYPE_MODE (type) == GET_MODE (op0))
7911 /* If neither mode is BLKmode, and both modes are the same size
7912 then we can use gen_lowpart. */
7913 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7914 && GET_MODE_SIZE (TYPE_MODE (type))
7915 == GET_MODE_SIZE (GET_MODE (op0)))
7917 if (GET_CODE (op0) == SUBREG)
7918 op0 = force_reg (GET_MODE (op0), op0);
7919 op0 = gen_lowpart (TYPE_MODE (type), op0);
7921 /* If both modes are integral, then we can convert from one to the
7922 other. */
7923 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7924 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7925 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7926 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7927 /* As a last resort, spill op0 to memory, and reload it in a
7928 different mode. */
7929 else if (!MEM_P (op0))
7931 /* If the operand is not a MEM, force it into memory. Since we
7932 are going to be changing the mode of the MEM, don't call
7933 force_const_mem for constants because we don't allow pool
7934 constants to change mode. */
7935 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7937 gcc_assert (!TREE_ADDRESSABLE (exp));
7939 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7940 target
7941 = assign_stack_temp_for_type
7942 (TYPE_MODE (inner_type),
7943 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7945 emit_move_insn (target, op0);
7946 op0 = target;
7949 /* At this point, OP0 is in the correct mode. If the output type is such
7950 that the operand is known to be aligned, indicate that it is.
7951 Otherwise, we need only be concerned about alignment for non-BLKmode
7952 results. */
7953 if (MEM_P (op0))
7955 op0 = copy_rtx (op0);
7957 if (TYPE_ALIGN_OK (type))
7958 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7959 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7960 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7962 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7963 HOST_WIDE_INT temp_size
7964 = MAX (int_size_in_bytes (inner_type),
7965 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7966 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7967 temp_size, 0, type);
7968 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7970 gcc_assert (!TREE_ADDRESSABLE (exp));
7972 if (GET_MODE (op0) == BLKmode)
7973 emit_block_move (new_with_op0_mode, op0,
7974 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7975 (modifier == EXPAND_STACK_PARM
7976 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7977 else
7978 emit_move_insn (new_with_op0_mode, op0);
7980 op0 = new;
7983 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7986 return op0;
7988 case PLUS_EXPR:
7989 /* Check if this is a case for multiplication and addition. */
7990 if (TREE_CODE (type) == INTEGER_TYPE
7991 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
7993 tree subsubexp0, subsubexp1;
7994 enum tree_code code0, code1;
7996 subexp0 = TREE_OPERAND (exp, 0);
7997 subsubexp0 = TREE_OPERAND (subexp0, 0);
7998 subsubexp1 = TREE_OPERAND (subexp0, 1);
7999 code0 = TREE_CODE (subsubexp0);
8000 code1 = TREE_CODE (subsubexp1);
8001 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8002 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8003 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8004 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8005 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8006 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8007 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8009 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8010 enum machine_mode innermode = TYPE_MODE (op0type);
8011 bool zextend_p = TYPE_UNSIGNED (op0type);
8012 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8013 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8014 && (this_optab->handlers[(int) mode].insn_code
8015 != CODE_FOR_nothing))
8017 expand_operands (TREE_OPERAND (subsubexp0, 0),
8018 TREE_OPERAND (subsubexp1, 0),
8019 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8020 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8021 VOIDmode, EXPAND_NORMAL);
8022 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8023 target, unsignedp);
8024 gcc_assert (temp);
8025 return REDUCE_BIT_FIELD (temp);
8030 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8031 something else, make sure we add the register to the constant and
8032 then to the other thing. This case can occur during strength
8033 reduction and doing it this way will produce better code if the
8034 frame pointer or argument pointer is eliminated.
8036 fold-const.c will ensure that the constant is always in the inner
8037 PLUS_EXPR, so the only case we need to do anything about is if
8038 sp, ap, or fp is our second argument, in which case we must swap
8039 the innermost first argument and our second argument. */
8041 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8042 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8043 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8044 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8045 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8046 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8048 tree t = TREE_OPERAND (exp, 1);
8050 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8051 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8054 /* If the result is to be ptr_mode and we are adding an integer to
8055 something, we might be forming a constant. So try to use
8056 plus_constant. If it produces a sum and we can't accept it,
8057 use force_operand. This allows P = &ARR[const] to generate
8058 efficient code on machines where a SYMBOL_REF is not a valid
8059 address.
8061 If this is an EXPAND_SUM call, always return the sum. */
8062 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8063 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8065 if (modifier == EXPAND_STACK_PARM)
8066 target = 0;
8067 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8068 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8069 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8071 rtx constant_part;
8073 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8074 EXPAND_SUM);
8075 /* Use immed_double_const to ensure that the constant is
8076 truncated according to the mode of OP1, then sign extended
8077 to a HOST_WIDE_INT. Using the constant directly can result
8078 in non-canonical RTL in a 64x32 cross compile. */
8079 constant_part
8080 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8081 (HOST_WIDE_INT) 0,
8082 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8083 op1 = plus_constant (op1, INTVAL (constant_part));
8084 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8085 op1 = force_operand (op1, target);
8086 return REDUCE_BIT_FIELD (op1);
8089 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8090 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8091 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8093 rtx constant_part;
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8096 (modifier == EXPAND_INITIALIZER
8097 ? EXPAND_INITIALIZER : EXPAND_SUM));
8098 if (! CONSTANT_P (op0))
8100 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8101 VOIDmode, modifier);
8102 /* Return a PLUS if modifier says it's OK. */
8103 if (modifier == EXPAND_SUM
8104 || modifier == EXPAND_INITIALIZER)
8105 return simplify_gen_binary (PLUS, mode, op0, op1);
8106 goto binop2;
8108 /* Use immed_double_const to ensure that the constant is
8109 truncated according to the mode of OP1, then sign extended
8110 to a HOST_WIDE_INT. Using the constant directly can result
8111 in non-canonical RTL in a 64x32 cross compile. */
8112 constant_part
8113 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8114 (HOST_WIDE_INT) 0,
8115 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8116 op0 = plus_constant (op0, INTVAL (constant_part));
8117 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8118 op0 = force_operand (op0, target);
8119 return REDUCE_BIT_FIELD (op0);
8123 /* No sense saving up arithmetic to be done
8124 if it's all in the wrong mode to form part of an address.
8125 And force_operand won't know whether to sign-extend or
8126 zero-extend. */
8127 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8128 || mode != ptr_mode)
8130 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8131 subtarget, &op0, &op1, 0);
8132 if (op0 == const0_rtx)
8133 return op1;
8134 if (op1 == const0_rtx)
8135 return op0;
8136 goto binop2;
8139 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8140 subtarget, &op0, &op1, modifier);
8141 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8143 case MINUS_EXPR:
8144 /* Check if this is a case for multiplication and subtraction. */
8145 if (TREE_CODE (type) == INTEGER_TYPE
8146 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8148 tree subsubexp0, subsubexp1;
8149 enum tree_code code0, code1;
8151 subexp1 = TREE_OPERAND (exp, 1);
8152 subsubexp0 = TREE_OPERAND (subexp1, 0);
8153 subsubexp1 = TREE_OPERAND (subexp1, 1);
8154 code0 = TREE_CODE (subsubexp0);
8155 code1 = TREE_CODE (subsubexp1);
8156 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8157 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8158 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8159 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8160 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8161 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8162 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8164 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8165 enum machine_mode innermode = TYPE_MODE (op0type);
8166 bool zextend_p = TYPE_UNSIGNED (op0type);
8167 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8168 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8169 && (this_optab->handlers[(int) mode].insn_code
8170 != CODE_FOR_nothing))
8172 expand_operands (TREE_OPERAND (subsubexp0, 0),
8173 TREE_OPERAND (subsubexp1, 0),
8174 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8175 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8176 VOIDmode, EXPAND_NORMAL);
8177 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8178 target, unsignedp);
8179 gcc_assert (temp);
8180 return REDUCE_BIT_FIELD (temp);
8185 /* For initializers, we are allowed to return a MINUS of two
8186 symbolic constants. Here we handle all cases when both operands
8187 are constant. */
8188 /* Handle difference of two symbolic constants,
8189 for the sake of an initializer. */
8190 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8191 && really_constant_p (TREE_OPERAND (exp, 0))
8192 && really_constant_p (TREE_OPERAND (exp, 1)))
8194 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8195 NULL_RTX, &op0, &op1, modifier);
8197 /* If the last operand is a CONST_INT, use plus_constant of
8198 the negated constant. Else make the MINUS. */
8199 if (GET_CODE (op1) == CONST_INT)
8200 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8201 else
8202 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8205 /* No sense saving up arithmetic to be done
8206 if it's all in the wrong mode to form part of an address.
8207 And force_operand won't know whether to sign-extend or
8208 zero-extend. */
8209 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8210 || mode != ptr_mode)
8211 goto binop;
8213 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8214 subtarget, &op0, &op1, modifier);
8216 /* Convert A - const to A + (-const). */
8217 if (GET_CODE (op1) == CONST_INT)
8219 op1 = negate_rtx (mode, op1);
8220 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8223 goto binop2;
8225 case MULT_EXPR:
8226 /* If first operand is constant, swap them.
8227 Thus the following special case checks need only
8228 check the second operand. */
8229 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8231 tree t1 = TREE_OPERAND (exp, 0);
8232 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8233 TREE_OPERAND (exp, 1) = t1;
8236 /* Attempt to return something suitable for generating an
8237 indexed address, for machines that support that. */
8239 if (modifier == EXPAND_SUM && mode == ptr_mode
8240 && host_integerp (TREE_OPERAND (exp, 1), 0))
8242 tree exp1 = TREE_OPERAND (exp, 1);
8244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8245 EXPAND_SUM);
8247 if (!REG_P (op0))
8248 op0 = force_operand (op0, NULL_RTX);
8249 if (!REG_P (op0))
8250 op0 = copy_to_mode_reg (mode, op0);
8252 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8253 gen_int_mode (tree_low_cst (exp1, 0),
8254 TYPE_MODE (TREE_TYPE (exp1)))));
8257 if (modifier == EXPAND_STACK_PARM)
8258 target = 0;
8260 /* Check for multiplying things that have been extended
8261 from a narrower type. If this machine supports multiplying
8262 in that narrower type with a result in the desired type,
8263 do it that way, and avoid the explicit type-conversion. */
8265 subexp0 = TREE_OPERAND (exp, 0);
8266 subexp1 = TREE_OPERAND (exp, 1);
8267 /* First, check if we have a multiplication of one signed and one
8268 unsigned operand. */
8269 if (TREE_CODE (subexp0) == NOP_EXPR
8270 && TREE_CODE (subexp1) == NOP_EXPR
8271 && TREE_CODE (type) == INTEGER_TYPE
8272 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8273 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8274 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8275 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8276 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8277 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8279 enum machine_mode innermode
8280 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8281 this_optab = usmul_widen_optab;
8282 if (mode == GET_MODE_WIDER_MODE (innermode))
8284 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8286 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8287 expand_operands (TREE_OPERAND (subexp0, 0),
8288 TREE_OPERAND (subexp1, 0),
8289 NULL_RTX, &op0, &op1, 0);
8290 else
8291 expand_operands (TREE_OPERAND (subexp0, 0),
8292 TREE_OPERAND (subexp1, 0),
8293 NULL_RTX, &op1, &op0, 0);
8295 goto binop3;
8299 /* Check for a multiplication with matching signedness. */
8300 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8301 && TREE_CODE (type) == INTEGER_TYPE
8302 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8303 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8304 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8305 && int_fits_type_p (TREE_OPERAND (exp, 1),
8306 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8307 /* Don't use a widening multiply if a shift will do. */
8308 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8309 > HOST_BITS_PER_WIDE_INT)
8310 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8312 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8313 && (TYPE_PRECISION (TREE_TYPE
8314 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8315 == TYPE_PRECISION (TREE_TYPE
8316 (TREE_OPERAND
8317 (TREE_OPERAND (exp, 0), 0))))
8318 /* If both operands are extended, they must either both
8319 be zero-extended or both be sign-extended. */
8320 && (TYPE_UNSIGNED (TREE_TYPE
8321 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8322 == TYPE_UNSIGNED (TREE_TYPE
8323 (TREE_OPERAND
8324 (TREE_OPERAND (exp, 0), 0)))))))
8326 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8327 enum machine_mode innermode = TYPE_MODE (op0type);
8328 bool zextend_p = TYPE_UNSIGNED (op0type);
8329 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8330 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8332 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8334 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8336 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8337 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8338 TREE_OPERAND (exp, 1),
8339 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8340 else
8341 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8342 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8343 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8344 goto binop3;
8346 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8347 && innermode == word_mode)
8349 rtx htem, hipart;
8350 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8351 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8352 op1 = convert_modes (innermode, mode,
8353 expand_normal (TREE_OPERAND (exp, 1)),
8354 unsignedp);
8355 else
8356 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8357 temp = expand_binop (mode, other_optab, op0, op1, target,
8358 unsignedp, OPTAB_LIB_WIDEN);
8359 hipart = gen_highpart (innermode, temp);
8360 htem = expand_mult_highpart_adjust (innermode, hipart,
8361 op0, op1, hipart,
8362 zextend_p);
8363 if (htem != hipart)
8364 emit_move_insn (hipart, htem);
8365 return REDUCE_BIT_FIELD (temp);
8369 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8370 subtarget, &op0, &op1, 0);
8371 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8373 case TRUNC_DIV_EXPR:
8374 case FLOOR_DIV_EXPR:
8375 case CEIL_DIV_EXPR:
8376 case ROUND_DIV_EXPR:
8377 case EXACT_DIV_EXPR:
8378 if (modifier == EXPAND_STACK_PARM)
8379 target = 0;
8380 /* Possible optimization: compute the dividend with EXPAND_SUM
8381 then if the divisor is constant can optimize the case
8382 where some terms of the dividend have coeffs divisible by it. */
8383 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8384 subtarget, &op0, &op1, 0);
8385 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8387 case RDIV_EXPR:
8388 goto binop;
8390 case TRUNC_MOD_EXPR:
8391 case FLOOR_MOD_EXPR:
8392 case CEIL_MOD_EXPR:
8393 case ROUND_MOD_EXPR:
8394 if (modifier == EXPAND_STACK_PARM)
8395 target = 0;
8396 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8397 subtarget, &op0, &op1, 0);
8398 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8400 case FIX_TRUNC_EXPR:
8401 op0 = expand_normal (TREE_OPERAND (exp, 0));
8402 if (target == 0 || modifier == EXPAND_STACK_PARM)
8403 target = gen_reg_rtx (mode);
8404 expand_fix (target, op0, unsignedp);
8405 return target;
8407 case FLOAT_EXPR:
8408 op0 = expand_normal (TREE_OPERAND (exp, 0));
8409 if (target == 0 || modifier == EXPAND_STACK_PARM)
8410 target = gen_reg_rtx (mode);
8411 /* expand_float can't figure out what to do if FROM has VOIDmode.
8412 So give it the correct mode. With -O, cse will optimize this. */
8413 if (GET_MODE (op0) == VOIDmode)
8414 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8415 op0);
8416 expand_float (target, op0,
8417 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8418 return target;
8420 case NEGATE_EXPR:
8421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8422 VOIDmode, EXPAND_NORMAL);
8423 if (modifier == EXPAND_STACK_PARM)
8424 target = 0;
8425 temp = expand_unop (mode,
8426 optab_for_tree_code (NEGATE_EXPR, type),
8427 op0, target, 0);
8428 gcc_assert (temp);
8429 return REDUCE_BIT_FIELD (temp);
8431 case ABS_EXPR:
8432 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8433 VOIDmode, EXPAND_NORMAL);
8434 if (modifier == EXPAND_STACK_PARM)
8435 target = 0;
8437 /* ABS_EXPR is not valid for complex arguments. */
8438 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8439 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8441 /* Unsigned abs is simply the operand. Testing here means we don't
8442 risk generating incorrect code below. */
8443 if (TYPE_UNSIGNED (type))
8444 return op0;
8446 return expand_abs (mode, op0, target, unsignedp,
8447 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8449 case MAX_EXPR:
8450 case MIN_EXPR:
8451 target = original_target;
8452 if (target == 0
8453 || modifier == EXPAND_STACK_PARM
8454 || (MEM_P (target) && MEM_VOLATILE_P (target))
8455 || GET_MODE (target) != mode
8456 || (REG_P (target)
8457 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8458 target = gen_reg_rtx (mode);
8459 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8460 target, &op0, &op1, 0);
8462 /* First try to do it with a special MIN or MAX instruction.
8463 If that does not win, use a conditional jump to select the proper
8464 value. */
8465 this_optab = optab_for_tree_code (code, type);
8466 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8467 OPTAB_WIDEN);
8468 if (temp != 0)
8469 return temp;
8471 /* At this point, a MEM target is no longer useful; we will get better
8472 code without it. */
8474 if (! REG_P (target))
8475 target = gen_reg_rtx (mode);
8477 /* If op1 was placed in target, swap op0 and op1. */
8478 if (target != op0 && target == op1)
8480 temp = op0;
8481 op0 = op1;
8482 op1 = temp;
8485 /* We generate better code and avoid problems with op1 mentioning
8486 target by forcing op1 into a pseudo if it isn't a constant. */
8487 if (! CONSTANT_P (op1))
8488 op1 = force_reg (mode, op1);
8491 enum rtx_code comparison_code;
8492 rtx cmpop1 = op1;
8494 if (code == MAX_EXPR)
8495 comparison_code = unsignedp ? GEU : GE;
8496 else
8497 comparison_code = unsignedp ? LEU : LE;
8499 /* Canonicalize to comparisons against 0. */
8500 if (op1 == const1_rtx)
8502 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8503 or (a != 0 ? a : 1) for unsigned.
8504 For MIN we are safe converting (a <= 1 ? a : 1)
8505 into (a <= 0 ? a : 1) */
8506 cmpop1 = const0_rtx;
8507 if (code == MAX_EXPR)
8508 comparison_code = unsignedp ? NE : GT;
8510 if (op1 == constm1_rtx && !unsignedp)
8512 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8513 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8514 cmpop1 = const0_rtx;
8515 if (code == MIN_EXPR)
8516 comparison_code = LT;
8518 #ifdef HAVE_conditional_move
8519 /* Use a conditional move if possible. */
8520 if (can_conditionally_move_p (mode))
8522 rtx insn;
8524 /* ??? Same problem as in expmed.c: emit_conditional_move
8525 forces a stack adjustment via compare_from_rtx, and we
8526 lose the stack adjustment if the sequence we are about
8527 to create is discarded. */
8528 do_pending_stack_adjust ();
8530 start_sequence ();
8532 /* Try to emit the conditional move. */
8533 insn = emit_conditional_move (target, comparison_code,
8534 op0, cmpop1, mode,
8535 op0, op1, mode,
8536 unsignedp);
8538 /* If we could do the conditional move, emit the sequence,
8539 and return. */
8540 if (insn)
8542 rtx seq = get_insns ();
8543 end_sequence ();
8544 emit_insn (seq);
8545 return target;
8548 /* Otherwise discard the sequence and fall back to code with
8549 branches. */
8550 end_sequence ();
8552 #endif
8553 if (target != op0)
8554 emit_move_insn (target, op0);
8556 temp = gen_label_rtx ();
8557 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8558 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8560 emit_move_insn (target, op1);
8561 emit_label (temp);
8562 return target;
8564 case BIT_NOT_EXPR:
8565 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8566 VOIDmode, EXPAND_NORMAL);
8567 if (modifier == EXPAND_STACK_PARM)
8568 target = 0;
8569 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8570 gcc_assert (temp);
8571 return temp;
8573 /* ??? Can optimize bitwise operations with one arg constant.
8574 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8575 and (a bitwise1 b) bitwise2 b (etc)
8576 but that is probably not worth while. */
8578 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8579 boolean values when we want in all cases to compute both of them. In
8580 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8581 as actual zero-or-1 values and then bitwise anding. In cases where
8582 there cannot be any side effects, better code would be made by
8583 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8584 how to recognize those cases. */
8586 case TRUTH_AND_EXPR:
8587 code = BIT_AND_EXPR;
8588 case BIT_AND_EXPR:
8589 goto binop;
8591 case TRUTH_OR_EXPR:
8592 code = BIT_IOR_EXPR;
8593 case BIT_IOR_EXPR:
8594 goto binop;
8596 case TRUTH_XOR_EXPR:
8597 code = BIT_XOR_EXPR;
8598 case BIT_XOR_EXPR:
8599 goto binop;
8601 case LSHIFT_EXPR:
8602 case RSHIFT_EXPR:
8603 case LROTATE_EXPR:
8604 case RROTATE_EXPR:
8605 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8606 subtarget = 0;
8607 if (modifier == EXPAND_STACK_PARM)
8608 target = 0;
8609 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8610 VOIDmode, EXPAND_NORMAL);
8611 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8612 unsignedp);
8614 /* Could determine the answer when only additive constants differ. Also,
8615 the addition of one can be handled by changing the condition. */
8616 case LT_EXPR:
8617 case LE_EXPR:
8618 case GT_EXPR:
8619 case GE_EXPR:
8620 case EQ_EXPR:
8621 case NE_EXPR:
8622 case UNORDERED_EXPR:
8623 case ORDERED_EXPR:
8624 case UNLT_EXPR:
8625 case UNLE_EXPR:
8626 case UNGT_EXPR:
8627 case UNGE_EXPR:
8628 case UNEQ_EXPR:
8629 case LTGT_EXPR:
8630 temp = do_store_flag (exp,
8631 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8632 tmode != VOIDmode ? tmode : mode, 0);
8633 if (temp != 0)
8634 return temp;
8636 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8637 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8638 && original_target
8639 && REG_P (original_target)
8640 && (GET_MODE (original_target)
8641 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8643 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8644 VOIDmode, EXPAND_NORMAL);
8646 /* If temp is constant, we can just compute the result. */
8647 if (GET_CODE (temp) == CONST_INT)
8649 if (INTVAL (temp) != 0)
8650 emit_move_insn (target, const1_rtx);
8651 else
8652 emit_move_insn (target, const0_rtx);
8654 return target;
8657 if (temp != original_target)
8659 enum machine_mode mode1 = GET_MODE (temp);
8660 if (mode1 == VOIDmode)
8661 mode1 = tmode != VOIDmode ? tmode : mode;
8663 temp = copy_to_mode_reg (mode1, temp);
8666 op1 = gen_label_rtx ();
8667 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8668 GET_MODE (temp), unsignedp, op1);
8669 emit_move_insn (temp, const1_rtx);
8670 emit_label (op1);
8671 return temp;
8674 /* If no set-flag instruction, must generate a conditional store
8675 into a temporary variable. Drop through and handle this
8676 like && and ||. */
8678 if (! ignore
8679 && (target == 0
8680 || modifier == EXPAND_STACK_PARM
8681 || ! safe_from_p (target, exp, 1)
8682 /* Make sure we don't have a hard reg (such as function's return
8683 value) live across basic blocks, if not optimizing. */
8684 || (!optimize && REG_P (target)
8685 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8686 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8688 if (target)
8689 emit_move_insn (target, const0_rtx);
8691 op1 = gen_label_rtx ();
8692 jumpifnot (exp, op1);
8694 if (target)
8695 emit_move_insn (target, const1_rtx);
8697 emit_label (op1);
8698 return ignore ? const0_rtx : target;
8700 case TRUTH_NOT_EXPR:
8701 if (modifier == EXPAND_STACK_PARM)
8702 target = 0;
8703 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8704 VOIDmode, EXPAND_NORMAL);
8705 /* The parser is careful to generate TRUTH_NOT_EXPR
8706 only with operands that are always zero or one. */
8707 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8708 target, 1, OPTAB_LIB_WIDEN);
8709 gcc_assert (temp);
8710 return temp;
8712 case STATEMENT_LIST:
8714 tree_stmt_iterator iter;
8716 gcc_assert (ignore);
8718 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8719 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8721 return const0_rtx;
8723 case COND_EXPR:
8724 /* A COND_EXPR with its type being VOID_TYPE represents a
8725 conditional jump and is handled in
8726 expand_gimple_cond_expr. */
8727 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8729 /* Note that COND_EXPRs whose type is a structure or union
8730 are required to be constructed to contain assignments of
8731 a temporary variable, so that we can evaluate them here
8732 for side effect only. If type is void, we must do likewise. */
8734 gcc_assert (!TREE_ADDRESSABLE (type)
8735 && !ignore
8736 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8737 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8739 /* If we are not to produce a result, we have no target. Otherwise,
8740 if a target was specified use it; it will not be used as an
8741 intermediate target unless it is safe. If no target, use a
8742 temporary. */
8744 if (modifier != EXPAND_STACK_PARM
8745 && original_target
8746 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8747 && GET_MODE (original_target) == mode
8748 #ifdef HAVE_conditional_move
8749 && (! can_conditionally_move_p (mode)
8750 || REG_P (original_target))
8751 #endif
8752 && !MEM_P (original_target))
8753 temp = original_target;
8754 else
8755 temp = assign_temp (type, 0, 0, 1);
8757 do_pending_stack_adjust ();
8758 NO_DEFER_POP;
8759 op0 = gen_label_rtx ();
8760 op1 = gen_label_rtx ();
8761 jumpifnot (TREE_OPERAND (exp, 0), op0);
8762 store_expr (TREE_OPERAND (exp, 1), temp,
8763 modifier == EXPAND_STACK_PARM);
8765 emit_jump_insn (gen_jump (op1));
8766 emit_barrier ();
8767 emit_label (op0);
8768 store_expr (TREE_OPERAND (exp, 2), temp,
8769 modifier == EXPAND_STACK_PARM);
8771 emit_label (op1);
8772 OK_DEFER_POP;
8773 return temp;
8775 case VEC_COND_EXPR:
8776 target = expand_vec_cond_expr (exp, target);
8777 return target;
8779 case MODIFY_EXPR:
8781 tree lhs = TREE_OPERAND (exp, 0);
8782 tree rhs = TREE_OPERAND (exp, 1);
8783 gcc_assert (ignore);
8784 expand_assignment (lhs, rhs);
8785 return const0_rtx;
8788 case GIMPLE_MODIFY_STMT:
8790 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8791 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8793 gcc_assert (ignore);
8795 /* Check for |= or &= of a bitfield of size one into another bitfield
8796 of size 1. In this case, (unless we need the result of the
8797 assignment) we can do this more efficiently with a
8798 test followed by an assignment, if necessary.
8800 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8801 things change so we do, this code should be enhanced to
8802 support it. */
8803 if (TREE_CODE (lhs) == COMPONENT_REF
8804 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8805 || TREE_CODE (rhs) == BIT_AND_EXPR)
8806 && TREE_OPERAND (rhs, 0) == lhs
8807 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8808 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8809 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8811 rtx label = gen_label_rtx ();
8812 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8813 do_jump (TREE_OPERAND (rhs, 1),
8814 value ? label : 0,
8815 value ? 0 : label);
8816 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8817 do_pending_stack_adjust ();
8818 emit_label (label);
8819 return const0_rtx;
8822 expand_assignment (lhs, rhs);
8823 return const0_rtx;
8826 case RETURN_EXPR:
8827 if (!TREE_OPERAND (exp, 0))
8828 expand_null_return ();
8829 else
8830 expand_return (TREE_OPERAND (exp, 0));
8831 return const0_rtx;
8833 case ADDR_EXPR:
8834 return expand_expr_addr_expr (exp, target, tmode, modifier);
8836 case COMPLEX_EXPR:
8837 /* Get the rtx code of the operands. */
8838 op0 = expand_normal (TREE_OPERAND (exp, 0));
8839 op1 = expand_normal (TREE_OPERAND (exp, 1));
8841 if (!target)
8842 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8844 /* Move the real (op0) and imaginary (op1) parts to their location. */
8845 write_complex_part (target, op0, false);
8846 write_complex_part (target, op1, true);
8848 return target;
8850 case REALPART_EXPR:
8851 op0 = expand_normal (TREE_OPERAND (exp, 0));
8852 return read_complex_part (op0, false);
8854 case IMAGPART_EXPR:
8855 op0 = expand_normal (TREE_OPERAND (exp, 0));
8856 return read_complex_part (op0, true);
8858 case RESX_EXPR:
8859 expand_resx_expr (exp);
8860 return const0_rtx;
8862 case TRY_CATCH_EXPR:
8863 case CATCH_EXPR:
8864 case EH_FILTER_EXPR:
8865 case TRY_FINALLY_EXPR:
8866 /* Lowered by tree-eh.c. */
8867 gcc_unreachable ();
8869 case WITH_CLEANUP_EXPR:
8870 case CLEANUP_POINT_EXPR:
8871 case TARGET_EXPR:
8872 case CASE_LABEL_EXPR:
8873 case VA_ARG_EXPR:
8874 case BIND_EXPR:
8875 case INIT_EXPR:
8876 case CONJ_EXPR:
8877 case COMPOUND_EXPR:
8878 case PREINCREMENT_EXPR:
8879 case PREDECREMENT_EXPR:
8880 case POSTINCREMENT_EXPR:
8881 case POSTDECREMENT_EXPR:
8882 case LOOP_EXPR:
8883 case EXIT_EXPR:
8884 case TRUTH_ANDIF_EXPR:
8885 case TRUTH_ORIF_EXPR:
8886 /* Lowered by gimplify.c. */
8887 gcc_unreachable ();
8889 case EXC_PTR_EXPR:
8890 return get_exception_pointer (cfun);
8892 case FILTER_EXPR:
8893 return get_exception_filter (cfun);
8895 case FDESC_EXPR:
8896 /* Function descriptors are not valid except for as
8897 initialization constants, and should not be expanded. */
8898 gcc_unreachable ();
8900 case SWITCH_EXPR:
8901 expand_case (exp);
8902 return const0_rtx;
8904 case LABEL_EXPR:
8905 expand_label (TREE_OPERAND (exp, 0));
8906 return const0_rtx;
8908 case ASM_EXPR:
8909 expand_asm_expr (exp);
8910 return const0_rtx;
8912 case WITH_SIZE_EXPR:
8913 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8914 have pulled out the size to use in whatever context it needed. */
8915 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8916 modifier, alt_rtl);
8918 case REALIGN_LOAD_EXPR:
8920 tree oprnd0 = TREE_OPERAND (exp, 0);
8921 tree oprnd1 = TREE_OPERAND (exp, 1);
8922 tree oprnd2 = TREE_OPERAND (exp, 2);
8923 rtx op2;
8925 this_optab = optab_for_tree_code (code, type);
8926 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8927 op2 = expand_normal (oprnd2);
8928 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8929 target, unsignedp);
8930 gcc_assert (temp);
8931 return temp;
8934 case DOT_PROD_EXPR:
8936 tree oprnd0 = TREE_OPERAND (exp, 0);
8937 tree oprnd1 = TREE_OPERAND (exp, 1);
8938 tree oprnd2 = TREE_OPERAND (exp, 2);
8939 rtx op2;
8941 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8942 op2 = expand_normal (oprnd2);
8943 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8944 target, unsignedp);
8945 return target;
8948 case WIDEN_SUM_EXPR:
8950 tree oprnd0 = TREE_OPERAND (exp, 0);
8951 tree oprnd1 = TREE_OPERAND (exp, 1);
8953 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8954 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8955 target, unsignedp);
8956 return target;
8959 case REDUC_MAX_EXPR:
8960 case REDUC_MIN_EXPR:
8961 case REDUC_PLUS_EXPR:
8963 op0 = expand_normal (TREE_OPERAND (exp, 0));
8964 this_optab = optab_for_tree_code (code, type);
8965 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8966 gcc_assert (temp);
8967 return temp;
8970 case VEC_EXTRACT_EVEN_EXPR:
8971 case VEC_EXTRACT_ODD_EXPR:
8973 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8974 NULL_RTX, &op0, &op1, 0);
8975 this_optab = optab_for_tree_code (code, type);
8976 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8977 OPTAB_WIDEN);
8978 gcc_assert (temp);
8979 return temp;
8982 case VEC_INTERLEAVE_HIGH_EXPR:
8983 case VEC_INTERLEAVE_LOW_EXPR:
8985 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8986 NULL_RTX, &op0, &op1, 0);
8987 this_optab = optab_for_tree_code (code, type);
8988 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8989 OPTAB_WIDEN);
8990 gcc_assert (temp);
8991 return temp;
8994 case VEC_LSHIFT_EXPR:
8995 case VEC_RSHIFT_EXPR:
8997 target = expand_vec_shift_expr (exp, target);
8998 return target;
9001 case VEC_UNPACK_HI_EXPR:
9002 case VEC_UNPACK_LO_EXPR:
9004 op0 = expand_normal (TREE_OPERAND (exp, 0));
9005 this_optab = optab_for_tree_code (code, type);
9006 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9007 target, unsignedp);
9008 gcc_assert (temp);
9009 return temp;
9012 case VEC_UNPACK_FLOAT_HI_EXPR:
9013 case VEC_UNPACK_FLOAT_LO_EXPR:
9015 op0 = expand_normal (TREE_OPERAND (exp, 0));
9016 /* The signedness is determined from input operand. */
9017 this_optab = optab_for_tree_code (code,
9018 TREE_TYPE (TREE_OPERAND (exp, 0)));
9019 temp = expand_widen_pattern_expr
9020 (exp, op0, NULL_RTX, NULL_RTX,
9021 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9023 gcc_assert (temp);
9024 return temp;
9027 case VEC_WIDEN_MULT_HI_EXPR:
9028 case VEC_WIDEN_MULT_LO_EXPR:
9030 tree oprnd0 = TREE_OPERAND (exp, 0);
9031 tree oprnd1 = TREE_OPERAND (exp, 1);
9033 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9034 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9035 target, unsignedp);
9036 gcc_assert (target);
9037 return target;
9040 case VEC_PACK_TRUNC_EXPR:
9041 case VEC_PACK_SAT_EXPR:
9042 case VEC_PACK_FIX_TRUNC_EXPR:
9044 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9045 goto binop;
9048 default:
9049 return lang_hooks.expand_expr (exp, original_target, tmode,
9050 modifier, alt_rtl);
9053 /* Here to do an ordinary binary operator. */
9054 binop:
9055 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9056 subtarget, &op0, &op1, 0);
9057 binop2:
9058 this_optab = optab_for_tree_code (code, type);
9059 binop3:
9060 if (modifier == EXPAND_STACK_PARM)
9061 target = 0;
9062 temp = expand_binop (mode, this_optab, op0, op1, target,
9063 unsignedp, OPTAB_LIB_WIDEN);
9064 gcc_assert (temp);
9065 return REDUCE_BIT_FIELD (temp);
9067 #undef REDUCE_BIT_FIELD
9069 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9070 signedness of TYPE), possibly returning the result in TARGET. */
9071 static rtx
9072 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9074 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9075 if (target && GET_MODE (target) != GET_MODE (exp))
9076 target = 0;
9077 /* For constant values, reduce using build_int_cst_type. */
9078 if (GET_CODE (exp) == CONST_INT)
9080 HOST_WIDE_INT value = INTVAL (exp);
9081 tree t = build_int_cst_type (type, value);
9082 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9084 else if (TYPE_UNSIGNED (type))
9086 rtx mask;
9087 if (prec < HOST_BITS_PER_WIDE_INT)
9088 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9089 GET_MODE (exp));
9090 else
9091 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9092 ((unsigned HOST_WIDE_INT) 1
9093 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9094 GET_MODE (exp));
9095 return expand_and (GET_MODE (exp), exp, mask, target);
9097 else
9099 tree count = build_int_cst (NULL_TREE,
9100 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9101 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9102 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9106 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9107 when applied to the address of EXP produces an address known to be
9108 aligned more than BIGGEST_ALIGNMENT. */
9110 static int
9111 is_aligning_offset (tree offset, tree exp)
9113 /* Strip off any conversions. */
9114 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9115 || TREE_CODE (offset) == NOP_EXPR
9116 || TREE_CODE (offset) == CONVERT_EXPR)
9117 offset = TREE_OPERAND (offset, 0);
9119 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9120 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9121 if (TREE_CODE (offset) != BIT_AND_EXPR
9122 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9123 || compare_tree_int (TREE_OPERAND (offset, 1),
9124 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9125 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9126 return 0;
9128 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9129 It must be NEGATE_EXPR. Then strip any more conversions. */
9130 offset = TREE_OPERAND (offset, 0);
9131 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9132 || TREE_CODE (offset) == NOP_EXPR
9133 || TREE_CODE (offset) == CONVERT_EXPR)
9134 offset = TREE_OPERAND (offset, 0);
9136 if (TREE_CODE (offset) != NEGATE_EXPR)
9137 return 0;
9139 offset = TREE_OPERAND (offset, 0);
9140 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9141 || TREE_CODE (offset) == NOP_EXPR
9142 || TREE_CODE (offset) == CONVERT_EXPR)
9143 offset = TREE_OPERAND (offset, 0);
9145 /* This must now be the address of EXP. */
9146 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9149 /* Return the tree node if an ARG corresponds to a string constant or zero
9150 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9151 in bytes within the string that ARG is accessing. The type of the
9152 offset will be `sizetype'. */
9154 tree
9155 string_constant (tree arg, tree *ptr_offset)
9157 tree array, offset, lower_bound;
9158 STRIP_NOPS (arg);
9160 if (TREE_CODE (arg) == ADDR_EXPR)
9162 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9164 *ptr_offset = size_zero_node;
9165 return TREE_OPERAND (arg, 0);
9167 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9169 array = TREE_OPERAND (arg, 0);
9170 offset = size_zero_node;
9172 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9174 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9175 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9176 if (TREE_CODE (array) != STRING_CST
9177 && TREE_CODE (array) != VAR_DECL)
9178 return 0;
9180 /* Check if the array has a nonzero lower bound. */
9181 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9182 if (!integer_zerop (lower_bound))
9184 /* If the offset and base aren't both constants, return 0. */
9185 if (TREE_CODE (lower_bound) != INTEGER_CST)
9186 return 0;
9187 if (TREE_CODE (offset) != INTEGER_CST)
9188 return 0;
9189 /* Adjust offset by the lower bound. */
9190 offset = size_diffop (fold_convert (sizetype, offset),
9191 fold_convert (sizetype, lower_bound));
9194 else
9195 return 0;
9197 else if (TREE_CODE (arg) == PLUS_EXPR)
9199 tree arg0 = TREE_OPERAND (arg, 0);
9200 tree arg1 = TREE_OPERAND (arg, 1);
9202 STRIP_NOPS (arg0);
9203 STRIP_NOPS (arg1);
9205 if (TREE_CODE (arg0) == ADDR_EXPR
9206 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9207 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9209 array = TREE_OPERAND (arg0, 0);
9210 offset = arg1;
9212 else if (TREE_CODE (arg1) == ADDR_EXPR
9213 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9214 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9216 array = TREE_OPERAND (arg1, 0);
9217 offset = arg0;
9219 else
9220 return 0;
9222 else
9223 return 0;
9225 if (TREE_CODE (array) == STRING_CST)
9227 *ptr_offset = fold_convert (sizetype, offset);
9228 return array;
9230 else if (TREE_CODE (array) == VAR_DECL)
9232 int length;
9234 /* Variables initialized to string literals can be handled too. */
9235 if (DECL_INITIAL (array) == NULL_TREE
9236 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9237 return 0;
9239 /* If they are read-only, non-volatile and bind locally. */
9240 if (! TREE_READONLY (array)
9241 || TREE_SIDE_EFFECTS (array)
9242 || ! targetm.binds_local_p (array))
9243 return 0;
9245 /* Avoid const char foo[4] = "abcde"; */
9246 if (DECL_SIZE_UNIT (array) == NULL_TREE
9247 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9248 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9249 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9250 return 0;
9252 /* If variable is bigger than the string literal, OFFSET must be constant
9253 and inside of the bounds of the string literal. */
9254 offset = fold_convert (sizetype, offset);
9255 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9256 && (! host_integerp (offset, 1)
9257 || compare_tree_int (offset, length) >= 0))
9258 return 0;
9260 *ptr_offset = offset;
9261 return DECL_INITIAL (array);
9264 return 0;
9267 /* Generate code to calculate EXP using a store-flag instruction
9268 and return an rtx for the result. EXP is either a comparison
9269 or a TRUTH_NOT_EXPR whose operand is a comparison.
9271 If TARGET is nonzero, store the result there if convenient.
9273 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9274 cheap.
9276 Return zero if there is no suitable set-flag instruction
9277 available on this machine.
9279 Once expand_expr has been called on the arguments of the comparison,
9280 we are committed to doing the store flag, since it is not safe to
9281 re-evaluate the expression. We emit the store-flag insn by calling
9282 emit_store_flag, but only expand the arguments if we have a reason
9283 to believe that emit_store_flag will be successful. If we think that
9284 it will, but it isn't, we have to simulate the store-flag with a
9285 set/jump/set sequence. */
9287 static rtx
9288 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9290 enum rtx_code code;
9291 tree arg0, arg1, type;
9292 tree tem;
9293 enum machine_mode operand_mode;
9294 int invert = 0;
9295 int unsignedp;
9296 rtx op0, op1;
9297 enum insn_code icode;
9298 rtx subtarget = target;
9299 rtx result, label;
9301 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9302 result at the end. We can't simply invert the test since it would
9303 have already been inverted if it were valid. This case occurs for
9304 some floating-point comparisons. */
9306 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9307 invert = 1, exp = TREE_OPERAND (exp, 0);
9309 arg0 = TREE_OPERAND (exp, 0);
9310 arg1 = TREE_OPERAND (exp, 1);
9312 /* Don't crash if the comparison was erroneous. */
9313 if (arg0 == error_mark_node || arg1 == error_mark_node)
9314 return const0_rtx;
9316 type = TREE_TYPE (arg0);
9317 operand_mode = TYPE_MODE (type);
9318 unsignedp = TYPE_UNSIGNED (type);
9320 /* We won't bother with BLKmode store-flag operations because it would mean
9321 passing a lot of information to emit_store_flag. */
9322 if (operand_mode == BLKmode)
9323 return 0;
9325 /* We won't bother with store-flag operations involving function pointers
9326 when function pointers must be canonicalized before comparisons. */
9327 #ifdef HAVE_canonicalize_funcptr_for_compare
9328 if (HAVE_canonicalize_funcptr_for_compare
9329 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9330 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9331 == FUNCTION_TYPE))
9332 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9333 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9334 == FUNCTION_TYPE))))
9335 return 0;
9336 #endif
9338 STRIP_NOPS (arg0);
9339 STRIP_NOPS (arg1);
9341 /* Get the rtx comparison code to use. We know that EXP is a comparison
9342 operation of some type. Some comparisons against 1 and -1 can be
9343 converted to comparisons with zero. Do so here so that the tests
9344 below will be aware that we have a comparison with zero. These
9345 tests will not catch constants in the first operand, but constants
9346 are rarely passed as the first operand. */
9348 switch (TREE_CODE (exp))
9350 case EQ_EXPR:
9351 code = EQ;
9352 break;
9353 case NE_EXPR:
9354 code = NE;
9355 break;
9356 case LT_EXPR:
9357 if (integer_onep (arg1))
9358 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9359 else
9360 code = unsignedp ? LTU : LT;
9361 break;
9362 case LE_EXPR:
9363 if (! unsignedp && integer_all_onesp (arg1))
9364 arg1 = integer_zero_node, code = LT;
9365 else
9366 code = unsignedp ? LEU : LE;
9367 break;
9368 case GT_EXPR:
9369 if (! unsignedp && integer_all_onesp (arg1))
9370 arg1 = integer_zero_node, code = GE;
9371 else
9372 code = unsignedp ? GTU : GT;
9373 break;
9374 case GE_EXPR:
9375 if (integer_onep (arg1))
9376 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9377 else
9378 code = unsignedp ? GEU : GE;
9379 break;
9381 case UNORDERED_EXPR:
9382 code = UNORDERED;
9383 break;
9384 case ORDERED_EXPR:
9385 code = ORDERED;
9386 break;
9387 case UNLT_EXPR:
9388 code = UNLT;
9389 break;
9390 case UNLE_EXPR:
9391 code = UNLE;
9392 break;
9393 case UNGT_EXPR:
9394 code = UNGT;
9395 break;
9396 case UNGE_EXPR:
9397 code = UNGE;
9398 break;
9399 case UNEQ_EXPR:
9400 code = UNEQ;
9401 break;
9402 case LTGT_EXPR:
9403 code = LTGT;
9404 break;
9406 default:
9407 gcc_unreachable ();
9410 /* Put a constant second. */
9411 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9413 tem = arg0; arg0 = arg1; arg1 = tem;
9414 code = swap_condition (code);
9417 /* If this is an equality or inequality test of a single bit, we can
9418 do this by shifting the bit being tested to the low-order bit and
9419 masking the result with the constant 1. If the condition was EQ,
9420 we xor it with 1. This does not require an scc insn and is faster
9421 than an scc insn even if we have it.
9423 The code to make this transformation was moved into fold_single_bit_test,
9424 so we just call into the folder and expand its result. */
9426 if ((code == NE || code == EQ)
9427 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9428 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9430 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9431 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9432 arg0, arg1, type),
9433 target, VOIDmode, EXPAND_NORMAL);
9436 /* Now see if we are likely to be able to do this. Return if not. */
9437 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9438 return 0;
9440 icode = setcc_gen_code[(int) code];
9442 if (icode == CODE_FOR_nothing)
9444 enum machine_mode wmode;
9446 for (wmode = operand_mode;
9447 icode == CODE_FOR_nothing && wmode != VOIDmode;
9448 wmode = GET_MODE_WIDER_MODE (wmode))
9449 icode = cstore_optab->handlers[(int) wmode].insn_code;
9452 if (icode == CODE_FOR_nothing
9453 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9455 /* We can only do this if it is one of the special cases that
9456 can be handled without an scc insn. */
9457 if ((code == LT && integer_zerop (arg1))
9458 || (! only_cheap && code == GE && integer_zerop (arg1)))
9460 else if (! only_cheap && (code == NE || code == EQ)
9461 && TREE_CODE (type) != REAL_TYPE
9462 && ((abs_optab->handlers[(int) operand_mode].insn_code
9463 != CODE_FOR_nothing)
9464 || (ffs_optab->handlers[(int) operand_mode].insn_code
9465 != CODE_FOR_nothing)))
9467 else
9468 return 0;
9471 if (! get_subtarget (target)
9472 || GET_MODE (subtarget) != operand_mode)
9473 subtarget = 0;
9475 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9477 if (target == 0)
9478 target = gen_reg_rtx (mode);
9480 result = emit_store_flag (target, code, op0, op1,
9481 operand_mode, unsignedp, 1);
9483 if (result)
9485 if (invert)
9486 result = expand_binop (mode, xor_optab, result, const1_rtx,
9487 result, 0, OPTAB_LIB_WIDEN);
9488 return result;
9491 /* If this failed, we have to do this with set/compare/jump/set code. */
9492 if (!REG_P (target)
9493 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9494 target = gen_reg_rtx (GET_MODE (target));
9496 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9497 label = gen_label_rtx ();
9498 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9499 NULL_RTX, label);
9501 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9502 emit_label (label);
9504 return target;
9508 /* Stubs in case we haven't got a casesi insn. */
9509 #ifndef HAVE_casesi
9510 # define HAVE_casesi 0
9511 # define gen_casesi(a, b, c, d, e) (0)
9512 # define CODE_FOR_casesi CODE_FOR_nothing
9513 #endif
9515 /* If the machine does not have a case insn that compares the bounds,
9516 this means extra overhead for dispatch tables, which raises the
9517 threshold for using them. */
9518 #ifndef CASE_VALUES_THRESHOLD
9519 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9520 #endif /* CASE_VALUES_THRESHOLD */
9522 unsigned int
9523 case_values_threshold (void)
9525 return CASE_VALUES_THRESHOLD;
9528 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9529 0 otherwise (i.e. if there is no casesi instruction). */
9531 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9532 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9534 enum machine_mode index_mode = SImode;
9535 int index_bits = GET_MODE_BITSIZE (index_mode);
9536 rtx op1, op2, index;
9537 enum machine_mode op_mode;
9539 if (! HAVE_casesi)
9540 return 0;
9542 /* Convert the index to SImode. */
9543 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9545 enum machine_mode omode = TYPE_MODE (index_type);
9546 rtx rangertx = expand_normal (range);
9548 /* We must handle the endpoints in the original mode. */
9549 index_expr = build2 (MINUS_EXPR, index_type,
9550 index_expr, minval);
9551 minval = integer_zero_node;
9552 index = expand_normal (index_expr);
9553 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9554 omode, 1, default_label);
9555 /* Now we can safely truncate. */
9556 index = convert_to_mode (index_mode, index, 0);
9558 else
9560 if (TYPE_MODE (index_type) != index_mode)
9562 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9563 index_expr = fold_convert (index_type, index_expr);
9566 index = expand_normal (index_expr);
9569 do_pending_stack_adjust ();
9571 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9572 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9573 (index, op_mode))
9574 index = copy_to_mode_reg (op_mode, index);
9576 op1 = expand_normal (minval);
9578 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9579 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9580 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9581 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9582 (op1, op_mode))
9583 op1 = copy_to_mode_reg (op_mode, op1);
9585 op2 = expand_normal (range);
9587 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9588 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9589 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9590 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9591 (op2, op_mode))
9592 op2 = copy_to_mode_reg (op_mode, op2);
9594 emit_jump_insn (gen_casesi (index, op1, op2,
9595 table_label, default_label));
9596 return 1;
9599 /* Attempt to generate a tablejump instruction; same concept. */
9600 #ifndef HAVE_tablejump
9601 #define HAVE_tablejump 0
9602 #define gen_tablejump(x, y) (0)
9603 #endif
9605 /* Subroutine of the next function.
9607 INDEX is the value being switched on, with the lowest value
9608 in the table already subtracted.
9609 MODE is its expected mode (needed if INDEX is constant).
9610 RANGE is the length of the jump table.
9611 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9613 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9614 index value is out of range. */
9616 static void
9617 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9618 rtx default_label)
9620 rtx temp, vector;
9622 if (INTVAL (range) > cfun->max_jumptable_ents)
9623 cfun->max_jumptable_ents = INTVAL (range);
9625 /* Do an unsigned comparison (in the proper mode) between the index
9626 expression and the value which represents the length of the range.
9627 Since we just finished subtracting the lower bound of the range
9628 from the index expression, this comparison allows us to simultaneously
9629 check that the original index expression value is both greater than
9630 or equal to the minimum value of the range and less than or equal to
9631 the maximum value of the range. */
9633 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9634 default_label);
9636 /* If index is in range, it must fit in Pmode.
9637 Convert to Pmode so we can index with it. */
9638 if (mode != Pmode)
9639 index = convert_to_mode (Pmode, index, 1);
9641 /* Don't let a MEM slip through, because then INDEX that comes
9642 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9643 and break_out_memory_refs will go to work on it and mess it up. */
9644 #ifdef PIC_CASE_VECTOR_ADDRESS
9645 if (flag_pic && !REG_P (index))
9646 index = copy_to_mode_reg (Pmode, index);
9647 #endif
9649 /* If flag_force_addr were to affect this address
9650 it could interfere with the tricky assumptions made
9651 about addresses that contain label-refs,
9652 which may be valid only very near the tablejump itself. */
9653 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9654 GET_MODE_SIZE, because this indicates how large insns are. The other
9655 uses should all be Pmode, because they are addresses. This code
9656 could fail if addresses and insns are not the same size. */
9657 index = gen_rtx_PLUS (Pmode,
9658 gen_rtx_MULT (Pmode, index,
9659 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9660 gen_rtx_LABEL_REF (Pmode, table_label));
9661 #ifdef PIC_CASE_VECTOR_ADDRESS
9662 if (flag_pic)
9663 index = PIC_CASE_VECTOR_ADDRESS (index);
9664 else
9665 #endif
9666 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9667 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9668 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9669 convert_move (temp, vector, 0);
9671 emit_jump_insn (gen_tablejump (temp, table_label));
9673 /* If we are generating PIC code or if the table is PC-relative, the
9674 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9675 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9676 emit_barrier ();
9680 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9681 rtx table_label, rtx default_label)
9683 rtx index;
9685 if (! HAVE_tablejump)
9686 return 0;
9688 index_expr = fold_build2 (MINUS_EXPR, index_type,
9689 fold_convert (index_type, index_expr),
9690 fold_convert (index_type, minval));
9691 index = expand_normal (index_expr);
9692 do_pending_stack_adjust ();
9694 do_tablejump (index, TYPE_MODE (index_type),
9695 convert_modes (TYPE_MODE (index_type),
9696 TYPE_MODE (TREE_TYPE (range)),
9697 expand_normal (range),
9698 TYPE_UNSIGNED (TREE_TYPE (range))),
9699 table_label, default_label);
9700 return 1;
9703 /* Nonzero if the mode is a valid vector mode for this architecture.
9704 This returns nonzero even if there is no hardware support for the
9705 vector mode, but we can emulate with narrower modes. */
9708 vector_mode_valid_p (enum machine_mode mode)
9710 enum mode_class class = GET_MODE_CLASS (mode);
9711 enum machine_mode innermode;
9713 /* Doh! What's going on? */
9714 if (class != MODE_VECTOR_INT
9715 && class != MODE_VECTOR_FLOAT)
9716 return 0;
9718 /* Hardware support. Woo hoo! */
9719 if (targetm.vector_mode_supported_p (mode))
9720 return 1;
9722 innermode = GET_MODE_INNER (mode);
9724 /* We should probably return 1 if requesting V4DI and we have no DI,
9725 but we have V2DI, but this is probably very unlikely. */
9727 /* If we have support for the inner mode, we can safely emulate it.
9728 We may not have V2DI, but me can emulate with a pair of DIs. */
9729 return targetm.scalar_mode_supported_p (innermode);
9732 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9733 static rtx
9734 const_vector_from_tree (tree exp)
9736 rtvec v;
9737 int units, i;
9738 tree link, elt;
9739 enum machine_mode inner, mode;
9741 mode = TYPE_MODE (TREE_TYPE (exp));
9743 if (initializer_zerop (exp))
9744 return CONST0_RTX (mode);
9746 units = GET_MODE_NUNITS (mode);
9747 inner = GET_MODE_INNER (mode);
9749 v = rtvec_alloc (units);
9751 link = TREE_VECTOR_CST_ELTS (exp);
9752 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9754 elt = TREE_VALUE (link);
9756 if (TREE_CODE (elt) == REAL_CST)
9757 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9758 inner);
9759 else
9760 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9761 TREE_INT_CST_HIGH (elt),
9762 inner);
9765 /* Initialize remaining elements to 0. */
9766 for (; i < units; ++i)
9767 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9769 return gen_rtx_CONST_VECTOR (mode, v);
9771 #include "gt-expr.h"