PR c++/15745
[official-gcc.git] / gcc / expr.c
blob054f4f13add7875b1cd66f3dbefc85558475146d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, alias_set_type, bool);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero. */
190 #ifndef SET_BY_PIECES_P
191 #define SET_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
193 < (unsigned int) SET_RATIO)
194 #endif
196 /* This macro is used to determine whether store_by_pieces should be
197 called to "memcpy" storage when the source is a constant string. */
198 #ifndef STORE_BY_PIECES_P
199 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
201 < (unsigned int) MOVE_RATIO)
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movmem_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block sets. */
208 enum insn_code setmem_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of three different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
216 /* Synchronization primitives. */
217 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
237 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
244 #endif
246 /* This is run to set up which modes can be used
247 directly in memory and to initialize the block move optab. It is run
248 at the beginning of compilation and when the target is reinitialized. */
250 void
251 init_expr_target (void)
253 rtx insn, pat;
254 enum machine_mode mode;
255 int num_clobbers;
256 rtx mem, mem1;
257 rtx reg;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
276 int regno;
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
289 regno++)
291 if (! HARD_REGNO_MODE_OK (regno, mode))
292 continue;
294 SET_REGNO (reg, regno);
296 SET_SRC (pat) = mem;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
327 enum insn_code ic;
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
331 continue;
333 PUT_MODE (mem, srcmode);
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
341 /* This is run at the start of compiling a function. */
343 void
344 init_expr (void)
346 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
349 /* Copy data from FROM to TO, where the machine modes are not the same.
350 Both modes may be integer, or both may be floating.
351 UNSIGNEDP should be nonzero if FROM is an unsigned type.
352 This causes zero-extension instead of sign-extension. */
354 void
355 convert_move (rtx to, rtx from, int unsignedp)
357 enum machine_mode to_mode = GET_MODE (to);
358 enum machine_mode from_mode = GET_MODE (from);
359 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
360 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
361 enum insn_code code;
362 rtx libcall;
364 /* rtx code for making an equivalent value. */
365 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
366 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
369 gcc_assert (to_real == from_real);
370 gcc_assert (to_mode != BLKmode);
371 gcc_assert (from_mode != BLKmode);
373 /* If the source and destination are already the same, then there's
374 nothing to do. */
375 if (to == from)
376 return;
378 /* If FROM is a SUBREG that indicates that we have already done at least
379 the required extension, strip it. We don't handle such SUBREGs as
380 TO here. */
382 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
383 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
384 >= GET_MODE_SIZE (to_mode))
385 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
386 from = gen_lowpart (to_mode, from), from_mode = to_mode;
388 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
390 if (to_mode == from_mode
391 || (from_mode == VOIDmode && CONSTANT_P (from)))
393 emit_move_insn (to, from);
394 return;
397 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
399 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
401 if (VECTOR_MODE_P (to_mode))
402 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
403 else
404 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
406 emit_move_insn (to, from);
407 return;
410 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
412 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
413 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
414 return;
417 if (to_real)
419 rtx value, insns;
420 convert_optab tab;
422 gcc_assert ((GET_MODE_PRECISION (from_mode)
423 != GET_MODE_PRECISION (to_mode))
424 || (DECIMAL_FLOAT_MODE_P (from_mode)
425 != DECIMAL_FLOAT_MODE_P (to_mode)));
427 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
428 /* Conversion between decimal float and binary float, same size. */
429 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
430 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
431 tab = sext_optab;
432 else
433 tab = trunc_optab;
435 /* Try converting directly if the insn is supported. */
437 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
438 if (code != CODE_FOR_nothing)
440 emit_unop_insn (code, to, from,
441 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
442 return;
445 /* Otherwise use a libcall. */
446 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
448 /* Is this conversion implemented yet? */
449 gcc_assert (libcall);
451 start_sequence ();
452 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
453 1, from, from_mode);
454 insns = get_insns ();
455 end_sequence ();
456 emit_libcall_block (insns, to, value,
457 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
458 from)
459 : gen_rtx_FLOAT_EXTEND (to_mode, from));
460 return;
463 /* Handle pointer conversion. */ /* SPEE 900220. */
464 /* Targets are expected to provide conversion insns between PxImode and
465 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
466 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
468 enum machine_mode full_mode
469 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
471 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
472 != CODE_FOR_nothing);
474 if (full_mode != from_mode)
475 from = convert_to_mode (full_mode, from, unsignedp);
476 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
477 to, from, UNKNOWN);
478 return;
480 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
482 rtx new_from;
483 enum machine_mode full_mode
484 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
486 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
487 != CODE_FOR_nothing);
489 if (to_mode == full_mode)
491 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
492 to, from, UNKNOWN);
493 return;
496 new_from = gen_reg_rtx (full_mode);
497 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
498 new_from, from, UNKNOWN);
500 /* else proceed to integer conversions below. */
501 from_mode = full_mode;
502 from = new_from;
505 /* Now both modes are integers. */
507 /* Handle expanding beyond a word. */
508 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
509 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
511 rtx insns;
512 rtx lowpart;
513 rtx fill_value;
514 rtx lowfrom;
515 int i;
516 enum machine_mode lowpart_mode;
517 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
519 /* Try converting directly if the insn is supported. */
520 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
521 != CODE_FOR_nothing)
523 /* If FROM is a SUBREG, put it into a register. Do this
524 so that we always generate the same set of insns for
525 better cse'ing; if an intermediate assignment occurred,
526 we won't be doing the operation directly on the SUBREG. */
527 if (optimize > 0 && GET_CODE (from) == SUBREG)
528 from = force_reg (from_mode, from);
529 emit_unop_insn (code, to, from, equiv_code);
530 return;
532 /* Next, try converting via full word. */
533 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
534 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
535 != CODE_FOR_nothing))
537 if (REG_P (to))
539 if (reg_overlap_mentioned_p (to, from))
540 from = force_reg (from_mode, from);
541 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
543 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
544 emit_unop_insn (code, to,
545 gen_lowpart (word_mode, to), equiv_code);
546 return;
549 /* No special multiword conversion insn; do it by hand. */
550 start_sequence ();
552 /* Since we will turn this into a no conflict block, we must ensure
553 that the source does not overlap the target. */
555 if (reg_overlap_mentioned_p (to, from))
556 from = force_reg (from_mode, from);
558 /* Get a copy of FROM widened to a word, if necessary. */
559 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
560 lowpart_mode = word_mode;
561 else
562 lowpart_mode = from_mode;
564 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
566 lowpart = gen_lowpart (lowpart_mode, to);
567 emit_move_insn (lowpart, lowfrom);
569 /* Compute the value to put in each remaining word. */
570 if (unsignedp)
571 fill_value = const0_rtx;
572 else
574 #ifdef HAVE_slt
575 if (HAVE_slt
576 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
577 && STORE_FLAG_VALUE == -1)
579 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
580 lowpart_mode, 0);
581 fill_value = gen_reg_rtx (word_mode);
582 emit_insn (gen_slt (fill_value));
584 else
585 #endif
587 fill_value
588 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
589 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
590 NULL_RTX, 0);
591 fill_value = convert_to_mode (word_mode, fill_value, 1);
595 /* Fill the remaining words. */
596 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
598 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
599 rtx subword = operand_subword (to, index, 1, to_mode);
601 gcc_assert (subword);
603 if (fill_value != subword)
604 emit_move_insn (subword, fill_value);
607 insns = get_insns ();
608 end_sequence ();
610 emit_no_conflict_block (insns, to, from, NULL_RTX,
611 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
612 return;
615 /* Truncating multi-word to a word or less. */
616 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
617 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
619 if (!((MEM_P (from)
620 && ! MEM_VOLATILE_P (from)
621 && direct_load[(int) to_mode]
622 && ! mode_dependent_address_p (XEXP (from, 0)))
623 || REG_P (from)
624 || GET_CODE (from) == SUBREG))
625 from = force_reg (from_mode, from);
626 convert_move (to, gen_lowpart (word_mode, from), 0);
627 return;
630 /* Now follow all the conversions between integers
631 no more than a word long. */
633 /* For truncation, usually we can just refer to FROM in a narrower mode. */
634 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
635 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
636 GET_MODE_BITSIZE (from_mode)))
638 if (!((MEM_P (from)
639 && ! MEM_VOLATILE_P (from)
640 && direct_load[(int) to_mode]
641 && ! mode_dependent_address_p (XEXP (from, 0)))
642 || REG_P (from)
643 || GET_CODE (from) == SUBREG))
644 from = force_reg (from_mode, from);
645 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
646 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
647 from = copy_to_reg (from);
648 emit_move_insn (to, gen_lowpart (to_mode, from));
649 return;
652 /* Handle extension. */
653 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
655 /* Convert directly if that works. */
656 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
657 != CODE_FOR_nothing)
659 emit_unop_insn (code, to, from, equiv_code);
660 return;
662 else
664 enum machine_mode intermediate;
665 rtx tmp;
666 tree shift_amount;
668 /* Search for a mode to convert via. */
669 for (intermediate = from_mode; intermediate != VOIDmode;
670 intermediate = GET_MODE_WIDER_MODE (intermediate))
671 if (((can_extend_p (to_mode, intermediate, unsignedp)
672 != CODE_FOR_nothing)
673 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
674 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
675 GET_MODE_BITSIZE (intermediate))))
676 && (can_extend_p (intermediate, from_mode, unsignedp)
677 != CODE_FOR_nothing))
679 convert_move (to, convert_to_mode (intermediate, from,
680 unsignedp), unsignedp);
681 return;
684 /* No suitable intermediate mode.
685 Generate what we need with shifts. */
686 shift_amount = build_int_cst (NULL_TREE,
687 GET_MODE_BITSIZE (to_mode)
688 - GET_MODE_BITSIZE (from_mode));
689 from = gen_lowpart (to_mode, force_reg (from_mode, from));
690 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
691 to, unsignedp);
692 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
693 to, unsignedp);
694 if (tmp != to)
695 emit_move_insn (to, tmp);
696 return;
700 /* Support special truncate insns for certain modes. */
701 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
703 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
704 to, from, UNKNOWN);
705 return;
708 /* Handle truncation of volatile memrefs, and so on;
709 the things that couldn't be truncated directly,
710 and for which there was no special instruction.
712 ??? Code above formerly short-circuited this, for most integer
713 mode pairs, with a force_reg in from_mode followed by a recursive
714 call to this routine. Appears always to have been wrong. */
715 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
717 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
718 emit_move_insn (to, temp);
719 return;
722 /* Mode combination is not recognized. */
723 gcc_unreachable ();
726 /* Return an rtx for a value that would result
727 from converting X to mode MODE.
728 Both X and MODE may be floating, or both integer.
729 UNSIGNEDP is nonzero if X is an unsigned value.
730 This can be done by referring to a part of X in place
731 or by copying to a new temporary with conversion. */
734 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
736 return convert_modes (mode, VOIDmode, x, unsignedp);
739 /* Return an rtx for a value that would result
740 from converting X from mode OLDMODE to mode MODE.
741 Both modes may be floating, or both integer.
742 UNSIGNEDP is nonzero if X is an unsigned value.
744 This can be done by referring to a part of X in place
745 or by copying to a new temporary with conversion.
747 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
750 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
752 rtx temp;
754 /* If FROM is a SUBREG that indicates that we have already done at least
755 the required extension, strip it. */
757 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
758 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
759 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
760 x = gen_lowpart (mode, x);
762 if (GET_MODE (x) != VOIDmode)
763 oldmode = GET_MODE (x);
765 if (mode == oldmode)
766 return x;
768 /* There is one case that we must handle specially: If we are converting
769 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
770 we are to interpret the constant as unsigned, gen_lowpart will do
771 the wrong if the constant appears negative. What we want to do is
772 make the high-order word of the constant zero, not all ones. */
774 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
775 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
776 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
778 HOST_WIDE_INT val = INTVAL (x);
780 if (oldmode != VOIDmode
781 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
783 int width = GET_MODE_BITSIZE (oldmode);
785 /* We need to zero extend VAL. */
786 val &= ((HOST_WIDE_INT) 1 << width) - 1;
789 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
792 /* We can do this with a gen_lowpart if both desired and current modes
793 are integer, and this is either a constant integer, a register, or a
794 non-volatile MEM. Except for the constant case where MODE is no
795 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
797 if ((GET_CODE (x) == CONST_INT
798 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
799 || (GET_MODE_CLASS (mode) == MODE_INT
800 && GET_MODE_CLASS (oldmode) == MODE_INT
801 && (GET_CODE (x) == CONST_DOUBLE
802 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
803 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
804 && direct_load[(int) mode])
805 || (REG_P (x)
806 && (! HARD_REGISTER_P (x)
807 || HARD_REGNO_MODE_OK (REGNO (x), mode))
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
809 GET_MODE_BITSIZE (GET_MODE (x)))))))))
811 /* ?? If we don't know OLDMODE, we have to assume here that
812 X does not need sign- or zero-extension. This may not be
813 the case, but it's the best we can do. */
814 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
815 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
817 HOST_WIDE_INT val = INTVAL (x);
818 int width = GET_MODE_BITSIZE (oldmode);
820 /* We must sign or zero-extend in this case. Start by
821 zero-extending, then sign extend if we need to. */
822 val &= ((HOST_WIDE_INT) 1 << width) - 1;
823 if (! unsignedp
824 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
825 val |= (HOST_WIDE_INT) (-1) << width;
827 return gen_int_mode (val, mode);
830 return gen_lowpart (mode, x);
833 /* Converting from integer constant into mode is always equivalent to an
834 subreg operation. */
835 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
837 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
838 return simplify_gen_subreg (mode, x, oldmode, 0);
841 temp = gen_reg_rtx (mode);
842 convert_move (temp, x, unsignedp);
843 return temp;
846 /* STORE_MAX_PIECES is the number of bytes at a time that we can
847 store efficiently. Due to internal GCC limitations, this is
848 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
849 for an immediate constant. */
851 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
853 /* Determine whether the LEN bytes can be moved by using several move
854 instructions. Return nonzero if a call to move_by_pieces should
855 succeed. */
858 can_move_by_pieces (unsigned HOST_WIDE_INT len,
859 unsigned int align ATTRIBUTE_UNUSED)
861 return MOVE_BY_PIECES_P (len, align);
864 /* Generate several move instructions to copy LEN bytes from block FROM to
865 block TO. (These are MEM rtx's with BLKmode).
867 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
868 used to push FROM to the stack.
870 ALIGN is maximum stack alignment we can assume.
872 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
873 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
874 stpcpy. */
877 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
878 unsigned int align, int endp)
880 struct move_by_pieces data;
881 rtx to_addr, from_addr = XEXP (from, 0);
882 unsigned int max_size = MOVE_MAX_PIECES + 1;
883 enum machine_mode mode = VOIDmode, tmode;
884 enum insn_code icode;
886 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888 data.offset = 0;
889 data.from_addr = from_addr;
890 if (to)
892 to_addr = XEXP (to, 0);
893 data.to = to;
894 data.autinc_to
895 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
896 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
897 data.reverse
898 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
900 else
902 to_addr = NULL_RTX;
903 data.to = NULL_RTX;
904 data.autinc_to = 1;
905 #ifdef STACK_GROWS_DOWNWARD
906 data.reverse = 1;
907 #else
908 data.reverse = 0;
909 #endif
911 data.to_addr = to_addr;
912 data.from = from;
913 data.autinc_from
914 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
915 || GET_CODE (from_addr) == POST_INC
916 || GET_CODE (from_addr) == POST_DEC);
918 data.explicit_inc_from = 0;
919 data.explicit_inc_to = 0;
920 if (data.reverse) data.offset = len;
921 data.len = len;
923 /* If copying requires more than two move insns,
924 copy addresses to registers (to make displacements shorter)
925 and use post-increment if available. */
926 if (!(data.autinc_from && data.autinc_to)
927 && move_by_pieces_ninsns (len, align, max_size) > 2)
929 /* Find the mode of the largest move... */
930 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
931 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) < max_size)
933 mode = tmode;
935 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
937 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
938 data.autinc_from = 1;
939 data.explicit_inc_from = -1;
941 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
943 data.from_addr = copy_addr_to_reg (from_addr);
944 data.autinc_from = 1;
945 data.explicit_inc_from = 1;
947 if (!data.autinc_from && CONSTANT_P (from_addr))
948 data.from_addr = copy_addr_to_reg (from_addr);
949 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
951 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
952 data.autinc_to = 1;
953 data.explicit_inc_to = -1;
955 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
957 data.to_addr = copy_addr_to_reg (to_addr);
958 data.autinc_to = 1;
959 data.explicit_inc_to = 1;
961 if (!data.autinc_to && CONSTANT_P (to_addr))
962 data.to_addr = copy_addr_to_reg (to_addr);
965 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
966 if (align >= GET_MODE_ALIGNMENT (tmode))
967 align = GET_MODE_ALIGNMENT (tmode);
968 else
970 enum machine_mode xmode;
972 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
973 tmode != VOIDmode;
974 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
975 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
976 || SLOW_UNALIGNED_ACCESS (tmode, align))
977 break;
979 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
982 /* First move what we can in the largest integer mode, then go to
983 successively smaller modes. */
985 while (max_size > 1)
987 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
988 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
989 if (GET_MODE_SIZE (tmode) < max_size)
990 mode = tmode;
992 if (mode == VOIDmode)
993 break;
995 icode = optab_handler (mov_optab, mode)->insn_code;
996 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
997 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
999 max_size = GET_MODE_SIZE (mode);
1002 /* The code above should have handled everything. */
1003 gcc_assert (!data.len);
1005 if (endp)
1007 rtx to1;
1009 gcc_assert (!data.reverse);
1010 if (data.autinc_to)
1012 if (endp == 2)
1014 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1015 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1016 else
1017 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1018 -1));
1020 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1021 data.offset);
1023 else
1025 if (endp == 2)
1026 --data.offset;
1027 to1 = adjust_address (data.to, QImode, data.offset);
1029 return to1;
1031 else
1032 return data.to;
1035 /* Return number of insns required to move L bytes by pieces.
1036 ALIGN (in bits) is maximum alignment we can assume. */
1038 static unsigned HOST_WIDE_INT
1039 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1040 unsigned int max_size)
1042 unsigned HOST_WIDE_INT n_insns = 0;
1043 enum machine_mode tmode;
1045 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1046 if (align >= GET_MODE_ALIGNMENT (tmode))
1047 align = GET_MODE_ALIGNMENT (tmode);
1048 else
1050 enum machine_mode tmode, xmode;
1052 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1053 tmode != VOIDmode;
1054 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1056 || SLOW_UNALIGNED_ACCESS (tmode, align))
1057 break;
1059 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1062 while (max_size > 1)
1064 enum machine_mode mode = VOIDmode;
1065 enum insn_code icode;
1067 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1068 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1069 if (GET_MODE_SIZE (tmode) < max_size)
1070 mode = tmode;
1072 if (mode == VOIDmode)
1073 break;
1075 icode = optab_handler (mov_optab, mode)->insn_code;
1076 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1077 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1079 max_size = GET_MODE_SIZE (mode);
1082 gcc_assert (!l);
1083 return n_insns;
1086 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1087 with move instructions for mode MODE. GENFUN is the gen_... function
1088 to make a move insn for that mode. DATA has all the other info. */
1090 static void
1091 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1092 struct move_by_pieces *data)
1094 unsigned int size = GET_MODE_SIZE (mode);
1095 rtx to1 = NULL_RTX, from1;
1097 while (data->len >= size)
1099 if (data->reverse)
1100 data->offset -= size;
1102 if (data->to)
1104 if (data->autinc_to)
1105 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1106 data->offset);
1107 else
1108 to1 = adjust_address (data->to, mode, data->offset);
1111 if (data->autinc_from)
1112 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1113 data->offset);
1114 else
1115 from1 = adjust_address (data->from, mode, data->offset);
1117 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1118 emit_insn (gen_add2_insn (data->to_addr,
1119 GEN_INT (-(HOST_WIDE_INT)size)));
1120 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1121 emit_insn (gen_add2_insn (data->from_addr,
1122 GEN_INT (-(HOST_WIDE_INT)size)));
1124 if (data->to)
1125 emit_insn ((*genfun) (to1, from1));
1126 else
1128 #ifdef PUSH_ROUNDING
1129 emit_single_push_insn (mode, from1, NULL);
1130 #else
1131 gcc_unreachable ();
1132 #endif
1135 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1136 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1137 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1138 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1140 if (! data->reverse)
1141 data->offset += size;
1143 data->len -= size;
1147 /* Emit code to move a block Y to a block X. This may be done with
1148 string-move instructions, with multiple scalar move instructions,
1149 or with a library call.
1151 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1152 SIZE is an rtx that says how long they are.
1153 ALIGN is the maximum alignment we can assume they have.
1154 METHOD describes what kind of copy this is, and what mechanisms may be used.
1156 Return the address of the new block, if memcpy is called and returns it,
1157 0 otherwise. */
1160 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1161 unsigned int expected_align, HOST_WIDE_INT expected_size)
1163 bool may_use_call;
1164 rtx retval = 0;
1165 unsigned int align;
1167 switch (method)
1169 case BLOCK_OP_NORMAL:
1170 case BLOCK_OP_TAILCALL:
1171 may_use_call = true;
1172 break;
1174 case BLOCK_OP_CALL_PARM:
1175 may_use_call = block_move_libcall_safe_for_call_parm ();
1177 /* Make inhibit_defer_pop nonzero around the library call
1178 to force it to pop the arguments right away. */
1179 NO_DEFER_POP;
1180 break;
1182 case BLOCK_OP_NO_LIBCALL:
1183 may_use_call = false;
1184 break;
1186 default:
1187 gcc_unreachable ();
1190 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1192 gcc_assert (MEM_P (x));
1193 gcc_assert (MEM_P (y));
1194 gcc_assert (size);
1196 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1197 block copy is more efficient for other large modes, e.g. DCmode. */
1198 x = adjust_address (x, BLKmode, 0);
1199 y = adjust_address (y, BLKmode, 0);
1201 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1202 can be incorrect is coming from __builtin_memcpy. */
1203 if (GET_CODE (size) == CONST_INT)
1205 if (INTVAL (size) == 0)
1206 return 0;
1208 x = shallow_copy_rtx (x);
1209 y = shallow_copy_rtx (y);
1210 set_mem_size (x, size);
1211 set_mem_size (y, size);
1214 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1215 move_by_pieces (x, y, INTVAL (size), align, 0);
1216 else if (emit_block_move_via_movmem (x, y, size, align,
1217 expected_align, expected_size))
1219 else if (may_use_call)
1220 retval = emit_block_move_via_libcall (x, y, size,
1221 method == BLOCK_OP_TAILCALL);
1222 else
1223 emit_block_move_via_loop (x, y, size, align);
1225 if (method == BLOCK_OP_CALL_PARM)
1226 OK_DEFER_POP;
1228 return retval;
1232 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1234 return emit_block_move_hints (x, y, size, method, 0, -1);
1237 /* A subroutine of emit_block_move. Returns true if calling the
1238 block move libcall will not clobber any parameters which may have
1239 already been placed on the stack. */
1241 static bool
1242 block_move_libcall_safe_for_call_parm (void)
1244 /* If arguments are pushed on the stack, then they're safe. */
1245 if (PUSH_ARGS)
1246 return true;
1248 /* If registers go on the stack anyway, any argument is sure to clobber
1249 an outgoing argument. */
1250 #if defined (REG_PARM_STACK_SPACE)
1251 if (OUTGOING_REG_PARM_STACK_SPACE)
1253 tree fn;
1254 fn = emit_block_move_libcall_fn (false);
1255 if (REG_PARM_STACK_SPACE (fn) != 0)
1256 return false;
1258 #endif
1260 /* If any argument goes in memory, then it might clobber an outgoing
1261 argument. */
1263 CUMULATIVE_ARGS args_so_far;
1264 tree fn, arg;
1266 fn = emit_block_move_libcall_fn (false);
1267 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1269 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1270 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1272 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1273 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1274 if (!tmp || !REG_P (tmp))
1275 return false;
1276 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1277 return false;
1278 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1281 return true;
1284 /* A subroutine of emit_block_move. Expand a movmem pattern;
1285 return true if successful. */
1287 static bool
1288 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1289 unsigned int expected_align, HOST_WIDE_INT expected_size)
1291 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1292 int save_volatile_ok = volatile_ok;
1293 enum machine_mode mode;
1295 if (expected_align < align)
1296 expected_align = align;
1298 /* Since this is a move insn, we don't care about volatility. */
1299 volatile_ok = 1;
1301 /* Try the most limited insn first, because there's no point
1302 including more than one in the machine description unless
1303 the more limited one has some advantage. */
1305 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1306 mode = GET_MODE_WIDER_MODE (mode))
1308 enum insn_code code = movmem_optab[(int) mode];
1309 insn_operand_predicate_fn pred;
1311 if (code != CODE_FOR_nothing
1312 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1313 here because if SIZE is less than the mode mask, as it is
1314 returned by the macro, it will definitely be less than the
1315 actual mode mask. */
1316 && ((GET_CODE (size) == CONST_INT
1317 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1318 <= (GET_MODE_MASK (mode) >> 1)))
1319 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1320 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1321 || (*pred) (x, BLKmode))
1322 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1323 || (*pred) (y, BLKmode))
1324 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1325 || (*pred) (opalign, VOIDmode)))
1327 rtx op2;
1328 rtx last = get_last_insn ();
1329 rtx pat;
1331 op2 = convert_to_mode (mode, size, 1);
1332 pred = insn_data[(int) code].operand[2].predicate;
1333 if (pred != 0 && ! (*pred) (op2, mode))
1334 op2 = copy_to_mode_reg (mode, op2);
1336 /* ??? When called via emit_block_move_for_call, it'd be
1337 nice if there were some way to inform the backend, so
1338 that it doesn't fail the expansion because it thinks
1339 emitting the libcall would be more efficient. */
1341 if (insn_data[(int) code].n_operands == 4)
1342 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1343 else
1344 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1345 GEN_INT (expected_align),
1346 GEN_INT (expected_size));
1347 if (pat)
1349 emit_insn (pat);
1350 volatile_ok = save_volatile_ok;
1351 return true;
1353 else
1354 delete_insns_since (last);
1358 volatile_ok = save_volatile_ok;
1359 return false;
1362 /* A subroutine of emit_block_move. Expand a call to memcpy.
1363 Return the return value from memcpy, 0 otherwise. */
1366 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1368 rtx dst_addr, src_addr;
1369 tree call_expr, fn, src_tree, dst_tree, size_tree;
1370 enum machine_mode size_mode;
1371 rtx retval;
1373 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1374 pseudos. We can then place those new pseudos into a VAR_DECL and
1375 use them later. */
1377 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1378 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1380 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1381 src_addr = convert_memory_address (ptr_mode, src_addr);
1383 dst_tree = make_tree (ptr_type_node, dst_addr);
1384 src_tree = make_tree (ptr_type_node, src_addr);
1386 size_mode = TYPE_MODE (sizetype);
1388 size = convert_to_mode (size_mode, size, 1);
1389 size = copy_to_mode_reg (size_mode, size);
1391 /* It is incorrect to use the libcall calling conventions to call
1392 memcpy in this context. This could be a user call to memcpy and
1393 the user may wish to examine the return value from memcpy. For
1394 targets where libcalls and normal calls have different conventions
1395 for returning pointers, we could end up generating incorrect code. */
1397 size_tree = make_tree (sizetype, size);
1399 fn = emit_block_move_libcall_fn (true);
1400 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1401 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1403 retval = expand_normal (call_expr);
1405 return retval;
1408 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1409 for the function we use for block copies. The first time FOR_CALL
1410 is true, we call assemble_external. */
1412 static GTY(()) tree block_move_fn;
1414 void
1415 init_block_move_fn (const char *asmspec)
1417 if (!block_move_fn)
1419 tree args, fn;
1421 fn = get_identifier ("memcpy");
1422 args = build_function_type_list (ptr_type_node, ptr_type_node,
1423 const_ptr_type_node, sizetype,
1424 NULL_TREE);
1426 fn = build_decl (FUNCTION_DECL, fn, args);
1427 DECL_EXTERNAL (fn) = 1;
1428 TREE_PUBLIC (fn) = 1;
1429 DECL_ARTIFICIAL (fn) = 1;
1430 TREE_NOTHROW (fn) = 1;
1431 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1432 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1434 block_move_fn = fn;
1437 if (asmspec)
1438 set_user_assembler_name (block_move_fn, asmspec);
1441 static tree
1442 emit_block_move_libcall_fn (int for_call)
1444 static bool emitted_extern;
1446 if (!block_move_fn)
1447 init_block_move_fn (NULL);
1449 if (for_call && !emitted_extern)
1451 emitted_extern = true;
1452 make_decl_rtl (block_move_fn);
1453 assemble_external (block_move_fn);
1456 return block_move_fn;
1459 /* A subroutine of emit_block_move. Copy the data via an explicit
1460 loop. This is used only when libcalls are forbidden. */
1461 /* ??? It'd be nice to copy in hunks larger than QImode. */
1463 static void
1464 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1465 unsigned int align ATTRIBUTE_UNUSED)
1467 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1468 enum machine_mode iter_mode;
1470 iter_mode = GET_MODE (size);
1471 if (iter_mode == VOIDmode)
1472 iter_mode = word_mode;
1474 top_label = gen_label_rtx ();
1475 cmp_label = gen_label_rtx ();
1476 iter = gen_reg_rtx (iter_mode);
1478 emit_move_insn (iter, const0_rtx);
1480 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1481 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1482 do_pending_stack_adjust ();
1484 emit_jump (cmp_label);
1485 emit_label (top_label);
1487 tmp = convert_modes (Pmode, iter_mode, iter, true);
1488 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1489 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1490 x = change_address (x, QImode, x_addr);
1491 y = change_address (y, QImode, y_addr);
1493 emit_move_insn (x, y);
1495 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1496 true, OPTAB_LIB_WIDEN);
1497 if (tmp != iter)
1498 emit_move_insn (iter, tmp);
1500 emit_label (cmp_label);
1502 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1503 true, top_label);
1506 /* Copy all or part of a value X into registers starting at REGNO.
1507 The number of registers to be filled is NREGS. */
1509 void
1510 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1512 int i;
1513 #ifdef HAVE_load_multiple
1514 rtx pat;
1515 rtx last;
1516 #endif
1518 if (nregs == 0)
1519 return;
1521 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1522 x = validize_mem (force_const_mem (mode, x));
1524 /* See if the machine can do this with a load multiple insn. */
1525 #ifdef HAVE_load_multiple
1526 if (HAVE_load_multiple)
1528 last = get_last_insn ();
1529 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1530 GEN_INT (nregs));
1531 if (pat)
1533 emit_insn (pat);
1534 return;
1536 else
1537 delete_insns_since (last);
1539 #endif
1541 for (i = 0; i < nregs; i++)
1542 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1543 operand_subword_force (x, i, mode));
1546 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1547 The number of registers to be filled is NREGS. */
1549 void
1550 move_block_from_reg (int regno, rtx x, int nregs)
1552 int i;
1554 if (nregs == 0)
1555 return;
1557 /* See if the machine can do this with a store multiple insn. */
1558 #ifdef HAVE_store_multiple
1559 if (HAVE_store_multiple)
1561 rtx last = get_last_insn ();
1562 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1563 GEN_INT (nregs));
1564 if (pat)
1566 emit_insn (pat);
1567 return;
1569 else
1570 delete_insns_since (last);
1572 #endif
1574 for (i = 0; i < nregs; i++)
1576 rtx tem = operand_subword (x, i, 1, BLKmode);
1578 gcc_assert (tem);
1580 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1584 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1585 ORIG, where ORIG is a non-consecutive group of registers represented by
1586 a PARALLEL. The clone is identical to the original except in that the
1587 original set of registers is replaced by a new set of pseudo registers.
1588 The new set has the same modes as the original set. */
1591 gen_group_rtx (rtx orig)
1593 int i, length;
1594 rtx *tmps;
1596 gcc_assert (GET_CODE (orig) == PARALLEL);
1598 length = XVECLEN (orig, 0);
1599 tmps = alloca (sizeof (rtx) * length);
1601 /* Skip a NULL entry in first slot. */
1602 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1604 if (i)
1605 tmps[0] = 0;
1607 for (; i < length; i++)
1609 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1610 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1612 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1615 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1618 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1619 except that values are placed in TMPS[i], and must later be moved
1620 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1622 static void
1623 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1625 rtx src;
1626 int start, i;
1627 enum machine_mode m = GET_MODE (orig_src);
1629 gcc_assert (GET_CODE (dst) == PARALLEL);
1631 if (m != VOIDmode
1632 && !SCALAR_INT_MODE_P (m)
1633 && !MEM_P (orig_src)
1634 && GET_CODE (orig_src) != CONCAT)
1636 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1637 if (imode == BLKmode)
1638 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1639 else
1640 src = gen_reg_rtx (imode);
1641 if (imode != BLKmode)
1642 src = gen_lowpart (GET_MODE (orig_src), src);
1643 emit_move_insn (src, orig_src);
1644 /* ...and back again. */
1645 if (imode != BLKmode)
1646 src = gen_lowpart (imode, src);
1647 emit_group_load_1 (tmps, dst, src, type, ssize);
1648 return;
1651 /* Check for a NULL entry, used to indicate that the parameter goes
1652 both on the stack and in registers. */
1653 if (XEXP (XVECEXP (dst, 0, 0), 0))
1654 start = 0;
1655 else
1656 start = 1;
1658 /* Process the pieces. */
1659 for (i = start; i < XVECLEN (dst, 0); i++)
1661 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1662 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1663 unsigned int bytelen = GET_MODE_SIZE (mode);
1664 int shift = 0;
1666 /* Handle trailing fragments that run over the size of the struct. */
1667 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1669 /* Arrange to shift the fragment to where it belongs.
1670 extract_bit_field loads to the lsb of the reg. */
1671 if (
1672 #ifdef BLOCK_REG_PADDING
1673 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1674 == (BYTES_BIG_ENDIAN ? upward : downward)
1675 #else
1676 BYTES_BIG_ENDIAN
1677 #endif
1679 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1680 bytelen = ssize - bytepos;
1681 gcc_assert (bytelen > 0);
1684 /* If we won't be loading directly from memory, protect the real source
1685 from strange tricks we might play; but make sure that the source can
1686 be loaded directly into the destination. */
1687 src = orig_src;
1688 if (!MEM_P (orig_src)
1689 && (!CONSTANT_P (orig_src)
1690 || (GET_MODE (orig_src) != mode
1691 && GET_MODE (orig_src) != VOIDmode)))
1693 if (GET_MODE (orig_src) == VOIDmode)
1694 src = gen_reg_rtx (mode);
1695 else
1696 src = gen_reg_rtx (GET_MODE (orig_src));
1698 emit_move_insn (src, orig_src);
1701 /* Optimize the access just a bit. */
1702 if (MEM_P (src)
1703 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1704 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1705 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1706 && bytelen == GET_MODE_SIZE (mode))
1708 tmps[i] = gen_reg_rtx (mode);
1709 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1711 else if (COMPLEX_MODE_P (mode)
1712 && GET_MODE (src) == mode
1713 && bytelen == GET_MODE_SIZE (mode))
1714 /* Let emit_move_complex do the bulk of the work. */
1715 tmps[i] = src;
1716 else if (GET_CODE (src) == CONCAT)
1718 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1719 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1721 if ((bytepos == 0 && bytelen == slen0)
1722 || (bytepos != 0 && bytepos + bytelen <= slen))
1724 /* The following assumes that the concatenated objects all
1725 have the same size. In this case, a simple calculation
1726 can be used to determine the object and the bit field
1727 to be extracted. */
1728 tmps[i] = XEXP (src, bytepos / slen0);
1729 if (! CONSTANT_P (tmps[i])
1730 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1731 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1732 (bytepos % slen0) * BITS_PER_UNIT,
1733 1, NULL_RTX, mode, mode);
1735 else
1737 rtx mem;
1739 gcc_assert (!bytepos);
1740 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1741 emit_move_insn (mem, src);
1742 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1743 0, 1, NULL_RTX, mode, mode);
1746 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1747 SIMD register, which is currently broken. While we get GCC
1748 to emit proper RTL for these cases, let's dump to memory. */
1749 else if (VECTOR_MODE_P (GET_MODE (dst))
1750 && REG_P (src))
1752 int slen = GET_MODE_SIZE (GET_MODE (src));
1753 rtx mem;
1755 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1756 emit_move_insn (mem, src);
1757 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1759 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1760 && XVECLEN (dst, 0) > 1)
1761 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1762 else if (CONSTANT_P (src)
1763 || (REG_P (src) && GET_MODE (src) == mode))
1764 tmps[i] = src;
1765 else
1766 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1767 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1768 mode, mode);
1770 if (shift)
1771 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1772 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1776 /* Emit code to move a block SRC of type TYPE to a block DST,
1777 where DST is non-consecutive registers represented by a PARALLEL.
1778 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1779 if not known. */
1781 void
1782 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1784 rtx *tmps;
1785 int i;
1787 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1788 emit_group_load_1 (tmps, dst, src, type, ssize);
1790 /* Copy the extracted pieces into the proper (probable) hard regs. */
1791 for (i = 0; i < XVECLEN (dst, 0); i++)
1793 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1794 if (d == NULL)
1795 continue;
1796 emit_move_insn (d, tmps[i]);
1800 /* Similar, but load SRC into new pseudos in a format that looks like
1801 PARALLEL. This can later be fed to emit_group_move to get things
1802 in the right place. */
1805 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1807 rtvec vec;
1808 int i;
1810 vec = rtvec_alloc (XVECLEN (parallel, 0));
1811 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1813 /* Convert the vector to look just like the original PARALLEL, except
1814 with the computed values. */
1815 for (i = 0; i < XVECLEN (parallel, 0); i++)
1817 rtx e = XVECEXP (parallel, 0, i);
1818 rtx d = XEXP (e, 0);
1820 if (d)
1822 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1823 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1825 RTVEC_ELT (vec, i) = e;
1828 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1831 /* Emit code to move a block SRC to block DST, where SRC and DST are
1832 non-consecutive groups of registers, each represented by a PARALLEL. */
1834 void
1835 emit_group_move (rtx dst, rtx src)
1837 int i;
1839 gcc_assert (GET_CODE (src) == PARALLEL
1840 && GET_CODE (dst) == PARALLEL
1841 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1843 /* Skip first entry if NULL. */
1844 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1845 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1846 XEXP (XVECEXP (src, 0, i), 0));
1849 /* Move a group of registers represented by a PARALLEL into pseudos. */
1852 emit_group_move_into_temps (rtx src)
1854 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1855 int i;
1857 for (i = 0; i < XVECLEN (src, 0); i++)
1859 rtx e = XVECEXP (src, 0, i);
1860 rtx d = XEXP (e, 0);
1862 if (d)
1863 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1864 RTVEC_ELT (vec, i) = e;
1867 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1870 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1871 where SRC is non-consecutive registers represented by a PARALLEL.
1872 SSIZE represents the total size of block ORIG_DST, or -1 if not
1873 known. */
1875 void
1876 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1878 rtx *tmps, dst;
1879 int start, finish, i;
1880 enum machine_mode m = GET_MODE (orig_dst);
1882 gcc_assert (GET_CODE (src) == PARALLEL);
1884 if (!SCALAR_INT_MODE_P (m)
1885 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1887 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1888 if (imode == BLKmode)
1889 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1890 else
1891 dst = gen_reg_rtx (imode);
1892 emit_group_store (dst, src, type, ssize);
1893 if (imode != BLKmode)
1894 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1895 emit_move_insn (orig_dst, dst);
1896 return;
1899 /* Check for a NULL entry, used to indicate that the parameter goes
1900 both on the stack and in registers. */
1901 if (XEXP (XVECEXP (src, 0, 0), 0))
1902 start = 0;
1903 else
1904 start = 1;
1905 finish = XVECLEN (src, 0);
1907 tmps = alloca (sizeof (rtx) * finish);
1909 /* Copy the (probable) hard regs into pseudos. */
1910 for (i = start; i < finish; i++)
1912 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1913 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1915 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1916 emit_move_insn (tmps[i], reg);
1918 else
1919 tmps[i] = reg;
1922 /* If we won't be storing directly into memory, protect the real destination
1923 from strange tricks we might play. */
1924 dst = orig_dst;
1925 if (GET_CODE (dst) == PARALLEL)
1927 rtx temp;
1929 /* We can get a PARALLEL dst if there is a conditional expression in
1930 a return statement. In that case, the dst and src are the same,
1931 so no action is necessary. */
1932 if (rtx_equal_p (dst, src))
1933 return;
1935 /* It is unclear if we can ever reach here, but we may as well handle
1936 it. Allocate a temporary, and split this into a store/load to/from
1937 the temporary. */
1939 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1940 emit_group_store (temp, src, type, ssize);
1941 emit_group_load (dst, temp, type, ssize);
1942 return;
1944 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1946 enum machine_mode outer = GET_MODE (dst);
1947 enum machine_mode inner;
1948 HOST_WIDE_INT bytepos;
1949 bool done = false;
1950 rtx temp;
1952 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1953 dst = gen_reg_rtx (outer);
1955 /* Make life a bit easier for combine. */
1956 /* If the first element of the vector is the low part
1957 of the destination mode, use a paradoxical subreg to
1958 initialize the destination. */
1959 if (start < finish)
1961 inner = GET_MODE (tmps[start]);
1962 bytepos = subreg_lowpart_offset (inner, outer);
1963 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1965 temp = simplify_gen_subreg (outer, tmps[start],
1966 inner, 0);
1967 if (temp)
1969 emit_move_insn (dst, temp);
1970 done = true;
1971 start++;
1976 /* If the first element wasn't the low part, try the last. */
1977 if (!done
1978 && start < finish - 1)
1980 inner = GET_MODE (tmps[finish - 1]);
1981 bytepos = subreg_lowpart_offset (inner, outer);
1982 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1984 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1985 inner, 0);
1986 if (temp)
1988 emit_move_insn (dst, temp);
1989 done = true;
1990 finish--;
1995 /* Otherwise, simply initialize the result to zero. */
1996 if (!done)
1997 emit_move_insn (dst, CONST0_RTX (outer));
2000 /* Process the pieces. */
2001 for (i = start; i < finish; i++)
2003 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2004 enum machine_mode mode = GET_MODE (tmps[i]);
2005 unsigned int bytelen = GET_MODE_SIZE (mode);
2006 rtx dest = dst;
2008 /* Handle trailing fragments that run over the size of the struct. */
2009 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2011 /* store_bit_field always takes its value from the lsb.
2012 Move the fragment to the lsb if it's not already there. */
2013 if (
2014 #ifdef BLOCK_REG_PADDING
2015 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2016 == (BYTES_BIG_ENDIAN ? upward : downward)
2017 #else
2018 BYTES_BIG_ENDIAN
2019 #endif
2022 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2023 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2024 build_int_cst (NULL_TREE, shift),
2025 tmps[i], 0);
2027 bytelen = ssize - bytepos;
2030 if (GET_CODE (dst) == CONCAT)
2032 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2033 dest = XEXP (dst, 0);
2034 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2036 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2037 dest = XEXP (dst, 1);
2039 else
2041 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2042 dest = assign_stack_temp (GET_MODE (dest),
2043 GET_MODE_SIZE (GET_MODE (dest)), 0);
2044 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2045 tmps[i]);
2046 dst = dest;
2047 break;
2051 /* Optimize the access just a bit. */
2052 if (MEM_P (dest)
2053 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2054 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2055 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2056 && bytelen == GET_MODE_SIZE (mode))
2057 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2058 else
2059 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2060 mode, tmps[i]);
2063 /* Copy from the pseudo into the (probable) hard reg. */
2064 if (orig_dst != dst)
2065 emit_move_insn (orig_dst, dst);
2068 /* Generate code to copy a BLKmode object of TYPE out of a
2069 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2070 is null, a stack temporary is created. TGTBLK is returned.
2072 The purpose of this routine is to handle functions that return
2073 BLKmode structures in registers. Some machines (the PA for example)
2074 want to return all small structures in registers regardless of the
2075 structure's alignment. */
2078 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2080 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2081 rtx src = NULL, dst = NULL;
2082 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2083 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2085 if (tgtblk == 0)
2087 tgtblk = assign_temp (build_qualified_type (type,
2088 (TYPE_QUALS (type)
2089 | TYPE_QUAL_CONST)),
2090 0, 1, 1);
2091 preserve_temp_slots (tgtblk);
2094 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2095 into a new pseudo which is a full word. */
2097 if (GET_MODE (srcreg) != BLKmode
2098 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2099 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2101 /* If the structure doesn't take up a whole number of words, see whether
2102 SRCREG is padded on the left or on the right. If it's on the left,
2103 set PADDING_CORRECTION to the number of bits to skip.
2105 In most ABIs, the structure will be returned at the least end of
2106 the register, which translates to right padding on little-endian
2107 targets and left padding on big-endian targets. The opposite
2108 holds if the structure is returned at the most significant
2109 end of the register. */
2110 if (bytes % UNITS_PER_WORD != 0
2111 && (targetm.calls.return_in_msb (type)
2112 ? !BYTES_BIG_ENDIAN
2113 : BYTES_BIG_ENDIAN))
2114 padding_correction
2115 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2117 /* Copy the structure BITSIZE bites at a time.
2119 We could probably emit more efficient code for machines which do not use
2120 strict alignment, but it doesn't seem worth the effort at the current
2121 time. */
2122 for (bitpos = 0, xbitpos = padding_correction;
2123 bitpos < bytes * BITS_PER_UNIT;
2124 bitpos += bitsize, xbitpos += bitsize)
2126 /* We need a new source operand each time xbitpos is on a
2127 word boundary and when xbitpos == padding_correction
2128 (the first time through). */
2129 if (xbitpos % BITS_PER_WORD == 0
2130 || xbitpos == padding_correction)
2131 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2132 GET_MODE (srcreg));
2134 /* We need a new destination operand each time bitpos is on
2135 a word boundary. */
2136 if (bitpos % BITS_PER_WORD == 0)
2137 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2139 /* Use xbitpos for the source extraction (right justified) and
2140 xbitpos for the destination store (left justified). */
2141 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2142 extract_bit_field (src, bitsize,
2143 xbitpos % BITS_PER_WORD, 1,
2144 NULL_RTX, word_mode, word_mode));
2147 return tgtblk;
2150 /* Add a USE expression for REG to the (possibly empty) list pointed
2151 to by CALL_FUSAGE. REG must denote a hard register. */
2153 void
2154 use_reg (rtx *call_fusage, rtx reg)
2156 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2158 *call_fusage
2159 = gen_rtx_EXPR_LIST (VOIDmode,
2160 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2163 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2164 starting at REGNO. All of these registers must be hard registers. */
2166 void
2167 use_regs (rtx *call_fusage, int regno, int nregs)
2169 int i;
2171 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2173 for (i = 0; i < nregs; i++)
2174 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2177 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2178 PARALLEL REGS. This is for calls that pass values in multiple
2179 non-contiguous locations. The Irix 6 ABI has examples of this. */
2181 void
2182 use_group_regs (rtx *call_fusage, rtx regs)
2184 int i;
2186 for (i = 0; i < XVECLEN (regs, 0); i++)
2188 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2190 /* A NULL entry means the parameter goes both on the stack and in
2191 registers. This can also be a MEM for targets that pass values
2192 partially on the stack and partially in registers. */
2193 if (reg != 0 && REG_P (reg))
2194 use_reg (call_fusage, reg);
2199 /* Determine whether the LEN bytes generated by CONSTFUN can be
2200 stored to memory using several move instructions. CONSTFUNDATA is
2201 a pointer which will be passed as argument in every CONSTFUN call.
2202 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2203 a memset operation and false if it's a copy of a constant string.
2204 Return nonzero if a call to store_by_pieces should succeed. */
2207 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2208 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2209 void *constfundata, unsigned int align, bool memsetp)
2211 unsigned HOST_WIDE_INT l;
2212 unsigned int max_size;
2213 HOST_WIDE_INT offset = 0;
2214 enum machine_mode mode, tmode;
2215 enum insn_code icode;
2216 int reverse;
2217 rtx cst;
2219 if (len == 0)
2220 return 1;
2222 if (! (memsetp
2223 ? SET_BY_PIECES_P (len, align)
2224 : STORE_BY_PIECES_P (len, align)))
2225 return 0;
2227 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2228 if (align >= GET_MODE_ALIGNMENT (tmode))
2229 align = GET_MODE_ALIGNMENT (tmode);
2230 else
2232 enum machine_mode xmode;
2234 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2235 tmode != VOIDmode;
2236 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2237 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2238 || SLOW_UNALIGNED_ACCESS (tmode, align))
2239 break;
2241 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2244 /* We would first store what we can in the largest integer mode, then go to
2245 successively smaller modes. */
2247 for (reverse = 0;
2248 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2249 reverse++)
2251 l = len;
2252 mode = VOIDmode;
2253 max_size = STORE_MAX_PIECES + 1;
2254 while (max_size > 1)
2256 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2257 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2258 if (GET_MODE_SIZE (tmode) < max_size)
2259 mode = tmode;
2261 if (mode == VOIDmode)
2262 break;
2264 icode = optab_handler (mov_optab, mode)->insn_code;
2265 if (icode != CODE_FOR_nothing
2266 && align >= GET_MODE_ALIGNMENT (mode))
2268 unsigned int size = GET_MODE_SIZE (mode);
2270 while (l >= size)
2272 if (reverse)
2273 offset -= size;
2275 cst = (*constfun) (constfundata, offset, mode);
2276 if (!LEGITIMATE_CONSTANT_P (cst))
2277 return 0;
2279 if (!reverse)
2280 offset += size;
2282 l -= size;
2286 max_size = GET_MODE_SIZE (mode);
2289 /* The code above should have handled everything. */
2290 gcc_assert (!l);
2293 return 1;
2296 /* Generate several move instructions to store LEN bytes generated by
2297 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2298 pointer which will be passed as argument in every CONSTFUN call.
2299 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2300 a memset operation and false if it's a copy of a constant string.
2301 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2302 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2303 stpcpy. */
2306 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2307 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2308 void *constfundata, unsigned int align, bool memsetp, int endp)
2310 struct store_by_pieces data;
2312 if (len == 0)
2314 gcc_assert (endp != 2);
2315 return to;
2318 gcc_assert (memsetp
2319 ? SET_BY_PIECES_P (len, align)
2320 : STORE_BY_PIECES_P (len, align));
2321 data.constfun = constfun;
2322 data.constfundata = constfundata;
2323 data.len = len;
2324 data.to = to;
2325 store_by_pieces_1 (&data, align);
2326 if (endp)
2328 rtx to1;
2330 gcc_assert (!data.reverse);
2331 if (data.autinc_to)
2333 if (endp == 2)
2335 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2336 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2337 else
2338 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2339 -1));
2341 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2342 data.offset);
2344 else
2346 if (endp == 2)
2347 --data.offset;
2348 to1 = adjust_address (data.to, QImode, data.offset);
2350 return to1;
2352 else
2353 return data.to;
2356 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2357 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2359 static void
2360 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2362 struct store_by_pieces data;
2364 if (len == 0)
2365 return;
2367 data.constfun = clear_by_pieces_1;
2368 data.constfundata = NULL;
2369 data.len = len;
2370 data.to = to;
2371 store_by_pieces_1 (&data, align);
2374 /* Callback routine for clear_by_pieces.
2375 Return const0_rtx unconditionally. */
2377 static rtx
2378 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2379 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2380 enum machine_mode mode ATTRIBUTE_UNUSED)
2382 return const0_rtx;
2385 /* Subroutine of clear_by_pieces and store_by_pieces.
2386 Generate several move instructions to store LEN bytes of block TO. (A MEM
2387 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2389 static void
2390 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2391 unsigned int align ATTRIBUTE_UNUSED)
2393 rtx to_addr = XEXP (data->to, 0);
2394 unsigned int max_size = STORE_MAX_PIECES + 1;
2395 enum machine_mode mode = VOIDmode, tmode;
2396 enum insn_code icode;
2398 data->offset = 0;
2399 data->to_addr = to_addr;
2400 data->autinc_to
2401 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2402 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2404 data->explicit_inc_to = 0;
2405 data->reverse
2406 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2407 if (data->reverse)
2408 data->offset = data->len;
2410 /* If storing requires more than two move insns,
2411 copy addresses to registers (to make displacements shorter)
2412 and use post-increment if available. */
2413 if (!data->autinc_to
2414 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2416 /* Determine the main mode we'll be using. */
2417 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2418 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2419 if (GET_MODE_SIZE (tmode) < max_size)
2420 mode = tmode;
2422 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2424 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2425 data->autinc_to = 1;
2426 data->explicit_inc_to = -1;
2429 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2430 && ! data->autinc_to)
2432 data->to_addr = copy_addr_to_reg (to_addr);
2433 data->autinc_to = 1;
2434 data->explicit_inc_to = 1;
2437 if ( !data->autinc_to && CONSTANT_P (to_addr))
2438 data->to_addr = copy_addr_to_reg (to_addr);
2441 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2442 if (align >= GET_MODE_ALIGNMENT (tmode))
2443 align = GET_MODE_ALIGNMENT (tmode);
2444 else
2446 enum machine_mode xmode;
2448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2449 tmode != VOIDmode;
2450 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2451 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2452 || SLOW_UNALIGNED_ACCESS (tmode, align))
2453 break;
2455 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2458 /* First store what we can in the largest integer mode, then go to
2459 successively smaller modes. */
2461 while (max_size > 1)
2463 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2464 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2465 if (GET_MODE_SIZE (tmode) < max_size)
2466 mode = tmode;
2468 if (mode == VOIDmode)
2469 break;
2471 icode = optab_handler (mov_optab, mode)->insn_code;
2472 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2473 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2475 max_size = GET_MODE_SIZE (mode);
2478 /* The code above should have handled everything. */
2479 gcc_assert (!data->len);
2482 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2483 with move instructions for mode MODE. GENFUN is the gen_... function
2484 to make a move insn for that mode. DATA has all the other info. */
2486 static void
2487 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2488 struct store_by_pieces *data)
2490 unsigned int size = GET_MODE_SIZE (mode);
2491 rtx to1, cst;
2493 while (data->len >= size)
2495 if (data->reverse)
2496 data->offset -= size;
2498 if (data->autinc_to)
2499 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2500 data->offset);
2501 else
2502 to1 = adjust_address (data->to, mode, data->offset);
2504 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2505 emit_insn (gen_add2_insn (data->to_addr,
2506 GEN_INT (-(HOST_WIDE_INT) size)));
2508 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2509 emit_insn ((*genfun) (to1, cst));
2511 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2512 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2514 if (! data->reverse)
2515 data->offset += size;
2517 data->len -= size;
2521 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2522 its length in bytes. */
2525 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2526 unsigned int expected_align, HOST_WIDE_INT expected_size)
2528 enum machine_mode mode = GET_MODE (object);
2529 unsigned int align;
2531 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2533 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2534 just move a zero. Otherwise, do this a piece at a time. */
2535 if (mode != BLKmode
2536 && GET_CODE (size) == CONST_INT
2537 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2539 rtx zero = CONST0_RTX (mode);
2540 if (zero != NULL)
2542 emit_move_insn (object, zero);
2543 return NULL;
2546 if (COMPLEX_MODE_P (mode))
2548 zero = CONST0_RTX (GET_MODE_INNER (mode));
2549 if (zero != NULL)
2551 write_complex_part (object, zero, 0);
2552 write_complex_part (object, zero, 1);
2553 return NULL;
2558 if (size == const0_rtx)
2559 return NULL;
2561 align = MEM_ALIGN (object);
2563 if (GET_CODE (size) == CONST_INT
2564 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2565 clear_by_pieces (object, INTVAL (size), align);
2566 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2567 expected_align, expected_size))
2569 else
2570 return set_storage_via_libcall (object, size, const0_rtx,
2571 method == BLOCK_OP_TAILCALL);
2573 return NULL;
2577 clear_storage (rtx object, rtx size, enum block_op_methods method)
2579 return clear_storage_hints (object, size, method, 0, -1);
2583 /* A subroutine of clear_storage. Expand a call to memset.
2584 Return the return value of memset, 0 otherwise. */
2587 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2589 tree call_expr, fn, object_tree, size_tree, val_tree;
2590 enum machine_mode size_mode;
2591 rtx retval;
2593 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2594 place those into new pseudos into a VAR_DECL and use them later. */
2596 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2598 size_mode = TYPE_MODE (sizetype);
2599 size = convert_to_mode (size_mode, size, 1);
2600 size = copy_to_mode_reg (size_mode, size);
2602 /* It is incorrect to use the libcall calling conventions to call
2603 memset in this context. This could be a user call to memset and
2604 the user may wish to examine the return value from memset. For
2605 targets where libcalls and normal calls have different conventions
2606 for returning pointers, we could end up generating incorrect code. */
2608 object_tree = make_tree (ptr_type_node, object);
2609 if (GET_CODE (val) != CONST_INT)
2610 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2611 size_tree = make_tree (sizetype, size);
2612 val_tree = make_tree (integer_type_node, val);
2614 fn = clear_storage_libcall_fn (true);
2615 call_expr = build_call_expr (fn, 3,
2616 object_tree, integer_zero_node, size_tree);
2617 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2619 retval = expand_normal (call_expr);
2621 return retval;
2624 /* A subroutine of set_storage_via_libcall. Create the tree node
2625 for the function we use for block clears. The first time FOR_CALL
2626 is true, we call assemble_external. */
2628 static GTY(()) tree block_clear_fn;
2630 void
2631 init_block_clear_fn (const char *asmspec)
2633 if (!block_clear_fn)
2635 tree fn, args;
2637 fn = get_identifier ("memset");
2638 args = build_function_type_list (ptr_type_node, ptr_type_node,
2639 integer_type_node, sizetype,
2640 NULL_TREE);
2642 fn = build_decl (FUNCTION_DECL, fn, args);
2643 DECL_EXTERNAL (fn) = 1;
2644 TREE_PUBLIC (fn) = 1;
2645 DECL_ARTIFICIAL (fn) = 1;
2646 TREE_NOTHROW (fn) = 1;
2647 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2648 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2650 block_clear_fn = fn;
2653 if (asmspec)
2654 set_user_assembler_name (block_clear_fn, asmspec);
2657 static tree
2658 clear_storage_libcall_fn (int for_call)
2660 static bool emitted_extern;
2662 if (!block_clear_fn)
2663 init_block_clear_fn (NULL);
2665 if (for_call && !emitted_extern)
2667 emitted_extern = true;
2668 make_decl_rtl (block_clear_fn);
2669 assemble_external (block_clear_fn);
2672 return block_clear_fn;
2675 /* Expand a setmem pattern; return true if successful. */
2677 bool
2678 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2679 unsigned int expected_align, HOST_WIDE_INT expected_size)
2681 /* Try the most limited insn first, because there's no point
2682 including more than one in the machine description unless
2683 the more limited one has some advantage. */
2685 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2686 enum machine_mode mode;
2688 if (expected_align < align)
2689 expected_align = align;
2691 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2692 mode = GET_MODE_WIDER_MODE (mode))
2694 enum insn_code code = setmem_optab[(int) mode];
2695 insn_operand_predicate_fn pred;
2697 if (code != CODE_FOR_nothing
2698 /* We don't need MODE to be narrower than
2699 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2700 the mode mask, as it is returned by the macro, it will
2701 definitely be less than the actual mode mask. */
2702 && ((GET_CODE (size) == CONST_INT
2703 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2704 <= (GET_MODE_MASK (mode) >> 1)))
2705 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2706 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2707 || (*pred) (object, BLKmode))
2708 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2709 || (*pred) (opalign, VOIDmode)))
2711 rtx opsize, opchar;
2712 enum machine_mode char_mode;
2713 rtx last = get_last_insn ();
2714 rtx pat;
2716 opsize = convert_to_mode (mode, size, 1);
2717 pred = insn_data[(int) code].operand[1].predicate;
2718 if (pred != 0 && ! (*pred) (opsize, mode))
2719 opsize = copy_to_mode_reg (mode, opsize);
2721 opchar = val;
2722 char_mode = insn_data[(int) code].operand[2].mode;
2723 if (char_mode != VOIDmode)
2725 opchar = convert_to_mode (char_mode, opchar, 1);
2726 pred = insn_data[(int) code].operand[2].predicate;
2727 if (pred != 0 && ! (*pred) (opchar, char_mode))
2728 opchar = copy_to_mode_reg (char_mode, opchar);
2731 if (insn_data[(int) code].n_operands == 4)
2732 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2733 else
2734 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2735 GEN_INT (expected_align),
2736 GEN_INT (expected_size));
2737 if (pat)
2739 emit_insn (pat);
2740 return true;
2742 else
2743 delete_insns_since (last);
2747 return false;
2751 /* Write to one of the components of the complex value CPLX. Write VAL to
2752 the real part if IMAG_P is false, and the imaginary part if its true. */
2754 static void
2755 write_complex_part (rtx cplx, rtx val, bool imag_p)
2757 enum machine_mode cmode;
2758 enum machine_mode imode;
2759 unsigned ibitsize;
2761 if (GET_CODE (cplx) == CONCAT)
2763 emit_move_insn (XEXP (cplx, imag_p), val);
2764 return;
2767 cmode = GET_MODE (cplx);
2768 imode = GET_MODE_INNER (cmode);
2769 ibitsize = GET_MODE_BITSIZE (imode);
2771 /* For MEMs simplify_gen_subreg may generate an invalid new address
2772 because, e.g., the original address is considered mode-dependent
2773 by the target, which restricts simplify_subreg from invoking
2774 adjust_address_nv. Instead of preparing fallback support for an
2775 invalid address, we call adjust_address_nv directly. */
2776 if (MEM_P (cplx))
2778 emit_move_insn (adjust_address_nv (cplx, imode,
2779 imag_p ? GET_MODE_SIZE (imode) : 0),
2780 val);
2781 return;
2784 /* If the sub-object is at least word sized, then we know that subregging
2785 will work. This special case is important, since store_bit_field
2786 wants to operate on integer modes, and there's rarely an OImode to
2787 correspond to TCmode. */
2788 if (ibitsize >= BITS_PER_WORD
2789 /* For hard regs we have exact predicates. Assume we can split
2790 the original object if it spans an even number of hard regs.
2791 This special case is important for SCmode on 64-bit platforms
2792 where the natural size of floating-point regs is 32-bit. */
2793 || (REG_P (cplx)
2794 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2795 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2797 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2798 imag_p ? GET_MODE_SIZE (imode) : 0);
2799 if (part)
2801 emit_move_insn (part, val);
2802 return;
2804 else
2805 /* simplify_gen_subreg may fail for sub-word MEMs. */
2806 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2809 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2812 /* Extract one of the components of the complex value CPLX. Extract the
2813 real part if IMAG_P is false, and the imaginary part if it's true. */
2815 static rtx
2816 read_complex_part (rtx cplx, bool imag_p)
2818 enum machine_mode cmode, imode;
2819 unsigned ibitsize;
2821 if (GET_CODE (cplx) == CONCAT)
2822 return XEXP (cplx, imag_p);
2824 cmode = GET_MODE (cplx);
2825 imode = GET_MODE_INNER (cmode);
2826 ibitsize = GET_MODE_BITSIZE (imode);
2828 /* Special case reads from complex constants that got spilled to memory. */
2829 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2831 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2832 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2834 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2835 if (CONSTANT_CLASS_P (part))
2836 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2840 /* For MEMs simplify_gen_subreg may generate an invalid new address
2841 because, e.g., the original address is considered mode-dependent
2842 by the target, which restricts simplify_subreg from invoking
2843 adjust_address_nv. Instead of preparing fallback support for an
2844 invalid address, we call adjust_address_nv directly. */
2845 if (MEM_P (cplx))
2846 return adjust_address_nv (cplx, imode,
2847 imag_p ? GET_MODE_SIZE (imode) : 0);
2849 /* If the sub-object is at least word sized, then we know that subregging
2850 will work. This special case is important, since extract_bit_field
2851 wants to operate on integer modes, and there's rarely an OImode to
2852 correspond to TCmode. */
2853 if (ibitsize >= BITS_PER_WORD
2854 /* For hard regs we have exact predicates. Assume we can split
2855 the original object if it spans an even number of hard regs.
2856 This special case is important for SCmode on 64-bit platforms
2857 where the natural size of floating-point regs is 32-bit. */
2858 || (REG_P (cplx)
2859 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2860 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2862 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2863 imag_p ? GET_MODE_SIZE (imode) : 0);
2864 if (ret)
2865 return ret;
2866 else
2867 /* simplify_gen_subreg may fail for sub-word MEMs. */
2868 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2871 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2872 true, NULL_RTX, imode, imode);
2875 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2876 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2877 represented in NEW_MODE. If FORCE is true, this will never happen, as
2878 we'll force-create a SUBREG if needed. */
2880 static rtx
2881 emit_move_change_mode (enum machine_mode new_mode,
2882 enum machine_mode old_mode, rtx x, bool force)
2884 rtx ret;
2886 if (push_operand (x, GET_MODE (x)))
2888 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2889 MEM_COPY_ATTRIBUTES (ret, x);
2891 else if (MEM_P (x))
2893 /* We don't have to worry about changing the address since the
2894 size in bytes is supposed to be the same. */
2895 if (reload_in_progress)
2897 /* Copy the MEM to change the mode and move any
2898 substitutions from the old MEM to the new one. */
2899 ret = adjust_address_nv (x, new_mode, 0);
2900 copy_replacements (x, ret);
2902 else
2903 ret = adjust_address (x, new_mode, 0);
2905 else
2907 /* Note that we do want simplify_subreg's behavior of validating
2908 that the new mode is ok for a hard register. If we were to use
2909 simplify_gen_subreg, we would create the subreg, but would
2910 probably run into the target not being able to implement it. */
2911 /* Except, of course, when FORCE is true, when this is exactly what
2912 we want. Which is needed for CCmodes on some targets. */
2913 if (force)
2914 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2915 else
2916 ret = simplify_subreg (new_mode, x, old_mode, 0);
2919 return ret;
2922 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2923 an integer mode of the same size as MODE. Returns the instruction
2924 emitted, or NULL if such a move could not be generated. */
2926 static rtx
2927 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2929 enum machine_mode imode;
2930 enum insn_code code;
2932 /* There must exist a mode of the exact size we require. */
2933 imode = int_mode_for_mode (mode);
2934 if (imode == BLKmode)
2935 return NULL_RTX;
2937 /* The target must support moves in this mode. */
2938 code = optab_handler (mov_optab, imode)->insn_code;
2939 if (code == CODE_FOR_nothing)
2940 return NULL_RTX;
2942 x = emit_move_change_mode (imode, mode, x, force);
2943 if (x == NULL_RTX)
2944 return NULL_RTX;
2945 y = emit_move_change_mode (imode, mode, y, force);
2946 if (y == NULL_RTX)
2947 return NULL_RTX;
2948 return emit_insn (GEN_FCN (code) (x, y));
2951 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2952 Return an equivalent MEM that does not use an auto-increment. */
2954 static rtx
2955 emit_move_resolve_push (enum machine_mode mode, rtx x)
2957 enum rtx_code code = GET_CODE (XEXP (x, 0));
2958 HOST_WIDE_INT adjust;
2959 rtx temp;
2961 adjust = GET_MODE_SIZE (mode);
2962 #ifdef PUSH_ROUNDING
2963 adjust = PUSH_ROUNDING (adjust);
2964 #endif
2965 if (code == PRE_DEC || code == POST_DEC)
2966 adjust = -adjust;
2967 else if (code == PRE_MODIFY || code == POST_MODIFY)
2969 rtx expr = XEXP (XEXP (x, 0), 1);
2970 HOST_WIDE_INT val;
2972 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2973 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2974 val = INTVAL (XEXP (expr, 1));
2975 if (GET_CODE (expr) == MINUS)
2976 val = -val;
2977 gcc_assert (adjust == val || adjust == -val);
2978 adjust = val;
2981 /* Do not use anti_adjust_stack, since we don't want to update
2982 stack_pointer_delta. */
2983 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2984 GEN_INT (adjust), stack_pointer_rtx,
2985 0, OPTAB_LIB_WIDEN);
2986 if (temp != stack_pointer_rtx)
2987 emit_move_insn (stack_pointer_rtx, temp);
2989 switch (code)
2991 case PRE_INC:
2992 case PRE_DEC:
2993 case PRE_MODIFY:
2994 temp = stack_pointer_rtx;
2995 break;
2996 case POST_INC:
2997 case POST_DEC:
2998 case POST_MODIFY:
2999 temp = plus_constant (stack_pointer_rtx, -adjust);
3000 break;
3001 default:
3002 gcc_unreachable ();
3005 return replace_equiv_address (x, temp);
3008 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3009 X is known to satisfy push_operand, and MODE is known to be complex.
3010 Returns the last instruction emitted. */
3013 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3015 enum machine_mode submode = GET_MODE_INNER (mode);
3016 bool imag_first;
3018 #ifdef PUSH_ROUNDING
3019 unsigned int submodesize = GET_MODE_SIZE (submode);
3021 /* In case we output to the stack, but the size is smaller than the
3022 machine can push exactly, we need to use move instructions. */
3023 if (PUSH_ROUNDING (submodesize) != submodesize)
3025 x = emit_move_resolve_push (mode, x);
3026 return emit_move_insn (x, y);
3028 #endif
3030 /* Note that the real part always precedes the imag part in memory
3031 regardless of machine's endianness. */
3032 switch (GET_CODE (XEXP (x, 0)))
3034 case PRE_DEC:
3035 case POST_DEC:
3036 imag_first = true;
3037 break;
3038 case PRE_INC:
3039 case POST_INC:
3040 imag_first = false;
3041 break;
3042 default:
3043 gcc_unreachable ();
3046 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3047 read_complex_part (y, imag_first));
3048 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3049 read_complex_part (y, !imag_first));
3052 /* A subroutine of emit_move_complex. Perform the move from Y to X
3053 via two moves of the parts. Returns the last instruction emitted. */
3056 emit_move_complex_parts (rtx x, rtx y)
3058 /* Show the output dies here. This is necessary for SUBREGs
3059 of pseudos since we cannot track their lifetimes correctly;
3060 hard regs shouldn't appear here except as return values. */
3061 if (!reload_completed && !reload_in_progress
3062 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3063 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3065 write_complex_part (x, read_complex_part (y, false), false);
3066 write_complex_part (x, read_complex_part (y, true), true);
3068 return get_last_insn ();
3071 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3072 MODE is known to be complex. Returns the last instruction emitted. */
3074 static rtx
3075 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3077 bool try_int;
3079 /* Need to take special care for pushes, to maintain proper ordering
3080 of the data, and possibly extra padding. */
3081 if (push_operand (x, mode))
3082 return emit_move_complex_push (mode, x, y);
3084 /* See if we can coerce the target into moving both values at once. */
3086 /* Move floating point as parts. */
3087 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3088 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3089 try_int = false;
3090 /* Not possible if the values are inherently not adjacent. */
3091 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3092 try_int = false;
3093 /* Is possible if both are registers (or subregs of registers). */
3094 else if (register_operand (x, mode) && register_operand (y, mode))
3095 try_int = true;
3096 /* If one of the operands is a memory, and alignment constraints
3097 are friendly enough, we may be able to do combined memory operations.
3098 We do not attempt this if Y is a constant because that combination is
3099 usually better with the by-parts thing below. */
3100 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3101 && (!STRICT_ALIGNMENT
3102 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3103 try_int = true;
3104 else
3105 try_int = false;
3107 if (try_int)
3109 rtx ret;
3111 /* For memory to memory moves, optimal behavior can be had with the
3112 existing block move logic. */
3113 if (MEM_P (x) && MEM_P (y))
3115 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3116 BLOCK_OP_NO_LIBCALL);
3117 return get_last_insn ();
3120 ret = emit_move_via_integer (mode, x, y, true);
3121 if (ret)
3122 return ret;
3125 return emit_move_complex_parts (x, y);
3128 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3129 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3131 static rtx
3132 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3134 rtx ret;
3136 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3137 if (mode != CCmode)
3139 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3140 if (code != CODE_FOR_nothing)
3142 x = emit_move_change_mode (CCmode, mode, x, true);
3143 y = emit_move_change_mode (CCmode, mode, y, true);
3144 return emit_insn (GEN_FCN (code) (x, y));
3148 /* Otherwise, find the MODE_INT mode of the same width. */
3149 ret = emit_move_via_integer (mode, x, y, false);
3150 gcc_assert (ret != NULL);
3151 return ret;
3154 /* Return true if word I of OP lies entirely in the
3155 undefined bits of a paradoxical subreg. */
3157 static bool
3158 undefined_operand_subword_p (const_rtx op, int i)
3160 enum machine_mode innermode, innermostmode;
3161 int offset;
3162 if (GET_CODE (op) != SUBREG)
3163 return false;
3164 innermode = GET_MODE (op);
3165 innermostmode = GET_MODE (SUBREG_REG (op));
3166 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3167 /* The SUBREG_BYTE represents offset, as if the value were stored in
3168 memory, except for a paradoxical subreg where we define
3169 SUBREG_BYTE to be 0; undo this exception as in
3170 simplify_subreg. */
3171 if (SUBREG_BYTE (op) == 0
3172 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3174 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3175 if (WORDS_BIG_ENDIAN)
3176 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3177 if (BYTES_BIG_ENDIAN)
3178 offset += difference % UNITS_PER_WORD;
3180 if (offset >= GET_MODE_SIZE (innermostmode)
3181 || offset <= -GET_MODE_SIZE (word_mode))
3182 return true;
3183 return false;
3186 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3187 MODE is any multi-word or full-word mode that lacks a move_insn
3188 pattern. Note that you will get better code if you define such
3189 patterns, even if they must turn into multiple assembler instructions. */
3191 static rtx
3192 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3194 rtx last_insn = 0;
3195 rtx seq, inner;
3196 bool need_clobber;
3197 int i;
3199 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3201 /* If X is a push on the stack, do the push now and replace
3202 X with a reference to the stack pointer. */
3203 if (push_operand (x, mode))
3204 x = emit_move_resolve_push (mode, x);
3206 /* If we are in reload, see if either operand is a MEM whose address
3207 is scheduled for replacement. */
3208 if (reload_in_progress && MEM_P (x)
3209 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3210 x = replace_equiv_address_nv (x, inner);
3211 if (reload_in_progress && MEM_P (y)
3212 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3213 y = replace_equiv_address_nv (y, inner);
3215 start_sequence ();
3217 need_clobber = false;
3218 for (i = 0;
3219 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3220 i++)
3222 rtx xpart = operand_subword (x, i, 1, mode);
3223 rtx ypart;
3225 /* Do not generate code for a move if it would come entirely
3226 from the undefined bits of a paradoxical subreg. */
3227 if (undefined_operand_subword_p (y, i))
3228 continue;
3230 ypart = operand_subword (y, i, 1, mode);
3232 /* If we can't get a part of Y, put Y into memory if it is a
3233 constant. Otherwise, force it into a register. Then we must
3234 be able to get a part of Y. */
3235 if (ypart == 0 && CONSTANT_P (y))
3237 y = use_anchored_address (force_const_mem (mode, y));
3238 ypart = operand_subword (y, i, 1, mode);
3240 else if (ypart == 0)
3241 ypart = operand_subword_force (y, i, mode);
3243 gcc_assert (xpart && ypart);
3245 need_clobber |= (GET_CODE (xpart) == SUBREG);
3247 last_insn = emit_move_insn (xpart, ypart);
3250 seq = get_insns ();
3251 end_sequence ();
3253 /* Show the output dies here. This is necessary for SUBREGs
3254 of pseudos since we cannot track their lifetimes correctly;
3255 hard regs shouldn't appear here except as return values.
3256 We never want to emit such a clobber after reload. */
3257 if (x != y
3258 && ! (reload_in_progress || reload_completed)
3259 && need_clobber != 0)
3260 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3262 emit_insn (seq);
3264 return last_insn;
3267 /* Low level part of emit_move_insn.
3268 Called just like emit_move_insn, but assumes X and Y
3269 are basically valid. */
3272 emit_move_insn_1 (rtx x, rtx y)
3274 enum machine_mode mode = GET_MODE (x);
3275 enum insn_code code;
3277 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3279 code = optab_handler (mov_optab, mode)->insn_code;
3280 if (code != CODE_FOR_nothing)
3281 return emit_insn (GEN_FCN (code) (x, y));
3283 /* Expand complex moves by moving real part and imag part. */
3284 if (COMPLEX_MODE_P (mode))
3285 return emit_move_complex (mode, x, y);
3287 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3289 rtx result = emit_move_via_integer (mode, x, y, true);
3291 /* If we can't find an integer mode, use multi words. */
3292 if (result)
3293 return result;
3294 else
3295 return emit_move_multi_word (mode, x, y);
3298 if (GET_MODE_CLASS (mode) == MODE_CC)
3299 return emit_move_ccmode (mode, x, y);
3301 /* Try using a move pattern for the corresponding integer mode. This is
3302 only safe when simplify_subreg can convert MODE constants into integer
3303 constants. At present, it can only do this reliably if the value
3304 fits within a HOST_WIDE_INT. */
3305 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3307 rtx ret = emit_move_via_integer (mode, x, y, false);
3308 if (ret)
3309 return ret;
3312 return emit_move_multi_word (mode, x, y);
3315 /* Generate code to copy Y into X.
3316 Both Y and X must have the same mode, except that
3317 Y can be a constant with VOIDmode.
3318 This mode cannot be BLKmode; use emit_block_move for that.
3320 Return the last instruction emitted. */
3323 emit_move_insn (rtx x, rtx y)
3325 enum machine_mode mode = GET_MODE (x);
3326 rtx y_cst = NULL_RTX;
3327 rtx last_insn, set;
3329 gcc_assert (mode != BLKmode
3330 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3332 if (CONSTANT_P (y))
3334 if (optimize
3335 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3336 && (last_insn = compress_float_constant (x, y)))
3337 return last_insn;
3339 y_cst = y;
3341 if (!LEGITIMATE_CONSTANT_P (y))
3343 y = force_const_mem (mode, y);
3345 /* If the target's cannot_force_const_mem prevented the spill,
3346 assume that the target's move expanders will also take care
3347 of the non-legitimate constant. */
3348 if (!y)
3349 y = y_cst;
3350 else
3351 y = use_anchored_address (y);
3355 /* If X or Y are memory references, verify that their addresses are valid
3356 for the machine. */
3357 if (MEM_P (x)
3358 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3359 && ! push_operand (x, GET_MODE (x)))
3360 || (flag_force_addr
3361 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3362 x = validize_mem (x);
3364 if (MEM_P (y)
3365 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3366 || (flag_force_addr
3367 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3368 y = validize_mem (y);
3370 gcc_assert (mode != BLKmode);
3372 last_insn = emit_move_insn_1 (x, y);
3374 if (y_cst && REG_P (x)
3375 && (set = single_set (last_insn)) != NULL_RTX
3376 && SET_DEST (set) == x
3377 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3378 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3380 return last_insn;
3383 /* If Y is representable exactly in a narrower mode, and the target can
3384 perform the extension directly from constant or memory, then emit the
3385 move as an extension. */
3387 static rtx
3388 compress_float_constant (rtx x, rtx y)
3390 enum machine_mode dstmode = GET_MODE (x);
3391 enum machine_mode orig_srcmode = GET_MODE (y);
3392 enum machine_mode srcmode;
3393 REAL_VALUE_TYPE r;
3394 int oldcost, newcost;
3396 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3398 if (LEGITIMATE_CONSTANT_P (y))
3399 oldcost = rtx_cost (y, SET);
3400 else
3401 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3403 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3404 srcmode != orig_srcmode;
3405 srcmode = GET_MODE_WIDER_MODE (srcmode))
3407 enum insn_code ic;
3408 rtx trunc_y, last_insn;
3410 /* Skip if the target can't extend this way. */
3411 ic = can_extend_p (dstmode, srcmode, 0);
3412 if (ic == CODE_FOR_nothing)
3413 continue;
3415 /* Skip if the narrowed value isn't exact. */
3416 if (! exact_real_truncate (srcmode, &r))
3417 continue;
3419 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3421 if (LEGITIMATE_CONSTANT_P (trunc_y))
3423 /* Skip if the target needs extra instructions to perform
3424 the extension. */
3425 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3426 continue;
3427 /* This is valid, but may not be cheaper than the original. */
3428 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3429 if (oldcost < newcost)
3430 continue;
3432 else if (float_extend_from_mem[dstmode][srcmode])
3434 trunc_y = force_const_mem (srcmode, trunc_y);
3435 /* This is valid, but may not be cheaper than the original. */
3436 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3437 if (oldcost < newcost)
3438 continue;
3439 trunc_y = validize_mem (trunc_y);
3441 else
3442 continue;
3444 /* For CSE's benefit, force the compressed constant pool entry
3445 into a new pseudo. This constant may be used in different modes,
3446 and if not, combine will put things back together for us. */
3447 trunc_y = force_reg (srcmode, trunc_y);
3448 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3449 last_insn = get_last_insn ();
3451 if (REG_P (x))
3452 set_unique_reg_note (last_insn, REG_EQUAL, y);
3454 return last_insn;
3457 return NULL_RTX;
3460 /* Pushing data onto the stack. */
3462 /* Push a block of length SIZE (perhaps variable)
3463 and return an rtx to address the beginning of the block.
3464 The value may be virtual_outgoing_args_rtx.
3466 EXTRA is the number of bytes of padding to push in addition to SIZE.
3467 BELOW nonzero means this padding comes at low addresses;
3468 otherwise, the padding comes at high addresses. */
3471 push_block (rtx size, int extra, int below)
3473 rtx temp;
3475 size = convert_modes (Pmode, ptr_mode, size, 1);
3476 if (CONSTANT_P (size))
3477 anti_adjust_stack (plus_constant (size, extra));
3478 else if (REG_P (size) && extra == 0)
3479 anti_adjust_stack (size);
3480 else
3482 temp = copy_to_mode_reg (Pmode, size);
3483 if (extra != 0)
3484 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3485 temp, 0, OPTAB_LIB_WIDEN);
3486 anti_adjust_stack (temp);
3489 #ifndef STACK_GROWS_DOWNWARD
3490 if (0)
3491 #else
3492 if (1)
3493 #endif
3495 temp = virtual_outgoing_args_rtx;
3496 if (extra != 0 && below)
3497 temp = plus_constant (temp, extra);
3499 else
3501 if (GET_CODE (size) == CONST_INT)
3502 temp = plus_constant (virtual_outgoing_args_rtx,
3503 -INTVAL (size) - (below ? 0 : extra));
3504 else if (extra != 0 && !below)
3505 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3506 negate_rtx (Pmode, plus_constant (size, extra)));
3507 else
3508 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3509 negate_rtx (Pmode, size));
3512 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3515 #ifdef PUSH_ROUNDING
3517 /* Emit single push insn. */
3519 static void
3520 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3522 rtx dest_addr;
3523 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3524 rtx dest;
3525 enum insn_code icode;
3526 insn_operand_predicate_fn pred;
3528 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3529 /* If there is push pattern, use it. Otherwise try old way of throwing
3530 MEM representing push operation to move expander. */
3531 icode = optab_handler (push_optab, mode)->insn_code;
3532 if (icode != CODE_FOR_nothing)
3534 if (((pred = insn_data[(int) icode].operand[0].predicate)
3535 && !((*pred) (x, mode))))
3536 x = force_reg (mode, x);
3537 emit_insn (GEN_FCN (icode) (x));
3538 return;
3540 if (GET_MODE_SIZE (mode) == rounded_size)
3541 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3542 /* If we are to pad downward, adjust the stack pointer first and
3543 then store X into the stack location using an offset. This is
3544 because emit_move_insn does not know how to pad; it does not have
3545 access to type. */
3546 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3548 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3549 HOST_WIDE_INT offset;
3551 emit_move_insn (stack_pointer_rtx,
3552 expand_binop (Pmode,
3553 #ifdef STACK_GROWS_DOWNWARD
3554 sub_optab,
3555 #else
3556 add_optab,
3557 #endif
3558 stack_pointer_rtx,
3559 GEN_INT (rounded_size),
3560 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3562 offset = (HOST_WIDE_INT) padding_size;
3563 #ifdef STACK_GROWS_DOWNWARD
3564 if (STACK_PUSH_CODE == POST_DEC)
3565 /* We have already decremented the stack pointer, so get the
3566 previous value. */
3567 offset += (HOST_WIDE_INT) rounded_size;
3568 #else
3569 if (STACK_PUSH_CODE == POST_INC)
3570 /* We have already incremented the stack pointer, so get the
3571 previous value. */
3572 offset -= (HOST_WIDE_INT) rounded_size;
3573 #endif
3574 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3576 else
3578 #ifdef STACK_GROWS_DOWNWARD
3579 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3580 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3581 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3582 #else
3583 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3584 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3585 GEN_INT (rounded_size));
3586 #endif
3587 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3590 dest = gen_rtx_MEM (mode, dest_addr);
3592 if (type != 0)
3594 set_mem_attributes (dest, type, 1);
3596 if (flag_optimize_sibling_calls)
3597 /* Function incoming arguments may overlap with sibling call
3598 outgoing arguments and we cannot allow reordering of reads
3599 from function arguments with stores to outgoing arguments
3600 of sibling calls. */
3601 set_mem_alias_set (dest, 0);
3603 emit_move_insn (dest, x);
3605 #endif
3607 /* Generate code to push X onto the stack, assuming it has mode MODE and
3608 type TYPE.
3609 MODE is redundant except when X is a CONST_INT (since they don't
3610 carry mode info).
3611 SIZE is an rtx for the size of data to be copied (in bytes),
3612 needed only if X is BLKmode.
3614 ALIGN (in bits) is maximum alignment we can assume.
3616 If PARTIAL and REG are both nonzero, then copy that many of the first
3617 bytes of X into registers starting with REG, and push the rest of X.
3618 The amount of space pushed is decreased by PARTIAL bytes.
3619 REG must be a hard register in this case.
3620 If REG is zero but PARTIAL is not, take any all others actions for an
3621 argument partially in registers, but do not actually load any
3622 registers.
3624 EXTRA is the amount in bytes of extra space to leave next to this arg.
3625 This is ignored if an argument block has already been allocated.
3627 On a machine that lacks real push insns, ARGS_ADDR is the address of
3628 the bottom of the argument block for this call. We use indexing off there
3629 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3630 argument block has not been preallocated.
3632 ARGS_SO_FAR is the size of args previously pushed for this call.
3634 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3635 for arguments passed in registers. If nonzero, it will be the number
3636 of bytes required. */
3638 void
3639 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3640 unsigned int align, int partial, rtx reg, int extra,
3641 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3642 rtx alignment_pad)
3644 rtx xinner;
3645 enum direction stack_direction
3646 #ifdef STACK_GROWS_DOWNWARD
3647 = downward;
3648 #else
3649 = upward;
3650 #endif
3652 /* Decide where to pad the argument: `downward' for below,
3653 `upward' for above, or `none' for don't pad it.
3654 Default is below for small data on big-endian machines; else above. */
3655 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3657 /* Invert direction if stack is post-decrement.
3658 FIXME: why? */
3659 if (STACK_PUSH_CODE == POST_DEC)
3660 if (where_pad != none)
3661 where_pad = (where_pad == downward ? upward : downward);
3663 xinner = x;
3665 if (mode == BLKmode
3666 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3668 /* Copy a block into the stack, entirely or partially. */
3670 rtx temp;
3671 int used;
3672 int offset;
3673 int skip;
3675 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3676 used = partial - offset;
3678 if (mode != BLKmode)
3680 /* A value is to be stored in an insufficiently aligned
3681 stack slot; copy via a suitably aligned slot if
3682 necessary. */
3683 size = GEN_INT (GET_MODE_SIZE (mode));
3684 if (!MEM_P (xinner))
3686 temp = assign_temp (type, 0, 1, 1);
3687 emit_move_insn (temp, xinner);
3688 xinner = temp;
3692 gcc_assert (size);
3694 /* USED is now the # of bytes we need not copy to the stack
3695 because registers will take care of them. */
3697 if (partial != 0)
3698 xinner = adjust_address (xinner, BLKmode, used);
3700 /* If the partial register-part of the arg counts in its stack size,
3701 skip the part of stack space corresponding to the registers.
3702 Otherwise, start copying to the beginning of the stack space,
3703 by setting SKIP to 0. */
3704 skip = (reg_parm_stack_space == 0) ? 0 : used;
3706 #ifdef PUSH_ROUNDING
3707 /* Do it with several push insns if that doesn't take lots of insns
3708 and if there is no difficulty with push insns that skip bytes
3709 on the stack for alignment purposes. */
3710 if (args_addr == 0
3711 && PUSH_ARGS
3712 && GET_CODE (size) == CONST_INT
3713 && skip == 0
3714 && MEM_ALIGN (xinner) >= align
3715 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3716 /* Here we avoid the case of a structure whose weak alignment
3717 forces many pushes of a small amount of data,
3718 and such small pushes do rounding that causes trouble. */
3719 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3720 || align >= BIGGEST_ALIGNMENT
3721 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3722 == (align / BITS_PER_UNIT)))
3723 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3725 /* Push padding now if padding above and stack grows down,
3726 or if padding below and stack grows up.
3727 But if space already allocated, this has already been done. */
3728 if (extra && args_addr == 0
3729 && where_pad != none && where_pad != stack_direction)
3730 anti_adjust_stack (GEN_INT (extra));
3732 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3734 else
3735 #endif /* PUSH_ROUNDING */
3737 rtx target;
3739 /* Otherwise make space on the stack and copy the data
3740 to the address of that space. */
3742 /* Deduct words put into registers from the size we must copy. */
3743 if (partial != 0)
3745 if (GET_CODE (size) == CONST_INT)
3746 size = GEN_INT (INTVAL (size) - used);
3747 else
3748 size = expand_binop (GET_MODE (size), sub_optab, size,
3749 GEN_INT (used), NULL_RTX, 0,
3750 OPTAB_LIB_WIDEN);
3753 /* Get the address of the stack space.
3754 In this case, we do not deal with EXTRA separately.
3755 A single stack adjust will do. */
3756 if (! args_addr)
3758 temp = push_block (size, extra, where_pad == downward);
3759 extra = 0;
3761 else if (GET_CODE (args_so_far) == CONST_INT)
3762 temp = memory_address (BLKmode,
3763 plus_constant (args_addr,
3764 skip + INTVAL (args_so_far)));
3765 else
3766 temp = memory_address (BLKmode,
3767 plus_constant (gen_rtx_PLUS (Pmode,
3768 args_addr,
3769 args_so_far),
3770 skip));
3772 if (!ACCUMULATE_OUTGOING_ARGS)
3774 /* If the source is referenced relative to the stack pointer,
3775 copy it to another register to stabilize it. We do not need
3776 to do this if we know that we won't be changing sp. */
3778 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3779 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3780 temp = copy_to_reg (temp);
3783 target = gen_rtx_MEM (BLKmode, temp);
3785 /* We do *not* set_mem_attributes here, because incoming arguments
3786 may overlap with sibling call outgoing arguments and we cannot
3787 allow reordering of reads from function arguments with stores
3788 to outgoing arguments of sibling calls. We do, however, want
3789 to record the alignment of the stack slot. */
3790 /* ALIGN may well be better aligned than TYPE, e.g. due to
3791 PARM_BOUNDARY. Assume the caller isn't lying. */
3792 set_mem_align (target, align);
3794 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3797 else if (partial > 0)
3799 /* Scalar partly in registers. */
3801 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3802 int i;
3803 int not_stack;
3804 /* # bytes of start of argument
3805 that we must make space for but need not store. */
3806 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3807 int args_offset = INTVAL (args_so_far);
3808 int skip;
3810 /* Push padding now if padding above and stack grows down,
3811 or if padding below and stack grows up.
3812 But if space already allocated, this has already been done. */
3813 if (extra && args_addr == 0
3814 && where_pad != none && where_pad != stack_direction)
3815 anti_adjust_stack (GEN_INT (extra));
3817 /* If we make space by pushing it, we might as well push
3818 the real data. Otherwise, we can leave OFFSET nonzero
3819 and leave the space uninitialized. */
3820 if (args_addr == 0)
3821 offset = 0;
3823 /* Now NOT_STACK gets the number of words that we don't need to
3824 allocate on the stack. Convert OFFSET to words too. */
3825 not_stack = (partial - offset) / UNITS_PER_WORD;
3826 offset /= UNITS_PER_WORD;
3828 /* If the partial register-part of the arg counts in its stack size,
3829 skip the part of stack space corresponding to the registers.
3830 Otherwise, start copying to the beginning of the stack space,
3831 by setting SKIP to 0. */
3832 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3834 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3835 x = validize_mem (force_const_mem (mode, x));
3837 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3838 SUBREGs of such registers are not allowed. */
3839 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3840 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3841 x = copy_to_reg (x);
3843 /* Loop over all the words allocated on the stack for this arg. */
3844 /* We can do it by words, because any scalar bigger than a word
3845 has a size a multiple of a word. */
3846 #ifndef PUSH_ARGS_REVERSED
3847 for (i = not_stack; i < size; i++)
3848 #else
3849 for (i = size - 1; i >= not_stack; i--)
3850 #endif
3851 if (i >= not_stack + offset)
3852 emit_push_insn (operand_subword_force (x, i, mode),
3853 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3854 0, args_addr,
3855 GEN_INT (args_offset + ((i - not_stack + skip)
3856 * UNITS_PER_WORD)),
3857 reg_parm_stack_space, alignment_pad);
3859 else
3861 rtx addr;
3862 rtx dest;
3864 /* Push padding now if padding above and stack grows down,
3865 or if padding below and stack grows up.
3866 But if space already allocated, this has already been done. */
3867 if (extra && args_addr == 0
3868 && where_pad != none && where_pad != stack_direction)
3869 anti_adjust_stack (GEN_INT (extra));
3871 #ifdef PUSH_ROUNDING
3872 if (args_addr == 0 && PUSH_ARGS)
3873 emit_single_push_insn (mode, x, type);
3874 else
3875 #endif
3877 if (GET_CODE (args_so_far) == CONST_INT)
3878 addr
3879 = memory_address (mode,
3880 plus_constant (args_addr,
3881 INTVAL (args_so_far)));
3882 else
3883 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3884 args_so_far));
3885 dest = gen_rtx_MEM (mode, addr);
3887 /* We do *not* set_mem_attributes here, because incoming arguments
3888 may overlap with sibling call outgoing arguments and we cannot
3889 allow reordering of reads from function arguments with stores
3890 to outgoing arguments of sibling calls. We do, however, want
3891 to record the alignment of the stack slot. */
3892 /* ALIGN may well be better aligned than TYPE, e.g. due to
3893 PARM_BOUNDARY. Assume the caller isn't lying. */
3894 set_mem_align (dest, align);
3896 emit_move_insn (dest, x);
3900 /* If part should go in registers, copy that part
3901 into the appropriate registers. Do this now, at the end,
3902 since mem-to-mem copies above may do function calls. */
3903 if (partial > 0 && reg != 0)
3905 /* Handle calls that pass values in multiple non-contiguous locations.
3906 The Irix 6 ABI has examples of this. */
3907 if (GET_CODE (reg) == PARALLEL)
3908 emit_group_load (reg, x, type, -1);
3909 else
3911 gcc_assert (partial % UNITS_PER_WORD == 0);
3912 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3916 if (extra && args_addr == 0 && where_pad == stack_direction)
3917 anti_adjust_stack (GEN_INT (extra));
3919 if (alignment_pad && args_addr == 0)
3920 anti_adjust_stack (alignment_pad);
3923 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3924 operations. */
3926 static rtx
3927 get_subtarget (rtx x)
3929 return (optimize
3930 || x == 0
3931 /* Only registers can be subtargets. */
3932 || !REG_P (x)
3933 /* Don't use hard regs to avoid extending their life. */
3934 || REGNO (x) < FIRST_PSEUDO_REGISTER
3935 ? 0 : x);
3938 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3939 FIELD is a bitfield. Returns true if the optimization was successful,
3940 and there's nothing else to do. */
3942 static bool
3943 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3944 unsigned HOST_WIDE_INT bitpos,
3945 enum machine_mode mode1, rtx str_rtx,
3946 tree to, tree src)
3948 enum machine_mode str_mode = GET_MODE (str_rtx);
3949 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3950 tree op0, op1;
3951 rtx value, result;
3952 optab binop;
3954 if (mode1 != VOIDmode
3955 || bitsize >= BITS_PER_WORD
3956 || str_bitsize > BITS_PER_WORD
3957 || TREE_SIDE_EFFECTS (to)
3958 || TREE_THIS_VOLATILE (to))
3959 return false;
3961 STRIP_NOPS (src);
3962 if (!BINARY_CLASS_P (src)
3963 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3964 return false;
3966 op0 = TREE_OPERAND (src, 0);
3967 op1 = TREE_OPERAND (src, 1);
3968 STRIP_NOPS (op0);
3970 if (!operand_equal_p (to, op0, 0))
3971 return false;
3973 if (MEM_P (str_rtx))
3975 unsigned HOST_WIDE_INT offset1;
3977 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3978 str_mode = word_mode;
3979 str_mode = get_best_mode (bitsize, bitpos,
3980 MEM_ALIGN (str_rtx), str_mode, 0);
3981 if (str_mode == VOIDmode)
3982 return false;
3983 str_bitsize = GET_MODE_BITSIZE (str_mode);
3985 offset1 = bitpos;
3986 bitpos %= str_bitsize;
3987 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3988 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3990 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3991 return false;
3993 /* If the bit field covers the whole REG/MEM, store_field
3994 will likely generate better code. */
3995 if (bitsize >= str_bitsize)
3996 return false;
3998 /* We can't handle fields split across multiple entities. */
3999 if (bitpos + bitsize > str_bitsize)
4000 return false;
4002 if (BYTES_BIG_ENDIAN)
4003 bitpos = str_bitsize - bitpos - bitsize;
4005 switch (TREE_CODE (src))
4007 case PLUS_EXPR:
4008 case MINUS_EXPR:
4009 /* For now, just optimize the case of the topmost bitfield
4010 where we don't need to do any masking and also
4011 1 bit bitfields where xor can be used.
4012 We might win by one instruction for the other bitfields
4013 too if insv/extv instructions aren't used, so that
4014 can be added later. */
4015 if (bitpos + bitsize != str_bitsize
4016 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4017 break;
4019 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4020 value = convert_modes (str_mode,
4021 TYPE_MODE (TREE_TYPE (op1)), value,
4022 TYPE_UNSIGNED (TREE_TYPE (op1)));
4024 /* We may be accessing data outside the field, which means
4025 we can alias adjacent data. */
4026 if (MEM_P (str_rtx))
4028 str_rtx = shallow_copy_rtx (str_rtx);
4029 set_mem_alias_set (str_rtx, 0);
4030 set_mem_expr (str_rtx, 0);
4033 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4034 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4036 value = expand_and (str_mode, value, const1_rtx, NULL);
4037 binop = xor_optab;
4039 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4040 build_int_cst (NULL_TREE, bitpos),
4041 NULL_RTX, 1);
4042 result = expand_binop (str_mode, binop, str_rtx,
4043 value, str_rtx, 1, OPTAB_WIDEN);
4044 if (result != str_rtx)
4045 emit_move_insn (str_rtx, result);
4046 return true;
4048 case BIT_IOR_EXPR:
4049 case BIT_XOR_EXPR:
4050 if (TREE_CODE (op1) != INTEGER_CST)
4051 break;
4052 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4053 value = convert_modes (GET_MODE (str_rtx),
4054 TYPE_MODE (TREE_TYPE (op1)), value,
4055 TYPE_UNSIGNED (TREE_TYPE (op1)));
4057 /* We may be accessing data outside the field, which means
4058 we can alias adjacent data. */
4059 if (MEM_P (str_rtx))
4061 str_rtx = shallow_copy_rtx (str_rtx);
4062 set_mem_alias_set (str_rtx, 0);
4063 set_mem_expr (str_rtx, 0);
4066 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4067 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4069 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4070 - 1);
4071 value = expand_and (GET_MODE (str_rtx), value, mask,
4072 NULL_RTX);
4074 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4075 build_int_cst (NULL_TREE, bitpos),
4076 NULL_RTX, 1);
4077 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4078 value, str_rtx, 1, OPTAB_WIDEN);
4079 if (result != str_rtx)
4080 emit_move_insn (str_rtx, result);
4081 return true;
4083 default:
4084 break;
4087 return false;
4091 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4092 is true, try generating a nontemporal store. */
4094 void
4095 expand_assignment (tree to, tree from, bool nontemporal)
4097 rtx to_rtx = 0;
4098 rtx result;
4100 /* Don't crash if the lhs of the assignment was erroneous. */
4101 if (TREE_CODE (to) == ERROR_MARK)
4103 result = expand_normal (from);
4104 return;
4107 /* Optimize away no-op moves without side-effects. */
4108 if (operand_equal_p (to, from, 0))
4109 return;
4111 /* Assignment of a structure component needs special treatment
4112 if the structure component's rtx is not simply a MEM.
4113 Assignment of an array element at a constant index, and assignment of
4114 an array element in an unaligned packed structure field, has the same
4115 problem. */
4116 if (handled_component_p (to)
4117 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4119 enum machine_mode mode1;
4120 HOST_WIDE_INT bitsize, bitpos;
4121 tree offset;
4122 int unsignedp;
4123 int volatilep = 0;
4124 tree tem;
4126 push_temp_slots ();
4127 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4128 &unsignedp, &volatilep, true);
4130 /* If we are going to use store_bit_field and extract_bit_field,
4131 make sure to_rtx will be safe for multiple use. */
4133 to_rtx = expand_normal (tem);
4135 if (offset != 0)
4137 rtx offset_rtx;
4139 if (!MEM_P (to_rtx))
4141 /* We can get constant negative offsets into arrays with broken
4142 user code. Translate this to a trap instead of ICEing. */
4143 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4144 expand_builtin_trap ();
4145 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4148 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4149 #ifdef POINTERS_EXTEND_UNSIGNED
4150 if (GET_MODE (offset_rtx) != Pmode)
4151 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4152 #else
4153 if (GET_MODE (offset_rtx) != ptr_mode)
4154 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4155 #endif
4157 /* A constant address in TO_RTX can have VOIDmode, we must not try
4158 to call force_reg for that case. Avoid that case. */
4159 if (MEM_P (to_rtx)
4160 && GET_MODE (to_rtx) == BLKmode
4161 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4162 && bitsize > 0
4163 && (bitpos % bitsize) == 0
4164 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4165 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4167 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4168 bitpos = 0;
4171 to_rtx = offset_address (to_rtx, offset_rtx,
4172 highest_pow2_factor_for_target (to,
4173 offset));
4176 /* Handle expand_expr of a complex value returning a CONCAT. */
4177 if (GET_CODE (to_rtx) == CONCAT)
4179 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4181 gcc_assert (bitpos == 0);
4182 result = store_expr (from, to_rtx, false, nontemporal);
4184 else
4186 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4187 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4188 nontemporal);
4191 else
4193 if (MEM_P (to_rtx))
4195 /* If the field is at offset zero, we could have been given the
4196 DECL_RTX of the parent struct. Don't munge it. */
4197 to_rtx = shallow_copy_rtx (to_rtx);
4199 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4201 /* Deal with volatile and readonly fields. The former is only
4202 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4203 if (volatilep)
4204 MEM_VOLATILE_P (to_rtx) = 1;
4205 if (component_uses_parent_alias_set (to))
4206 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4209 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4210 to_rtx, to, from))
4211 result = NULL;
4212 else
4213 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4214 TREE_TYPE (tem), get_alias_set (to),
4215 nontemporal);
4218 if (result)
4219 preserve_temp_slots (result);
4220 free_temp_slots ();
4221 pop_temp_slots ();
4222 return;
4225 /* If the rhs is a function call and its value is not an aggregate,
4226 call the function before we start to compute the lhs.
4227 This is needed for correct code for cases such as
4228 val = setjmp (buf) on machines where reference to val
4229 requires loading up part of an address in a separate insn.
4231 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4232 since it might be a promoted variable where the zero- or sign- extension
4233 needs to be done. Handling this in the normal way is safe because no
4234 computation is done before the call. */
4235 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4236 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4237 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4238 && REG_P (DECL_RTL (to))))
4240 rtx value;
4242 push_temp_slots ();
4243 value = expand_normal (from);
4244 if (to_rtx == 0)
4245 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4247 /* Handle calls that return values in multiple non-contiguous locations.
4248 The Irix 6 ABI has examples of this. */
4249 if (GET_CODE (to_rtx) == PARALLEL)
4250 emit_group_load (to_rtx, value, TREE_TYPE (from),
4251 int_size_in_bytes (TREE_TYPE (from)));
4252 else if (GET_MODE (to_rtx) == BLKmode)
4253 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4254 else
4256 if (POINTER_TYPE_P (TREE_TYPE (to)))
4257 value = convert_memory_address (GET_MODE (to_rtx), value);
4258 emit_move_insn (to_rtx, value);
4260 preserve_temp_slots (to_rtx);
4261 free_temp_slots ();
4262 pop_temp_slots ();
4263 return;
4266 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4267 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4269 if (to_rtx == 0)
4270 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4272 /* Don't move directly into a return register. */
4273 if (TREE_CODE (to) == RESULT_DECL
4274 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4276 rtx temp;
4278 push_temp_slots ();
4279 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4281 if (GET_CODE (to_rtx) == PARALLEL)
4282 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4283 int_size_in_bytes (TREE_TYPE (from)));
4284 else
4285 emit_move_insn (to_rtx, temp);
4287 preserve_temp_slots (to_rtx);
4288 free_temp_slots ();
4289 pop_temp_slots ();
4290 return;
4293 /* In case we are returning the contents of an object which overlaps
4294 the place the value is being stored, use a safe function when copying
4295 a value through a pointer into a structure value return block. */
4296 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4297 && current_function_returns_struct
4298 && !current_function_returns_pcc_struct)
4300 rtx from_rtx, size;
4302 push_temp_slots ();
4303 size = expr_size (from);
4304 from_rtx = expand_normal (from);
4306 emit_library_call (memmove_libfunc, LCT_NORMAL,
4307 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4308 XEXP (from_rtx, 0), Pmode,
4309 convert_to_mode (TYPE_MODE (sizetype),
4310 size, TYPE_UNSIGNED (sizetype)),
4311 TYPE_MODE (sizetype));
4313 preserve_temp_slots (to_rtx);
4314 free_temp_slots ();
4315 pop_temp_slots ();
4316 return;
4319 /* Compute FROM and store the value in the rtx we got. */
4321 push_temp_slots ();
4322 result = store_expr (from, to_rtx, 0, nontemporal);
4323 preserve_temp_slots (result);
4324 free_temp_slots ();
4325 pop_temp_slots ();
4326 return;
4329 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4330 succeeded, false otherwise. */
4332 static bool
4333 emit_storent_insn (rtx to, rtx from)
4335 enum machine_mode mode = GET_MODE (to), imode;
4336 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4337 rtx pattern;
4339 if (code == CODE_FOR_nothing)
4340 return false;
4342 imode = insn_data[code].operand[0].mode;
4343 if (!insn_data[code].operand[0].predicate (to, imode))
4344 return false;
4346 imode = insn_data[code].operand[1].mode;
4347 if (!insn_data[code].operand[1].predicate (from, imode))
4349 from = copy_to_mode_reg (imode, from);
4350 if (!insn_data[code].operand[1].predicate (from, imode))
4351 return false;
4354 pattern = GEN_FCN (code) (to, from);
4355 if (pattern == NULL_RTX)
4356 return false;
4358 emit_insn (pattern);
4359 return true;
4362 /* Generate code for computing expression EXP,
4363 and storing the value into TARGET.
4365 If the mode is BLKmode then we may return TARGET itself.
4366 It turns out that in BLKmode it doesn't cause a problem.
4367 because C has no operators that could combine two different
4368 assignments into the same BLKmode object with different values
4369 with no sequence point. Will other languages need this to
4370 be more thorough?
4372 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4373 stack, and block moves may need to be treated specially.
4375 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4378 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4380 rtx temp;
4381 rtx alt_rtl = NULL_RTX;
4382 int dont_return_target = 0;
4384 if (VOID_TYPE_P (TREE_TYPE (exp)))
4386 /* C++ can generate ?: expressions with a throw expression in one
4387 branch and an rvalue in the other. Here, we resolve attempts to
4388 store the throw expression's nonexistent result. */
4389 gcc_assert (!call_param_p);
4390 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4391 return NULL_RTX;
4393 if (TREE_CODE (exp) == COMPOUND_EXPR)
4395 /* Perform first part of compound expression, then assign from second
4396 part. */
4397 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4398 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4399 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4400 nontemporal);
4402 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4404 /* For conditional expression, get safe form of the target. Then
4405 test the condition, doing the appropriate assignment on either
4406 side. This avoids the creation of unnecessary temporaries.
4407 For non-BLKmode, it is more efficient not to do this. */
4409 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4411 do_pending_stack_adjust ();
4412 NO_DEFER_POP;
4413 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4414 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4415 nontemporal);
4416 emit_jump_insn (gen_jump (lab2));
4417 emit_barrier ();
4418 emit_label (lab1);
4419 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4420 nontemporal);
4421 emit_label (lab2);
4422 OK_DEFER_POP;
4424 return NULL_RTX;
4426 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4427 /* If this is a scalar in a register that is stored in a wider mode
4428 than the declared mode, compute the result into its declared mode
4429 and then convert to the wider mode. Our value is the computed
4430 expression. */
4432 rtx inner_target = 0;
4434 /* We can do the conversion inside EXP, which will often result
4435 in some optimizations. Do the conversion in two steps: first
4436 change the signedness, if needed, then the extend. But don't
4437 do this if the type of EXP is a subtype of something else
4438 since then the conversion might involve more than just
4439 converting modes. */
4440 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4441 && TREE_TYPE (TREE_TYPE (exp)) == 0
4442 && (!lang_hooks.reduce_bit_field_operations
4443 || (GET_MODE_PRECISION (GET_MODE (target))
4444 == TYPE_PRECISION (TREE_TYPE (exp)))))
4446 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4447 != SUBREG_PROMOTED_UNSIGNED_P (target))
4449 /* Some types, e.g. Fortran's logical*4, won't have a signed
4450 version, so use the mode instead. */
4451 tree ntype
4452 = (signed_or_unsigned_type_for
4453 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4454 if (ntype == NULL)
4455 ntype = lang_hooks.types.type_for_mode
4456 (TYPE_MODE (TREE_TYPE (exp)),
4457 SUBREG_PROMOTED_UNSIGNED_P (target));
4459 exp = fold_convert (ntype, exp);
4462 exp = fold_convert (lang_hooks.types.type_for_mode
4463 (GET_MODE (SUBREG_REG (target)),
4464 SUBREG_PROMOTED_UNSIGNED_P (target)),
4465 exp);
4467 inner_target = SUBREG_REG (target);
4470 temp = expand_expr (exp, inner_target, VOIDmode,
4471 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4473 /* If TEMP is a VOIDmode constant, use convert_modes to make
4474 sure that we properly convert it. */
4475 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4477 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4478 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4479 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4480 GET_MODE (target), temp,
4481 SUBREG_PROMOTED_UNSIGNED_P (target));
4484 convert_move (SUBREG_REG (target), temp,
4485 SUBREG_PROMOTED_UNSIGNED_P (target));
4487 return NULL_RTX;
4489 else if (TREE_CODE (exp) == STRING_CST
4490 && !nontemporal && !call_param_p
4491 && TREE_STRING_LENGTH (exp) > 0
4492 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4494 /* Optimize initialization of an array with a STRING_CST. */
4495 HOST_WIDE_INT exp_len, str_copy_len;
4496 rtx dest_mem;
4498 exp_len = int_expr_size (exp);
4499 if (exp_len <= 0)
4500 goto normal_expr;
4502 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4503 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4504 goto normal_expr;
4506 str_copy_len = TREE_STRING_LENGTH (exp);
4507 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4509 str_copy_len += STORE_MAX_PIECES - 1;
4510 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4512 str_copy_len = MIN (str_copy_len, exp_len);
4513 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4514 (void *) TREE_STRING_POINTER (exp),
4515 MEM_ALIGN (target), false))
4516 goto normal_expr;
4518 dest_mem = target;
4520 dest_mem = store_by_pieces (dest_mem,
4521 str_copy_len, builtin_strncpy_read_str,
4522 (void *) TREE_STRING_POINTER (exp),
4523 MEM_ALIGN (target), false,
4524 exp_len > str_copy_len ? 1 : 0);
4525 if (exp_len > str_copy_len)
4526 clear_storage (dest_mem, GEN_INT (exp_len - str_copy_len),
4527 BLOCK_OP_NORMAL);
4528 return NULL_RTX;
4530 else
4532 rtx tmp_target;
4534 normal_expr:
4535 /* If we want to use a nontemporal store, force the value to
4536 register first. */
4537 tmp_target = nontemporal ? NULL_RTX : target;
4538 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4539 (call_param_p
4540 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4541 &alt_rtl);
4542 /* Return TARGET if it's a specified hardware register.
4543 If TARGET is a volatile mem ref, either return TARGET
4544 or return a reg copied *from* TARGET; ANSI requires this.
4546 Otherwise, if TEMP is not TARGET, return TEMP
4547 if it is constant (for efficiency),
4548 or if we really want the correct value. */
4549 if (!(target && REG_P (target)
4550 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4551 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4552 && ! rtx_equal_p (temp, target)
4553 && CONSTANT_P (temp))
4554 dont_return_target = 1;
4557 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4558 the same as that of TARGET, adjust the constant. This is needed, for
4559 example, in case it is a CONST_DOUBLE and we want only a word-sized
4560 value. */
4561 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4562 && TREE_CODE (exp) != ERROR_MARK
4563 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4564 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4565 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4567 /* If value was not generated in the target, store it there.
4568 Convert the value to TARGET's type first if necessary and emit the
4569 pending incrementations that have been queued when expanding EXP.
4570 Note that we cannot emit the whole queue blindly because this will
4571 effectively disable the POST_INC optimization later.
4573 If TEMP and TARGET compare equal according to rtx_equal_p, but
4574 one or both of them are volatile memory refs, we have to distinguish
4575 two cases:
4576 - expand_expr has used TARGET. In this case, we must not generate
4577 another copy. This can be detected by TARGET being equal according
4578 to == .
4579 - expand_expr has not used TARGET - that means that the source just
4580 happens to have the same RTX form. Since temp will have been created
4581 by expand_expr, it will compare unequal according to == .
4582 We must generate a copy in this case, to reach the correct number
4583 of volatile memory references. */
4585 if ((! rtx_equal_p (temp, target)
4586 || (temp != target && (side_effects_p (temp)
4587 || side_effects_p (target))))
4588 && TREE_CODE (exp) != ERROR_MARK
4589 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4590 but TARGET is not valid memory reference, TEMP will differ
4591 from TARGET although it is really the same location. */
4592 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4593 /* If there's nothing to copy, don't bother. Don't call
4594 expr_size unless necessary, because some front-ends (C++)
4595 expr_size-hook must not be given objects that are not
4596 supposed to be bit-copied or bit-initialized. */
4597 && expr_size (exp) != const0_rtx)
4599 if (GET_MODE (temp) != GET_MODE (target)
4600 && GET_MODE (temp) != VOIDmode)
4602 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4603 if (dont_return_target)
4605 /* In this case, we will return TEMP,
4606 so make sure it has the proper mode.
4607 But don't forget to store the value into TARGET. */
4608 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4609 emit_move_insn (target, temp);
4611 else if (GET_MODE (target) == BLKmode)
4612 emit_block_move (target, temp, expr_size (exp),
4613 (call_param_p
4614 ? BLOCK_OP_CALL_PARM
4615 : BLOCK_OP_NORMAL));
4616 else
4617 convert_move (target, temp, unsignedp);
4620 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4622 /* Handle copying a string constant into an array. The string
4623 constant may be shorter than the array. So copy just the string's
4624 actual length, and clear the rest. First get the size of the data
4625 type of the string, which is actually the size of the target. */
4626 rtx size = expr_size (exp);
4628 if (GET_CODE (size) == CONST_INT
4629 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4630 emit_block_move (target, temp, size,
4631 (call_param_p
4632 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4633 else
4635 /* Compute the size of the data to copy from the string. */
4636 tree copy_size
4637 = size_binop (MIN_EXPR,
4638 make_tree (sizetype, size),
4639 size_int (TREE_STRING_LENGTH (exp)));
4640 rtx copy_size_rtx
4641 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4642 (call_param_p
4643 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4644 rtx label = 0;
4646 /* Copy that much. */
4647 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4648 TYPE_UNSIGNED (sizetype));
4649 emit_block_move (target, temp, copy_size_rtx,
4650 (call_param_p
4651 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4653 /* Figure out how much is left in TARGET that we have to clear.
4654 Do all calculations in ptr_mode. */
4655 if (GET_CODE (copy_size_rtx) == CONST_INT)
4657 size = plus_constant (size, -INTVAL (copy_size_rtx));
4658 target = adjust_address (target, BLKmode,
4659 INTVAL (copy_size_rtx));
4661 else
4663 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4664 copy_size_rtx, NULL_RTX, 0,
4665 OPTAB_LIB_WIDEN);
4667 #ifdef POINTERS_EXTEND_UNSIGNED
4668 if (GET_MODE (copy_size_rtx) != Pmode)
4669 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4670 TYPE_UNSIGNED (sizetype));
4671 #endif
4673 target = offset_address (target, copy_size_rtx,
4674 highest_pow2_factor (copy_size));
4675 label = gen_label_rtx ();
4676 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4677 GET_MODE (size), 0, label);
4680 if (size != const0_rtx)
4681 clear_storage (target, size, BLOCK_OP_NORMAL);
4683 if (label)
4684 emit_label (label);
4687 /* Handle calls that return values in multiple non-contiguous locations.
4688 The Irix 6 ABI has examples of this. */
4689 else if (GET_CODE (target) == PARALLEL)
4690 emit_group_load (target, temp, TREE_TYPE (exp),
4691 int_size_in_bytes (TREE_TYPE (exp)));
4692 else if (GET_MODE (temp) == BLKmode)
4693 emit_block_move (target, temp, expr_size (exp),
4694 (call_param_p
4695 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4696 else if (nontemporal
4697 && emit_storent_insn (target, temp))
4698 /* If we managed to emit a nontemporal store, there is nothing else to
4699 do. */
4701 else
4703 temp = force_operand (temp, target);
4704 if (temp != target)
4705 emit_move_insn (target, temp);
4709 return NULL_RTX;
4712 /* Helper for categorize_ctor_elements. Identical interface. */
4714 static bool
4715 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4716 HOST_WIDE_INT *p_elt_count,
4717 bool *p_must_clear)
4719 unsigned HOST_WIDE_INT idx;
4720 HOST_WIDE_INT nz_elts, elt_count;
4721 tree value, purpose;
4723 /* Whether CTOR is a valid constant initializer, in accordance with what
4724 initializer_constant_valid_p does. If inferred from the constructor
4725 elements, true until proven otherwise. */
4726 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4727 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4729 nz_elts = 0;
4730 elt_count = 0;
4732 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4734 HOST_WIDE_INT mult;
4736 mult = 1;
4737 if (TREE_CODE (purpose) == RANGE_EXPR)
4739 tree lo_index = TREE_OPERAND (purpose, 0);
4740 tree hi_index = TREE_OPERAND (purpose, 1);
4742 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4743 mult = (tree_low_cst (hi_index, 1)
4744 - tree_low_cst (lo_index, 1) + 1);
4747 switch (TREE_CODE (value))
4749 case CONSTRUCTOR:
4751 HOST_WIDE_INT nz = 0, ic = 0;
4753 bool const_elt_p
4754 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4756 nz_elts += mult * nz;
4757 elt_count += mult * ic;
4759 if (const_from_elts_p && const_p)
4760 const_p = const_elt_p;
4762 break;
4764 case INTEGER_CST:
4765 case REAL_CST:
4766 if (!initializer_zerop (value))
4767 nz_elts += mult;
4768 elt_count += mult;
4769 break;
4771 case STRING_CST:
4772 nz_elts += mult * TREE_STRING_LENGTH (value);
4773 elt_count += mult * TREE_STRING_LENGTH (value);
4774 break;
4776 case COMPLEX_CST:
4777 if (!initializer_zerop (TREE_REALPART (value)))
4778 nz_elts += mult;
4779 if (!initializer_zerop (TREE_IMAGPART (value)))
4780 nz_elts += mult;
4781 elt_count += mult;
4782 break;
4784 case VECTOR_CST:
4786 tree v;
4787 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4789 if (!initializer_zerop (TREE_VALUE (v)))
4790 nz_elts += mult;
4791 elt_count += mult;
4794 break;
4796 default:
4797 nz_elts += mult;
4798 elt_count += mult;
4800 if (const_from_elts_p && const_p)
4801 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4802 != NULL_TREE;
4803 break;
4807 if (!*p_must_clear
4808 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4809 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4811 tree init_sub_type;
4812 bool clear_this = true;
4814 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4816 /* We don't expect more than one element of the union to be
4817 initialized. Not sure what we should do otherwise... */
4818 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4819 == 1);
4821 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4822 CONSTRUCTOR_ELTS (ctor),
4823 0)->value);
4825 /* ??? We could look at each element of the union, and find the
4826 largest element. Which would avoid comparing the size of the
4827 initialized element against any tail padding in the union.
4828 Doesn't seem worth the effort... */
4829 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4830 TYPE_SIZE (init_sub_type)) == 1)
4832 /* And now we have to find out if the element itself is fully
4833 constructed. E.g. for union { struct { int a, b; } s; } u
4834 = { .s = { .a = 1 } }. */
4835 if (elt_count == count_type_elements (init_sub_type, false))
4836 clear_this = false;
4840 *p_must_clear = clear_this;
4843 *p_nz_elts += nz_elts;
4844 *p_elt_count += elt_count;
4846 return const_p;
4849 /* Examine CTOR to discover:
4850 * how many scalar fields are set to nonzero values,
4851 and place it in *P_NZ_ELTS;
4852 * how many scalar fields in total are in CTOR,
4853 and place it in *P_ELT_COUNT.
4854 * if a type is a union, and the initializer from the constructor
4855 is not the largest element in the union, then set *p_must_clear.
4857 Return whether or not CTOR is a valid static constant initializer, the same
4858 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4860 bool
4861 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4862 HOST_WIDE_INT *p_elt_count,
4863 bool *p_must_clear)
4865 *p_nz_elts = 0;
4866 *p_elt_count = 0;
4867 *p_must_clear = false;
4869 return
4870 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4873 /* Count the number of scalars in TYPE. Return -1 on overflow or
4874 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4875 array member at the end of the structure. */
4877 HOST_WIDE_INT
4878 count_type_elements (const_tree type, bool allow_flexarr)
4880 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4881 switch (TREE_CODE (type))
4883 case ARRAY_TYPE:
4885 tree telts = array_type_nelts (type);
4886 if (telts && host_integerp (telts, 1))
4888 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4889 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4890 if (n == 0)
4891 return 0;
4892 else if (max / n > m)
4893 return n * m;
4895 return -1;
4898 case RECORD_TYPE:
4900 HOST_WIDE_INT n = 0, t;
4901 tree f;
4903 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4904 if (TREE_CODE (f) == FIELD_DECL)
4906 t = count_type_elements (TREE_TYPE (f), false);
4907 if (t < 0)
4909 /* Check for structures with flexible array member. */
4910 tree tf = TREE_TYPE (f);
4911 if (allow_flexarr
4912 && TREE_CHAIN (f) == NULL
4913 && TREE_CODE (tf) == ARRAY_TYPE
4914 && TYPE_DOMAIN (tf)
4915 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4916 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4917 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4918 && int_size_in_bytes (type) >= 0)
4919 break;
4921 return -1;
4923 n += t;
4926 return n;
4929 case UNION_TYPE:
4930 case QUAL_UNION_TYPE:
4932 /* Ho hum. How in the world do we guess here? Clearly it isn't
4933 right to count the fields. Guess based on the number of words. */
4934 HOST_WIDE_INT n = int_size_in_bytes (type);
4935 if (n < 0)
4936 return -1;
4937 return n / UNITS_PER_WORD;
4940 case COMPLEX_TYPE:
4941 return 2;
4943 case VECTOR_TYPE:
4944 return TYPE_VECTOR_SUBPARTS (type);
4946 case INTEGER_TYPE:
4947 case REAL_TYPE:
4948 case ENUMERAL_TYPE:
4949 case BOOLEAN_TYPE:
4950 case POINTER_TYPE:
4951 case OFFSET_TYPE:
4952 case REFERENCE_TYPE:
4953 return 1;
4955 case VOID_TYPE:
4956 case METHOD_TYPE:
4957 case FUNCTION_TYPE:
4958 case LANG_TYPE:
4959 default:
4960 gcc_unreachable ();
4964 /* Return 1 if EXP contains mostly (3/4) zeros. */
4966 static int
4967 mostly_zeros_p (const_tree exp)
4969 if (TREE_CODE (exp) == CONSTRUCTOR)
4972 HOST_WIDE_INT nz_elts, count, elts;
4973 bool must_clear;
4975 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4976 if (must_clear)
4977 return 1;
4979 elts = count_type_elements (TREE_TYPE (exp), false);
4981 return nz_elts < elts / 4;
4984 return initializer_zerop (exp);
4987 /* Return 1 if EXP contains all zeros. */
4989 static int
4990 all_zeros_p (const_tree exp)
4992 if (TREE_CODE (exp) == CONSTRUCTOR)
4995 HOST_WIDE_INT nz_elts, count;
4996 bool must_clear;
4998 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4999 return nz_elts == 0;
5002 return initializer_zerop (exp);
5005 /* Helper function for store_constructor.
5006 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5007 TYPE is the type of the CONSTRUCTOR, not the element type.
5008 CLEARED is as for store_constructor.
5009 ALIAS_SET is the alias set to use for any stores.
5011 This provides a recursive shortcut back to store_constructor when it isn't
5012 necessary to go through store_field. This is so that we can pass through
5013 the cleared field to let store_constructor know that we may not have to
5014 clear a substructure if the outer structure has already been cleared. */
5016 static void
5017 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5018 HOST_WIDE_INT bitpos, enum machine_mode mode,
5019 tree exp, tree type, int cleared,
5020 alias_set_type alias_set)
5022 if (TREE_CODE (exp) == CONSTRUCTOR
5023 /* We can only call store_constructor recursively if the size and
5024 bit position are on a byte boundary. */
5025 && bitpos % BITS_PER_UNIT == 0
5026 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5027 /* If we have a nonzero bitpos for a register target, then we just
5028 let store_field do the bitfield handling. This is unlikely to
5029 generate unnecessary clear instructions anyways. */
5030 && (bitpos == 0 || MEM_P (target)))
5032 if (MEM_P (target))
5033 target
5034 = adjust_address (target,
5035 GET_MODE (target) == BLKmode
5036 || 0 != (bitpos
5037 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5038 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5041 /* Update the alias set, if required. */
5042 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5043 && MEM_ALIAS_SET (target) != 0)
5045 target = copy_rtx (target);
5046 set_mem_alias_set (target, alias_set);
5049 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5051 else
5052 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5055 /* Store the value of constructor EXP into the rtx TARGET.
5056 TARGET is either a REG or a MEM; we know it cannot conflict, since
5057 safe_from_p has been called.
5058 CLEARED is true if TARGET is known to have been zero'd.
5059 SIZE is the number of bytes of TARGET we are allowed to modify: this
5060 may not be the same as the size of EXP if we are assigning to a field
5061 which has been packed to exclude padding bits. */
5063 static void
5064 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5066 tree type = TREE_TYPE (exp);
5067 #ifdef WORD_REGISTER_OPERATIONS
5068 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5069 #endif
5071 switch (TREE_CODE (type))
5073 case RECORD_TYPE:
5074 case UNION_TYPE:
5075 case QUAL_UNION_TYPE:
5077 unsigned HOST_WIDE_INT idx;
5078 tree field, value;
5080 /* If size is zero or the target is already cleared, do nothing. */
5081 if (size == 0 || cleared)
5082 cleared = 1;
5083 /* We either clear the aggregate or indicate the value is dead. */
5084 else if ((TREE_CODE (type) == UNION_TYPE
5085 || TREE_CODE (type) == QUAL_UNION_TYPE)
5086 && ! CONSTRUCTOR_ELTS (exp))
5087 /* If the constructor is empty, clear the union. */
5089 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5090 cleared = 1;
5093 /* If we are building a static constructor into a register,
5094 set the initial value as zero so we can fold the value into
5095 a constant. But if more than one register is involved,
5096 this probably loses. */
5097 else if (REG_P (target) && TREE_STATIC (exp)
5098 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5100 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5101 cleared = 1;
5104 /* If the constructor has fewer fields than the structure or
5105 if we are initializing the structure to mostly zeros, clear
5106 the whole structure first. Don't do this if TARGET is a
5107 register whose mode size isn't equal to SIZE since
5108 clear_storage can't handle this case. */
5109 else if (size > 0
5110 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5111 != fields_length (type))
5112 || mostly_zeros_p (exp))
5113 && (!REG_P (target)
5114 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5115 == size)))
5117 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5118 cleared = 1;
5121 if (REG_P (target) && !cleared)
5122 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5124 /* Store each element of the constructor into the
5125 corresponding field of TARGET. */
5126 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5128 enum machine_mode mode;
5129 HOST_WIDE_INT bitsize;
5130 HOST_WIDE_INT bitpos = 0;
5131 tree offset;
5132 rtx to_rtx = target;
5134 /* Just ignore missing fields. We cleared the whole
5135 structure, above, if any fields are missing. */
5136 if (field == 0)
5137 continue;
5139 if (cleared && initializer_zerop (value))
5140 continue;
5142 if (host_integerp (DECL_SIZE (field), 1))
5143 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5144 else
5145 bitsize = -1;
5147 mode = DECL_MODE (field);
5148 if (DECL_BIT_FIELD (field))
5149 mode = VOIDmode;
5151 offset = DECL_FIELD_OFFSET (field);
5152 if (host_integerp (offset, 0)
5153 && host_integerp (bit_position (field), 0))
5155 bitpos = int_bit_position (field);
5156 offset = 0;
5158 else
5159 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5161 if (offset)
5163 rtx offset_rtx;
5165 offset
5166 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5167 make_tree (TREE_TYPE (exp),
5168 target));
5170 offset_rtx = expand_normal (offset);
5171 gcc_assert (MEM_P (to_rtx));
5173 #ifdef POINTERS_EXTEND_UNSIGNED
5174 if (GET_MODE (offset_rtx) != Pmode)
5175 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5176 #else
5177 if (GET_MODE (offset_rtx) != ptr_mode)
5178 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5179 #endif
5181 to_rtx = offset_address (to_rtx, offset_rtx,
5182 highest_pow2_factor (offset));
5185 #ifdef WORD_REGISTER_OPERATIONS
5186 /* If this initializes a field that is smaller than a
5187 word, at the start of a word, try to widen it to a full
5188 word. This special case allows us to output C++ member
5189 function initializations in a form that the optimizers
5190 can understand. */
5191 if (REG_P (target)
5192 && bitsize < BITS_PER_WORD
5193 && bitpos % BITS_PER_WORD == 0
5194 && GET_MODE_CLASS (mode) == MODE_INT
5195 && TREE_CODE (value) == INTEGER_CST
5196 && exp_size >= 0
5197 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5199 tree type = TREE_TYPE (value);
5201 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5203 type = lang_hooks.types.type_for_size
5204 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5205 value = fold_convert (type, value);
5208 if (BYTES_BIG_ENDIAN)
5209 value
5210 = fold_build2 (LSHIFT_EXPR, type, value,
5211 build_int_cst (type,
5212 BITS_PER_WORD - bitsize));
5213 bitsize = BITS_PER_WORD;
5214 mode = word_mode;
5216 #endif
5218 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5219 && DECL_NONADDRESSABLE_P (field))
5221 to_rtx = copy_rtx (to_rtx);
5222 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5225 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5226 value, type, cleared,
5227 get_alias_set (TREE_TYPE (field)));
5229 break;
5231 case ARRAY_TYPE:
5233 tree value, index;
5234 unsigned HOST_WIDE_INT i;
5235 int need_to_clear;
5236 tree domain;
5237 tree elttype = TREE_TYPE (type);
5238 int const_bounds_p;
5239 HOST_WIDE_INT minelt = 0;
5240 HOST_WIDE_INT maxelt = 0;
5242 domain = TYPE_DOMAIN (type);
5243 const_bounds_p = (TYPE_MIN_VALUE (domain)
5244 && TYPE_MAX_VALUE (domain)
5245 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5246 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5248 /* If we have constant bounds for the range of the type, get them. */
5249 if (const_bounds_p)
5251 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5252 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5255 /* If the constructor has fewer elements than the array, clear
5256 the whole array first. Similarly if this is static
5257 constructor of a non-BLKmode object. */
5258 if (cleared)
5259 need_to_clear = 0;
5260 else if (REG_P (target) && TREE_STATIC (exp))
5261 need_to_clear = 1;
5262 else
5264 unsigned HOST_WIDE_INT idx;
5265 tree index, value;
5266 HOST_WIDE_INT count = 0, zero_count = 0;
5267 need_to_clear = ! const_bounds_p;
5269 /* This loop is a more accurate version of the loop in
5270 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5271 is also needed to check for missing elements. */
5272 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5274 HOST_WIDE_INT this_node_count;
5276 if (need_to_clear)
5277 break;
5279 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5281 tree lo_index = TREE_OPERAND (index, 0);
5282 tree hi_index = TREE_OPERAND (index, 1);
5284 if (! host_integerp (lo_index, 1)
5285 || ! host_integerp (hi_index, 1))
5287 need_to_clear = 1;
5288 break;
5291 this_node_count = (tree_low_cst (hi_index, 1)
5292 - tree_low_cst (lo_index, 1) + 1);
5294 else
5295 this_node_count = 1;
5297 count += this_node_count;
5298 if (mostly_zeros_p (value))
5299 zero_count += this_node_count;
5302 /* Clear the entire array first if there are any missing
5303 elements, or if the incidence of zero elements is >=
5304 75%. */
5305 if (! need_to_clear
5306 && (count < maxelt - minelt + 1
5307 || 4 * zero_count >= 3 * count))
5308 need_to_clear = 1;
5311 if (need_to_clear && size > 0)
5313 if (REG_P (target))
5314 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5315 else
5316 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5317 cleared = 1;
5320 if (!cleared && REG_P (target))
5321 /* Inform later passes that the old value is dead. */
5322 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5324 /* Store each element of the constructor into the
5325 corresponding element of TARGET, determined by counting the
5326 elements. */
5327 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5329 enum machine_mode mode;
5330 HOST_WIDE_INT bitsize;
5331 HOST_WIDE_INT bitpos;
5332 int unsignedp;
5333 rtx xtarget = target;
5335 if (cleared && initializer_zerop (value))
5336 continue;
5338 unsignedp = TYPE_UNSIGNED (elttype);
5339 mode = TYPE_MODE (elttype);
5340 if (mode == BLKmode)
5341 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5342 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5343 : -1);
5344 else
5345 bitsize = GET_MODE_BITSIZE (mode);
5347 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5349 tree lo_index = TREE_OPERAND (index, 0);
5350 tree hi_index = TREE_OPERAND (index, 1);
5351 rtx index_r, pos_rtx;
5352 HOST_WIDE_INT lo, hi, count;
5353 tree position;
5355 /* If the range is constant and "small", unroll the loop. */
5356 if (const_bounds_p
5357 && host_integerp (lo_index, 0)
5358 && host_integerp (hi_index, 0)
5359 && (lo = tree_low_cst (lo_index, 0),
5360 hi = tree_low_cst (hi_index, 0),
5361 count = hi - lo + 1,
5362 (!MEM_P (target)
5363 || count <= 2
5364 || (host_integerp (TYPE_SIZE (elttype), 1)
5365 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5366 <= 40 * 8)))))
5368 lo -= minelt; hi -= minelt;
5369 for (; lo <= hi; lo++)
5371 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5373 if (MEM_P (target)
5374 && !MEM_KEEP_ALIAS_SET_P (target)
5375 && TREE_CODE (type) == ARRAY_TYPE
5376 && TYPE_NONALIASED_COMPONENT (type))
5378 target = copy_rtx (target);
5379 MEM_KEEP_ALIAS_SET_P (target) = 1;
5382 store_constructor_field
5383 (target, bitsize, bitpos, mode, value, type, cleared,
5384 get_alias_set (elttype));
5387 else
5389 rtx loop_start = gen_label_rtx ();
5390 rtx loop_end = gen_label_rtx ();
5391 tree exit_cond;
5393 expand_normal (hi_index);
5394 unsignedp = TYPE_UNSIGNED (domain);
5396 index = build_decl (VAR_DECL, NULL_TREE, domain);
5398 index_r
5399 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5400 &unsignedp, 0));
5401 SET_DECL_RTL (index, index_r);
5402 store_expr (lo_index, index_r, 0, false);
5404 /* Build the head of the loop. */
5405 do_pending_stack_adjust ();
5406 emit_label (loop_start);
5408 /* Assign value to element index. */
5409 position =
5410 fold_convert (ssizetype,
5411 fold_build2 (MINUS_EXPR,
5412 TREE_TYPE (index),
5413 index,
5414 TYPE_MIN_VALUE (domain)));
5416 position =
5417 size_binop (MULT_EXPR, position,
5418 fold_convert (ssizetype,
5419 TYPE_SIZE_UNIT (elttype)));
5421 pos_rtx = expand_normal (position);
5422 xtarget = offset_address (target, pos_rtx,
5423 highest_pow2_factor (position));
5424 xtarget = adjust_address (xtarget, mode, 0);
5425 if (TREE_CODE (value) == CONSTRUCTOR)
5426 store_constructor (value, xtarget, cleared,
5427 bitsize / BITS_PER_UNIT);
5428 else
5429 store_expr (value, xtarget, 0, false);
5431 /* Generate a conditional jump to exit the loop. */
5432 exit_cond = build2 (LT_EXPR, integer_type_node,
5433 index, hi_index);
5434 jumpif (exit_cond, loop_end);
5436 /* Update the loop counter, and jump to the head of
5437 the loop. */
5438 expand_assignment (index,
5439 build2 (PLUS_EXPR, TREE_TYPE (index),
5440 index, integer_one_node),
5441 false);
5443 emit_jump (loop_start);
5445 /* Build the end of the loop. */
5446 emit_label (loop_end);
5449 else if ((index != 0 && ! host_integerp (index, 0))
5450 || ! host_integerp (TYPE_SIZE (elttype), 1))
5452 tree position;
5454 if (index == 0)
5455 index = ssize_int (1);
5457 if (minelt)
5458 index = fold_convert (ssizetype,
5459 fold_build2 (MINUS_EXPR,
5460 TREE_TYPE (index),
5461 index,
5462 TYPE_MIN_VALUE (domain)));
5464 position =
5465 size_binop (MULT_EXPR, index,
5466 fold_convert (ssizetype,
5467 TYPE_SIZE_UNIT (elttype)));
5468 xtarget = offset_address (target,
5469 expand_normal (position),
5470 highest_pow2_factor (position));
5471 xtarget = adjust_address (xtarget, mode, 0);
5472 store_expr (value, xtarget, 0, false);
5474 else
5476 if (index != 0)
5477 bitpos = ((tree_low_cst (index, 0) - minelt)
5478 * tree_low_cst (TYPE_SIZE (elttype), 1));
5479 else
5480 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5482 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5483 && TREE_CODE (type) == ARRAY_TYPE
5484 && TYPE_NONALIASED_COMPONENT (type))
5486 target = copy_rtx (target);
5487 MEM_KEEP_ALIAS_SET_P (target) = 1;
5489 store_constructor_field (target, bitsize, bitpos, mode, value,
5490 type, cleared, get_alias_set (elttype));
5493 break;
5496 case VECTOR_TYPE:
5498 unsigned HOST_WIDE_INT idx;
5499 constructor_elt *ce;
5500 int i;
5501 int need_to_clear;
5502 int icode = 0;
5503 tree elttype = TREE_TYPE (type);
5504 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5505 enum machine_mode eltmode = TYPE_MODE (elttype);
5506 HOST_WIDE_INT bitsize;
5507 HOST_WIDE_INT bitpos;
5508 rtvec vector = NULL;
5509 unsigned n_elts;
5511 gcc_assert (eltmode != BLKmode);
5513 n_elts = TYPE_VECTOR_SUBPARTS (type);
5514 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5516 enum machine_mode mode = GET_MODE (target);
5518 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5519 if (icode != CODE_FOR_nothing)
5521 unsigned int i;
5523 vector = rtvec_alloc (n_elts);
5524 for (i = 0; i < n_elts; i++)
5525 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5529 /* If the constructor has fewer elements than the vector,
5530 clear the whole array first. Similarly if this is static
5531 constructor of a non-BLKmode object. */
5532 if (cleared)
5533 need_to_clear = 0;
5534 else if (REG_P (target) && TREE_STATIC (exp))
5535 need_to_clear = 1;
5536 else
5538 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5539 tree value;
5541 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5543 int n_elts_here = tree_low_cst
5544 (int_const_binop (TRUNC_DIV_EXPR,
5545 TYPE_SIZE (TREE_TYPE (value)),
5546 TYPE_SIZE (elttype), 0), 1);
5548 count += n_elts_here;
5549 if (mostly_zeros_p (value))
5550 zero_count += n_elts_here;
5553 /* Clear the entire vector first if there are any missing elements,
5554 or if the incidence of zero elements is >= 75%. */
5555 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5558 if (need_to_clear && size > 0 && !vector)
5560 if (REG_P (target))
5561 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5562 else
5563 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5564 cleared = 1;
5567 /* Inform later passes that the old value is dead. */
5568 if (!cleared && !vector && REG_P (target))
5569 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5571 /* Store each element of the constructor into the corresponding
5572 element of TARGET, determined by counting the elements. */
5573 for (idx = 0, i = 0;
5574 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5575 idx++, i += bitsize / elt_size)
5577 HOST_WIDE_INT eltpos;
5578 tree value = ce->value;
5580 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5581 if (cleared && initializer_zerop (value))
5582 continue;
5584 if (ce->index)
5585 eltpos = tree_low_cst (ce->index, 1);
5586 else
5587 eltpos = i;
5589 if (vector)
5591 /* Vector CONSTRUCTORs should only be built from smaller
5592 vectors in the case of BLKmode vectors. */
5593 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5594 RTVEC_ELT (vector, eltpos)
5595 = expand_normal (value);
5597 else
5599 enum machine_mode value_mode =
5600 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5601 ? TYPE_MODE (TREE_TYPE (value))
5602 : eltmode;
5603 bitpos = eltpos * elt_size;
5604 store_constructor_field (target, bitsize, bitpos,
5605 value_mode, value, type,
5606 cleared, get_alias_set (elttype));
5610 if (vector)
5611 emit_insn (GEN_FCN (icode)
5612 (target,
5613 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5614 break;
5617 default:
5618 gcc_unreachable ();
5622 /* Store the value of EXP (an expression tree)
5623 into a subfield of TARGET which has mode MODE and occupies
5624 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5625 If MODE is VOIDmode, it means that we are storing into a bit-field.
5627 Always return const0_rtx unless we have something particular to
5628 return.
5630 TYPE is the type of the underlying object,
5632 ALIAS_SET is the alias set for the destination. This value will
5633 (in general) be different from that for TARGET, since TARGET is a
5634 reference to the containing structure.
5636 If NONTEMPORAL is true, try generating a nontemporal store. */
5638 static rtx
5639 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5640 enum machine_mode mode, tree exp, tree type,
5641 alias_set_type alias_set, bool nontemporal)
5643 HOST_WIDE_INT width_mask = 0;
5645 if (TREE_CODE (exp) == ERROR_MARK)
5646 return const0_rtx;
5648 /* If we have nothing to store, do nothing unless the expression has
5649 side-effects. */
5650 if (bitsize == 0)
5651 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5652 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5653 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5655 /* If we are storing into an unaligned field of an aligned union that is
5656 in a register, we may have the mode of TARGET being an integer mode but
5657 MODE == BLKmode. In that case, get an aligned object whose size and
5658 alignment are the same as TARGET and store TARGET into it (we can avoid
5659 the store if the field being stored is the entire width of TARGET). Then
5660 call ourselves recursively to store the field into a BLKmode version of
5661 that object. Finally, load from the object into TARGET. This is not
5662 very efficient in general, but should only be slightly more expensive
5663 than the otherwise-required unaligned accesses. Perhaps this can be
5664 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5665 twice, once with emit_move_insn and once via store_field. */
5667 if (mode == BLKmode
5668 && (REG_P (target) || GET_CODE (target) == SUBREG))
5670 rtx object = assign_temp (type, 0, 1, 1);
5671 rtx blk_object = adjust_address (object, BLKmode, 0);
5673 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5674 emit_move_insn (object, target);
5676 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5677 nontemporal);
5679 emit_move_insn (target, object);
5681 /* We want to return the BLKmode version of the data. */
5682 return blk_object;
5685 if (GET_CODE (target) == CONCAT)
5687 /* We're storing into a struct containing a single __complex. */
5689 gcc_assert (!bitpos);
5690 return store_expr (exp, target, 0, nontemporal);
5693 /* If the structure is in a register or if the component
5694 is a bit field, we cannot use addressing to access it.
5695 Use bit-field techniques or SUBREG to store in it. */
5697 if (mode == VOIDmode
5698 || (mode != BLKmode && ! direct_store[(int) mode]
5699 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5700 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5701 || REG_P (target)
5702 || GET_CODE (target) == SUBREG
5703 /* If the field isn't aligned enough to store as an ordinary memref,
5704 store it as a bit field. */
5705 || (mode != BLKmode
5706 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5707 || bitpos % GET_MODE_ALIGNMENT (mode))
5708 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5709 || (bitpos % BITS_PER_UNIT != 0)))
5710 /* If the RHS and field are a constant size and the size of the
5711 RHS isn't the same size as the bitfield, we must use bitfield
5712 operations. */
5713 || (bitsize >= 0
5714 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5715 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5717 rtx temp;
5719 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5720 implies a mask operation. If the precision is the same size as
5721 the field we're storing into, that mask is redundant. This is
5722 particularly common with bit field assignments generated by the
5723 C front end. */
5724 if (TREE_CODE (exp) == NOP_EXPR)
5726 tree type = TREE_TYPE (exp);
5727 if (INTEGRAL_TYPE_P (type)
5728 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5729 && bitsize == TYPE_PRECISION (type))
5731 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5732 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5733 exp = TREE_OPERAND (exp, 0);
5737 temp = expand_normal (exp);
5739 /* If BITSIZE is narrower than the size of the type of EXP
5740 we will be narrowing TEMP. Normally, what's wanted are the
5741 low-order bits. However, if EXP's type is a record and this is
5742 big-endian machine, we want the upper BITSIZE bits. */
5743 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5744 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5745 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5746 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5747 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5748 - bitsize),
5749 NULL_RTX, 1);
5751 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5752 MODE. */
5753 if (mode != VOIDmode && mode != BLKmode
5754 && mode != TYPE_MODE (TREE_TYPE (exp)))
5755 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5757 /* If the modes of TARGET and TEMP are both BLKmode, both
5758 must be in memory and BITPOS must be aligned on a byte
5759 boundary. If so, we simply do a block copy. */
5760 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5762 gcc_assert (MEM_P (target) && MEM_P (temp)
5763 && !(bitpos % BITS_PER_UNIT));
5765 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5766 emit_block_move (target, temp,
5767 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5768 / BITS_PER_UNIT),
5769 BLOCK_OP_NORMAL);
5771 return const0_rtx;
5774 /* Store the value in the bitfield. */
5775 store_bit_field (target, bitsize, bitpos, mode, temp);
5777 return const0_rtx;
5779 else
5781 /* Now build a reference to just the desired component. */
5782 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5784 if (to_rtx == target)
5785 to_rtx = copy_rtx (to_rtx);
5787 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5788 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5789 set_mem_alias_set (to_rtx, alias_set);
5791 return store_expr (exp, to_rtx, 0, nontemporal);
5795 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5796 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5797 codes and find the ultimate containing object, which we return.
5799 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5800 bit position, and *PUNSIGNEDP to the signedness of the field.
5801 If the position of the field is variable, we store a tree
5802 giving the variable offset (in units) in *POFFSET.
5803 This offset is in addition to the bit position.
5804 If the position is not variable, we store 0 in *POFFSET.
5806 If any of the extraction expressions is volatile,
5807 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5809 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5810 is a mode that can be used to access the field. In that case, *PBITSIZE
5811 is redundant.
5813 If the field describes a variable-sized object, *PMODE is set to
5814 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5815 this case, but the address of the object can be found.
5817 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5818 look through nodes that serve as markers of a greater alignment than
5819 the one that can be deduced from the expression. These nodes make it
5820 possible for front-ends to prevent temporaries from being created by
5821 the middle-end on alignment considerations. For that purpose, the
5822 normal operating mode at high-level is to always pass FALSE so that
5823 the ultimate containing object is really returned; moreover, the
5824 associated predicate handled_component_p will always return TRUE
5825 on these nodes, thus indicating that they are essentially handled
5826 by get_inner_reference. TRUE should only be passed when the caller
5827 is scanning the expression in order to build another representation
5828 and specifically knows how to handle these nodes; as such, this is
5829 the normal operating mode in the RTL expanders. */
5831 tree
5832 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5833 HOST_WIDE_INT *pbitpos, tree *poffset,
5834 enum machine_mode *pmode, int *punsignedp,
5835 int *pvolatilep, bool keep_aligning)
5837 tree size_tree = 0;
5838 enum machine_mode mode = VOIDmode;
5839 tree offset = size_zero_node;
5840 tree bit_offset = bitsize_zero_node;
5842 /* First get the mode, signedness, and size. We do this from just the
5843 outermost expression. */
5844 if (TREE_CODE (exp) == COMPONENT_REF)
5846 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5847 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5848 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5850 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5852 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5854 size_tree = TREE_OPERAND (exp, 1);
5855 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5857 /* For vector types, with the correct size of access, use the mode of
5858 inner type. */
5859 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5860 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5861 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5862 mode = TYPE_MODE (TREE_TYPE (exp));
5864 else
5866 mode = TYPE_MODE (TREE_TYPE (exp));
5867 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5869 if (mode == BLKmode)
5870 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5871 else
5872 *pbitsize = GET_MODE_BITSIZE (mode);
5875 if (size_tree != 0)
5877 if (! host_integerp (size_tree, 1))
5878 mode = BLKmode, *pbitsize = -1;
5879 else
5880 *pbitsize = tree_low_cst (size_tree, 1);
5883 *pmode = mode;
5885 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5886 and find the ultimate containing object. */
5887 while (1)
5889 switch (TREE_CODE (exp))
5891 case BIT_FIELD_REF:
5892 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5893 TREE_OPERAND (exp, 2));
5894 break;
5896 case COMPONENT_REF:
5898 tree field = TREE_OPERAND (exp, 1);
5899 tree this_offset = component_ref_field_offset (exp);
5901 /* If this field hasn't been filled in yet, don't go past it.
5902 This should only happen when folding expressions made during
5903 type construction. */
5904 if (this_offset == 0)
5905 break;
5907 offset = size_binop (PLUS_EXPR, offset, this_offset);
5908 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5909 DECL_FIELD_BIT_OFFSET (field));
5911 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5913 break;
5915 case ARRAY_REF:
5916 case ARRAY_RANGE_REF:
5918 tree index = TREE_OPERAND (exp, 1);
5919 tree low_bound = array_ref_low_bound (exp);
5920 tree unit_size = array_ref_element_size (exp);
5922 /* We assume all arrays have sizes that are a multiple of a byte.
5923 First subtract the lower bound, if any, in the type of the
5924 index, then convert to sizetype and multiply by the size of
5925 the array element. */
5926 if (! integer_zerop (low_bound))
5927 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5928 index, low_bound);
5930 offset = size_binop (PLUS_EXPR, offset,
5931 size_binop (MULT_EXPR,
5932 fold_convert (sizetype, index),
5933 unit_size));
5935 break;
5937 case REALPART_EXPR:
5938 break;
5940 case IMAGPART_EXPR:
5941 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5942 bitsize_int (*pbitsize));
5943 break;
5945 case VIEW_CONVERT_EXPR:
5946 if (keep_aligning && STRICT_ALIGNMENT
5947 && (TYPE_ALIGN (TREE_TYPE (exp))
5948 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5949 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5950 < BIGGEST_ALIGNMENT)
5951 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5952 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5953 goto done;
5954 break;
5956 default:
5957 goto done;
5960 /* If any reference in the chain is volatile, the effect is volatile. */
5961 if (TREE_THIS_VOLATILE (exp))
5962 *pvolatilep = 1;
5964 exp = TREE_OPERAND (exp, 0);
5966 done:
5968 /* If OFFSET is constant, see if we can return the whole thing as a
5969 constant bit position. Make sure to handle overflow during
5970 this conversion. */
5971 if (host_integerp (offset, 0))
5973 double_int tem = double_int_mul (tree_to_double_int (offset),
5974 uhwi_to_double_int (BITS_PER_UNIT));
5975 tem = double_int_add (tem, tree_to_double_int (bit_offset));
5976 if (double_int_fits_in_shwi_p (tem))
5978 *pbitpos = double_int_to_shwi (tem);
5979 *poffset = NULL_TREE;
5980 return exp;
5984 /* Otherwise, split it up. */
5985 *pbitpos = tree_low_cst (bit_offset, 0);
5986 *poffset = offset;
5988 return exp;
5991 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5992 look for whether EXP or any nested component-refs within EXP is marked
5993 as PACKED. */
5995 bool
5996 contains_packed_reference (const_tree exp)
5998 bool packed_p = false;
6000 while (1)
6002 switch (TREE_CODE (exp))
6004 case COMPONENT_REF:
6006 tree field = TREE_OPERAND (exp, 1);
6007 packed_p = DECL_PACKED (field)
6008 || TYPE_PACKED (TREE_TYPE (field))
6009 || TYPE_PACKED (TREE_TYPE (exp));
6010 if (packed_p)
6011 goto done;
6013 break;
6015 case BIT_FIELD_REF:
6016 case ARRAY_REF:
6017 case ARRAY_RANGE_REF:
6018 case REALPART_EXPR:
6019 case IMAGPART_EXPR:
6020 case VIEW_CONVERT_EXPR:
6021 break;
6023 default:
6024 goto done;
6026 exp = TREE_OPERAND (exp, 0);
6028 done:
6029 return packed_p;
6032 /* Return a tree of sizetype representing the size, in bytes, of the element
6033 of EXP, an ARRAY_REF. */
6035 tree
6036 array_ref_element_size (tree exp)
6038 tree aligned_size = TREE_OPERAND (exp, 3);
6039 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6041 /* If a size was specified in the ARRAY_REF, it's the size measured
6042 in alignment units of the element type. So multiply by that value. */
6043 if (aligned_size)
6045 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6046 sizetype from another type of the same width and signedness. */
6047 if (TREE_TYPE (aligned_size) != sizetype)
6048 aligned_size = fold_convert (sizetype, aligned_size);
6049 return size_binop (MULT_EXPR, aligned_size,
6050 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6053 /* Otherwise, take the size from that of the element type. Substitute
6054 any PLACEHOLDER_EXPR that we have. */
6055 else
6056 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6059 /* Return a tree representing the lower bound of the array mentioned in
6060 EXP, an ARRAY_REF. */
6062 tree
6063 array_ref_low_bound (tree exp)
6065 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6067 /* If a lower bound is specified in EXP, use it. */
6068 if (TREE_OPERAND (exp, 2))
6069 return TREE_OPERAND (exp, 2);
6071 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6072 substituting for a PLACEHOLDER_EXPR as needed. */
6073 if (domain_type && TYPE_MIN_VALUE (domain_type))
6074 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6076 /* Otherwise, return a zero of the appropriate type. */
6077 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6080 /* Return a tree representing the upper bound of the array mentioned in
6081 EXP, an ARRAY_REF. */
6083 tree
6084 array_ref_up_bound (tree exp)
6086 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6088 /* If there is a domain type and it has an upper bound, use it, substituting
6089 for a PLACEHOLDER_EXPR as needed. */
6090 if (domain_type && TYPE_MAX_VALUE (domain_type))
6091 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6093 /* Otherwise fail. */
6094 return NULL_TREE;
6097 /* Return a tree representing the offset, in bytes, of the field referenced
6098 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6100 tree
6101 component_ref_field_offset (tree exp)
6103 tree aligned_offset = TREE_OPERAND (exp, 2);
6104 tree field = TREE_OPERAND (exp, 1);
6106 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6107 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6108 value. */
6109 if (aligned_offset)
6111 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6112 sizetype from another type of the same width and signedness. */
6113 if (TREE_TYPE (aligned_offset) != sizetype)
6114 aligned_offset = fold_convert (sizetype, aligned_offset);
6115 return size_binop (MULT_EXPR, aligned_offset,
6116 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6119 /* Otherwise, take the offset from that of the field. Substitute
6120 any PLACEHOLDER_EXPR that we have. */
6121 else
6122 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6125 /* Return 1 if T is an expression that get_inner_reference handles. */
6128 handled_component_p (const_tree t)
6130 switch (TREE_CODE (t))
6132 case BIT_FIELD_REF:
6133 case COMPONENT_REF:
6134 case ARRAY_REF:
6135 case ARRAY_RANGE_REF:
6136 case VIEW_CONVERT_EXPR:
6137 case REALPART_EXPR:
6138 case IMAGPART_EXPR:
6139 return 1;
6141 default:
6142 return 0;
6146 /* Given an rtx VALUE that may contain additions and multiplications, return
6147 an equivalent value that just refers to a register, memory, or constant.
6148 This is done by generating instructions to perform the arithmetic and
6149 returning a pseudo-register containing the value.
6151 The returned value may be a REG, SUBREG, MEM or constant. */
6154 force_operand (rtx value, rtx target)
6156 rtx op1, op2;
6157 /* Use subtarget as the target for operand 0 of a binary operation. */
6158 rtx subtarget = get_subtarget (target);
6159 enum rtx_code code = GET_CODE (value);
6161 /* Check for subreg applied to an expression produced by loop optimizer. */
6162 if (code == SUBREG
6163 && !REG_P (SUBREG_REG (value))
6164 && !MEM_P (SUBREG_REG (value)))
6166 value
6167 = simplify_gen_subreg (GET_MODE (value),
6168 force_reg (GET_MODE (SUBREG_REG (value)),
6169 force_operand (SUBREG_REG (value),
6170 NULL_RTX)),
6171 GET_MODE (SUBREG_REG (value)),
6172 SUBREG_BYTE (value));
6173 code = GET_CODE (value);
6176 /* Check for a PIC address load. */
6177 if ((code == PLUS || code == MINUS)
6178 && XEXP (value, 0) == pic_offset_table_rtx
6179 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6180 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6181 || GET_CODE (XEXP (value, 1)) == CONST))
6183 if (!subtarget)
6184 subtarget = gen_reg_rtx (GET_MODE (value));
6185 emit_move_insn (subtarget, value);
6186 return subtarget;
6189 if (ARITHMETIC_P (value))
6191 op2 = XEXP (value, 1);
6192 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6193 subtarget = 0;
6194 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6196 code = PLUS;
6197 op2 = negate_rtx (GET_MODE (value), op2);
6200 /* Check for an addition with OP2 a constant integer and our first
6201 operand a PLUS of a virtual register and something else. In that
6202 case, we want to emit the sum of the virtual register and the
6203 constant first and then add the other value. This allows virtual
6204 register instantiation to simply modify the constant rather than
6205 creating another one around this addition. */
6206 if (code == PLUS && GET_CODE (op2) == CONST_INT
6207 && GET_CODE (XEXP (value, 0)) == PLUS
6208 && REG_P (XEXP (XEXP (value, 0), 0))
6209 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6210 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6212 rtx temp = expand_simple_binop (GET_MODE (value), code,
6213 XEXP (XEXP (value, 0), 0), op2,
6214 subtarget, 0, OPTAB_LIB_WIDEN);
6215 return expand_simple_binop (GET_MODE (value), code, temp,
6216 force_operand (XEXP (XEXP (value,
6217 0), 1), 0),
6218 target, 0, OPTAB_LIB_WIDEN);
6221 op1 = force_operand (XEXP (value, 0), subtarget);
6222 op2 = force_operand (op2, NULL_RTX);
6223 switch (code)
6225 case MULT:
6226 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6227 case DIV:
6228 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6229 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6230 target, 1, OPTAB_LIB_WIDEN);
6231 else
6232 return expand_divmod (0,
6233 FLOAT_MODE_P (GET_MODE (value))
6234 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6235 GET_MODE (value), op1, op2, target, 0);
6236 case MOD:
6237 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6238 target, 0);
6239 case UDIV:
6240 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6241 target, 1);
6242 case UMOD:
6243 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6244 target, 1);
6245 case ASHIFTRT:
6246 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6247 target, 0, OPTAB_LIB_WIDEN);
6248 default:
6249 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6250 target, 1, OPTAB_LIB_WIDEN);
6253 if (UNARY_P (value))
6255 if (!target)
6256 target = gen_reg_rtx (GET_MODE (value));
6257 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6258 switch (code)
6260 case ZERO_EXTEND:
6261 case SIGN_EXTEND:
6262 case TRUNCATE:
6263 case FLOAT_EXTEND:
6264 case FLOAT_TRUNCATE:
6265 convert_move (target, op1, code == ZERO_EXTEND);
6266 return target;
6268 case FIX:
6269 case UNSIGNED_FIX:
6270 expand_fix (target, op1, code == UNSIGNED_FIX);
6271 return target;
6273 case FLOAT:
6274 case UNSIGNED_FLOAT:
6275 expand_float (target, op1, code == UNSIGNED_FLOAT);
6276 return target;
6278 default:
6279 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6283 #ifdef INSN_SCHEDULING
6284 /* On machines that have insn scheduling, we want all memory reference to be
6285 explicit, so we need to deal with such paradoxical SUBREGs. */
6286 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6287 && (GET_MODE_SIZE (GET_MODE (value))
6288 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6289 value
6290 = simplify_gen_subreg (GET_MODE (value),
6291 force_reg (GET_MODE (SUBREG_REG (value)),
6292 force_operand (SUBREG_REG (value),
6293 NULL_RTX)),
6294 GET_MODE (SUBREG_REG (value)),
6295 SUBREG_BYTE (value));
6296 #endif
6298 return value;
6301 /* Subroutine of expand_expr: return nonzero iff there is no way that
6302 EXP can reference X, which is being modified. TOP_P is nonzero if this
6303 call is going to be used to determine whether we need a temporary
6304 for EXP, as opposed to a recursive call to this function.
6306 It is always safe for this routine to return zero since it merely
6307 searches for optimization opportunities. */
6310 safe_from_p (const_rtx x, tree exp, int top_p)
6312 rtx exp_rtl = 0;
6313 int i, nops;
6315 if (x == 0
6316 /* If EXP has varying size, we MUST use a target since we currently
6317 have no way of allocating temporaries of variable size
6318 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6319 So we assume here that something at a higher level has prevented a
6320 clash. This is somewhat bogus, but the best we can do. Only
6321 do this when X is BLKmode and when we are at the top level. */
6322 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6323 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6324 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6325 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6326 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6327 != INTEGER_CST)
6328 && GET_MODE (x) == BLKmode)
6329 /* If X is in the outgoing argument area, it is always safe. */
6330 || (MEM_P (x)
6331 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6332 || (GET_CODE (XEXP (x, 0)) == PLUS
6333 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6334 return 1;
6336 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6337 find the underlying pseudo. */
6338 if (GET_CODE (x) == SUBREG)
6340 x = SUBREG_REG (x);
6341 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6342 return 0;
6345 /* Now look at our tree code and possibly recurse. */
6346 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6348 case tcc_declaration:
6349 exp_rtl = DECL_RTL_IF_SET (exp);
6350 break;
6352 case tcc_constant:
6353 return 1;
6355 case tcc_exceptional:
6356 if (TREE_CODE (exp) == TREE_LIST)
6358 while (1)
6360 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6361 return 0;
6362 exp = TREE_CHAIN (exp);
6363 if (!exp)
6364 return 1;
6365 if (TREE_CODE (exp) != TREE_LIST)
6366 return safe_from_p (x, exp, 0);
6369 else if (TREE_CODE (exp) == CONSTRUCTOR)
6371 constructor_elt *ce;
6372 unsigned HOST_WIDE_INT idx;
6374 for (idx = 0;
6375 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6376 idx++)
6377 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6378 || !safe_from_p (x, ce->value, 0))
6379 return 0;
6380 return 1;
6382 else if (TREE_CODE (exp) == ERROR_MARK)
6383 return 1; /* An already-visited SAVE_EXPR? */
6384 else
6385 return 0;
6387 case tcc_statement:
6388 /* The only case we look at here is the DECL_INITIAL inside a
6389 DECL_EXPR. */
6390 return (TREE_CODE (exp) != DECL_EXPR
6391 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6392 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6393 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6395 case tcc_binary:
6396 case tcc_comparison:
6397 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6398 return 0;
6399 /* Fall through. */
6401 case tcc_unary:
6402 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6404 case tcc_expression:
6405 case tcc_reference:
6406 case tcc_vl_exp:
6407 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6408 the expression. If it is set, we conflict iff we are that rtx or
6409 both are in memory. Otherwise, we check all operands of the
6410 expression recursively. */
6412 switch (TREE_CODE (exp))
6414 case ADDR_EXPR:
6415 /* If the operand is static or we are static, we can't conflict.
6416 Likewise if we don't conflict with the operand at all. */
6417 if (staticp (TREE_OPERAND (exp, 0))
6418 || TREE_STATIC (exp)
6419 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6420 return 1;
6422 /* Otherwise, the only way this can conflict is if we are taking
6423 the address of a DECL a that address if part of X, which is
6424 very rare. */
6425 exp = TREE_OPERAND (exp, 0);
6426 if (DECL_P (exp))
6428 if (!DECL_RTL_SET_P (exp)
6429 || !MEM_P (DECL_RTL (exp)))
6430 return 0;
6431 else
6432 exp_rtl = XEXP (DECL_RTL (exp), 0);
6434 break;
6436 case MISALIGNED_INDIRECT_REF:
6437 case ALIGN_INDIRECT_REF:
6438 case INDIRECT_REF:
6439 if (MEM_P (x)
6440 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6441 get_alias_set (exp)))
6442 return 0;
6443 break;
6445 case CALL_EXPR:
6446 /* Assume that the call will clobber all hard registers and
6447 all of memory. */
6448 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6449 || MEM_P (x))
6450 return 0;
6451 break;
6453 case WITH_CLEANUP_EXPR:
6454 case CLEANUP_POINT_EXPR:
6455 /* Lowered by gimplify.c. */
6456 gcc_unreachable ();
6458 case SAVE_EXPR:
6459 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6461 default:
6462 break;
6465 /* If we have an rtx, we do not need to scan our operands. */
6466 if (exp_rtl)
6467 break;
6469 nops = TREE_OPERAND_LENGTH (exp);
6470 for (i = 0; i < nops; i++)
6471 if (TREE_OPERAND (exp, i) != 0
6472 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6473 return 0;
6475 break;
6477 case tcc_type:
6478 /* Should never get a type here. */
6479 gcc_unreachable ();
6481 case tcc_gimple_stmt:
6482 gcc_unreachable ();
6485 /* If we have an rtl, find any enclosed object. Then see if we conflict
6486 with it. */
6487 if (exp_rtl)
6489 if (GET_CODE (exp_rtl) == SUBREG)
6491 exp_rtl = SUBREG_REG (exp_rtl);
6492 if (REG_P (exp_rtl)
6493 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6494 return 0;
6497 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6498 are memory and they conflict. */
6499 return ! (rtx_equal_p (x, exp_rtl)
6500 || (MEM_P (x) && MEM_P (exp_rtl)
6501 && true_dependence (exp_rtl, VOIDmode, x,
6502 rtx_addr_varies_p)));
6505 /* If we reach here, it is safe. */
6506 return 1;
6510 /* Return the highest power of two that EXP is known to be a multiple of.
6511 This is used in updating alignment of MEMs in array references. */
6513 unsigned HOST_WIDE_INT
6514 highest_pow2_factor (const_tree exp)
6516 unsigned HOST_WIDE_INT c0, c1;
6518 switch (TREE_CODE (exp))
6520 case INTEGER_CST:
6521 /* We can find the lowest bit that's a one. If the low
6522 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6523 We need to handle this case since we can find it in a COND_EXPR,
6524 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6525 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6526 later ICE. */
6527 if (TREE_OVERFLOW (exp))
6528 return BIGGEST_ALIGNMENT;
6529 else
6531 /* Note: tree_low_cst is intentionally not used here,
6532 we don't care about the upper bits. */
6533 c0 = TREE_INT_CST_LOW (exp);
6534 c0 &= -c0;
6535 return c0 ? c0 : BIGGEST_ALIGNMENT;
6537 break;
6539 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6540 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6541 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6542 return MIN (c0, c1);
6544 case MULT_EXPR:
6545 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6546 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6547 return c0 * c1;
6549 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6550 case CEIL_DIV_EXPR:
6551 if (integer_pow2p (TREE_OPERAND (exp, 1))
6552 && host_integerp (TREE_OPERAND (exp, 1), 1))
6554 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6555 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6556 return MAX (1, c0 / c1);
6558 break;
6560 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6561 case SAVE_EXPR:
6562 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6564 case COMPOUND_EXPR:
6565 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6567 case COND_EXPR:
6568 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6569 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6570 return MIN (c0, c1);
6572 default:
6573 break;
6576 return 1;
6579 /* Similar, except that the alignment requirements of TARGET are
6580 taken into account. Assume it is at least as aligned as its
6581 type, unless it is a COMPONENT_REF in which case the layout of
6582 the structure gives the alignment. */
6584 static unsigned HOST_WIDE_INT
6585 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6587 unsigned HOST_WIDE_INT target_align, factor;
6589 factor = highest_pow2_factor (exp);
6590 if (TREE_CODE (target) == COMPONENT_REF)
6591 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6592 else
6593 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6594 return MAX (factor, target_align);
6597 /* Return &VAR expression for emulated thread local VAR. */
6599 static tree
6600 emutls_var_address (tree var)
6602 tree emuvar = emutls_decl (var);
6603 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6604 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6605 tree arglist = build_tree_list (NULL_TREE, arg);
6606 tree call = build_function_call_expr (fn, arglist);
6607 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6610 /* Expands variable VAR. */
6612 void
6613 expand_var (tree var)
6615 if (DECL_EXTERNAL (var))
6616 return;
6618 if (TREE_STATIC (var))
6619 /* If this is an inlined copy of a static local variable,
6620 look up the original decl. */
6621 var = DECL_ORIGIN (var);
6623 if (TREE_STATIC (var)
6624 ? !TREE_ASM_WRITTEN (var)
6625 : !DECL_RTL_SET_P (var))
6627 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6628 /* Should be ignored. */;
6629 else if (lang_hooks.expand_decl (var))
6630 /* OK. */;
6631 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6632 expand_decl (var);
6633 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6634 rest_of_decl_compilation (var, 0, 0);
6635 else
6636 /* No expansion needed. */
6637 gcc_assert (TREE_CODE (var) == TYPE_DECL
6638 || TREE_CODE (var) == CONST_DECL
6639 || TREE_CODE (var) == FUNCTION_DECL
6640 || TREE_CODE (var) == LABEL_DECL);
6644 /* Subroutine of expand_expr. Expand the two operands of a binary
6645 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6646 The value may be stored in TARGET if TARGET is nonzero. The
6647 MODIFIER argument is as documented by expand_expr. */
6649 static void
6650 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6651 enum expand_modifier modifier)
6653 if (! safe_from_p (target, exp1, 1))
6654 target = 0;
6655 if (operand_equal_p (exp0, exp1, 0))
6657 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6658 *op1 = copy_rtx (*op0);
6660 else
6662 /* If we need to preserve evaluation order, copy exp0 into its own
6663 temporary variable so that it can't be clobbered by exp1. */
6664 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6665 exp0 = save_expr (exp0);
6666 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6667 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6672 /* Return a MEM that contains constant EXP. DEFER is as for
6673 output_constant_def and MODIFIER is as for expand_expr. */
6675 static rtx
6676 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6678 rtx mem;
6680 mem = output_constant_def (exp, defer);
6681 if (modifier != EXPAND_INITIALIZER)
6682 mem = use_anchored_address (mem);
6683 return mem;
6686 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6687 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6689 static rtx
6690 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6691 enum expand_modifier modifier)
6693 rtx result, subtarget;
6694 tree inner, offset;
6695 HOST_WIDE_INT bitsize, bitpos;
6696 int volatilep, unsignedp;
6697 enum machine_mode mode1;
6699 /* If we are taking the address of a constant and are at the top level,
6700 we have to use output_constant_def since we can't call force_const_mem
6701 at top level. */
6702 /* ??? This should be considered a front-end bug. We should not be
6703 generating ADDR_EXPR of something that isn't an LVALUE. The only
6704 exception here is STRING_CST. */
6705 if (TREE_CODE (exp) == CONSTRUCTOR
6706 || CONSTANT_CLASS_P (exp))
6707 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6709 /* Everything must be something allowed by is_gimple_addressable. */
6710 switch (TREE_CODE (exp))
6712 case INDIRECT_REF:
6713 /* This case will happen via recursion for &a->b. */
6714 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6716 case CONST_DECL:
6717 /* Recurse and make the output_constant_def clause above handle this. */
6718 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6719 tmode, modifier);
6721 case REALPART_EXPR:
6722 /* The real part of the complex number is always first, therefore
6723 the address is the same as the address of the parent object. */
6724 offset = 0;
6725 bitpos = 0;
6726 inner = TREE_OPERAND (exp, 0);
6727 break;
6729 case IMAGPART_EXPR:
6730 /* The imaginary part of the complex number is always second.
6731 The expression is therefore always offset by the size of the
6732 scalar type. */
6733 offset = 0;
6734 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6735 inner = TREE_OPERAND (exp, 0);
6736 break;
6738 case VAR_DECL:
6739 /* TLS emulation hook - replace __thread VAR's &VAR with
6740 __emutls_get_address (&_emutls.VAR). */
6741 if (! targetm.have_tls
6742 && TREE_CODE (exp) == VAR_DECL
6743 && DECL_THREAD_LOCAL_P (exp))
6745 exp = emutls_var_address (exp);
6746 return expand_expr (exp, target, tmode, modifier);
6748 /* Fall through. */
6750 default:
6751 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6752 expand_expr, as that can have various side effects; LABEL_DECLs for
6753 example, may not have their DECL_RTL set yet. Assume language
6754 specific tree nodes can be expanded in some interesting way. */
6755 if (DECL_P (exp)
6756 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6758 result = expand_expr (exp, target, tmode,
6759 modifier == EXPAND_INITIALIZER
6760 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6762 /* If the DECL isn't in memory, then the DECL wasn't properly
6763 marked TREE_ADDRESSABLE, which will be either a front-end
6764 or a tree optimizer bug. */
6765 gcc_assert (MEM_P (result));
6766 result = XEXP (result, 0);
6768 /* ??? Is this needed anymore? */
6769 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6771 assemble_external (exp);
6772 TREE_USED (exp) = 1;
6775 if (modifier != EXPAND_INITIALIZER
6776 && modifier != EXPAND_CONST_ADDRESS)
6777 result = force_operand (result, target);
6778 return result;
6781 /* Pass FALSE as the last argument to get_inner_reference although
6782 we are expanding to RTL. The rationale is that we know how to
6783 handle "aligning nodes" here: we can just bypass them because
6784 they won't change the final object whose address will be returned
6785 (they actually exist only for that purpose). */
6786 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6787 &mode1, &unsignedp, &volatilep, false);
6788 break;
6791 /* We must have made progress. */
6792 gcc_assert (inner != exp);
6794 subtarget = offset || bitpos ? NULL_RTX : target;
6795 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6797 if (offset)
6799 rtx tmp;
6801 if (modifier != EXPAND_NORMAL)
6802 result = force_operand (result, NULL);
6803 tmp = expand_expr (offset, NULL_RTX, tmode,
6804 modifier == EXPAND_INITIALIZER
6805 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6807 result = convert_memory_address (tmode, result);
6808 tmp = convert_memory_address (tmode, tmp);
6810 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6811 result = gen_rtx_PLUS (tmode, result, tmp);
6812 else
6814 subtarget = bitpos ? NULL_RTX : target;
6815 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6816 1, OPTAB_LIB_WIDEN);
6820 if (bitpos)
6822 /* Someone beforehand should have rejected taking the address
6823 of such an object. */
6824 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6826 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6827 if (modifier < EXPAND_SUM)
6828 result = force_operand (result, target);
6831 return result;
6834 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6835 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6837 static rtx
6838 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6839 enum expand_modifier modifier)
6841 enum machine_mode rmode;
6842 rtx result;
6844 /* Target mode of VOIDmode says "whatever's natural". */
6845 if (tmode == VOIDmode)
6846 tmode = TYPE_MODE (TREE_TYPE (exp));
6848 /* We can get called with some Weird Things if the user does silliness
6849 like "(short) &a". In that case, convert_memory_address won't do
6850 the right thing, so ignore the given target mode. */
6851 if (tmode != Pmode && tmode != ptr_mode)
6852 tmode = Pmode;
6854 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6855 tmode, modifier);
6857 /* Despite expand_expr claims concerning ignoring TMODE when not
6858 strictly convenient, stuff breaks if we don't honor it. Note
6859 that combined with the above, we only do this for pointer modes. */
6860 rmode = GET_MODE (result);
6861 if (rmode == VOIDmode)
6862 rmode = tmode;
6863 if (rmode != tmode)
6864 result = convert_memory_address (tmode, result);
6866 return result;
6870 /* expand_expr: generate code for computing expression EXP.
6871 An rtx for the computed value is returned. The value is never null.
6872 In the case of a void EXP, const0_rtx is returned.
6874 The value may be stored in TARGET if TARGET is nonzero.
6875 TARGET is just a suggestion; callers must assume that
6876 the rtx returned may not be the same as TARGET.
6878 If TARGET is CONST0_RTX, it means that the value will be ignored.
6880 If TMODE is not VOIDmode, it suggests generating the
6881 result in mode TMODE. But this is done only when convenient.
6882 Otherwise, TMODE is ignored and the value generated in its natural mode.
6883 TMODE is just a suggestion; callers must assume that
6884 the rtx returned may not have mode TMODE.
6886 Note that TARGET may have neither TMODE nor MODE. In that case, it
6887 probably will not be used.
6889 If MODIFIER is EXPAND_SUM then when EXP is an addition
6890 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6891 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6892 products as above, or REG or MEM, or constant.
6893 Ordinarily in such cases we would output mul or add instructions
6894 and then return a pseudo reg containing the sum.
6896 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6897 it also marks a label as absolutely required (it can't be dead).
6898 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6899 This is used for outputting expressions used in initializers.
6901 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6902 with a constant address even if that address is not normally legitimate.
6903 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6905 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6906 a call parameter. Such targets require special care as we haven't yet
6907 marked TARGET so that it's safe from being trashed by libcalls. We
6908 don't want to use TARGET for anything but the final result;
6909 Intermediate values must go elsewhere. Additionally, calls to
6910 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6912 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6913 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6914 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6915 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6916 recursively. */
6918 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6919 enum expand_modifier, rtx *);
6922 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6923 enum expand_modifier modifier, rtx *alt_rtl)
6925 int rn = -1;
6926 rtx ret, last = NULL;
6928 /* Handle ERROR_MARK before anybody tries to access its type. */
6929 if (TREE_CODE (exp) == ERROR_MARK
6930 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6932 ret = CONST0_RTX (tmode);
6933 return ret ? ret : const0_rtx;
6936 if (flag_non_call_exceptions)
6938 rn = lookup_stmt_eh_region (exp);
6939 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6940 if (rn >= 0)
6941 last = get_last_insn ();
6944 /* If this is an expression of some kind and it has an associated line
6945 number, then emit the line number before expanding the expression.
6947 We need to save and restore the file and line information so that
6948 errors discovered during expansion are emitted with the right
6949 information. It would be better of the diagnostic routines
6950 used the file/line information embedded in the tree nodes rather
6951 than globals. */
6952 if (cfun && EXPR_HAS_LOCATION (exp))
6954 location_t saved_location = input_location;
6955 input_location = EXPR_LOCATION (exp);
6956 set_curr_insn_source_location (input_location);
6958 /* Record where the insns produced belong. */
6959 set_curr_insn_block (TREE_BLOCK (exp));
6961 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6963 input_location = saved_location;
6965 else
6967 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6970 /* If using non-call exceptions, mark all insns that may trap.
6971 expand_call() will mark CALL_INSNs before we get to this code,
6972 but it doesn't handle libcalls, and these may trap. */
6973 if (rn >= 0)
6975 rtx insn;
6976 for (insn = next_real_insn (last); insn;
6977 insn = next_real_insn (insn))
6979 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6980 /* If we want exceptions for non-call insns, any
6981 may_trap_p instruction may throw. */
6982 && GET_CODE (PATTERN (insn)) != CLOBBER
6983 && GET_CODE (PATTERN (insn)) != USE
6984 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6986 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6987 REG_NOTES (insn));
6992 return ret;
6995 static rtx
6996 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6997 enum expand_modifier modifier, rtx *alt_rtl)
6999 rtx op0, op1, op2, temp, decl_rtl;
7000 tree type;
7001 int unsignedp;
7002 enum machine_mode mode;
7003 enum tree_code code = TREE_CODE (exp);
7004 optab this_optab;
7005 rtx subtarget, original_target;
7006 int ignore;
7007 tree context, subexp0, subexp1;
7008 bool reduce_bit_field = false;
7009 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
7010 ? reduce_to_bit_field_precision ((expr), \
7011 target, \
7012 type) \
7013 : (expr))
7015 if (GIMPLE_STMT_P (exp))
7017 type = void_type_node;
7018 mode = VOIDmode;
7019 unsignedp = 0;
7021 else
7023 type = TREE_TYPE (exp);
7024 mode = TYPE_MODE (type);
7025 unsignedp = TYPE_UNSIGNED (type);
7027 if (lang_hooks.reduce_bit_field_operations
7028 && TREE_CODE (type) == INTEGER_TYPE
7029 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7031 /* An operation in what may be a bit-field type needs the
7032 result to be reduced to the precision of the bit-field type,
7033 which is narrower than that of the type's mode. */
7034 reduce_bit_field = true;
7035 if (modifier == EXPAND_STACK_PARM)
7036 target = 0;
7039 /* Use subtarget as the target for operand 0 of a binary operation. */
7040 subtarget = get_subtarget (target);
7041 original_target = target;
7042 ignore = (target == const0_rtx
7043 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7044 || code == CONVERT_EXPR || code == COND_EXPR
7045 || code == VIEW_CONVERT_EXPR)
7046 && TREE_CODE (type) == VOID_TYPE));
7048 /* If we are going to ignore this result, we need only do something
7049 if there is a side-effect somewhere in the expression. If there
7050 is, short-circuit the most common cases here. Note that we must
7051 not call expand_expr with anything but const0_rtx in case this
7052 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7054 if (ignore)
7056 if (! TREE_SIDE_EFFECTS (exp))
7057 return const0_rtx;
7059 /* Ensure we reference a volatile object even if value is ignored, but
7060 don't do this if all we are doing is taking its address. */
7061 if (TREE_THIS_VOLATILE (exp)
7062 && TREE_CODE (exp) != FUNCTION_DECL
7063 && mode != VOIDmode && mode != BLKmode
7064 && modifier != EXPAND_CONST_ADDRESS)
7066 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7067 if (MEM_P (temp))
7068 temp = copy_to_reg (temp);
7069 return const0_rtx;
7072 if (TREE_CODE_CLASS (code) == tcc_unary
7073 || code == COMPONENT_REF || code == INDIRECT_REF)
7074 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7075 modifier);
7077 else if (TREE_CODE_CLASS (code) == tcc_binary
7078 || TREE_CODE_CLASS (code) == tcc_comparison
7079 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7081 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7082 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7083 return const0_rtx;
7085 else if (code == BIT_FIELD_REF)
7087 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7088 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7089 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7090 return const0_rtx;
7093 target = 0;
7097 switch (code)
7099 case LABEL_DECL:
7101 tree function = decl_function_context (exp);
7103 temp = label_rtx (exp);
7104 temp = gen_rtx_LABEL_REF (Pmode, temp);
7106 if (function != current_function_decl
7107 && function != 0)
7108 LABEL_REF_NONLOCAL_P (temp) = 1;
7110 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7111 return temp;
7114 case SSA_NAME:
7115 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7116 NULL);
7118 case PARM_DECL:
7119 case VAR_DECL:
7120 /* If a static var's type was incomplete when the decl was written,
7121 but the type is complete now, lay out the decl now. */
7122 if (DECL_SIZE (exp) == 0
7123 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7124 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7125 layout_decl (exp, 0);
7127 /* TLS emulation hook - replace __thread vars with
7128 *__emutls_get_address (&_emutls.var). */
7129 if (! targetm.have_tls
7130 && TREE_CODE (exp) == VAR_DECL
7131 && DECL_THREAD_LOCAL_P (exp))
7133 exp = build_fold_indirect_ref (emutls_var_address (exp));
7134 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7137 /* ... fall through ... */
7139 case FUNCTION_DECL:
7140 case RESULT_DECL:
7141 decl_rtl = DECL_RTL (exp);
7142 gcc_assert (decl_rtl);
7143 decl_rtl = copy_rtx (decl_rtl);
7145 /* Ensure variable marked as used even if it doesn't go through
7146 a parser. If it hasn't be used yet, write out an external
7147 definition. */
7148 if (! TREE_USED (exp))
7150 assemble_external (exp);
7151 TREE_USED (exp) = 1;
7154 /* Show we haven't gotten RTL for this yet. */
7155 temp = 0;
7157 /* Variables inherited from containing functions should have
7158 been lowered by this point. */
7159 context = decl_function_context (exp);
7160 gcc_assert (!context
7161 || context == current_function_decl
7162 || TREE_STATIC (exp)
7163 /* ??? C++ creates functions that are not TREE_STATIC. */
7164 || TREE_CODE (exp) == FUNCTION_DECL);
7166 /* This is the case of an array whose size is to be determined
7167 from its initializer, while the initializer is still being parsed.
7168 See expand_decl. */
7170 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7171 temp = validize_mem (decl_rtl);
7173 /* If DECL_RTL is memory, we are in the normal case and either
7174 the address is not valid or it is not a register and -fforce-addr
7175 is specified, get the address into a register. */
7177 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7179 if (alt_rtl)
7180 *alt_rtl = decl_rtl;
7181 decl_rtl = use_anchored_address (decl_rtl);
7182 if (modifier != EXPAND_CONST_ADDRESS
7183 && modifier != EXPAND_SUM
7184 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7185 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7186 temp = replace_equiv_address (decl_rtl,
7187 copy_rtx (XEXP (decl_rtl, 0)));
7190 /* If we got something, return it. But first, set the alignment
7191 if the address is a register. */
7192 if (temp != 0)
7194 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7195 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7197 return temp;
7200 /* If the mode of DECL_RTL does not match that of the decl, it
7201 must be a promoted value. We return a SUBREG of the wanted mode,
7202 but mark it so that we know that it was already extended. */
7204 if (REG_P (decl_rtl)
7205 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7207 enum machine_mode pmode;
7209 /* Get the signedness used for this variable. Ensure we get the
7210 same mode we got when the variable was declared. */
7211 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7212 (TREE_CODE (exp) == RESULT_DECL
7213 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7214 gcc_assert (GET_MODE (decl_rtl) == pmode);
7216 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7217 SUBREG_PROMOTED_VAR_P (temp) = 1;
7218 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7219 return temp;
7222 return decl_rtl;
7224 case INTEGER_CST:
7225 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7226 TREE_INT_CST_HIGH (exp), mode);
7228 return temp;
7230 case VECTOR_CST:
7232 tree tmp = NULL_TREE;
7233 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7234 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7235 return const_vector_from_tree (exp);
7236 if (GET_MODE_CLASS (mode) == MODE_INT)
7238 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7239 if (type_for_mode)
7240 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7242 if (!tmp)
7243 tmp = build_constructor_from_list (type,
7244 TREE_VECTOR_CST_ELTS (exp));
7245 return expand_expr (tmp, ignore ? const0_rtx : target,
7246 tmode, modifier);
7249 case CONST_DECL:
7250 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7252 case REAL_CST:
7253 /* If optimized, generate immediate CONST_DOUBLE
7254 which will be turned into memory by reload if necessary.
7256 We used to force a register so that loop.c could see it. But
7257 this does not allow gen_* patterns to perform optimizations with
7258 the constants. It also produces two insns in cases like "x = 1.0;".
7259 On most machines, floating-point constants are not permitted in
7260 many insns, so we'd end up copying it to a register in any case.
7262 Now, we do the copying in expand_binop, if appropriate. */
7263 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7264 TYPE_MODE (TREE_TYPE (exp)));
7266 case COMPLEX_CST:
7267 /* Handle evaluating a complex constant in a CONCAT target. */
7268 if (original_target && GET_CODE (original_target) == CONCAT)
7270 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7271 rtx rtarg, itarg;
7273 rtarg = XEXP (original_target, 0);
7274 itarg = XEXP (original_target, 1);
7276 /* Move the real and imaginary parts separately. */
7277 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7278 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7280 if (op0 != rtarg)
7281 emit_move_insn (rtarg, op0);
7282 if (op1 != itarg)
7283 emit_move_insn (itarg, op1);
7285 return original_target;
7288 /* ... fall through ... */
7290 case STRING_CST:
7291 temp = expand_expr_constant (exp, 1, modifier);
7293 /* temp contains a constant address.
7294 On RISC machines where a constant address isn't valid,
7295 make some insns to get that address into a register. */
7296 if (modifier != EXPAND_CONST_ADDRESS
7297 && modifier != EXPAND_INITIALIZER
7298 && modifier != EXPAND_SUM
7299 && (! memory_address_p (mode, XEXP (temp, 0))
7300 || flag_force_addr))
7301 return replace_equiv_address (temp,
7302 copy_rtx (XEXP (temp, 0)));
7303 return temp;
7305 case SAVE_EXPR:
7307 tree val = TREE_OPERAND (exp, 0);
7308 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7310 if (!SAVE_EXPR_RESOLVED_P (exp))
7312 /* We can indeed still hit this case, typically via builtin
7313 expanders calling save_expr immediately before expanding
7314 something. Assume this means that we only have to deal
7315 with non-BLKmode values. */
7316 gcc_assert (GET_MODE (ret) != BLKmode);
7318 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7319 DECL_ARTIFICIAL (val) = 1;
7320 DECL_IGNORED_P (val) = 1;
7321 TREE_OPERAND (exp, 0) = val;
7322 SAVE_EXPR_RESOLVED_P (exp) = 1;
7324 if (!CONSTANT_P (ret))
7325 ret = copy_to_reg (ret);
7326 SET_DECL_RTL (val, ret);
7329 return ret;
7332 case GOTO_EXPR:
7333 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7334 expand_goto (TREE_OPERAND (exp, 0));
7335 else
7336 expand_computed_goto (TREE_OPERAND (exp, 0));
7337 return const0_rtx;
7339 case CONSTRUCTOR:
7340 /* If we don't need the result, just ensure we evaluate any
7341 subexpressions. */
7342 if (ignore)
7344 unsigned HOST_WIDE_INT idx;
7345 tree value;
7347 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7348 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7350 return const0_rtx;
7353 /* Try to avoid creating a temporary at all. This is possible
7354 if all of the initializer is zero.
7355 FIXME: try to handle all [0..255] initializers we can handle
7356 with memset. */
7357 else if (TREE_STATIC (exp)
7358 && !TREE_ADDRESSABLE (exp)
7359 && target != 0 && mode == BLKmode
7360 && all_zeros_p (exp))
7362 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7363 return target;
7366 /* All elts simple constants => refer to a constant in memory. But
7367 if this is a non-BLKmode mode, let it store a field at a time
7368 since that should make a CONST_INT or CONST_DOUBLE when we
7369 fold. Likewise, if we have a target we can use, it is best to
7370 store directly into the target unless the type is large enough
7371 that memcpy will be used. If we are making an initializer and
7372 all operands are constant, put it in memory as well.
7374 FIXME: Avoid trying to fill vector constructors piece-meal.
7375 Output them with output_constant_def below unless we're sure
7376 they're zeros. This should go away when vector initializers
7377 are treated like VECTOR_CST instead of arrays.
7379 else if ((TREE_STATIC (exp)
7380 && ((mode == BLKmode
7381 && ! (target != 0 && safe_from_p (target, exp, 1)))
7382 || TREE_ADDRESSABLE (exp)
7383 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7384 && (! MOVE_BY_PIECES_P
7385 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7386 TYPE_ALIGN (type)))
7387 && ! mostly_zeros_p (exp))))
7388 || ((modifier == EXPAND_INITIALIZER
7389 || modifier == EXPAND_CONST_ADDRESS)
7390 && TREE_CONSTANT (exp)))
7392 rtx constructor = expand_expr_constant (exp, 1, modifier);
7394 if (modifier != EXPAND_CONST_ADDRESS
7395 && modifier != EXPAND_INITIALIZER
7396 && modifier != EXPAND_SUM)
7397 constructor = validize_mem (constructor);
7399 return constructor;
7401 else
7403 /* Handle calls that pass values in multiple non-contiguous
7404 locations. The Irix 6 ABI has examples of this. */
7405 if (target == 0 || ! safe_from_p (target, exp, 1)
7406 || GET_CODE (target) == PARALLEL
7407 || modifier == EXPAND_STACK_PARM)
7408 target
7409 = assign_temp (build_qualified_type (type,
7410 (TYPE_QUALS (type)
7411 | (TREE_READONLY (exp)
7412 * TYPE_QUAL_CONST))),
7413 0, TREE_ADDRESSABLE (exp), 1);
7415 store_constructor (exp, target, 0, int_expr_size (exp));
7416 return target;
7419 case MISALIGNED_INDIRECT_REF:
7420 case ALIGN_INDIRECT_REF:
7421 case INDIRECT_REF:
7423 tree exp1 = TREE_OPERAND (exp, 0);
7425 if (modifier != EXPAND_WRITE)
7427 tree t;
7429 t = fold_read_from_constant_string (exp);
7430 if (t)
7431 return expand_expr (t, target, tmode, modifier);
7434 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7435 op0 = memory_address (mode, op0);
7437 if (code == ALIGN_INDIRECT_REF)
7439 int align = TYPE_ALIGN_UNIT (type);
7440 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7441 op0 = memory_address (mode, op0);
7444 temp = gen_rtx_MEM (mode, op0);
7446 set_mem_attributes (temp, exp, 0);
7448 /* Resolve the misalignment now, so that we don't have to remember
7449 to resolve it later. Of course, this only works for reads. */
7450 /* ??? When we get around to supporting writes, we'll have to handle
7451 this in store_expr directly. The vectorizer isn't generating
7452 those yet, however. */
7453 if (code == MISALIGNED_INDIRECT_REF)
7455 int icode;
7456 rtx reg, insn;
7458 gcc_assert (modifier == EXPAND_NORMAL
7459 || modifier == EXPAND_STACK_PARM);
7461 /* The vectorizer should have already checked the mode. */
7462 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7463 gcc_assert (icode != CODE_FOR_nothing);
7465 /* We've already validated the memory, and we're creating a
7466 new pseudo destination. The predicates really can't fail. */
7467 reg = gen_reg_rtx (mode);
7469 /* Nor can the insn generator. */
7470 insn = GEN_FCN (icode) (reg, temp);
7471 emit_insn (insn);
7473 return reg;
7476 return temp;
7479 case TARGET_MEM_REF:
7481 struct mem_address addr;
7483 get_address_description (exp, &addr);
7484 op0 = addr_for_mem_ref (&addr, true);
7485 op0 = memory_address (mode, op0);
7486 temp = gen_rtx_MEM (mode, op0);
7487 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7489 return temp;
7491 case ARRAY_REF:
7494 tree array = TREE_OPERAND (exp, 0);
7495 tree index = TREE_OPERAND (exp, 1);
7497 /* Fold an expression like: "foo"[2].
7498 This is not done in fold so it won't happen inside &.
7499 Don't fold if this is for wide characters since it's too
7500 difficult to do correctly and this is a very rare case. */
7502 if (modifier != EXPAND_CONST_ADDRESS
7503 && modifier != EXPAND_INITIALIZER
7504 && modifier != EXPAND_MEMORY)
7506 tree t = fold_read_from_constant_string (exp);
7508 if (t)
7509 return expand_expr (t, target, tmode, modifier);
7512 /* If this is a constant index into a constant array,
7513 just get the value from the array. Handle both the cases when
7514 we have an explicit constructor and when our operand is a variable
7515 that was declared const. */
7517 if (modifier != EXPAND_CONST_ADDRESS
7518 && modifier != EXPAND_INITIALIZER
7519 && modifier != EXPAND_MEMORY
7520 && TREE_CODE (array) == CONSTRUCTOR
7521 && ! TREE_SIDE_EFFECTS (array)
7522 && TREE_CODE (index) == INTEGER_CST)
7524 unsigned HOST_WIDE_INT ix;
7525 tree field, value;
7527 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7528 field, value)
7529 if (tree_int_cst_equal (field, index))
7531 if (!TREE_SIDE_EFFECTS (value))
7532 return expand_expr (fold (value), target, tmode, modifier);
7533 break;
7537 else if (optimize >= 1
7538 && modifier != EXPAND_CONST_ADDRESS
7539 && modifier != EXPAND_INITIALIZER
7540 && modifier != EXPAND_MEMORY
7541 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7542 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7543 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7544 && targetm.binds_local_p (array))
7546 if (TREE_CODE (index) == INTEGER_CST)
7548 tree init = DECL_INITIAL (array);
7550 if (TREE_CODE (init) == CONSTRUCTOR)
7552 unsigned HOST_WIDE_INT ix;
7553 tree field, value;
7555 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7556 field, value)
7557 if (tree_int_cst_equal (field, index))
7559 if (!TREE_SIDE_EFFECTS (value))
7560 return expand_expr (fold (value), target, tmode,
7561 modifier);
7562 break;
7565 else if(TREE_CODE (init) == STRING_CST)
7567 tree index1 = index;
7568 tree low_bound = array_ref_low_bound (exp);
7569 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7571 /* Optimize the special-case of a zero lower bound.
7573 We convert the low_bound to sizetype to avoid some problems
7574 with constant folding. (E.g. suppose the lower bound is 1,
7575 and its mode is QI. Without the conversion,l (ARRAY
7576 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7577 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7579 if (! integer_zerop (low_bound))
7580 index1 = size_diffop (index1, fold_convert (sizetype,
7581 low_bound));
7583 if (0 > compare_tree_int (index1,
7584 TREE_STRING_LENGTH (init)))
7586 tree type = TREE_TYPE (TREE_TYPE (init));
7587 enum machine_mode mode = TYPE_MODE (type);
7589 if (GET_MODE_CLASS (mode) == MODE_INT
7590 && GET_MODE_SIZE (mode) == 1)
7591 return gen_int_mode (TREE_STRING_POINTER (init)
7592 [TREE_INT_CST_LOW (index1)],
7593 mode);
7599 goto normal_inner_ref;
7601 case COMPONENT_REF:
7602 /* If the operand is a CONSTRUCTOR, we can just extract the
7603 appropriate field if it is present. */
7604 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7606 unsigned HOST_WIDE_INT idx;
7607 tree field, value;
7609 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7610 idx, field, value)
7611 if (field == TREE_OPERAND (exp, 1)
7612 /* We can normally use the value of the field in the
7613 CONSTRUCTOR. However, if this is a bitfield in
7614 an integral mode that we can fit in a HOST_WIDE_INT,
7615 we must mask only the number of bits in the bitfield,
7616 since this is done implicitly by the constructor. If
7617 the bitfield does not meet either of those conditions,
7618 we can't do this optimization. */
7619 && (! DECL_BIT_FIELD (field)
7620 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7621 && (GET_MODE_BITSIZE (DECL_MODE (field))
7622 <= HOST_BITS_PER_WIDE_INT))))
7624 if (DECL_BIT_FIELD (field)
7625 && modifier == EXPAND_STACK_PARM)
7626 target = 0;
7627 op0 = expand_expr (value, target, tmode, modifier);
7628 if (DECL_BIT_FIELD (field))
7630 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7631 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7633 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7635 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7636 op0 = expand_and (imode, op0, op1, target);
7638 else
7640 tree count
7641 = build_int_cst (NULL_TREE,
7642 GET_MODE_BITSIZE (imode) - bitsize);
7644 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7645 target, 0);
7646 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7647 target, 0);
7651 return op0;
7654 goto normal_inner_ref;
7656 case BIT_FIELD_REF:
7657 case ARRAY_RANGE_REF:
7658 normal_inner_ref:
7660 enum machine_mode mode1;
7661 HOST_WIDE_INT bitsize, bitpos;
7662 tree offset;
7663 int volatilep = 0;
7664 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7665 &mode1, &unsignedp, &volatilep, true);
7666 rtx orig_op0;
7668 /* If we got back the original object, something is wrong. Perhaps
7669 we are evaluating an expression too early. In any event, don't
7670 infinitely recurse. */
7671 gcc_assert (tem != exp);
7673 /* If TEM's type is a union of variable size, pass TARGET to the inner
7674 computation, since it will need a temporary and TARGET is known
7675 to have to do. This occurs in unchecked conversion in Ada. */
7677 orig_op0 = op0
7678 = expand_expr (tem,
7679 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7680 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7681 != INTEGER_CST)
7682 && modifier != EXPAND_STACK_PARM
7683 ? target : NULL_RTX),
7684 VOIDmode,
7685 (modifier == EXPAND_INITIALIZER
7686 || modifier == EXPAND_CONST_ADDRESS
7687 || modifier == EXPAND_STACK_PARM)
7688 ? modifier : EXPAND_NORMAL);
7690 /* If this is a constant, put it into a register if it is a legitimate
7691 constant, OFFSET is 0, and we won't try to extract outside the
7692 register (in case we were passed a partially uninitialized object
7693 or a view_conversion to a larger size). Force the constant to
7694 memory otherwise. */
7695 if (CONSTANT_P (op0))
7697 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7698 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7699 && offset == 0
7700 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7701 op0 = force_reg (mode, op0);
7702 else
7703 op0 = validize_mem (force_const_mem (mode, op0));
7706 /* Otherwise, if this object not in memory and we either have an
7707 offset, a BLKmode result, or a reference outside the object, put it
7708 there. Such cases can occur in Ada if we have unchecked conversion
7709 of an expression from a scalar type to an array or record type or
7710 for an ARRAY_RANGE_REF whose type is BLKmode. */
7711 else if (!MEM_P (op0)
7712 && (offset != 0
7713 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7714 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7716 tree nt = build_qualified_type (TREE_TYPE (tem),
7717 (TYPE_QUALS (TREE_TYPE (tem))
7718 | TYPE_QUAL_CONST));
7719 rtx memloc = assign_temp (nt, 1, 1, 1);
7721 emit_move_insn (memloc, op0);
7722 op0 = memloc;
7725 if (offset != 0)
7727 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7728 EXPAND_SUM);
7730 gcc_assert (MEM_P (op0));
7732 #ifdef POINTERS_EXTEND_UNSIGNED
7733 if (GET_MODE (offset_rtx) != Pmode)
7734 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7735 #else
7736 if (GET_MODE (offset_rtx) != ptr_mode)
7737 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7738 #endif
7740 if (GET_MODE (op0) == BLKmode
7741 /* A constant address in OP0 can have VOIDmode, we must
7742 not try to call force_reg in that case. */
7743 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7744 && bitsize != 0
7745 && (bitpos % bitsize) == 0
7746 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7747 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7749 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7750 bitpos = 0;
7753 op0 = offset_address (op0, offset_rtx,
7754 highest_pow2_factor (offset));
7757 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7758 record its alignment as BIGGEST_ALIGNMENT. */
7759 if (MEM_P (op0) && bitpos == 0 && offset != 0
7760 && is_aligning_offset (offset, tem))
7761 set_mem_align (op0, BIGGEST_ALIGNMENT);
7763 /* Don't forget about volatility even if this is a bitfield. */
7764 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7766 if (op0 == orig_op0)
7767 op0 = copy_rtx (op0);
7769 MEM_VOLATILE_P (op0) = 1;
7772 /* The following code doesn't handle CONCAT.
7773 Assume only bitpos == 0 can be used for CONCAT, due to
7774 one element arrays having the same mode as its element. */
7775 if (GET_CODE (op0) == CONCAT)
7777 gcc_assert (bitpos == 0
7778 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7779 return op0;
7782 /* In cases where an aligned union has an unaligned object
7783 as a field, we might be extracting a BLKmode value from
7784 an integer-mode (e.g., SImode) object. Handle this case
7785 by doing the extract into an object as wide as the field
7786 (which we know to be the width of a basic mode), then
7787 storing into memory, and changing the mode to BLKmode. */
7788 if (mode1 == VOIDmode
7789 || REG_P (op0) || GET_CODE (op0) == SUBREG
7790 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7791 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7792 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7793 && modifier != EXPAND_CONST_ADDRESS
7794 && modifier != EXPAND_INITIALIZER)
7795 /* If the field isn't aligned enough to fetch as a memref,
7796 fetch it as a bit field. */
7797 || (mode1 != BLKmode
7798 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7799 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7800 || (MEM_P (op0)
7801 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7802 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7803 && ((modifier == EXPAND_CONST_ADDRESS
7804 || modifier == EXPAND_INITIALIZER)
7805 ? STRICT_ALIGNMENT
7806 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7807 || (bitpos % BITS_PER_UNIT != 0)))
7808 /* If the type and the field are a constant size and the
7809 size of the type isn't the same size as the bitfield,
7810 we must use bitfield operations. */
7811 || (bitsize >= 0
7812 && TYPE_SIZE (TREE_TYPE (exp))
7813 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7814 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7815 bitsize)))
7817 enum machine_mode ext_mode = mode;
7819 if (ext_mode == BLKmode
7820 && ! (target != 0 && MEM_P (op0)
7821 && MEM_P (target)
7822 && bitpos % BITS_PER_UNIT == 0))
7823 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7825 if (ext_mode == BLKmode)
7827 if (target == 0)
7828 target = assign_temp (type, 0, 1, 1);
7830 if (bitsize == 0)
7831 return target;
7833 /* In this case, BITPOS must start at a byte boundary and
7834 TARGET, if specified, must be a MEM. */
7835 gcc_assert (MEM_P (op0)
7836 && (!target || MEM_P (target))
7837 && !(bitpos % BITS_PER_UNIT));
7839 emit_block_move (target,
7840 adjust_address (op0, VOIDmode,
7841 bitpos / BITS_PER_UNIT),
7842 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7843 / BITS_PER_UNIT),
7844 (modifier == EXPAND_STACK_PARM
7845 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7847 return target;
7850 op0 = validize_mem (op0);
7852 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7853 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7855 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7856 (modifier == EXPAND_STACK_PARM
7857 ? NULL_RTX : target),
7858 ext_mode, ext_mode);
7860 /* If the result is a record type and BITSIZE is narrower than
7861 the mode of OP0, an integral mode, and this is a big endian
7862 machine, we must put the field into the high-order bits. */
7863 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7864 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7865 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7866 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7867 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7868 - bitsize),
7869 op0, 1);
7871 /* If the result type is BLKmode, store the data into a temporary
7872 of the appropriate type, but with the mode corresponding to the
7873 mode for the data we have (op0's mode). It's tempting to make
7874 this a constant type, since we know it's only being stored once,
7875 but that can cause problems if we are taking the address of this
7876 COMPONENT_REF because the MEM of any reference via that address
7877 will have flags corresponding to the type, which will not
7878 necessarily be constant. */
7879 if (mode == BLKmode)
7881 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7882 rtx new;
7884 /* If the reference doesn't use the alias set of its type,
7885 we cannot create the temporary using that type. */
7886 if (component_uses_parent_alias_set (exp))
7888 new = assign_stack_local (ext_mode, size, 0);
7889 set_mem_alias_set (new, get_alias_set (exp));
7891 else
7892 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7894 emit_move_insn (new, op0);
7895 op0 = copy_rtx (new);
7896 PUT_MODE (op0, BLKmode);
7897 set_mem_attributes (op0, exp, 1);
7900 return op0;
7903 /* If the result is BLKmode, use that to access the object
7904 now as well. */
7905 if (mode == BLKmode)
7906 mode1 = BLKmode;
7908 /* Get a reference to just this component. */
7909 if (modifier == EXPAND_CONST_ADDRESS
7910 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7911 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7912 else
7913 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7915 if (op0 == orig_op0)
7916 op0 = copy_rtx (op0);
7918 set_mem_attributes (op0, exp, 0);
7919 if (REG_P (XEXP (op0, 0)))
7920 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7922 MEM_VOLATILE_P (op0) |= volatilep;
7923 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7924 || modifier == EXPAND_CONST_ADDRESS
7925 || modifier == EXPAND_INITIALIZER)
7926 return op0;
7927 else if (target == 0)
7928 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7930 convert_move (target, op0, unsignedp);
7931 return target;
7934 case OBJ_TYPE_REF:
7935 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7937 case CALL_EXPR:
7938 /* All valid uses of __builtin_va_arg_pack () are removed during
7939 inlining. */
7940 if (CALL_EXPR_VA_ARG_PACK (exp))
7941 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7942 /* Check for a built-in function. */
7943 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7944 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7945 == FUNCTION_DECL)
7946 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7948 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7949 == BUILT_IN_FRONTEND)
7950 return lang_hooks.expand_expr (exp, original_target,
7951 tmode, modifier,
7952 alt_rtl);
7953 else
7954 return expand_builtin (exp, target, subtarget, tmode, ignore);
7957 return expand_call (exp, target, ignore);
7959 case NON_LVALUE_EXPR:
7960 case NOP_EXPR:
7961 case CONVERT_EXPR:
7962 if (TREE_OPERAND (exp, 0) == error_mark_node)
7963 return const0_rtx;
7965 if (TREE_CODE (type) == UNION_TYPE)
7967 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7969 /* If both input and output are BLKmode, this conversion isn't doing
7970 anything except possibly changing memory attribute. */
7971 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7973 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7974 modifier);
7976 result = copy_rtx (result);
7977 set_mem_attributes (result, exp, 0);
7978 return result;
7981 if (target == 0)
7983 if (TYPE_MODE (type) != BLKmode)
7984 target = gen_reg_rtx (TYPE_MODE (type));
7985 else
7986 target = assign_temp (type, 0, 1, 1);
7989 if (MEM_P (target))
7990 /* Store data into beginning of memory target. */
7991 store_expr (TREE_OPERAND (exp, 0),
7992 adjust_address (target, TYPE_MODE (valtype), 0),
7993 modifier == EXPAND_STACK_PARM,
7994 false);
7996 else
7998 gcc_assert (REG_P (target));
8000 /* Store this field into a union of the proper type. */
8001 store_field (target,
8002 MIN ((int_size_in_bytes (TREE_TYPE
8003 (TREE_OPERAND (exp, 0)))
8004 * BITS_PER_UNIT),
8005 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8006 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8007 type, 0, false);
8010 /* Return the entire union. */
8011 return target;
8014 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8016 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8017 modifier);
8019 /* If the signedness of the conversion differs and OP0 is
8020 a promoted SUBREG, clear that indication since we now
8021 have to do the proper extension. */
8022 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8023 && GET_CODE (op0) == SUBREG)
8024 SUBREG_PROMOTED_VAR_P (op0) = 0;
8026 return REDUCE_BIT_FIELD (op0);
8029 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8030 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8031 if (GET_MODE (op0) == mode)
8034 /* If OP0 is a constant, just convert it into the proper mode. */
8035 else if (CONSTANT_P (op0))
8037 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8038 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8040 if (modifier == EXPAND_INITIALIZER)
8041 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8042 subreg_lowpart_offset (mode,
8043 inner_mode));
8044 else
8045 op0= convert_modes (mode, inner_mode, op0,
8046 TYPE_UNSIGNED (inner_type));
8049 else if (modifier == EXPAND_INITIALIZER)
8050 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8052 else if (target == 0)
8053 op0 = convert_to_mode (mode, op0,
8054 TYPE_UNSIGNED (TREE_TYPE
8055 (TREE_OPERAND (exp, 0))));
8056 else
8058 convert_move (target, op0,
8059 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8060 op0 = target;
8063 return REDUCE_BIT_FIELD (op0);
8065 case VIEW_CONVERT_EXPR:
8066 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8068 /* If the input and output modes are both the same, we are done. */
8069 if (TYPE_MODE (type) == GET_MODE (op0))
8071 /* If neither mode is BLKmode, and both modes are the same size
8072 then we can use gen_lowpart. */
8073 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8074 && GET_MODE_SIZE (TYPE_MODE (type))
8075 == GET_MODE_SIZE (GET_MODE (op0)))
8077 if (GET_CODE (op0) == SUBREG)
8078 op0 = force_reg (GET_MODE (op0), op0);
8079 op0 = gen_lowpart (TYPE_MODE (type), op0);
8081 /* If both modes are integral, then we can convert from one to the
8082 other. */
8083 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8084 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8085 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8086 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8087 /* As a last resort, spill op0 to memory, and reload it in a
8088 different mode. */
8089 else if (!MEM_P (op0))
8091 /* If the operand is not a MEM, force it into memory. Since we
8092 are going to be changing the mode of the MEM, don't call
8093 force_const_mem for constants because we don't allow pool
8094 constants to change mode. */
8095 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8097 gcc_assert (!TREE_ADDRESSABLE (exp));
8099 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8100 target
8101 = assign_stack_temp_for_type
8102 (TYPE_MODE (inner_type),
8103 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8105 emit_move_insn (target, op0);
8106 op0 = target;
8109 /* At this point, OP0 is in the correct mode. If the output type is such
8110 that the operand is known to be aligned, indicate that it is.
8111 Otherwise, we need only be concerned about alignment for non-BLKmode
8112 results. */
8113 if (MEM_P (op0))
8115 op0 = copy_rtx (op0);
8117 if (TYPE_ALIGN_OK (type))
8118 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8119 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8120 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8122 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8123 HOST_WIDE_INT temp_size
8124 = MAX (int_size_in_bytes (inner_type),
8125 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8126 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8127 temp_size, 0, type);
8128 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8130 gcc_assert (!TREE_ADDRESSABLE (exp));
8132 if (GET_MODE (op0) == BLKmode)
8133 emit_block_move (new_with_op0_mode, op0,
8134 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8135 (modifier == EXPAND_STACK_PARM
8136 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8137 else
8138 emit_move_insn (new_with_op0_mode, op0);
8140 op0 = new;
8143 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8146 return op0;
8148 case POINTER_PLUS_EXPR:
8149 /* Even though the sizetype mode and the pointer's mode can be different
8150 expand is able to handle this correctly and get the correct result out
8151 of the PLUS_EXPR code. */
8152 case PLUS_EXPR:
8154 /* Check if this is a case for multiplication and addition. */
8155 if (TREE_CODE (type) == INTEGER_TYPE
8156 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8158 tree subsubexp0, subsubexp1;
8159 enum tree_code code0, code1;
8161 subexp0 = TREE_OPERAND (exp, 0);
8162 subsubexp0 = TREE_OPERAND (subexp0, 0);
8163 subsubexp1 = TREE_OPERAND (subexp0, 1);
8164 code0 = TREE_CODE (subsubexp0);
8165 code1 = TREE_CODE (subsubexp1);
8166 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8167 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8168 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8169 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8170 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8171 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8172 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8174 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8175 enum machine_mode innermode = TYPE_MODE (op0type);
8176 bool zextend_p = TYPE_UNSIGNED (op0type);
8177 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8178 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8179 && (optab_handler (this_optab, mode)->insn_code
8180 != CODE_FOR_nothing))
8182 expand_operands (TREE_OPERAND (subsubexp0, 0),
8183 TREE_OPERAND (subsubexp1, 0),
8184 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8185 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8186 VOIDmode, EXPAND_NORMAL);
8187 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8188 target, unsignedp);
8189 gcc_assert (temp);
8190 return REDUCE_BIT_FIELD (temp);
8195 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8196 something else, make sure we add the register to the constant and
8197 then to the other thing. This case can occur during strength
8198 reduction and doing it this way will produce better code if the
8199 frame pointer or argument pointer is eliminated.
8201 fold-const.c will ensure that the constant is always in the inner
8202 PLUS_EXPR, so the only case we need to do anything about is if
8203 sp, ap, or fp is our second argument, in which case we must swap
8204 the innermost first argument and our second argument. */
8206 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8207 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8208 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8209 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8210 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8211 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8213 tree t = TREE_OPERAND (exp, 1);
8215 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8216 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8219 /* If the result is to be ptr_mode and we are adding an integer to
8220 something, we might be forming a constant. So try to use
8221 plus_constant. If it produces a sum and we can't accept it,
8222 use force_operand. This allows P = &ARR[const] to generate
8223 efficient code on machines where a SYMBOL_REF is not a valid
8224 address.
8226 If this is an EXPAND_SUM call, always return the sum. */
8227 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8228 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8230 if (modifier == EXPAND_STACK_PARM)
8231 target = 0;
8232 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8233 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8234 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8236 rtx constant_part;
8238 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8239 EXPAND_SUM);
8240 /* Use immed_double_const to ensure that the constant is
8241 truncated according to the mode of OP1, then sign extended
8242 to a HOST_WIDE_INT. Using the constant directly can result
8243 in non-canonical RTL in a 64x32 cross compile. */
8244 constant_part
8245 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8246 (HOST_WIDE_INT) 0,
8247 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8248 op1 = plus_constant (op1, INTVAL (constant_part));
8249 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8250 op1 = force_operand (op1, target);
8251 return REDUCE_BIT_FIELD (op1);
8254 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8255 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8256 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8258 rtx constant_part;
8260 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8261 (modifier == EXPAND_INITIALIZER
8262 ? EXPAND_INITIALIZER : EXPAND_SUM));
8263 if (! CONSTANT_P (op0))
8265 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8266 VOIDmode, modifier);
8267 /* Return a PLUS if modifier says it's OK. */
8268 if (modifier == EXPAND_SUM
8269 || modifier == EXPAND_INITIALIZER)
8270 return simplify_gen_binary (PLUS, mode, op0, op1);
8271 goto binop2;
8273 /* Use immed_double_const to ensure that the constant is
8274 truncated according to the mode of OP1, then sign extended
8275 to a HOST_WIDE_INT. Using the constant directly can result
8276 in non-canonical RTL in a 64x32 cross compile. */
8277 constant_part
8278 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8279 (HOST_WIDE_INT) 0,
8280 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8281 op0 = plus_constant (op0, INTVAL (constant_part));
8282 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8283 op0 = force_operand (op0, target);
8284 return REDUCE_BIT_FIELD (op0);
8288 /* No sense saving up arithmetic to be done
8289 if it's all in the wrong mode to form part of an address.
8290 And force_operand won't know whether to sign-extend or
8291 zero-extend. */
8292 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8293 || mode != ptr_mode)
8295 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8296 subtarget, &op0, &op1, 0);
8297 if (op0 == const0_rtx)
8298 return op1;
8299 if (op1 == const0_rtx)
8300 return op0;
8301 goto binop2;
8304 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8305 subtarget, &op0, &op1, modifier);
8306 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8308 case MINUS_EXPR:
8309 /* Check if this is a case for multiplication and subtraction. */
8310 if (TREE_CODE (type) == INTEGER_TYPE
8311 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8313 tree subsubexp0, subsubexp1;
8314 enum tree_code code0, code1;
8316 subexp1 = TREE_OPERAND (exp, 1);
8317 subsubexp0 = TREE_OPERAND (subexp1, 0);
8318 subsubexp1 = TREE_OPERAND (subexp1, 1);
8319 code0 = TREE_CODE (subsubexp0);
8320 code1 = TREE_CODE (subsubexp1);
8321 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8322 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8323 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8324 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8325 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8326 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8327 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8329 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8330 enum machine_mode innermode = TYPE_MODE (op0type);
8331 bool zextend_p = TYPE_UNSIGNED (op0type);
8332 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8333 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8334 && (optab_handler (this_optab, mode)->insn_code
8335 != CODE_FOR_nothing))
8337 expand_operands (TREE_OPERAND (subsubexp0, 0),
8338 TREE_OPERAND (subsubexp1, 0),
8339 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8340 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8341 VOIDmode, EXPAND_NORMAL);
8342 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8343 target, unsignedp);
8344 gcc_assert (temp);
8345 return REDUCE_BIT_FIELD (temp);
8350 /* For initializers, we are allowed to return a MINUS of two
8351 symbolic constants. Here we handle all cases when both operands
8352 are constant. */
8353 /* Handle difference of two symbolic constants,
8354 for the sake of an initializer. */
8355 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8356 && really_constant_p (TREE_OPERAND (exp, 0))
8357 && really_constant_p (TREE_OPERAND (exp, 1)))
8359 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8360 NULL_RTX, &op0, &op1, modifier);
8362 /* If the last operand is a CONST_INT, use plus_constant of
8363 the negated constant. Else make the MINUS. */
8364 if (GET_CODE (op1) == CONST_INT)
8365 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8366 else
8367 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8370 /* No sense saving up arithmetic to be done
8371 if it's all in the wrong mode to form part of an address.
8372 And force_operand won't know whether to sign-extend or
8373 zero-extend. */
8374 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8375 || mode != ptr_mode)
8376 goto binop;
8378 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8379 subtarget, &op0, &op1, modifier);
8381 /* Convert A - const to A + (-const). */
8382 if (GET_CODE (op1) == CONST_INT)
8384 op1 = negate_rtx (mode, op1);
8385 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8388 goto binop2;
8390 case MULT_EXPR:
8391 /* If first operand is constant, swap them.
8392 Thus the following special case checks need only
8393 check the second operand. */
8394 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8396 tree t1 = TREE_OPERAND (exp, 0);
8397 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8398 TREE_OPERAND (exp, 1) = t1;
8401 /* Attempt to return something suitable for generating an
8402 indexed address, for machines that support that. */
8404 if (modifier == EXPAND_SUM && mode == ptr_mode
8405 && host_integerp (TREE_OPERAND (exp, 1), 0))
8407 tree exp1 = TREE_OPERAND (exp, 1);
8409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8410 EXPAND_SUM);
8412 if (!REG_P (op0))
8413 op0 = force_operand (op0, NULL_RTX);
8414 if (!REG_P (op0))
8415 op0 = copy_to_mode_reg (mode, op0);
8417 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8418 gen_int_mode (tree_low_cst (exp1, 0),
8419 TYPE_MODE (TREE_TYPE (exp1)))));
8422 if (modifier == EXPAND_STACK_PARM)
8423 target = 0;
8425 /* Check for multiplying things that have been extended
8426 from a narrower type. If this machine supports multiplying
8427 in that narrower type with a result in the desired type,
8428 do it that way, and avoid the explicit type-conversion. */
8430 subexp0 = TREE_OPERAND (exp, 0);
8431 subexp1 = TREE_OPERAND (exp, 1);
8432 /* First, check if we have a multiplication of one signed and one
8433 unsigned operand. */
8434 if (TREE_CODE (subexp0) == NOP_EXPR
8435 && TREE_CODE (subexp1) == NOP_EXPR
8436 && TREE_CODE (type) == INTEGER_TYPE
8437 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8438 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8439 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8440 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8441 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8442 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8444 enum machine_mode innermode
8445 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8446 this_optab = usmul_widen_optab;
8447 if (mode == GET_MODE_WIDER_MODE (innermode))
8449 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8451 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8452 expand_operands (TREE_OPERAND (subexp0, 0),
8453 TREE_OPERAND (subexp1, 0),
8454 NULL_RTX, &op0, &op1, 0);
8455 else
8456 expand_operands (TREE_OPERAND (subexp0, 0),
8457 TREE_OPERAND (subexp1, 0),
8458 NULL_RTX, &op1, &op0, 0);
8460 goto binop3;
8464 /* Check for a multiplication with matching signedness. */
8465 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8466 && TREE_CODE (type) == INTEGER_TYPE
8467 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8468 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8469 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8470 && int_fits_type_p (TREE_OPERAND (exp, 1),
8471 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8472 /* Don't use a widening multiply if a shift will do. */
8473 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8474 > HOST_BITS_PER_WIDE_INT)
8475 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8477 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8478 && (TYPE_PRECISION (TREE_TYPE
8479 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8480 == TYPE_PRECISION (TREE_TYPE
8481 (TREE_OPERAND
8482 (TREE_OPERAND (exp, 0), 0))))
8483 /* If both operands are extended, they must either both
8484 be zero-extended or both be sign-extended. */
8485 && (TYPE_UNSIGNED (TREE_TYPE
8486 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8487 == TYPE_UNSIGNED (TREE_TYPE
8488 (TREE_OPERAND
8489 (TREE_OPERAND (exp, 0), 0)))))))
8491 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8492 enum machine_mode innermode = TYPE_MODE (op0type);
8493 bool zextend_p = TYPE_UNSIGNED (op0type);
8494 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8495 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8497 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8499 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8501 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8502 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8503 TREE_OPERAND (exp, 1),
8504 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8505 else
8506 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8507 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8508 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8509 goto binop3;
8511 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8512 && innermode == word_mode)
8514 rtx htem, hipart;
8515 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8516 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8517 op1 = convert_modes (innermode, mode,
8518 expand_normal (TREE_OPERAND (exp, 1)),
8519 unsignedp);
8520 else
8521 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8522 temp = expand_binop (mode, other_optab, op0, op1, target,
8523 unsignedp, OPTAB_LIB_WIDEN);
8524 hipart = gen_highpart (innermode, temp);
8525 htem = expand_mult_highpart_adjust (innermode, hipart,
8526 op0, op1, hipart,
8527 zextend_p);
8528 if (htem != hipart)
8529 emit_move_insn (hipart, htem);
8530 return REDUCE_BIT_FIELD (temp);
8534 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8535 subtarget, &op0, &op1, 0);
8536 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8538 case TRUNC_DIV_EXPR:
8539 case FLOOR_DIV_EXPR:
8540 case CEIL_DIV_EXPR:
8541 case ROUND_DIV_EXPR:
8542 case EXACT_DIV_EXPR:
8543 if (modifier == EXPAND_STACK_PARM)
8544 target = 0;
8545 /* Possible optimization: compute the dividend with EXPAND_SUM
8546 then if the divisor is constant can optimize the case
8547 where some terms of the dividend have coeffs divisible by it. */
8548 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8549 subtarget, &op0, &op1, 0);
8550 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8552 case RDIV_EXPR:
8553 goto binop;
8555 case TRUNC_MOD_EXPR:
8556 case FLOOR_MOD_EXPR:
8557 case CEIL_MOD_EXPR:
8558 case ROUND_MOD_EXPR:
8559 if (modifier == EXPAND_STACK_PARM)
8560 target = 0;
8561 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8562 subtarget, &op0, &op1, 0);
8563 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8565 case FIX_TRUNC_EXPR:
8566 op0 = expand_normal (TREE_OPERAND (exp, 0));
8567 if (target == 0 || modifier == EXPAND_STACK_PARM)
8568 target = gen_reg_rtx (mode);
8569 expand_fix (target, op0, unsignedp);
8570 return target;
8572 case FLOAT_EXPR:
8573 op0 = expand_normal (TREE_OPERAND (exp, 0));
8574 if (target == 0 || modifier == EXPAND_STACK_PARM)
8575 target = gen_reg_rtx (mode);
8576 /* expand_float can't figure out what to do if FROM has VOIDmode.
8577 So give it the correct mode. With -O, cse will optimize this. */
8578 if (GET_MODE (op0) == VOIDmode)
8579 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8580 op0);
8581 expand_float (target, op0,
8582 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8583 return target;
8585 case NEGATE_EXPR:
8586 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8587 VOIDmode, EXPAND_NORMAL);
8588 if (modifier == EXPAND_STACK_PARM)
8589 target = 0;
8590 temp = expand_unop (mode,
8591 optab_for_tree_code (NEGATE_EXPR, type),
8592 op0, target, 0);
8593 gcc_assert (temp);
8594 return REDUCE_BIT_FIELD (temp);
8596 case ABS_EXPR:
8597 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8598 VOIDmode, EXPAND_NORMAL);
8599 if (modifier == EXPAND_STACK_PARM)
8600 target = 0;
8602 /* ABS_EXPR is not valid for complex arguments. */
8603 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8604 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8606 /* Unsigned abs is simply the operand. Testing here means we don't
8607 risk generating incorrect code below. */
8608 if (TYPE_UNSIGNED (type))
8609 return op0;
8611 return expand_abs (mode, op0, target, unsignedp,
8612 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8614 case MAX_EXPR:
8615 case MIN_EXPR:
8616 target = original_target;
8617 if (target == 0
8618 || modifier == EXPAND_STACK_PARM
8619 || (MEM_P (target) && MEM_VOLATILE_P (target))
8620 || GET_MODE (target) != mode
8621 || (REG_P (target)
8622 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8623 target = gen_reg_rtx (mode);
8624 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8625 target, &op0, &op1, 0);
8627 /* First try to do it with a special MIN or MAX instruction.
8628 If that does not win, use a conditional jump to select the proper
8629 value. */
8630 this_optab = optab_for_tree_code (code, type);
8631 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8632 OPTAB_WIDEN);
8633 if (temp != 0)
8634 return temp;
8636 /* At this point, a MEM target is no longer useful; we will get better
8637 code without it. */
8639 if (! REG_P (target))
8640 target = gen_reg_rtx (mode);
8642 /* If op1 was placed in target, swap op0 and op1. */
8643 if (target != op0 && target == op1)
8645 temp = op0;
8646 op0 = op1;
8647 op1 = temp;
8650 /* We generate better code and avoid problems with op1 mentioning
8651 target by forcing op1 into a pseudo if it isn't a constant. */
8652 if (! CONSTANT_P (op1))
8653 op1 = force_reg (mode, op1);
8656 enum rtx_code comparison_code;
8657 rtx cmpop1 = op1;
8659 if (code == MAX_EXPR)
8660 comparison_code = unsignedp ? GEU : GE;
8661 else
8662 comparison_code = unsignedp ? LEU : LE;
8664 /* Canonicalize to comparisons against 0. */
8665 if (op1 == const1_rtx)
8667 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8668 or (a != 0 ? a : 1) for unsigned.
8669 For MIN we are safe converting (a <= 1 ? a : 1)
8670 into (a <= 0 ? a : 1) */
8671 cmpop1 = const0_rtx;
8672 if (code == MAX_EXPR)
8673 comparison_code = unsignedp ? NE : GT;
8675 if (op1 == constm1_rtx && !unsignedp)
8677 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8678 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8679 cmpop1 = const0_rtx;
8680 if (code == MIN_EXPR)
8681 comparison_code = LT;
8683 #ifdef HAVE_conditional_move
8684 /* Use a conditional move if possible. */
8685 if (can_conditionally_move_p (mode))
8687 rtx insn;
8689 /* ??? Same problem as in expmed.c: emit_conditional_move
8690 forces a stack adjustment via compare_from_rtx, and we
8691 lose the stack adjustment if the sequence we are about
8692 to create is discarded. */
8693 do_pending_stack_adjust ();
8695 start_sequence ();
8697 /* Try to emit the conditional move. */
8698 insn = emit_conditional_move (target, comparison_code,
8699 op0, cmpop1, mode,
8700 op0, op1, mode,
8701 unsignedp);
8703 /* If we could do the conditional move, emit the sequence,
8704 and return. */
8705 if (insn)
8707 rtx seq = get_insns ();
8708 end_sequence ();
8709 emit_insn (seq);
8710 return target;
8713 /* Otherwise discard the sequence and fall back to code with
8714 branches. */
8715 end_sequence ();
8717 #endif
8718 if (target != op0)
8719 emit_move_insn (target, op0);
8721 temp = gen_label_rtx ();
8722 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8723 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8725 emit_move_insn (target, op1);
8726 emit_label (temp);
8727 return target;
8729 case BIT_NOT_EXPR:
8730 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8731 VOIDmode, EXPAND_NORMAL);
8732 if (modifier == EXPAND_STACK_PARM)
8733 target = 0;
8734 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8735 gcc_assert (temp);
8736 return temp;
8738 /* ??? Can optimize bitwise operations with one arg constant.
8739 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8740 and (a bitwise1 b) bitwise2 b (etc)
8741 but that is probably not worth while. */
8743 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8744 boolean values when we want in all cases to compute both of them. In
8745 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8746 as actual zero-or-1 values and then bitwise anding. In cases where
8747 there cannot be any side effects, better code would be made by
8748 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8749 how to recognize those cases. */
8751 case TRUTH_AND_EXPR:
8752 code = BIT_AND_EXPR;
8753 case BIT_AND_EXPR:
8754 goto binop;
8756 case TRUTH_OR_EXPR:
8757 code = BIT_IOR_EXPR;
8758 case BIT_IOR_EXPR:
8759 goto binop;
8761 case TRUTH_XOR_EXPR:
8762 code = BIT_XOR_EXPR;
8763 case BIT_XOR_EXPR:
8764 goto binop;
8766 case LSHIFT_EXPR:
8767 case RSHIFT_EXPR:
8768 case LROTATE_EXPR:
8769 case RROTATE_EXPR:
8770 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8771 subtarget = 0;
8772 if (modifier == EXPAND_STACK_PARM)
8773 target = 0;
8774 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8775 VOIDmode, EXPAND_NORMAL);
8776 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8777 unsignedp);
8779 /* Could determine the answer when only additive constants differ. Also,
8780 the addition of one can be handled by changing the condition. */
8781 case LT_EXPR:
8782 case LE_EXPR:
8783 case GT_EXPR:
8784 case GE_EXPR:
8785 case EQ_EXPR:
8786 case NE_EXPR:
8787 case UNORDERED_EXPR:
8788 case ORDERED_EXPR:
8789 case UNLT_EXPR:
8790 case UNLE_EXPR:
8791 case UNGT_EXPR:
8792 case UNGE_EXPR:
8793 case UNEQ_EXPR:
8794 case LTGT_EXPR:
8795 temp = do_store_flag (exp,
8796 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8797 tmode != VOIDmode ? tmode : mode, 0);
8798 if (temp != 0)
8799 return temp;
8801 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8802 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8803 && original_target
8804 && REG_P (original_target)
8805 && (GET_MODE (original_target)
8806 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8808 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8809 VOIDmode, EXPAND_NORMAL);
8811 /* If temp is constant, we can just compute the result. */
8812 if (GET_CODE (temp) == CONST_INT)
8814 if (INTVAL (temp) != 0)
8815 emit_move_insn (target, const1_rtx);
8816 else
8817 emit_move_insn (target, const0_rtx);
8819 return target;
8822 if (temp != original_target)
8824 enum machine_mode mode1 = GET_MODE (temp);
8825 if (mode1 == VOIDmode)
8826 mode1 = tmode != VOIDmode ? tmode : mode;
8828 temp = copy_to_mode_reg (mode1, temp);
8831 op1 = gen_label_rtx ();
8832 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8833 GET_MODE (temp), unsignedp, op1);
8834 emit_move_insn (temp, const1_rtx);
8835 emit_label (op1);
8836 return temp;
8839 /* If no set-flag instruction, must generate a conditional store
8840 into a temporary variable. Drop through and handle this
8841 like && and ||. */
8843 if (! ignore
8844 && (target == 0
8845 || modifier == EXPAND_STACK_PARM
8846 || ! safe_from_p (target, exp, 1)
8847 /* Make sure we don't have a hard reg (such as function's return
8848 value) live across basic blocks, if not optimizing. */
8849 || (!optimize && REG_P (target)
8850 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8851 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8853 if (target)
8854 emit_move_insn (target, const0_rtx);
8856 op1 = gen_label_rtx ();
8857 jumpifnot (exp, op1);
8859 if (target)
8860 emit_move_insn (target, const1_rtx);
8862 emit_label (op1);
8863 return ignore ? const0_rtx : target;
8865 case TRUTH_NOT_EXPR:
8866 if (modifier == EXPAND_STACK_PARM)
8867 target = 0;
8868 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8869 VOIDmode, EXPAND_NORMAL);
8870 /* The parser is careful to generate TRUTH_NOT_EXPR
8871 only with operands that are always zero or one. */
8872 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8873 target, 1, OPTAB_LIB_WIDEN);
8874 gcc_assert (temp);
8875 return temp;
8877 case STATEMENT_LIST:
8879 tree_stmt_iterator iter;
8881 gcc_assert (ignore);
8883 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8884 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8886 return const0_rtx;
8888 case COND_EXPR:
8889 /* A COND_EXPR with its type being VOID_TYPE represents a
8890 conditional jump and is handled in
8891 expand_gimple_cond_expr. */
8892 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8894 /* Note that COND_EXPRs whose type is a structure or union
8895 are required to be constructed to contain assignments of
8896 a temporary variable, so that we can evaluate them here
8897 for side effect only. If type is void, we must do likewise. */
8899 gcc_assert (!TREE_ADDRESSABLE (type)
8900 && !ignore
8901 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8902 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8904 /* If we are not to produce a result, we have no target. Otherwise,
8905 if a target was specified use it; it will not be used as an
8906 intermediate target unless it is safe. If no target, use a
8907 temporary. */
8909 if (modifier != EXPAND_STACK_PARM
8910 && original_target
8911 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8912 && GET_MODE (original_target) == mode
8913 #ifdef HAVE_conditional_move
8914 && (! can_conditionally_move_p (mode)
8915 || REG_P (original_target))
8916 #endif
8917 && !MEM_P (original_target))
8918 temp = original_target;
8919 else
8920 temp = assign_temp (type, 0, 0, 1);
8922 do_pending_stack_adjust ();
8923 NO_DEFER_POP;
8924 op0 = gen_label_rtx ();
8925 op1 = gen_label_rtx ();
8926 jumpifnot (TREE_OPERAND (exp, 0), op0);
8927 store_expr (TREE_OPERAND (exp, 1), temp,
8928 modifier == EXPAND_STACK_PARM,
8929 false);
8931 emit_jump_insn (gen_jump (op1));
8932 emit_barrier ();
8933 emit_label (op0);
8934 store_expr (TREE_OPERAND (exp, 2), temp,
8935 modifier == EXPAND_STACK_PARM,
8936 false);
8938 emit_label (op1);
8939 OK_DEFER_POP;
8940 return temp;
8942 case VEC_COND_EXPR:
8943 target = expand_vec_cond_expr (exp, target);
8944 return target;
8946 case MODIFY_EXPR:
8948 tree lhs = TREE_OPERAND (exp, 0);
8949 tree rhs = TREE_OPERAND (exp, 1);
8950 gcc_assert (ignore);
8951 expand_assignment (lhs, rhs, false);
8952 return const0_rtx;
8955 case GIMPLE_MODIFY_STMT:
8957 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8958 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8960 gcc_assert (ignore);
8962 /* Check for |= or &= of a bitfield of size one into another bitfield
8963 of size 1. In this case, (unless we need the result of the
8964 assignment) we can do this more efficiently with a
8965 test followed by an assignment, if necessary.
8967 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8968 things change so we do, this code should be enhanced to
8969 support it. */
8970 if (TREE_CODE (lhs) == COMPONENT_REF
8971 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8972 || TREE_CODE (rhs) == BIT_AND_EXPR)
8973 && TREE_OPERAND (rhs, 0) == lhs
8974 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8975 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8976 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8978 rtx label = gen_label_rtx ();
8979 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8980 do_jump (TREE_OPERAND (rhs, 1),
8981 value ? label : 0,
8982 value ? 0 : label);
8983 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
8984 MOVE_NONTEMPORAL (exp));
8985 do_pending_stack_adjust ();
8986 emit_label (label);
8987 return const0_rtx;
8990 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
8991 return const0_rtx;
8994 case RETURN_EXPR:
8995 if (!TREE_OPERAND (exp, 0))
8996 expand_null_return ();
8997 else
8998 expand_return (TREE_OPERAND (exp, 0));
8999 return const0_rtx;
9001 case ADDR_EXPR:
9002 return expand_expr_addr_expr (exp, target, tmode, modifier);
9004 case COMPLEX_EXPR:
9005 /* Get the rtx code of the operands. */
9006 op0 = expand_normal (TREE_OPERAND (exp, 0));
9007 op1 = expand_normal (TREE_OPERAND (exp, 1));
9009 if (!target)
9010 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9012 /* Move the real (op0) and imaginary (op1) parts to their location. */
9013 write_complex_part (target, op0, false);
9014 write_complex_part (target, op1, true);
9016 return target;
9018 case REALPART_EXPR:
9019 op0 = expand_normal (TREE_OPERAND (exp, 0));
9020 return read_complex_part (op0, false);
9022 case IMAGPART_EXPR:
9023 op0 = expand_normal (TREE_OPERAND (exp, 0));
9024 return read_complex_part (op0, true);
9026 case RESX_EXPR:
9027 expand_resx_expr (exp);
9028 return const0_rtx;
9030 case TRY_CATCH_EXPR:
9031 case CATCH_EXPR:
9032 case EH_FILTER_EXPR:
9033 case TRY_FINALLY_EXPR:
9034 /* Lowered by tree-eh.c. */
9035 gcc_unreachable ();
9037 case WITH_CLEANUP_EXPR:
9038 case CLEANUP_POINT_EXPR:
9039 case TARGET_EXPR:
9040 case CASE_LABEL_EXPR:
9041 case VA_ARG_EXPR:
9042 case BIND_EXPR:
9043 case INIT_EXPR:
9044 case CONJ_EXPR:
9045 case COMPOUND_EXPR:
9046 case PREINCREMENT_EXPR:
9047 case PREDECREMENT_EXPR:
9048 case POSTINCREMENT_EXPR:
9049 case POSTDECREMENT_EXPR:
9050 case LOOP_EXPR:
9051 case EXIT_EXPR:
9052 case TRUTH_ANDIF_EXPR:
9053 case TRUTH_ORIF_EXPR:
9054 /* Lowered by gimplify.c. */
9055 gcc_unreachable ();
9057 case CHANGE_DYNAMIC_TYPE_EXPR:
9058 /* This is ignored at the RTL level. The tree level set
9059 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9060 overkill for the RTL layer but is all that we can
9061 represent. */
9062 return const0_rtx;
9064 case EXC_PTR_EXPR:
9065 return get_exception_pointer (cfun);
9067 case FILTER_EXPR:
9068 return get_exception_filter (cfun);
9070 case FDESC_EXPR:
9071 /* Function descriptors are not valid except for as
9072 initialization constants, and should not be expanded. */
9073 gcc_unreachable ();
9075 case SWITCH_EXPR:
9076 expand_case (exp);
9077 return const0_rtx;
9079 case LABEL_EXPR:
9080 expand_label (TREE_OPERAND (exp, 0));
9081 return const0_rtx;
9083 case ASM_EXPR:
9084 expand_asm_expr (exp);
9085 return const0_rtx;
9087 case WITH_SIZE_EXPR:
9088 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9089 have pulled out the size to use in whatever context it needed. */
9090 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9091 modifier, alt_rtl);
9093 case REALIGN_LOAD_EXPR:
9095 tree oprnd0 = TREE_OPERAND (exp, 0);
9096 tree oprnd1 = TREE_OPERAND (exp, 1);
9097 tree oprnd2 = TREE_OPERAND (exp, 2);
9098 rtx op2;
9100 this_optab = optab_for_tree_code (code, type);
9101 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9102 op2 = expand_normal (oprnd2);
9103 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9104 target, unsignedp);
9105 gcc_assert (temp);
9106 return temp;
9109 case DOT_PROD_EXPR:
9111 tree oprnd0 = TREE_OPERAND (exp, 0);
9112 tree oprnd1 = TREE_OPERAND (exp, 1);
9113 tree oprnd2 = TREE_OPERAND (exp, 2);
9114 rtx op2;
9116 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9117 op2 = expand_normal (oprnd2);
9118 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9119 target, unsignedp);
9120 return target;
9123 case WIDEN_SUM_EXPR:
9125 tree oprnd0 = TREE_OPERAND (exp, 0);
9126 tree oprnd1 = TREE_OPERAND (exp, 1);
9128 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9129 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9130 target, unsignedp);
9131 return target;
9134 case REDUC_MAX_EXPR:
9135 case REDUC_MIN_EXPR:
9136 case REDUC_PLUS_EXPR:
9138 op0 = expand_normal (TREE_OPERAND (exp, 0));
9139 this_optab = optab_for_tree_code (code, type);
9140 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9141 gcc_assert (temp);
9142 return temp;
9145 case VEC_EXTRACT_EVEN_EXPR:
9146 case VEC_EXTRACT_ODD_EXPR:
9148 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9149 NULL_RTX, &op0, &op1, 0);
9150 this_optab = optab_for_tree_code (code, type);
9151 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9152 OPTAB_WIDEN);
9153 gcc_assert (temp);
9154 return temp;
9157 case VEC_INTERLEAVE_HIGH_EXPR:
9158 case VEC_INTERLEAVE_LOW_EXPR:
9160 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9161 NULL_RTX, &op0, &op1, 0);
9162 this_optab = optab_for_tree_code (code, type);
9163 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9164 OPTAB_WIDEN);
9165 gcc_assert (temp);
9166 return temp;
9169 case VEC_LSHIFT_EXPR:
9170 case VEC_RSHIFT_EXPR:
9172 target = expand_vec_shift_expr (exp, target);
9173 return target;
9176 case VEC_UNPACK_HI_EXPR:
9177 case VEC_UNPACK_LO_EXPR:
9179 op0 = expand_normal (TREE_OPERAND (exp, 0));
9180 this_optab = optab_for_tree_code (code, type);
9181 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9182 target, unsignedp);
9183 gcc_assert (temp);
9184 return temp;
9187 case VEC_UNPACK_FLOAT_HI_EXPR:
9188 case VEC_UNPACK_FLOAT_LO_EXPR:
9190 op0 = expand_normal (TREE_OPERAND (exp, 0));
9191 /* The signedness is determined from input operand. */
9192 this_optab = optab_for_tree_code (code,
9193 TREE_TYPE (TREE_OPERAND (exp, 0)));
9194 temp = expand_widen_pattern_expr
9195 (exp, op0, NULL_RTX, NULL_RTX,
9196 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9198 gcc_assert (temp);
9199 return temp;
9202 case VEC_WIDEN_MULT_HI_EXPR:
9203 case VEC_WIDEN_MULT_LO_EXPR:
9205 tree oprnd0 = TREE_OPERAND (exp, 0);
9206 tree oprnd1 = TREE_OPERAND (exp, 1);
9208 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9209 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9210 target, unsignedp);
9211 gcc_assert (target);
9212 return target;
9215 case VEC_PACK_TRUNC_EXPR:
9216 case VEC_PACK_SAT_EXPR:
9217 case VEC_PACK_FIX_TRUNC_EXPR:
9219 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9220 goto binop;
9223 default:
9224 return lang_hooks.expand_expr (exp, original_target, tmode,
9225 modifier, alt_rtl);
9228 /* Here to do an ordinary binary operator. */
9229 binop:
9230 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9231 subtarget, &op0, &op1, 0);
9232 binop2:
9233 this_optab = optab_for_tree_code (code, type);
9234 binop3:
9235 if (modifier == EXPAND_STACK_PARM)
9236 target = 0;
9237 temp = expand_binop (mode, this_optab, op0, op1, target,
9238 unsignedp, OPTAB_LIB_WIDEN);
9239 gcc_assert (temp);
9240 return REDUCE_BIT_FIELD (temp);
9242 #undef REDUCE_BIT_FIELD
9244 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9245 signedness of TYPE), possibly returning the result in TARGET. */
9246 static rtx
9247 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9249 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9250 if (target && GET_MODE (target) != GET_MODE (exp))
9251 target = 0;
9252 /* For constant values, reduce using build_int_cst_type. */
9253 if (GET_CODE (exp) == CONST_INT)
9255 HOST_WIDE_INT value = INTVAL (exp);
9256 tree t = build_int_cst_type (type, value);
9257 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9259 else if (TYPE_UNSIGNED (type))
9261 rtx mask;
9262 if (prec < HOST_BITS_PER_WIDE_INT)
9263 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9264 GET_MODE (exp));
9265 else
9266 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9267 ((unsigned HOST_WIDE_INT) 1
9268 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9269 GET_MODE (exp));
9270 return expand_and (GET_MODE (exp), exp, mask, target);
9272 else
9274 tree count = build_int_cst (NULL_TREE,
9275 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9276 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9277 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9281 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9282 when applied to the address of EXP produces an address known to be
9283 aligned more than BIGGEST_ALIGNMENT. */
9285 static int
9286 is_aligning_offset (const_tree offset, const_tree exp)
9288 /* Strip off any conversions. */
9289 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9290 || TREE_CODE (offset) == NOP_EXPR
9291 || TREE_CODE (offset) == CONVERT_EXPR)
9292 offset = TREE_OPERAND (offset, 0);
9294 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9295 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9296 if (TREE_CODE (offset) != BIT_AND_EXPR
9297 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9298 || compare_tree_int (TREE_OPERAND (offset, 1),
9299 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9300 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9301 return 0;
9303 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9304 It must be NEGATE_EXPR. Then strip any more conversions. */
9305 offset = TREE_OPERAND (offset, 0);
9306 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9307 || TREE_CODE (offset) == NOP_EXPR
9308 || TREE_CODE (offset) == CONVERT_EXPR)
9309 offset = TREE_OPERAND (offset, 0);
9311 if (TREE_CODE (offset) != NEGATE_EXPR)
9312 return 0;
9314 offset = TREE_OPERAND (offset, 0);
9315 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9316 || TREE_CODE (offset) == NOP_EXPR
9317 || TREE_CODE (offset) == CONVERT_EXPR)
9318 offset = TREE_OPERAND (offset, 0);
9320 /* This must now be the address of EXP. */
9321 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9324 /* Return the tree node if an ARG corresponds to a string constant or zero
9325 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9326 in bytes within the string that ARG is accessing. The type of the
9327 offset will be `sizetype'. */
9329 tree
9330 string_constant (tree arg, tree *ptr_offset)
9332 tree array, offset, lower_bound;
9333 STRIP_NOPS (arg);
9335 if (TREE_CODE (arg) == ADDR_EXPR)
9337 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9339 *ptr_offset = size_zero_node;
9340 return TREE_OPERAND (arg, 0);
9342 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9344 array = TREE_OPERAND (arg, 0);
9345 offset = size_zero_node;
9347 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9349 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9350 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9351 if (TREE_CODE (array) != STRING_CST
9352 && TREE_CODE (array) != VAR_DECL)
9353 return 0;
9355 /* Check if the array has a nonzero lower bound. */
9356 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9357 if (!integer_zerop (lower_bound))
9359 /* If the offset and base aren't both constants, return 0. */
9360 if (TREE_CODE (lower_bound) != INTEGER_CST)
9361 return 0;
9362 if (TREE_CODE (offset) != INTEGER_CST)
9363 return 0;
9364 /* Adjust offset by the lower bound. */
9365 offset = size_diffop (fold_convert (sizetype, offset),
9366 fold_convert (sizetype, lower_bound));
9369 else
9370 return 0;
9372 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9374 tree arg0 = TREE_OPERAND (arg, 0);
9375 tree arg1 = TREE_OPERAND (arg, 1);
9377 STRIP_NOPS (arg0);
9378 STRIP_NOPS (arg1);
9380 if (TREE_CODE (arg0) == ADDR_EXPR
9381 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9382 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9384 array = TREE_OPERAND (arg0, 0);
9385 offset = arg1;
9387 else if (TREE_CODE (arg1) == ADDR_EXPR
9388 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9389 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9391 array = TREE_OPERAND (arg1, 0);
9392 offset = arg0;
9394 else
9395 return 0;
9397 else
9398 return 0;
9400 if (TREE_CODE (array) == STRING_CST)
9402 *ptr_offset = fold_convert (sizetype, offset);
9403 return array;
9405 else if (TREE_CODE (array) == VAR_DECL)
9407 int length;
9409 /* Variables initialized to string literals can be handled too. */
9410 if (DECL_INITIAL (array) == NULL_TREE
9411 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9412 return 0;
9414 /* If they are read-only, non-volatile and bind locally. */
9415 if (! TREE_READONLY (array)
9416 || TREE_SIDE_EFFECTS (array)
9417 || ! targetm.binds_local_p (array))
9418 return 0;
9420 /* Avoid const char foo[4] = "abcde"; */
9421 if (DECL_SIZE_UNIT (array) == NULL_TREE
9422 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9423 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9424 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9425 return 0;
9427 /* If variable is bigger than the string literal, OFFSET must be constant
9428 and inside of the bounds of the string literal. */
9429 offset = fold_convert (sizetype, offset);
9430 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9431 && (! host_integerp (offset, 1)
9432 || compare_tree_int (offset, length) >= 0))
9433 return 0;
9435 *ptr_offset = offset;
9436 return DECL_INITIAL (array);
9439 return 0;
9442 /* Generate code to calculate EXP using a store-flag instruction
9443 and return an rtx for the result. EXP is either a comparison
9444 or a TRUTH_NOT_EXPR whose operand is a comparison.
9446 If TARGET is nonzero, store the result there if convenient.
9448 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9449 cheap.
9451 Return zero if there is no suitable set-flag instruction
9452 available on this machine.
9454 Once expand_expr has been called on the arguments of the comparison,
9455 we are committed to doing the store flag, since it is not safe to
9456 re-evaluate the expression. We emit the store-flag insn by calling
9457 emit_store_flag, but only expand the arguments if we have a reason
9458 to believe that emit_store_flag will be successful. If we think that
9459 it will, but it isn't, we have to simulate the store-flag with a
9460 set/jump/set sequence. */
9462 static rtx
9463 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9465 enum rtx_code code;
9466 tree arg0, arg1, type;
9467 tree tem;
9468 enum machine_mode operand_mode;
9469 int invert = 0;
9470 int unsignedp;
9471 rtx op0, op1;
9472 enum insn_code icode;
9473 rtx subtarget = target;
9474 rtx result, label;
9476 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9477 result at the end. We can't simply invert the test since it would
9478 have already been inverted if it were valid. This case occurs for
9479 some floating-point comparisons. */
9481 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9482 invert = 1, exp = TREE_OPERAND (exp, 0);
9484 arg0 = TREE_OPERAND (exp, 0);
9485 arg1 = TREE_OPERAND (exp, 1);
9487 /* Don't crash if the comparison was erroneous. */
9488 if (arg0 == error_mark_node || arg1 == error_mark_node)
9489 return const0_rtx;
9491 type = TREE_TYPE (arg0);
9492 operand_mode = TYPE_MODE (type);
9493 unsignedp = TYPE_UNSIGNED (type);
9495 /* We won't bother with BLKmode store-flag operations because it would mean
9496 passing a lot of information to emit_store_flag. */
9497 if (operand_mode == BLKmode)
9498 return 0;
9500 /* We won't bother with store-flag operations involving function pointers
9501 when function pointers must be canonicalized before comparisons. */
9502 #ifdef HAVE_canonicalize_funcptr_for_compare
9503 if (HAVE_canonicalize_funcptr_for_compare
9504 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9505 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9506 == FUNCTION_TYPE))
9507 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9508 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9509 == FUNCTION_TYPE))))
9510 return 0;
9511 #endif
9513 STRIP_NOPS (arg0);
9514 STRIP_NOPS (arg1);
9516 /* Get the rtx comparison code to use. We know that EXP is a comparison
9517 operation of some type. Some comparisons against 1 and -1 can be
9518 converted to comparisons with zero. Do so here so that the tests
9519 below will be aware that we have a comparison with zero. These
9520 tests will not catch constants in the first operand, but constants
9521 are rarely passed as the first operand. */
9523 switch (TREE_CODE (exp))
9525 case EQ_EXPR:
9526 code = EQ;
9527 break;
9528 case NE_EXPR:
9529 code = NE;
9530 break;
9531 case LT_EXPR:
9532 if (integer_onep (arg1))
9533 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9534 else
9535 code = unsignedp ? LTU : LT;
9536 break;
9537 case LE_EXPR:
9538 if (! unsignedp && integer_all_onesp (arg1))
9539 arg1 = integer_zero_node, code = LT;
9540 else
9541 code = unsignedp ? LEU : LE;
9542 break;
9543 case GT_EXPR:
9544 if (! unsignedp && integer_all_onesp (arg1))
9545 arg1 = integer_zero_node, code = GE;
9546 else
9547 code = unsignedp ? GTU : GT;
9548 break;
9549 case GE_EXPR:
9550 if (integer_onep (arg1))
9551 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9552 else
9553 code = unsignedp ? GEU : GE;
9554 break;
9556 case UNORDERED_EXPR:
9557 code = UNORDERED;
9558 break;
9559 case ORDERED_EXPR:
9560 code = ORDERED;
9561 break;
9562 case UNLT_EXPR:
9563 code = UNLT;
9564 break;
9565 case UNLE_EXPR:
9566 code = UNLE;
9567 break;
9568 case UNGT_EXPR:
9569 code = UNGT;
9570 break;
9571 case UNGE_EXPR:
9572 code = UNGE;
9573 break;
9574 case UNEQ_EXPR:
9575 code = UNEQ;
9576 break;
9577 case LTGT_EXPR:
9578 code = LTGT;
9579 break;
9581 default:
9582 gcc_unreachable ();
9585 /* Put a constant second. */
9586 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9588 tem = arg0; arg0 = arg1; arg1 = tem;
9589 code = swap_condition (code);
9592 /* If this is an equality or inequality test of a single bit, we can
9593 do this by shifting the bit being tested to the low-order bit and
9594 masking the result with the constant 1. If the condition was EQ,
9595 we xor it with 1. This does not require an scc insn and is faster
9596 than an scc insn even if we have it.
9598 The code to make this transformation was moved into fold_single_bit_test,
9599 so we just call into the folder and expand its result. */
9601 if ((code == NE || code == EQ)
9602 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9603 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9605 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9606 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9607 arg0, arg1, type),
9608 target, VOIDmode, EXPAND_NORMAL);
9611 /* Now see if we are likely to be able to do this. Return if not. */
9612 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9613 return 0;
9615 icode = setcc_gen_code[(int) code];
9617 if (icode == CODE_FOR_nothing)
9619 enum machine_mode wmode;
9621 for (wmode = operand_mode;
9622 icode == CODE_FOR_nothing && wmode != VOIDmode;
9623 wmode = GET_MODE_WIDER_MODE (wmode))
9624 icode = optab_handler (cstore_optab, wmode)->insn_code;
9627 if (icode == CODE_FOR_nothing
9628 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9630 /* We can only do this if it is one of the special cases that
9631 can be handled without an scc insn. */
9632 if ((code == LT && integer_zerop (arg1))
9633 || (! only_cheap && code == GE && integer_zerop (arg1)))
9635 else if (! only_cheap && (code == NE || code == EQ)
9636 && TREE_CODE (type) != REAL_TYPE
9637 && ((optab_handler (abs_optab, operand_mode)->insn_code
9638 != CODE_FOR_nothing)
9639 || (optab_handler (ffs_optab, operand_mode)->insn_code
9640 != CODE_FOR_nothing)))
9642 else
9643 return 0;
9646 if (! get_subtarget (target)
9647 || GET_MODE (subtarget) != operand_mode)
9648 subtarget = 0;
9650 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9652 if (target == 0)
9653 target = gen_reg_rtx (mode);
9655 result = emit_store_flag (target, code, op0, op1,
9656 operand_mode, unsignedp, 1);
9658 if (result)
9660 if (invert)
9661 result = expand_binop (mode, xor_optab, result, const1_rtx,
9662 result, 0, OPTAB_LIB_WIDEN);
9663 return result;
9666 /* If this failed, we have to do this with set/compare/jump/set code. */
9667 if (!REG_P (target)
9668 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9669 target = gen_reg_rtx (GET_MODE (target));
9671 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9672 label = gen_label_rtx ();
9673 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9674 NULL_RTX, label);
9676 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9677 emit_label (label);
9679 return target;
9683 /* Stubs in case we haven't got a casesi insn. */
9684 #ifndef HAVE_casesi
9685 # define HAVE_casesi 0
9686 # define gen_casesi(a, b, c, d, e) (0)
9687 # define CODE_FOR_casesi CODE_FOR_nothing
9688 #endif
9690 /* If the machine does not have a case insn that compares the bounds,
9691 this means extra overhead for dispatch tables, which raises the
9692 threshold for using them. */
9693 #ifndef CASE_VALUES_THRESHOLD
9694 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9695 #endif /* CASE_VALUES_THRESHOLD */
9697 unsigned int
9698 case_values_threshold (void)
9700 return CASE_VALUES_THRESHOLD;
9703 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9704 0 otherwise (i.e. if there is no casesi instruction). */
9706 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9707 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9709 enum machine_mode index_mode = SImode;
9710 int index_bits = GET_MODE_BITSIZE (index_mode);
9711 rtx op1, op2, index;
9712 enum machine_mode op_mode;
9714 if (! HAVE_casesi)
9715 return 0;
9717 /* Convert the index to SImode. */
9718 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9720 enum machine_mode omode = TYPE_MODE (index_type);
9721 rtx rangertx = expand_normal (range);
9723 /* We must handle the endpoints in the original mode. */
9724 index_expr = build2 (MINUS_EXPR, index_type,
9725 index_expr, minval);
9726 minval = integer_zero_node;
9727 index = expand_normal (index_expr);
9728 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9729 omode, 1, default_label);
9730 /* Now we can safely truncate. */
9731 index = convert_to_mode (index_mode, index, 0);
9733 else
9735 if (TYPE_MODE (index_type) != index_mode)
9737 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9738 index_expr = fold_convert (index_type, index_expr);
9741 index = expand_normal (index_expr);
9744 do_pending_stack_adjust ();
9746 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9747 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9748 (index, op_mode))
9749 index = copy_to_mode_reg (op_mode, index);
9751 op1 = expand_normal (minval);
9753 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9754 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9755 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9756 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9757 (op1, op_mode))
9758 op1 = copy_to_mode_reg (op_mode, op1);
9760 op2 = expand_normal (range);
9762 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9763 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9764 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9765 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9766 (op2, op_mode))
9767 op2 = copy_to_mode_reg (op_mode, op2);
9769 emit_jump_insn (gen_casesi (index, op1, op2,
9770 table_label, default_label));
9771 return 1;
9774 /* Attempt to generate a tablejump instruction; same concept. */
9775 #ifndef HAVE_tablejump
9776 #define HAVE_tablejump 0
9777 #define gen_tablejump(x, y) (0)
9778 #endif
9780 /* Subroutine of the next function.
9782 INDEX is the value being switched on, with the lowest value
9783 in the table already subtracted.
9784 MODE is its expected mode (needed if INDEX is constant).
9785 RANGE is the length of the jump table.
9786 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9788 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9789 index value is out of range. */
9791 static void
9792 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9793 rtx default_label)
9795 rtx temp, vector;
9797 if (INTVAL (range) > cfun->max_jumptable_ents)
9798 cfun->max_jumptable_ents = INTVAL (range);
9800 /* Do an unsigned comparison (in the proper mode) between the index
9801 expression and the value which represents the length of the range.
9802 Since we just finished subtracting the lower bound of the range
9803 from the index expression, this comparison allows us to simultaneously
9804 check that the original index expression value is both greater than
9805 or equal to the minimum value of the range and less than or equal to
9806 the maximum value of the range. */
9808 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9809 default_label);
9811 /* If index is in range, it must fit in Pmode.
9812 Convert to Pmode so we can index with it. */
9813 if (mode != Pmode)
9814 index = convert_to_mode (Pmode, index, 1);
9816 /* Don't let a MEM slip through, because then INDEX that comes
9817 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9818 and break_out_memory_refs will go to work on it and mess it up. */
9819 #ifdef PIC_CASE_VECTOR_ADDRESS
9820 if (flag_pic && !REG_P (index))
9821 index = copy_to_mode_reg (Pmode, index);
9822 #endif
9824 /* If flag_force_addr were to affect this address
9825 it could interfere with the tricky assumptions made
9826 about addresses that contain label-refs,
9827 which may be valid only very near the tablejump itself. */
9828 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9829 GET_MODE_SIZE, because this indicates how large insns are. The other
9830 uses should all be Pmode, because they are addresses. This code
9831 could fail if addresses and insns are not the same size. */
9832 index = gen_rtx_PLUS (Pmode,
9833 gen_rtx_MULT (Pmode, index,
9834 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9835 gen_rtx_LABEL_REF (Pmode, table_label));
9836 #ifdef PIC_CASE_VECTOR_ADDRESS
9837 if (flag_pic)
9838 index = PIC_CASE_VECTOR_ADDRESS (index);
9839 else
9840 #endif
9841 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9842 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9843 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9844 convert_move (temp, vector, 0);
9846 emit_jump_insn (gen_tablejump (temp, table_label));
9848 /* If we are generating PIC code or if the table is PC-relative, the
9849 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9850 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9851 emit_barrier ();
9855 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9856 rtx table_label, rtx default_label)
9858 rtx index;
9860 if (! HAVE_tablejump)
9861 return 0;
9863 index_expr = fold_build2 (MINUS_EXPR, index_type,
9864 fold_convert (index_type, index_expr),
9865 fold_convert (index_type, minval));
9866 index = expand_normal (index_expr);
9867 do_pending_stack_adjust ();
9869 do_tablejump (index, TYPE_MODE (index_type),
9870 convert_modes (TYPE_MODE (index_type),
9871 TYPE_MODE (TREE_TYPE (range)),
9872 expand_normal (range),
9873 TYPE_UNSIGNED (TREE_TYPE (range))),
9874 table_label, default_label);
9875 return 1;
9878 /* Nonzero if the mode is a valid vector mode for this architecture.
9879 This returns nonzero even if there is no hardware support for the
9880 vector mode, but we can emulate with narrower modes. */
9883 vector_mode_valid_p (enum machine_mode mode)
9885 enum mode_class class = GET_MODE_CLASS (mode);
9886 enum machine_mode innermode;
9888 /* Doh! What's going on? */
9889 if (class != MODE_VECTOR_INT
9890 && class != MODE_VECTOR_FLOAT)
9891 return 0;
9893 /* Hardware support. Woo hoo! */
9894 if (targetm.vector_mode_supported_p (mode))
9895 return 1;
9897 innermode = GET_MODE_INNER (mode);
9899 /* We should probably return 1 if requesting V4DI and we have no DI,
9900 but we have V2DI, but this is probably very unlikely. */
9902 /* If we have support for the inner mode, we can safely emulate it.
9903 We may not have V2DI, but me can emulate with a pair of DIs. */
9904 return targetm.scalar_mode_supported_p (innermode);
9907 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9908 static rtx
9909 const_vector_from_tree (tree exp)
9911 rtvec v;
9912 int units, i;
9913 tree link, elt;
9914 enum machine_mode inner, mode;
9916 mode = TYPE_MODE (TREE_TYPE (exp));
9918 if (initializer_zerop (exp))
9919 return CONST0_RTX (mode);
9921 units = GET_MODE_NUNITS (mode);
9922 inner = GET_MODE_INNER (mode);
9924 v = rtvec_alloc (units);
9926 link = TREE_VECTOR_CST_ELTS (exp);
9927 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9929 elt = TREE_VALUE (link);
9931 if (TREE_CODE (elt) == REAL_CST)
9932 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9933 inner);
9934 else
9935 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9936 TREE_INT_CST_HIGH (elt),
9937 inner);
9940 /* Initialize remaining elements to 0. */
9941 for (; i < units; ++i)
9942 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9944 return gen_rtx_CONST_VECTOR (mode, v);
9946 #include "gt-expr.h"