* cfgcleanup.c, config/dfp-bit.c, expr.c, fold-const.c,
[official-gcc.git] / gcc / expr.c
blob8d2839f8d6d0223066331e4d0ac95d15a266bcd0
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 #endif
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
244 void
245 init_expr_once (void)
247 rtx insn, pat;
248 enum machine_mode mode;
249 int num_clobbers;
250 rtx mem, mem1;
251 rtx reg;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
270 int regno;
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
275 PUT_MODE (reg, mode);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
288 REGNO (reg) = regno;
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 srcmode = GET_MODE_WIDER_MODE (srcmode))
321 enum insn_code ic;
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
327 PUT_MODE (mem, srcmode);
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
335 /* This is run at the start of compiling a function. */
337 void
338 init_expr (void)
340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
348 void
349 convert_move (rtx to, rtx from, int unsignedp)
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
355 enum insn_code code;
356 rtx libcall;
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
363 gcc_assert (to_real == from_real);
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
385 emit_move_insn (to, from);
386 return;
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
398 emit_move_insn (to, from);
399 return;
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
409 if (to_real)
411 rtx value, insns;
412 convert_optab tab;
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423 tab = sext_optab;
424 else
425 tab = trunc_optab;
427 /* Try converting directly if the insn is supported. */
429 code = tab->handlers[to_mode][from_mode].insn_code;
430 if (code != CODE_FOR_nothing)
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434 return;
437 /* Otherwise use a libcall. */
438 libcall = tab->handlers[to_mode][from_mode].libfunc;
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
443 start_sequence ();
444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445 1, from, from_mode);
446 insns = get_insns ();
447 end_sequence ();
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
452 return;
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 != CODE_FOR_nothing);
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469 to, from, UNKNOWN);
470 return;
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
474 rtx new_from;
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 != CODE_FOR_nothing);
481 if (to_mode == full_mode)
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 to, from, UNKNOWN);
485 return;
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from, from, UNKNOWN);
492 /* else proceed to integer conversions below. */
493 from_mode = full_mode;
494 from = new_from;
497 /* Now both modes are integers. */
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
529 if (REG_P (to))
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
566 #ifdef HAVE_slt
567 if (HAVE_slt
568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 && STORE_FLAG_VALUE == -1)
571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572 lowpart_mode, 0);
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
576 else
577 #endif
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582 NULL_RTX, 0);
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
593 gcc_assert (subword);
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
599 insns = get_insns ();
600 end_sequence ();
602 emit_no_conflict_block (insns, to, from, NULL_RTX,
603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604 return;
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
611 if (!((MEM_P (from)
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
615 || REG_P (from)
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
622 /* Now follow all the conversions between integers
623 no more than a word long. */
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (from_mode)))
630 if (!((MEM_P (from)
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
634 || REG_P (from)
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
654 else
656 enum machine_mode intermediate;
657 rtx tmp;
658 tree shift_amount;
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
680 - GET_MODE_BITSIZE (from_mode));
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696 to, from, UNKNOWN);
697 return;
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
714 /* Mode combination is not recognized. */
715 gcc_unreachable ();
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
728 return convert_modes (mode, VOIDmode, x, unsignedp);
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
744 rtx temp;
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
757 if (mode == oldmode)
758 return x;
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
770 HOST_WIDE_INT val = INTVAL (x);
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
797 || (REG_P (x)
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
819 return gen_int_mode (val, mode);
822 return gen_lowpart (mode, x);
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
853 return MOVE_BY_PIECES_P (len, align);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
872 struct move_by_pieces data;
873 rtx to_addr, from_addr = XEXP (from, 0);
874 unsigned int max_size = MOVE_MAX_PIECES + 1;
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
880 data.offset = 0;
881 data.from_addr = from_addr;
882 if (to)
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
892 else
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
921 /* Find the mode of the largest move... */
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
962 enum machine_mode xmode;
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
977 while (max_size > 1)
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
982 mode = tmode;
984 if (mode == VOIDmode)
985 break;
987 icode = mov_optab->handlers[(int) mode].insn_code;
988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
991 max_size = GET_MODE_SIZE (mode);
994 /* The code above should have handled everything. */
995 gcc_assert (!data.len);
997 if (endp)
999 rtx to1;
1001 gcc_assert (!data.reverse);
1002 if (data.autinc_to)
1004 if (endp == 2)
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1015 else
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1021 return to1;
1023 else
1024 return data.to;
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
1034 unsigned HOST_WIDE_INT n_insns = 0;
1035 enum machine_mode tmode;
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1042 enum machine_mode tmode, xmode;
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1054 while (max_size > 1)
1056 enum machine_mode mode = VOIDmode;
1057 enum insn_code icode;
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
1062 mode = tmode;
1064 if (mode == VOIDmode)
1065 break;
1067 icode = mov_optab->handlers[(int) mode].insn_code;
1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1071 max_size = GET_MODE_SIZE (mode);
1074 gcc_assert (!l);
1075 return n_insns;
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1082 static void
1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
1086 unsigned int size = GET_MODE_SIZE (mode);
1087 rtx to1 = NULL_RTX, from1;
1089 while (data->len >= size)
1091 if (data->reverse)
1092 data->offset -= size;
1094 if (data->to)
1096 if (data->autinc_to)
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
1099 else
1100 to1 = adjust_address (data->to, mode, data->offset);
1103 if (data->autinc_from)
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
1106 else
1107 from1 = adjust_address (data->from, mode, data->offset);
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122 #else
1123 gcc_unreachable ();
1124 #endif
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1132 if (! data->reverse)
1133 data->offset += size;
1135 data->len -= size;
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1152 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1154 bool may_use_call;
1155 rtx retval = 0;
1156 unsigned int align;
1158 switch (method)
1160 case BLOCK_OP_NORMAL:
1161 case BLOCK_OP_TAILCALL:
1162 may_use_call = true;
1163 break;
1165 case BLOCK_OP_CALL_PARM:
1166 may_use_call = block_move_libcall_safe_for_call_parm ();
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1170 NO_DEFER_POP;
1171 break;
1173 case BLOCK_OP_NO_LIBCALL:
1174 may_use_call = false;
1175 break;
1177 default:
1178 gcc_unreachable ();
1181 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1183 gcc_assert (MEM_P (x));
1184 gcc_assert (MEM_P (y));
1185 gcc_assert (size);
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x = adjust_address (x, BLKmode, 0);
1190 y = adjust_address (y, BLKmode, 0);
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size) == CONST_INT)
1196 if (INTVAL (size) == 0)
1197 return 0;
1199 x = shallow_copy_rtx (x);
1200 y = shallow_copy_rtx (y);
1201 set_mem_size (x, size);
1202 set_mem_size (y, size);
1205 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1206 move_by_pieces (x, y, INTVAL (size), align, 0);
1207 else if (emit_block_move_via_movmem (x, y, size, align))
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1218 return retval;
1221 /* A subroutine of emit_block_move. Returns true if calling the
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1225 static bool
1226 block_move_libcall_safe_for_call_parm (void)
1228 /* If arguments are pushed on the stack, then they're safe. */
1229 if (PUSH_ARGS)
1230 return true;
1232 /* If registers go on the stack anyway, any argument is sure to clobber
1233 an outgoing argument. */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1236 tree fn = emit_block_move_libcall_fn (false);
1237 (void) fn;
1238 if (REG_PARM_STACK_SPACE (fn) != 0)
1239 return false;
1241 #endif
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1246 CUMULATIVE_ARGS args_so_far;
1247 tree fn, arg;
1249 fn = emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1252 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1255 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257 if (!tmp || !REG_P (tmp))
1258 return false;
1259 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1260 return false;
1261 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1264 return true;
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1270 static bool
1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1273 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1274 int save_volatile_ok = volatile_ok;
1275 enum machine_mode mode;
1277 /* Since this is a move insn, we don't care about volatility. */
1278 volatile_ok = 1;
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285 mode = GET_MODE_WIDER_MODE (mode))
1287 enum insn_code code = movmem_optab[(int) mode];
1288 insn_operand_predicate_fn pred;
1290 if (code != CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297 <= (GET_MODE_MASK (mode) >> 1)))
1298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300 || (*pred) (x, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302 || (*pred) (y, BLKmode))
1303 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304 || (*pred) (opalign, VOIDmode)))
1306 rtx op2;
1307 rtx last = get_last_insn ();
1308 rtx pat;
1310 op2 = convert_to_mode (mode, size, 1);
1311 pred = insn_data[(int) code].operand[2].predicate;
1312 if (pred != 0 && ! (*pred) (op2, mode))
1313 op2 = copy_to_mode_reg (mode, op2);
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1320 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321 if (pat)
1323 emit_insn (pat);
1324 volatile_ok = save_volatile_ok;
1325 return true;
1327 else
1328 delete_insns_since (last);
1332 volatile_ok = save_volatile_ok;
1333 return false;
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1339 static rtx
1340 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1342 rtx dst_addr, src_addr;
1343 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344 enum machine_mode size_mode;
1345 rtx retval;
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 use them later. */
1351 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
1360 size_mode = TYPE_MODE (sizetype);
1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1371 size_tree = make_tree (sizetype, size);
1373 fn = emit_block_move_libcall_fn (true);
1374 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1375 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1380 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381 call_expr, arg_list, NULL_TREE);
1382 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1384 retval = expand_normal (call_expr);
1386 return retval;
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
1393 static GTY(()) tree block_move_fn;
1395 void
1396 init_block_move_fn (const char *asmspec)
1398 if (!block_move_fn)
1400 tree args, fn;
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
1407 fn = build_decl (FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1415 block_move_fn = fn;
1418 if (asmspec)
1419 set_user_assembler_name (block_move_fn, asmspec);
1422 static tree
1423 emit_block_move_libcall_fn (int for_call)
1425 static bool emitted_extern;
1427 if (!block_move_fn)
1428 init_block_move_fn (NULL);
1430 if (for_call && !emitted_extern)
1432 emitted_extern = true;
1433 make_decl_rtl (block_move_fn);
1434 assemble_external (block_move_fn);
1437 return block_move_fn;
1440 /* A subroutine of emit_block_move. Copy the data via an explicit
1441 loop. This is used only when libcalls are forbidden. */
1442 /* ??? It'd be nice to copy in hunks larger than QImode. */
1444 static void
1445 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1446 unsigned int align ATTRIBUTE_UNUSED)
1448 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1449 enum machine_mode iter_mode;
1451 iter_mode = GET_MODE (size);
1452 if (iter_mode == VOIDmode)
1453 iter_mode = word_mode;
1455 top_label = gen_label_rtx ();
1456 cmp_label = gen_label_rtx ();
1457 iter = gen_reg_rtx (iter_mode);
1459 emit_move_insn (iter, const0_rtx);
1461 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1462 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1463 do_pending_stack_adjust ();
1465 emit_jump (cmp_label);
1466 emit_label (top_label);
1468 tmp = convert_modes (Pmode, iter_mode, iter, true);
1469 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1470 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1471 x = change_address (x, QImode, x_addr);
1472 y = change_address (y, QImode, y_addr);
1474 emit_move_insn (x, y);
1476 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1477 true, OPTAB_LIB_WIDEN);
1478 if (tmp != iter)
1479 emit_move_insn (iter, tmp);
1481 emit_label (cmp_label);
1483 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1484 true, top_label);
1487 /* Copy all or part of a value X into registers starting at REGNO.
1488 The number of registers to be filled is NREGS. */
1490 void
1491 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1493 int i;
1494 #ifdef HAVE_load_multiple
1495 rtx pat;
1496 rtx last;
1497 #endif
1499 if (nregs == 0)
1500 return;
1502 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1503 x = validize_mem (force_const_mem (mode, x));
1505 /* See if the machine can do this with a load multiple insn. */
1506 #ifdef HAVE_load_multiple
1507 if (HAVE_load_multiple)
1509 last = get_last_insn ();
1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1511 GEN_INT (nregs));
1512 if (pat)
1514 emit_insn (pat);
1515 return;
1517 else
1518 delete_insns_since (last);
1520 #endif
1522 for (i = 0; i < nregs; i++)
1523 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1524 operand_subword_force (x, i, mode));
1527 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1528 The number of registers to be filled is NREGS. */
1530 void
1531 move_block_from_reg (int regno, rtx x, int nregs)
1533 int i;
1535 if (nregs == 0)
1536 return;
1538 /* See if the machine can do this with a store multiple insn. */
1539 #ifdef HAVE_store_multiple
1540 if (HAVE_store_multiple)
1542 rtx last = get_last_insn ();
1543 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1544 GEN_INT (nregs));
1545 if (pat)
1547 emit_insn (pat);
1548 return;
1550 else
1551 delete_insns_since (last);
1553 #endif
1555 for (i = 0; i < nregs; i++)
1557 rtx tem = operand_subword (x, i, 1, BLKmode);
1559 gcc_assert (tem);
1561 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1565 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566 ORIG, where ORIG is a non-consecutive group of registers represented by
1567 a PARALLEL. The clone is identical to the original except in that the
1568 original set of registers is replaced by a new set of pseudo registers.
1569 The new set has the same modes as the original set. */
1572 gen_group_rtx (rtx orig)
1574 int i, length;
1575 rtx *tmps;
1577 gcc_assert (GET_CODE (orig) == PARALLEL);
1579 length = XVECLEN (orig, 0);
1580 tmps = alloca (sizeof (rtx) * length);
1582 /* Skip a NULL entry in first slot. */
1583 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1585 if (i)
1586 tmps[0] = 0;
1588 for (; i < length; i++)
1590 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1591 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1593 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1596 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1599 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1600 except that values are placed in TMPS[i], and must later be moved
1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1603 static void
1604 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1606 rtx src;
1607 int start, i;
1608 enum machine_mode m = GET_MODE (orig_src);
1610 gcc_assert (GET_CODE (dst) == PARALLEL);
1612 if (m != VOIDmode
1613 && !SCALAR_INT_MODE_P (m)
1614 && !MEM_P (orig_src)
1615 && GET_CODE (orig_src) != CONCAT)
1617 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1618 if (imode == BLKmode)
1619 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1620 else
1621 src = gen_reg_rtx (imode);
1622 if (imode != BLKmode)
1623 src = gen_lowpart (GET_MODE (orig_src), src);
1624 emit_move_insn (src, orig_src);
1625 /* ...and back again. */
1626 if (imode != BLKmode)
1627 src = gen_lowpart (imode, src);
1628 emit_group_load_1 (tmps, dst, src, type, ssize);
1629 return;
1632 /* Check for a NULL entry, used to indicate that the parameter goes
1633 both on the stack and in registers. */
1634 if (XEXP (XVECEXP (dst, 0, 0), 0))
1635 start = 0;
1636 else
1637 start = 1;
1639 /* Process the pieces. */
1640 for (i = start; i < XVECLEN (dst, 0); i++)
1642 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1643 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1644 unsigned int bytelen = GET_MODE_SIZE (mode);
1645 int shift = 0;
1647 /* Handle trailing fragments that run over the size of the struct. */
1648 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1650 /* Arrange to shift the fragment to where it belongs.
1651 extract_bit_field loads to the lsb of the reg. */
1652 if (
1653 #ifdef BLOCK_REG_PADDING
1654 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1655 == (BYTES_BIG_ENDIAN ? upward : downward)
1656 #else
1657 BYTES_BIG_ENDIAN
1658 #endif
1660 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1661 bytelen = ssize - bytepos;
1662 gcc_assert (bytelen > 0);
1665 /* If we won't be loading directly from memory, protect the real source
1666 from strange tricks we might play; but make sure that the source can
1667 be loaded directly into the destination. */
1668 src = orig_src;
1669 if (!MEM_P (orig_src)
1670 && (!CONSTANT_P (orig_src)
1671 || (GET_MODE (orig_src) != mode
1672 && GET_MODE (orig_src) != VOIDmode)))
1674 if (GET_MODE (orig_src) == VOIDmode)
1675 src = gen_reg_rtx (mode);
1676 else
1677 src = gen_reg_rtx (GET_MODE (orig_src));
1679 emit_move_insn (src, orig_src);
1682 /* Optimize the access just a bit. */
1683 if (MEM_P (src)
1684 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1685 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1686 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1687 && bytelen == GET_MODE_SIZE (mode))
1689 tmps[i] = gen_reg_rtx (mode);
1690 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1692 else if (COMPLEX_MODE_P (mode)
1693 && GET_MODE (src) == mode
1694 && bytelen == GET_MODE_SIZE (mode))
1695 /* Let emit_move_complex do the bulk of the work. */
1696 tmps[i] = src;
1697 else if (GET_CODE (src) == CONCAT)
1699 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1700 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1702 if ((bytepos == 0 && bytelen == slen0)
1703 || (bytepos != 0 && bytepos + bytelen <= slen))
1705 /* The following assumes that the concatenated objects all
1706 have the same size. In this case, a simple calculation
1707 can be used to determine the object and the bit field
1708 to be extracted. */
1709 tmps[i] = XEXP (src, bytepos / slen0);
1710 if (! CONSTANT_P (tmps[i])
1711 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1712 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1713 (bytepos % slen0) * BITS_PER_UNIT,
1714 1, NULL_RTX, mode, mode);
1716 else
1718 rtx mem;
1720 gcc_assert (!bytepos);
1721 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1722 emit_move_insn (mem, src);
1723 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1724 0, 1, NULL_RTX, mode, mode);
1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728 SIMD register, which is currently broken. While we get GCC
1729 to emit proper RTL for these cases, let's dump to memory. */
1730 else if (VECTOR_MODE_P (GET_MODE (dst))
1731 && REG_P (src))
1733 int slen = GET_MODE_SIZE (GET_MODE (src));
1734 rtx mem;
1736 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737 emit_move_insn (mem, src);
1738 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1740 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1741 && XVECLEN (dst, 0) > 1)
1742 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1743 else if (CONSTANT_P (src)
1744 || (REG_P (src) && GET_MODE (src) == mode))
1745 tmps[i] = src;
1746 else
1747 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1748 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1749 mode, mode);
1751 if (shift)
1752 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1753 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1757 /* Emit code to move a block SRC of type TYPE to a block DST,
1758 where DST is non-consecutive registers represented by a PARALLEL.
1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1760 if not known. */
1762 void
1763 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1765 rtx *tmps;
1766 int i;
1768 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1769 emit_group_load_1 (tmps, dst, src, type, ssize);
1771 /* Copy the extracted pieces into the proper (probable) hard regs. */
1772 for (i = 0; i < XVECLEN (dst, 0); i++)
1774 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1775 if (d == NULL)
1776 continue;
1777 emit_move_insn (d, tmps[i]);
1781 /* Similar, but load SRC into new pseudos in a format that looks like
1782 PARALLEL. This can later be fed to emit_group_move to get things
1783 in the right place. */
1786 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1788 rtvec vec;
1789 int i;
1791 vec = rtvec_alloc (XVECLEN (parallel, 0));
1792 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1794 /* Convert the vector to look just like the original PARALLEL, except
1795 with the computed values. */
1796 for (i = 0; i < XVECLEN (parallel, 0); i++)
1798 rtx e = XVECEXP (parallel, 0, i);
1799 rtx d = XEXP (e, 0);
1801 if (d)
1803 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1804 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1806 RTVEC_ELT (vec, i) = e;
1809 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1812 /* Emit code to move a block SRC to block DST, where SRC and DST are
1813 non-consecutive groups of registers, each represented by a PARALLEL. */
1815 void
1816 emit_group_move (rtx dst, rtx src)
1818 int i;
1820 gcc_assert (GET_CODE (src) == PARALLEL
1821 && GET_CODE (dst) == PARALLEL
1822 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1824 /* Skip first entry if NULL. */
1825 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1826 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1827 XEXP (XVECEXP (src, 0, i), 0));
1830 /* Move a group of registers represented by a PARALLEL into pseudos. */
1833 emit_group_move_into_temps (rtx src)
1835 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1836 int i;
1838 for (i = 0; i < XVECLEN (src, 0); i++)
1840 rtx e = XVECEXP (src, 0, i);
1841 rtx d = XEXP (e, 0);
1843 if (d)
1844 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1845 RTVEC_ELT (vec, i) = e;
1848 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1851 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852 where SRC is non-consecutive registers represented by a PARALLEL.
1853 SSIZE represents the total size of block ORIG_DST, or -1 if not
1854 known. */
1856 void
1857 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1859 rtx *tmps, dst;
1860 int start, finish, i;
1861 enum machine_mode m = GET_MODE (orig_dst);
1863 gcc_assert (GET_CODE (src) == PARALLEL);
1865 if (!SCALAR_INT_MODE_P (m)
1866 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1868 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1869 if (imode == BLKmode)
1870 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1871 else
1872 dst = gen_reg_rtx (imode);
1873 emit_group_store (dst, src, type, ssize);
1874 if (imode != BLKmode)
1875 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1876 emit_move_insn (orig_dst, dst);
1877 return;
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
1882 if (XEXP (XVECEXP (src, 0, 0), 0))
1883 start = 0;
1884 else
1885 start = 1;
1886 finish = XVECLEN (src, 0);
1888 tmps = alloca (sizeof (rtx) * finish);
1890 /* Copy the (probable) hard regs into pseudos. */
1891 for (i = start; i < finish; i++)
1893 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1894 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1896 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1897 emit_move_insn (tmps[i], reg);
1899 else
1900 tmps[i] = reg;
1903 /* If we won't be storing directly into memory, protect the real destination
1904 from strange tricks we might play. */
1905 dst = orig_dst;
1906 if (GET_CODE (dst) == PARALLEL)
1908 rtx temp;
1910 /* We can get a PARALLEL dst if there is a conditional expression in
1911 a return statement. In that case, the dst and src are the same,
1912 so no action is necessary. */
1913 if (rtx_equal_p (dst, src))
1914 return;
1916 /* It is unclear if we can ever reach here, but we may as well handle
1917 it. Allocate a temporary, and split this into a store/load to/from
1918 the temporary. */
1920 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1921 emit_group_store (temp, src, type, ssize);
1922 emit_group_load (dst, temp, type, ssize);
1923 return;
1925 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1927 enum machine_mode outer = GET_MODE (dst);
1928 enum machine_mode inner;
1929 HOST_WIDE_INT bytepos;
1930 bool done = false;
1931 rtx temp;
1933 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1934 dst = gen_reg_rtx (outer);
1936 /* Make life a bit easier for combine. */
1937 /* If the first element of the vector is the low part
1938 of the destination mode, use a paradoxical subreg to
1939 initialize the destination. */
1940 if (start < finish)
1942 inner = GET_MODE (tmps[start]);
1943 bytepos = subreg_lowpart_offset (inner, outer);
1944 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1946 temp = simplify_gen_subreg (outer, tmps[start],
1947 inner, 0);
1948 if (temp)
1950 emit_move_insn (dst, temp);
1951 done = true;
1952 start++;
1957 /* If the first element wasn't the low part, try the last. */
1958 if (!done
1959 && start < finish - 1)
1961 inner = GET_MODE (tmps[finish - 1]);
1962 bytepos = subreg_lowpart_offset (inner, outer);
1963 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1965 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1966 inner, 0);
1967 if (temp)
1969 emit_move_insn (dst, temp);
1970 done = true;
1971 finish--;
1976 /* Otherwise, simply initialize the result to zero. */
1977 if (!done)
1978 emit_move_insn (dst, CONST0_RTX (outer));
1981 /* Process the pieces. */
1982 for (i = start; i < finish; i++)
1984 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1985 enum machine_mode mode = GET_MODE (tmps[i]);
1986 unsigned int bytelen = GET_MODE_SIZE (mode);
1987 rtx dest = dst;
1989 /* Handle trailing fragments that run over the size of the struct. */
1990 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1992 /* store_bit_field always takes its value from the lsb.
1993 Move the fragment to the lsb if it's not already there. */
1994 if (
1995 #ifdef BLOCK_REG_PADDING
1996 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1997 == (BYTES_BIG_ENDIAN ? upward : downward)
1998 #else
1999 BYTES_BIG_ENDIAN
2000 #endif
2003 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2004 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2005 build_int_cst (NULL_TREE, shift),
2006 tmps[i], 0);
2008 bytelen = ssize - bytepos;
2011 if (GET_CODE (dst) == CONCAT)
2013 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2020 else
2022 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023 dest = assign_stack_temp (GET_MODE (dest),
2024 GET_MODE_SIZE (GET_MODE (dest)), 0);
2025 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2026 tmps[i]);
2027 dst = dest;
2028 break;
2032 /* Optimize the access just a bit. */
2033 if (MEM_P (dest)
2034 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2035 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2036 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2037 && bytelen == GET_MODE_SIZE (mode))
2038 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2039 else
2040 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2041 mode, tmps[i]);
2044 /* Copy from the pseudo into the (probable) hard reg. */
2045 if (orig_dst != dst)
2046 emit_move_insn (orig_dst, dst);
2049 /* Generate code to copy a BLKmode object of TYPE out of a
2050 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2051 is null, a stack temporary is created. TGTBLK is returned.
2053 The purpose of this routine is to handle functions that return
2054 BLKmode structures in registers. Some machines (the PA for example)
2055 want to return all small structures in registers regardless of the
2056 structure's alignment. */
2059 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2061 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2062 rtx src = NULL, dst = NULL;
2063 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2064 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2066 if (tgtblk == 0)
2068 tgtblk = assign_temp (build_qualified_type (type,
2069 (TYPE_QUALS (type)
2070 | TYPE_QUAL_CONST)),
2071 0, 1, 1);
2072 preserve_temp_slots (tgtblk);
2075 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2076 into a new pseudo which is a full word. */
2078 if (GET_MODE (srcreg) != BLKmode
2079 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2080 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2082 /* If the structure doesn't take up a whole number of words, see whether
2083 SRCREG is padded on the left or on the right. If it's on the left,
2084 set PADDING_CORRECTION to the number of bits to skip.
2086 In most ABIs, the structure will be returned at the least end of
2087 the register, which translates to right padding on little-endian
2088 targets and left padding on big-endian targets. The opposite
2089 holds if the structure is returned at the most significant
2090 end of the register. */
2091 if (bytes % UNITS_PER_WORD != 0
2092 && (targetm.calls.return_in_msb (type)
2093 ? !BYTES_BIG_ENDIAN
2094 : BYTES_BIG_ENDIAN))
2095 padding_correction
2096 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2098 /* Copy the structure BITSIZE bites at a time.
2100 We could probably emit more efficient code for machines which do not use
2101 strict alignment, but it doesn't seem worth the effort at the current
2102 time. */
2103 for (bitpos = 0, xbitpos = padding_correction;
2104 bitpos < bytes * BITS_PER_UNIT;
2105 bitpos += bitsize, xbitpos += bitsize)
2107 /* We need a new source operand each time xbitpos is on a
2108 word boundary and when xbitpos == padding_correction
2109 (the first time through). */
2110 if (xbitpos % BITS_PER_WORD == 0
2111 || xbitpos == padding_correction)
2112 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2113 GET_MODE (srcreg));
2115 /* We need a new destination operand each time bitpos is on
2116 a word boundary. */
2117 if (bitpos % BITS_PER_WORD == 0)
2118 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2120 /* Use xbitpos for the source extraction (right justified) and
2121 xbitpos for the destination store (left justified). */
2122 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2123 extract_bit_field (src, bitsize,
2124 xbitpos % BITS_PER_WORD, 1,
2125 NULL_RTX, word_mode, word_mode));
2128 return tgtblk;
2131 /* Add a USE expression for REG to the (possibly empty) list pointed
2132 to by CALL_FUSAGE. REG must denote a hard register. */
2134 void
2135 use_reg (rtx *call_fusage, rtx reg)
2137 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2139 *call_fusage
2140 = gen_rtx_EXPR_LIST (VOIDmode,
2141 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2144 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2145 starting at REGNO. All of these registers must be hard registers. */
2147 void
2148 use_regs (rtx *call_fusage, int regno, int nregs)
2150 int i;
2152 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2154 for (i = 0; i < nregs; i++)
2155 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2158 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2159 PARALLEL REGS. This is for calls that pass values in multiple
2160 non-contiguous locations. The Irix 6 ABI has examples of this. */
2162 void
2163 use_group_regs (rtx *call_fusage, rtx regs)
2165 int i;
2167 for (i = 0; i < XVECLEN (regs, 0); i++)
2169 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2171 /* A NULL entry means the parameter goes both on the stack and in
2172 registers. This can also be a MEM for targets that pass values
2173 partially on the stack and partially in registers. */
2174 if (reg != 0 && REG_P (reg))
2175 use_reg (call_fusage, reg);
2180 /* Determine whether the LEN bytes generated by CONSTFUN can be
2181 stored to memory using several move instructions. CONSTFUNDATA is
2182 a pointer which will be passed as argument in every CONSTFUN call.
2183 ALIGN is maximum alignment we can assume. Return nonzero if a
2184 call to store_by_pieces should succeed. */
2187 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2188 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2189 void *constfundata, unsigned int align)
2191 unsigned HOST_WIDE_INT l;
2192 unsigned int max_size;
2193 HOST_WIDE_INT offset = 0;
2194 enum machine_mode mode, tmode;
2195 enum insn_code icode;
2196 int reverse;
2197 rtx cst;
2199 if (len == 0)
2200 return 1;
2202 if (! STORE_BY_PIECES_P (len, align))
2203 return 0;
2205 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2206 if (align >= GET_MODE_ALIGNMENT (tmode))
2207 align = GET_MODE_ALIGNMENT (tmode);
2208 else
2210 enum machine_mode xmode;
2212 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2213 tmode != VOIDmode;
2214 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2215 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2216 || SLOW_UNALIGNED_ACCESS (tmode, align))
2217 break;
2219 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2222 /* We would first store what we can in the largest integer mode, then go to
2223 successively smaller modes. */
2225 for (reverse = 0;
2226 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2227 reverse++)
2229 l = len;
2230 mode = VOIDmode;
2231 max_size = STORE_MAX_PIECES + 1;
2232 while (max_size > 1)
2234 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2235 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2236 if (GET_MODE_SIZE (tmode) < max_size)
2237 mode = tmode;
2239 if (mode == VOIDmode)
2240 break;
2242 icode = mov_optab->handlers[(int) mode].insn_code;
2243 if (icode != CODE_FOR_nothing
2244 && align >= GET_MODE_ALIGNMENT (mode))
2246 unsigned int size = GET_MODE_SIZE (mode);
2248 while (l >= size)
2250 if (reverse)
2251 offset -= size;
2253 cst = (*constfun) (constfundata, offset, mode);
2254 if (!LEGITIMATE_CONSTANT_P (cst))
2255 return 0;
2257 if (!reverse)
2258 offset += size;
2260 l -= size;
2264 max_size = GET_MODE_SIZE (mode);
2267 /* The code above should have handled everything. */
2268 gcc_assert (!l);
2271 return 1;
2274 /* Generate several move instructions to store LEN bytes generated by
2275 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2276 pointer which will be passed as argument in every CONSTFUN call.
2277 ALIGN is maximum alignment we can assume.
2278 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2279 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2280 stpcpy. */
2283 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2284 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2285 void *constfundata, unsigned int align, int endp)
2287 struct store_by_pieces data;
2289 if (len == 0)
2291 gcc_assert (endp != 2);
2292 return to;
2295 gcc_assert (STORE_BY_PIECES_P (len, align));
2296 data.constfun = constfun;
2297 data.constfundata = constfundata;
2298 data.len = len;
2299 data.to = to;
2300 store_by_pieces_1 (&data, align);
2301 if (endp)
2303 rtx to1;
2305 gcc_assert (!data.reverse);
2306 if (data.autinc_to)
2308 if (endp == 2)
2310 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2311 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2312 else
2313 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2314 -1));
2316 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2317 data.offset);
2319 else
2321 if (endp == 2)
2322 --data.offset;
2323 to1 = adjust_address (data.to, QImode, data.offset);
2325 return to1;
2327 else
2328 return data.to;
2331 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2332 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2334 static void
2335 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2337 struct store_by_pieces data;
2339 if (len == 0)
2340 return;
2342 data.constfun = clear_by_pieces_1;
2343 data.constfundata = NULL;
2344 data.len = len;
2345 data.to = to;
2346 store_by_pieces_1 (&data, align);
2349 /* Callback routine for clear_by_pieces.
2350 Return const0_rtx unconditionally. */
2352 static rtx
2353 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2354 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2355 enum machine_mode mode ATTRIBUTE_UNUSED)
2357 return const0_rtx;
2360 /* Subroutine of clear_by_pieces and store_by_pieces.
2361 Generate several move instructions to store LEN bytes of block TO. (A MEM
2362 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2364 static void
2365 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2366 unsigned int align ATTRIBUTE_UNUSED)
2368 rtx to_addr = XEXP (data->to, 0);
2369 unsigned int max_size = STORE_MAX_PIECES + 1;
2370 enum machine_mode mode = VOIDmode, tmode;
2371 enum insn_code icode;
2373 data->offset = 0;
2374 data->to_addr = to_addr;
2375 data->autinc_to
2376 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2377 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2379 data->explicit_inc_to = 0;
2380 data->reverse
2381 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2382 if (data->reverse)
2383 data->offset = data->len;
2385 /* If storing requires more than two move insns,
2386 copy addresses to registers (to make displacements shorter)
2387 and use post-increment if available. */
2388 if (!data->autinc_to
2389 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2391 /* Determine the main mode we'll be using. */
2392 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2393 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2394 if (GET_MODE_SIZE (tmode) < max_size)
2395 mode = tmode;
2397 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2399 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2400 data->autinc_to = 1;
2401 data->explicit_inc_to = -1;
2404 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2405 && ! data->autinc_to)
2407 data->to_addr = copy_addr_to_reg (to_addr);
2408 data->autinc_to = 1;
2409 data->explicit_inc_to = 1;
2412 if ( !data->autinc_to && CONSTANT_P (to_addr))
2413 data->to_addr = copy_addr_to_reg (to_addr);
2416 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2417 if (align >= GET_MODE_ALIGNMENT (tmode))
2418 align = GET_MODE_ALIGNMENT (tmode);
2419 else
2421 enum machine_mode xmode;
2423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2424 tmode != VOIDmode;
2425 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2426 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2427 || SLOW_UNALIGNED_ACCESS (tmode, align))
2428 break;
2430 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2433 /* First store what we can in the largest integer mode, then go to
2434 successively smaller modes. */
2436 while (max_size > 1)
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2441 mode = tmode;
2443 if (mode == VOIDmode)
2444 break;
2446 icode = mov_optab->handlers[(int) mode].insn_code;
2447 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2448 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2450 max_size = GET_MODE_SIZE (mode);
2453 /* The code above should have handled everything. */
2454 gcc_assert (!data->len);
2457 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2458 with move instructions for mode MODE. GENFUN is the gen_... function
2459 to make a move insn for that mode. DATA has all the other info. */
2461 static void
2462 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2463 struct store_by_pieces *data)
2465 unsigned int size = GET_MODE_SIZE (mode);
2466 rtx to1, cst;
2468 while (data->len >= size)
2470 if (data->reverse)
2471 data->offset -= size;
2473 if (data->autinc_to)
2474 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2475 data->offset);
2476 else
2477 to1 = adjust_address (data->to, mode, data->offset);
2479 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2480 emit_insn (gen_add2_insn (data->to_addr,
2481 GEN_INT (-(HOST_WIDE_INT) size)));
2483 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2484 emit_insn ((*genfun) (to1, cst));
2486 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2487 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2489 if (! data->reverse)
2490 data->offset += size;
2492 data->len -= size;
2496 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2497 its length in bytes. */
2500 clear_storage (rtx object, rtx size, enum block_op_methods method)
2502 enum machine_mode mode = GET_MODE (object);
2503 unsigned int align;
2505 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2507 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2508 just move a zero. Otherwise, do this a piece at a time. */
2509 if (mode != BLKmode
2510 && GET_CODE (size) == CONST_INT
2511 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2513 rtx zero = CONST0_RTX (mode);
2514 if (zero != NULL)
2516 emit_move_insn (object, zero);
2517 return NULL;
2520 if (COMPLEX_MODE_P (mode))
2522 zero = CONST0_RTX (GET_MODE_INNER (mode));
2523 if (zero != NULL)
2525 write_complex_part (object, zero, 0);
2526 write_complex_part (object, zero, 1);
2527 return NULL;
2532 if (size == const0_rtx)
2533 return NULL;
2535 align = MEM_ALIGN (object);
2537 if (GET_CODE (size) == CONST_INT
2538 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2539 clear_by_pieces (object, INTVAL (size), align);
2540 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2542 else
2543 return clear_storage_via_libcall (object, size,
2544 method == BLOCK_OP_TAILCALL);
2546 return NULL;
2549 /* A subroutine of clear_storage. Expand a call to memset.
2550 Return the return value of memset, 0 otherwise. */
2552 static rtx
2553 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2555 tree call_expr, arg_list, fn, object_tree, size_tree;
2556 enum machine_mode size_mode;
2557 rtx retval;
2559 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2560 place those into new pseudos into a VAR_DECL and use them later. */
2562 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2564 size_mode = TYPE_MODE (sizetype);
2565 size = convert_to_mode (size_mode, size, 1);
2566 size = copy_to_mode_reg (size_mode, size);
2568 /* It is incorrect to use the libcall calling conventions to call
2569 memset in this context. This could be a user call to memset and
2570 the user may wish to examine the return value from memset. For
2571 targets where libcalls and normal calls have different conventions
2572 for returning pointers, we could end up generating incorrect code. */
2574 object_tree = make_tree (ptr_type_node, object);
2575 size_tree = make_tree (sizetype, size);
2577 fn = clear_storage_libcall_fn (true);
2578 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2579 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2580 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2582 /* Now we have to build up the CALL_EXPR itself. */
2583 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2584 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2585 call_expr, arg_list, NULL_TREE);
2586 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2588 retval = expand_normal (call_expr);
2590 return retval;
2593 /* A subroutine of clear_storage_via_libcall. Create the tree node
2594 for the function we use for block clears. The first time FOR_CALL
2595 is true, we call assemble_external. */
2597 static GTY(()) tree block_clear_fn;
2599 void
2600 init_block_clear_fn (const char *asmspec)
2602 if (!block_clear_fn)
2604 tree fn, args;
2606 fn = get_identifier ("memset");
2607 args = build_function_type_list (ptr_type_node, ptr_type_node,
2608 integer_type_node, sizetype,
2609 NULL_TREE);
2611 fn = build_decl (FUNCTION_DECL, fn, args);
2612 DECL_EXTERNAL (fn) = 1;
2613 TREE_PUBLIC (fn) = 1;
2614 DECL_ARTIFICIAL (fn) = 1;
2615 TREE_NOTHROW (fn) = 1;
2616 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2617 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2619 block_clear_fn = fn;
2622 if (asmspec)
2623 set_user_assembler_name (block_clear_fn, asmspec);
2626 static tree
2627 clear_storage_libcall_fn (int for_call)
2629 static bool emitted_extern;
2631 if (!block_clear_fn)
2632 init_block_clear_fn (NULL);
2634 if (for_call && !emitted_extern)
2636 emitted_extern = true;
2637 make_decl_rtl (block_clear_fn);
2638 assemble_external (block_clear_fn);
2641 return block_clear_fn;
2644 /* Expand a setmem pattern; return true if successful. */
2646 bool
2647 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2649 /* Try the most limited insn first, because there's no point
2650 including more than one in the machine description unless
2651 the more limited one has some advantage. */
2653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2654 enum machine_mode mode;
2656 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2657 mode = GET_MODE_WIDER_MODE (mode))
2659 enum insn_code code = setmem_optab[(int) mode];
2660 insn_operand_predicate_fn pred;
2662 if (code != CODE_FOR_nothing
2663 /* We don't need MODE to be narrower than
2664 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2665 the mode mask, as it is returned by the macro, it will
2666 definitely be less than the actual mode mask. */
2667 && ((GET_CODE (size) == CONST_INT
2668 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2669 <= (GET_MODE_MASK (mode) >> 1)))
2670 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2671 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2672 || (*pred) (object, BLKmode))
2673 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2674 || (*pred) (opalign, VOIDmode)))
2676 rtx opsize, opchar;
2677 enum machine_mode char_mode;
2678 rtx last = get_last_insn ();
2679 rtx pat;
2681 opsize = convert_to_mode (mode, size, 1);
2682 pred = insn_data[(int) code].operand[1].predicate;
2683 if (pred != 0 && ! (*pred) (opsize, mode))
2684 opsize = copy_to_mode_reg (mode, opsize);
2686 opchar = val;
2687 char_mode = insn_data[(int) code].operand[2].mode;
2688 if (char_mode != VOIDmode)
2690 opchar = convert_to_mode (char_mode, opchar, 1);
2691 pred = insn_data[(int) code].operand[2].predicate;
2692 if (pred != 0 && ! (*pred) (opchar, char_mode))
2693 opchar = copy_to_mode_reg (char_mode, opchar);
2696 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2697 if (pat)
2699 emit_insn (pat);
2700 return true;
2702 else
2703 delete_insns_since (last);
2707 return false;
2711 /* Write to one of the components of the complex value CPLX. Write VAL to
2712 the real part if IMAG_P is false, and the imaginary part if its true. */
2714 static void
2715 write_complex_part (rtx cplx, rtx val, bool imag_p)
2717 enum machine_mode cmode;
2718 enum machine_mode imode;
2719 unsigned ibitsize;
2721 if (GET_CODE (cplx) == CONCAT)
2723 emit_move_insn (XEXP (cplx, imag_p), val);
2724 return;
2727 cmode = GET_MODE (cplx);
2728 imode = GET_MODE_INNER (cmode);
2729 ibitsize = GET_MODE_BITSIZE (imode);
2731 /* For MEMs simplify_gen_subreg may generate an invalid new address
2732 because, e.g., the original address is considered mode-dependent
2733 by the target, which restricts simplify_subreg from invoking
2734 adjust_address_nv. Instead of preparing fallback support for an
2735 invalid address, we call adjust_address_nv directly. */
2736 if (MEM_P (cplx))
2738 emit_move_insn (adjust_address_nv (cplx, imode,
2739 imag_p ? GET_MODE_SIZE (imode) : 0),
2740 val);
2741 return;
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since store_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2753 || (REG_P (cplx)
2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2757 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
2759 if (part)
2761 emit_move_insn (part, val);
2762 return;
2764 else
2765 /* simplify_gen_subreg may fail for sub-word MEMs. */
2766 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2769 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2772 /* Extract one of the components of the complex value CPLX. Extract the
2773 real part if IMAG_P is false, and the imaginary part if it's true. */
2775 static rtx
2776 read_complex_part (rtx cplx, bool imag_p)
2778 enum machine_mode cmode, imode;
2779 unsigned ibitsize;
2781 if (GET_CODE (cplx) == CONCAT)
2782 return XEXP (cplx, imag_p);
2784 cmode = GET_MODE (cplx);
2785 imode = GET_MODE_INNER (cmode);
2786 ibitsize = GET_MODE_BITSIZE (imode);
2788 /* Special case reads from complex constants that got spilled to memory. */
2789 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2791 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2792 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2794 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2795 if (CONSTANT_CLASS_P (part))
2796 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2800 /* For MEMs simplify_gen_subreg may generate an invalid new address
2801 because, e.g., the original address is considered mode-dependent
2802 by the target, which restricts simplify_subreg from invoking
2803 adjust_address_nv. Instead of preparing fallback support for an
2804 invalid address, we call adjust_address_nv directly. */
2805 if (MEM_P (cplx))
2806 return adjust_address_nv (cplx, imode,
2807 imag_p ? GET_MODE_SIZE (imode) : 0);
2809 /* If the sub-object is at least word sized, then we know that subregging
2810 will work. This special case is important, since extract_bit_field
2811 wants to operate on integer modes, and there's rarely an OImode to
2812 correspond to TCmode. */
2813 if (ibitsize >= BITS_PER_WORD
2814 /* For hard regs we have exact predicates. Assume we can split
2815 the original object if it spans an even number of hard regs.
2816 This special case is important for SCmode on 64-bit platforms
2817 where the natural size of floating-point regs is 32-bit. */
2818 || (REG_P (cplx)
2819 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2820 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2822 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2823 imag_p ? GET_MODE_SIZE (imode) : 0);
2824 if (ret)
2825 return ret;
2826 else
2827 /* simplify_gen_subreg may fail for sub-word MEMs. */
2828 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2831 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2832 true, NULL_RTX, imode, imode);
2835 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2836 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2837 represented in NEW_MODE. If FORCE is true, this will never happen, as
2838 we'll force-create a SUBREG if needed. */
2840 static rtx
2841 emit_move_change_mode (enum machine_mode new_mode,
2842 enum machine_mode old_mode, rtx x, bool force)
2844 rtx ret;
2846 if (MEM_P (x))
2848 /* We don't have to worry about changing the address since the
2849 size in bytes is supposed to be the same. */
2850 if (reload_in_progress)
2852 /* Copy the MEM to change the mode and move any
2853 substitutions from the old MEM to the new one. */
2854 ret = adjust_address_nv (x, new_mode, 0);
2855 copy_replacements (x, ret);
2857 else
2858 ret = adjust_address (x, new_mode, 0);
2860 else
2862 /* Note that we do want simplify_subreg's behavior of validating
2863 that the new mode is ok for a hard register. If we were to use
2864 simplify_gen_subreg, we would create the subreg, but would
2865 probably run into the target not being able to implement it. */
2866 /* Except, of course, when FORCE is true, when this is exactly what
2867 we want. Which is needed for CCmodes on some targets. */
2868 if (force)
2869 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2870 else
2871 ret = simplify_subreg (new_mode, x, old_mode, 0);
2874 return ret;
2877 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2878 an integer mode of the same size as MODE. Returns the instruction
2879 emitted, or NULL if such a move could not be generated. */
2881 static rtx
2882 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2884 enum machine_mode imode;
2885 enum insn_code code;
2887 /* There must exist a mode of the exact size we require. */
2888 imode = int_mode_for_mode (mode);
2889 if (imode == BLKmode)
2890 return NULL_RTX;
2892 /* The target must support moves in this mode. */
2893 code = mov_optab->handlers[imode].insn_code;
2894 if (code == CODE_FOR_nothing)
2895 return NULL_RTX;
2897 x = emit_move_change_mode (imode, mode, x, force);
2898 if (x == NULL_RTX)
2899 return NULL_RTX;
2900 y = emit_move_change_mode (imode, mode, y, force);
2901 if (y == NULL_RTX)
2902 return NULL_RTX;
2903 return emit_insn (GEN_FCN (code) (x, y));
2906 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2907 Return an equivalent MEM that does not use an auto-increment. */
2909 static rtx
2910 emit_move_resolve_push (enum machine_mode mode, rtx x)
2912 enum rtx_code code = GET_CODE (XEXP (x, 0));
2913 HOST_WIDE_INT adjust;
2914 rtx temp;
2916 adjust = GET_MODE_SIZE (mode);
2917 #ifdef PUSH_ROUNDING
2918 adjust = PUSH_ROUNDING (adjust);
2919 #endif
2920 if (code == PRE_DEC || code == POST_DEC)
2921 adjust = -adjust;
2922 else if (code == PRE_MODIFY || code == POST_MODIFY)
2924 rtx expr = XEXP (XEXP (x, 0), 1);
2925 HOST_WIDE_INT val;
2927 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2928 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2929 val = INTVAL (XEXP (expr, 1));
2930 if (GET_CODE (expr) == MINUS)
2931 val = -val;
2932 gcc_assert (adjust == val || adjust == -val);
2933 adjust = val;
2936 /* Do not use anti_adjust_stack, since we don't want to update
2937 stack_pointer_delta. */
2938 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2939 GEN_INT (adjust), stack_pointer_rtx,
2940 0, OPTAB_LIB_WIDEN);
2941 if (temp != stack_pointer_rtx)
2942 emit_move_insn (stack_pointer_rtx, temp);
2944 switch (code)
2946 case PRE_INC:
2947 case PRE_DEC:
2948 case PRE_MODIFY:
2949 temp = stack_pointer_rtx;
2950 break;
2951 case POST_INC:
2952 case POST_DEC:
2953 case POST_MODIFY:
2954 temp = plus_constant (stack_pointer_rtx, -adjust);
2955 break;
2956 default:
2957 gcc_unreachable ();
2960 return replace_equiv_address (x, temp);
2963 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2964 X is known to satisfy push_operand, and MODE is known to be complex.
2965 Returns the last instruction emitted. */
2967 static rtx
2968 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2970 enum machine_mode submode = GET_MODE_INNER (mode);
2971 bool imag_first;
2973 #ifdef PUSH_ROUNDING
2974 unsigned int submodesize = GET_MODE_SIZE (submode);
2976 /* In case we output to the stack, but the size is smaller than the
2977 machine can push exactly, we need to use move instructions. */
2978 if (PUSH_ROUNDING (submodesize) != submodesize)
2980 x = emit_move_resolve_push (mode, x);
2981 return emit_move_insn (x, y);
2983 #endif
2985 /* Note that the real part always precedes the imag part in memory
2986 regardless of machine's endianness. */
2987 switch (GET_CODE (XEXP (x, 0)))
2989 case PRE_DEC:
2990 case POST_DEC:
2991 imag_first = true;
2992 break;
2993 case PRE_INC:
2994 case POST_INC:
2995 imag_first = false;
2996 break;
2997 default:
2998 gcc_unreachable ();
3001 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3002 read_complex_part (y, imag_first));
3003 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004 read_complex_part (y, !imag_first));
3007 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3008 MODE is known to be complex. Returns the last instruction emitted. */
3010 static rtx
3011 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3013 bool try_int;
3015 /* Need to take special care for pushes, to maintain proper ordering
3016 of the data, and possibly extra padding. */
3017 if (push_operand (x, mode))
3018 return emit_move_complex_push (mode, x, y);
3020 /* See if we can coerce the target into moving both values at once. */
3022 /* Move floating point as parts. */
3023 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3024 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3025 try_int = false;
3026 /* Not possible if the values are inherently not adjacent. */
3027 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3028 try_int = false;
3029 /* Is possible if both are registers (or subregs of registers). */
3030 else if (register_operand (x, mode) && register_operand (y, mode))
3031 try_int = true;
3032 /* If one of the operands is a memory, and alignment constraints
3033 are friendly enough, we may be able to do combined memory operations.
3034 We do not attempt this if Y is a constant because that combination is
3035 usually better with the by-parts thing below. */
3036 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3037 && (!STRICT_ALIGNMENT
3038 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3039 try_int = true;
3040 else
3041 try_int = false;
3043 if (try_int)
3045 rtx ret;
3047 /* For memory to memory moves, optimal behavior can be had with the
3048 existing block move logic. */
3049 if (MEM_P (x) && MEM_P (y))
3051 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3052 BLOCK_OP_NO_LIBCALL);
3053 return get_last_insn ();
3056 ret = emit_move_via_integer (mode, x, y, true);
3057 if (ret)
3058 return ret;
3061 /* Show the output dies here. This is necessary for SUBREGs
3062 of pseudos since we cannot track their lifetimes correctly;
3063 hard regs shouldn't appear here except as return values. */
3064 if (!reload_completed && !reload_in_progress
3065 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3066 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3068 write_complex_part (x, read_complex_part (y, false), false);
3069 write_complex_part (x, read_complex_part (y, true), true);
3070 return get_last_insn ();
3073 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3074 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3076 static rtx
3077 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3079 rtx ret;
3081 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3082 if (mode != CCmode)
3084 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3085 if (code != CODE_FOR_nothing)
3087 x = emit_move_change_mode (CCmode, mode, x, true);
3088 y = emit_move_change_mode (CCmode, mode, y, true);
3089 return emit_insn (GEN_FCN (code) (x, y));
3093 /* Otherwise, find the MODE_INT mode of the same width. */
3094 ret = emit_move_via_integer (mode, x, y, false);
3095 gcc_assert (ret != NULL);
3096 return ret;
3099 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3100 MODE is any multi-word or full-word mode that lacks a move_insn
3101 pattern. Note that you will get better code if you define such
3102 patterns, even if they must turn into multiple assembler instructions. */
3104 static rtx
3105 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3107 rtx last_insn = 0;
3108 rtx seq, inner;
3109 bool need_clobber;
3110 int i;
3112 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3114 /* If X is a push on the stack, do the push now and replace
3115 X with a reference to the stack pointer. */
3116 if (push_operand (x, mode))
3117 x = emit_move_resolve_push (mode, x);
3119 /* If we are in reload, see if either operand is a MEM whose address
3120 is scheduled for replacement. */
3121 if (reload_in_progress && MEM_P (x)
3122 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3123 x = replace_equiv_address_nv (x, inner);
3124 if (reload_in_progress && MEM_P (y)
3125 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3126 y = replace_equiv_address_nv (y, inner);
3128 start_sequence ();
3130 need_clobber = false;
3131 for (i = 0;
3132 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3133 i++)
3135 rtx xpart = operand_subword (x, i, 1, mode);
3136 rtx ypart = operand_subword (y, i, 1, mode);
3138 /* If we can't get a part of Y, put Y into memory if it is a
3139 constant. Otherwise, force it into a register. Then we must
3140 be able to get a part of Y. */
3141 if (ypart == 0 && CONSTANT_P (y))
3143 y = use_anchored_address (force_const_mem (mode, y));
3144 ypart = operand_subword (y, i, 1, mode);
3146 else if (ypart == 0)
3147 ypart = operand_subword_force (y, i, mode);
3149 gcc_assert (xpart && ypart);
3151 need_clobber |= (GET_CODE (xpart) == SUBREG);
3153 last_insn = emit_move_insn (xpart, ypart);
3156 seq = get_insns ();
3157 end_sequence ();
3159 /* Show the output dies here. This is necessary for SUBREGs
3160 of pseudos since we cannot track their lifetimes correctly;
3161 hard regs shouldn't appear here except as return values.
3162 We never want to emit such a clobber after reload. */
3163 if (x != y
3164 && ! (reload_in_progress || reload_completed)
3165 && need_clobber != 0)
3166 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3168 emit_insn (seq);
3170 return last_insn;
3173 /* Low level part of emit_move_insn.
3174 Called just like emit_move_insn, but assumes X and Y
3175 are basically valid. */
3178 emit_move_insn_1 (rtx x, rtx y)
3180 enum machine_mode mode = GET_MODE (x);
3181 enum insn_code code;
3183 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3185 code = mov_optab->handlers[mode].insn_code;
3186 if (code != CODE_FOR_nothing)
3187 return emit_insn (GEN_FCN (code) (x, y));
3189 /* Expand complex moves by moving real part and imag part. */
3190 if (COMPLEX_MODE_P (mode))
3191 return emit_move_complex (mode, x, y);
3193 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3195 rtx result = emit_move_via_integer (mode, x, y, true);
3197 /* If we can't find an integer mode, use multi words. */
3198 if (result)
3199 return result;
3200 else
3201 return emit_move_multi_word (mode, x, y);
3204 if (GET_MODE_CLASS (mode) == MODE_CC)
3205 return emit_move_ccmode (mode, x, y);
3207 /* Try using a move pattern for the corresponding integer mode. This is
3208 only safe when simplify_subreg can convert MODE constants into integer
3209 constants. At present, it can only do this reliably if the value
3210 fits within a HOST_WIDE_INT. */
3211 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3213 rtx ret = emit_move_via_integer (mode, x, y, false);
3214 if (ret)
3215 return ret;
3218 return emit_move_multi_word (mode, x, y);
3221 /* Generate code to copy Y into X.
3222 Both Y and X must have the same mode, except that
3223 Y can be a constant with VOIDmode.
3224 This mode cannot be BLKmode; use emit_block_move for that.
3226 Return the last instruction emitted. */
3229 emit_move_insn (rtx x, rtx y)
3231 enum machine_mode mode = GET_MODE (x);
3232 rtx y_cst = NULL_RTX;
3233 rtx last_insn, set;
3235 gcc_assert (mode != BLKmode
3236 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3238 if (CONSTANT_P (y))
3240 if (optimize
3241 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3242 && (last_insn = compress_float_constant (x, y)))
3243 return last_insn;
3245 y_cst = y;
3247 if (!LEGITIMATE_CONSTANT_P (y))
3249 y = force_const_mem (mode, y);
3251 /* If the target's cannot_force_const_mem prevented the spill,
3252 assume that the target's move expanders will also take care
3253 of the non-legitimate constant. */
3254 if (!y)
3255 y = y_cst;
3256 else
3257 y = use_anchored_address (y);
3261 /* If X or Y are memory references, verify that their addresses are valid
3262 for the machine. */
3263 if (MEM_P (x)
3264 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3265 && ! push_operand (x, GET_MODE (x)))
3266 || (flag_force_addr
3267 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3268 x = validize_mem (x);
3270 if (MEM_P (y)
3271 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3272 || (flag_force_addr
3273 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3274 y = validize_mem (y);
3276 gcc_assert (mode != BLKmode);
3278 last_insn = emit_move_insn_1 (x, y);
3280 if (y_cst && REG_P (x)
3281 && (set = single_set (last_insn)) != NULL_RTX
3282 && SET_DEST (set) == x
3283 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3284 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3286 return last_insn;
3289 /* If Y is representable exactly in a narrower mode, and the target can
3290 perform the extension directly from constant or memory, then emit the
3291 move as an extension. */
3293 static rtx
3294 compress_float_constant (rtx x, rtx y)
3296 enum machine_mode dstmode = GET_MODE (x);
3297 enum machine_mode orig_srcmode = GET_MODE (y);
3298 enum machine_mode srcmode;
3299 REAL_VALUE_TYPE r;
3300 int oldcost, newcost;
3302 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3304 if (LEGITIMATE_CONSTANT_P (y))
3305 oldcost = rtx_cost (y, SET);
3306 else
3307 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3309 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3310 srcmode != orig_srcmode;
3311 srcmode = GET_MODE_WIDER_MODE (srcmode))
3313 enum insn_code ic;
3314 rtx trunc_y, last_insn;
3316 /* Skip if the target can't extend this way. */
3317 ic = can_extend_p (dstmode, srcmode, 0);
3318 if (ic == CODE_FOR_nothing)
3319 continue;
3321 /* Skip if the narrowed value isn't exact. */
3322 if (! exact_real_truncate (srcmode, &r))
3323 continue;
3325 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3327 if (LEGITIMATE_CONSTANT_P (trunc_y))
3329 /* Skip if the target needs extra instructions to perform
3330 the extension. */
3331 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3332 continue;
3333 /* This is valid, but may not be cheaper than the original. */
3334 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3335 if (oldcost < newcost)
3336 continue;
3338 else if (float_extend_from_mem[dstmode][srcmode])
3340 trunc_y = force_const_mem (srcmode, trunc_y);
3341 /* This is valid, but may not be cheaper than the original. */
3342 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3343 if (oldcost < newcost)
3344 continue;
3345 trunc_y = validize_mem (trunc_y);
3347 else
3348 continue;
3350 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3351 last_insn = get_last_insn ();
3353 if (REG_P (x))
3354 set_unique_reg_note (last_insn, REG_EQUAL, y);
3356 return last_insn;
3359 return NULL_RTX;
3362 /* Pushing data onto the stack. */
3364 /* Push a block of length SIZE (perhaps variable)
3365 and return an rtx to address the beginning of the block.
3366 The value may be virtual_outgoing_args_rtx.
3368 EXTRA is the number of bytes of padding to push in addition to SIZE.
3369 BELOW nonzero means this padding comes at low addresses;
3370 otherwise, the padding comes at high addresses. */
3373 push_block (rtx size, int extra, int below)
3375 rtx temp;
3377 size = convert_modes (Pmode, ptr_mode, size, 1);
3378 if (CONSTANT_P (size))
3379 anti_adjust_stack (plus_constant (size, extra));
3380 else if (REG_P (size) && extra == 0)
3381 anti_adjust_stack (size);
3382 else
3384 temp = copy_to_mode_reg (Pmode, size);
3385 if (extra != 0)
3386 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3387 temp, 0, OPTAB_LIB_WIDEN);
3388 anti_adjust_stack (temp);
3391 #ifndef STACK_GROWS_DOWNWARD
3392 if (0)
3393 #else
3394 if (1)
3395 #endif
3397 temp = virtual_outgoing_args_rtx;
3398 if (extra != 0 && below)
3399 temp = plus_constant (temp, extra);
3401 else
3403 if (GET_CODE (size) == CONST_INT)
3404 temp = plus_constant (virtual_outgoing_args_rtx,
3405 -INTVAL (size) - (below ? 0 : extra));
3406 else if (extra != 0 && !below)
3407 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3408 negate_rtx (Pmode, plus_constant (size, extra)));
3409 else
3410 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3411 negate_rtx (Pmode, size));
3414 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3417 #ifdef PUSH_ROUNDING
3419 /* Emit single push insn. */
3421 static void
3422 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3424 rtx dest_addr;
3425 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3426 rtx dest;
3427 enum insn_code icode;
3428 insn_operand_predicate_fn pred;
3430 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3431 /* If there is push pattern, use it. Otherwise try old way of throwing
3432 MEM representing push operation to move expander. */
3433 icode = push_optab->handlers[(int) mode].insn_code;
3434 if (icode != CODE_FOR_nothing)
3436 if (((pred = insn_data[(int) icode].operand[0].predicate)
3437 && !((*pred) (x, mode))))
3438 x = force_reg (mode, x);
3439 emit_insn (GEN_FCN (icode) (x));
3440 return;
3442 if (GET_MODE_SIZE (mode) == rounded_size)
3443 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3444 /* If we are to pad downward, adjust the stack pointer first and
3445 then store X into the stack location using an offset. This is
3446 because emit_move_insn does not know how to pad; it does not have
3447 access to type. */
3448 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3450 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3451 HOST_WIDE_INT offset;
3453 emit_move_insn (stack_pointer_rtx,
3454 expand_binop (Pmode,
3455 #ifdef STACK_GROWS_DOWNWARD
3456 sub_optab,
3457 #else
3458 add_optab,
3459 #endif
3460 stack_pointer_rtx,
3461 GEN_INT (rounded_size),
3462 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3464 offset = (HOST_WIDE_INT) padding_size;
3465 #ifdef STACK_GROWS_DOWNWARD
3466 if (STACK_PUSH_CODE == POST_DEC)
3467 /* We have already decremented the stack pointer, so get the
3468 previous value. */
3469 offset += (HOST_WIDE_INT) rounded_size;
3470 #else
3471 if (STACK_PUSH_CODE == POST_INC)
3472 /* We have already incremented the stack pointer, so get the
3473 previous value. */
3474 offset -= (HOST_WIDE_INT) rounded_size;
3475 #endif
3476 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3478 else
3480 #ifdef STACK_GROWS_DOWNWARD
3481 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3482 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3483 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3484 #else
3485 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3486 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3487 GEN_INT (rounded_size));
3488 #endif
3489 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3492 dest = gen_rtx_MEM (mode, dest_addr);
3494 if (type != 0)
3496 set_mem_attributes (dest, type, 1);
3498 if (flag_optimize_sibling_calls)
3499 /* Function incoming arguments may overlap with sibling call
3500 outgoing arguments and we cannot allow reordering of reads
3501 from function arguments with stores to outgoing arguments
3502 of sibling calls. */
3503 set_mem_alias_set (dest, 0);
3505 emit_move_insn (dest, x);
3507 #endif
3509 /* Generate code to push X onto the stack, assuming it has mode MODE and
3510 type TYPE.
3511 MODE is redundant except when X is a CONST_INT (since they don't
3512 carry mode info).
3513 SIZE is an rtx for the size of data to be copied (in bytes),
3514 needed only if X is BLKmode.
3516 ALIGN (in bits) is maximum alignment we can assume.
3518 If PARTIAL and REG are both nonzero, then copy that many of the first
3519 bytes of X into registers starting with REG, and push the rest of X.
3520 The amount of space pushed is decreased by PARTIAL bytes.
3521 REG must be a hard register in this case.
3522 If REG is zero but PARTIAL is not, take any all others actions for an
3523 argument partially in registers, but do not actually load any
3524 registers.
3526 EXTRA is the amount in bytes of extra space to leave next to this arg.
3527 This is ignored if an argument block has already been allocated.
3529 On a machine that lacks real push insns, ARGS_ADDR is the address of
3530 the bottom of the argument block for this call. We use indexing off there
3531 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3532 argument block has not been preallocated.
3534 ARGS_SO_FAR is the size of args previously pushed for this call.
3536 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3537 for arguments passed in registers. If nonzero, it will be the number
3538 of bytes required. */
3540 void
3541 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3542 unsigned int align, int partial, rtx reg, int extra,
3543 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3544 rtx alignment_pad)
3546 rtx xinner;
3547 enum direction stack_direction
3548 #ifdef STACK_GROWS_DOWNWARD
3549 = downward;
3550 #else
3551 = upward;
3552 #endif
3554 /* Decide where to pad the argument: `downward' for below,
3555 `upward' for above, or `none' for don't pad it.
3556 Default is below for small data on big-endian machines; else above. */
3557 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3559 /* Invert direction if stack is post-decrement.
3560 FIXME: why? */
3561 if (STACK_PUSH_CODE == POST_DEC)
3562 if (where_pad != none)
3563 where_pad = (where_pad == downward ? upward : downward);
3565 xinner = x;
3567 if (mode == BLKmode)
3569 /* Copy a block into the stack, entirely or partially. */
3571 rtx temp;
3572 int used;
3573 int offset;
3574 int skip;
3576 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3577 used = partial - offset;
3579 gcc_assert (size);
3581 /* USED is now the # of bytes we need not copy to the stack
3582 because registers will take care of them. */
3584 if (partial != 0)
3585 xinner = adjust_address (xinner, BLKmode, used);
3587 /* If the partial register-part of the arg counts in its stack size,
3588 skip the part of stack space corresponding to the registers.
3589 Otherwise, start copying to the beginning of the stack space,
3590 by setting SKIP to 0. */
3591 skip = (reg_parm_stack_space == 0) ? 0 : used;
3593 #ifdef PUSH_ROUNDING
3594 /* Do it with several push insns if that doesn't take lots of insns
3595 and if there is no difficulty with push insns that skip bytes
3596 on the stack for alignment purposes. */
3597 if (args_addr == 0
3598 && PUSH_ARGS
3599 && GET_CODE (size) == CONST_INT
3600 && skip == 0
3601 && MEM_ALIGN (xinner) >= align
3602 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3603 /* Here we avoid the case of a structure whose weak alignment
3604 forces many pushes of a small amount of data,
3605 and such small pushes do rounding that causes trouble. */
3606 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3607 || align >= BIGGEST_ALIGNMENT
3608 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3609 == (align / BITS_PER_UNIT)))
3610 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3612 /* Push padding now if padding above and stack grows down,
3613 or if padding below and stack grows up.
3614 But if space already allocated, this has already been done. */
3615 if (extra && args_addr == 0
3616 && where_pad != none && where_pad != stack_direction)
3617 anti_adjust_stack (GEN_INT (extra));
3619 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3621 else
3622 #endif /* PUSH_ROUNDING */
3624 rtx target;
3626 /* Otherwise make space on the stack and copy the data
3627 to the address of that space. */
3629 /* Deduct words put into registers from the size we must copy. */
3630 if (partial != 0)
3632 if (GET_CODE (size) == CONST_INT)
3633 size = GEN_INT (INTVAL (size) - used);
3634 else
3635 size = expand_binop (GET_MODE (size), sub_optab, size,
3636 GEN_INT (used), NULL_RTX, 0,
3637 OPTAB_LIB_WIDEN);
3640 /* Get the address of the stack space.
3641 In this case, we do not deal with EXTRA separately.
3642 A single stack adjust will do. */
3643 if (! args_addr)
3645 temp = push_block (size, extra, where_pad == downward);
3646 extra = 0;
3648 else if (GET_CODE (args_so_far) == CONST_INT)
3649 temp = memory_address (BLKmode,
3650 plus_constant (args_addr,
3651 skip + INTVAL (args_so_far)));
3652 else
3653 temp = memory_address (BLKmode,
3654 plus_constant (gen_rtx_PLUS (Pmode,
3655 args_addr,
3656 args_so_far),
3657 skip));
3659 if (!ACCUMULATE_OUTGOING_ARGS)
3661 /* If the source is referenced relative to the stack pointer,
3662 copy it to another register to stabilize it. We do not need
3663 to do this if we know that we won't be changing sp. */
3665 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3666 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3667 temp = copy_to_reg (temp);
3670 target = gen_rtx_MEM (BLKmode, temp);
3672 /* We do *not* set_mem_attributes here, because incoming arguments
3673 may overlap with sibling call outgoing arguments and we cannot
3674 allow reordering of reads from function arguments with stores
3675 to outgoing arguments of sibling calls. We do, however, want
3676 to record the alignment of the stack slot. */
3677 /* ALIGN may well be better aligned than TYPE, e.g. due to
3678 PARM_BOUNDARY. Assume the caller isn't lying. */
3679 set_mem_align (target, align);
3681 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3684 else if (partial > 0)
3686 /* Scalar partly in registers. */
3688 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3689 int i;
3690 int not_stack;
3691 /* # bytes of start of argument
3692 that we must make space for but need not store. */
3693 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3694 int args_offset = INTVAL (args_so_far);
3695 int skip;
3697 /* Push padding now if padding above and stack grows down,
3698 or if padding below and stack grows up.
3699 But if space already allocated, this has already been done. */
3700 if (extra && args_addr == 0
3701 && where_pad != none && where_pad != stack_direction)
3702 anti_adjust_stack (GEN_INT (extra));
3704 /* If we make space by pushing it, we might as well push
3705 the real data. Otherwise, we can leave OFFSET nonzero
3706 and leave the space uninitialized. */
3707 if (args_addr == 0)
3708 offset = 0;
3710 /* Now NOT_STACK gets the number of words that we don't need to
3711 allocate on the stack. Convert OFFSET to words too. */
3712 not_stack = (partial - offset) / UNITS_PER_WORD;
3713 offset /= UNITS_PER_WORD;
3715 /* If the partial register-part of the arg counts in its stack size,
3716 skip the part of stack space corresponding to the registers.
3717 Otherwise, start copying to the beginning of the stack space,
3718 by setting SKIP to 0. */
3719 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3721 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3722 x = validize_mem (force_const_mem (mode, x));
3724 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3725 SUBREGs of such registers are not allowed. */
3726 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3727 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3728 x = copy_to_reg (x);
3730 /* Loop over all the words allocated on the stack for this arg. */
3731 /* We can do it by words, because any scalar bigger than a word
3732 has a size a multiple of a word. */
3733 #ifndef PUSH_ARGS_REVERSED
3734 for (i = not_stack; i < size; i++)
3735 #else
3736 for (i = size - 1; i >= not_stack; i--)
3737 #endif
3738 if (i >= not_stack + offset)
3739 emit_push_insn (operand_subword_force (x, i, mode),
3740 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3741 0, args_addr,
3742 GEN_INT (args_offset + ((i - not_stack + skip)
3743 * UNITS_PER_WORD)),
3744 reg_parm_stack_space, alignment_pad);
3746 else
3748 rtx addr;
3749 rtx dest;
3751 /* Push padding now if padding above and stack grows down,
3752 or if padding below and stack grows up.
3753 But if space already allocated, this has already been done. */
3754 if (extra && args_addr == 0
3755 && where_pad != none && where_pad != stack_direction)
3756 anti_adjust_stack (GEN_INT (extra));
3758 #ifdef PUSH_ROUNDING
3759 if (args_addr == 0 && PUSH_ARGS)
3760 emit_single_push_insn (mode, x, type);
3761 else
3762 #endif
3764 if (GET_CODE (args_so_far) == CONST_INT)
3765 addr
3766 = memory_address (mode,
3767 plus_constant (args_addr,
3768 INTVAL (args_so_far)));
3769 else
3770 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3771 args_so_far));
3772 dest = gen_rtx_MEM (mode, addr);
3774 /* We do *not* set_mem_attributes here, because incoming arguments
3775 may overlap with sibling call outgoing arguments and we cannot
3776 allow reordering of reads from function arguments with stores
3777 to outgoing arguments of sibling calls. We do, however, want
3778 to record the alignment of the stack slot. */
3779 /* ALIGN may well be better aligned than TYPE, e.g. due to
3780 PARM_BOUNDARY. Assume the caller isn't lying. */
3781 set_mem_align (dest, align);
3783 emit_move_insn (dest, x);
3787 /* If part should go in registers, copy that part
3788 into the appropriate registers. Do this now, at the end,
3789 since mem-to-mem copies above may do function calls. */
3790 if (partial > 0 && reg != 0)
3792 /* Handle calls that pass values in multiple non-contiguous locations.
3793 The Irix 6 ABI has examples of this. */
3794 if (GET_CODE (reg) == PARALLEL)
3795 emit_group_load (reg, x, type, -1);
3796 else
3798 gcc_assert (partial % UNITS_PER_WORD == 0);
3799 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3803 if (extra && args_addr == 0 && where_pad == stack_direction)
3804 anti_adjust_stack (GEN_INT (extra));
3806 if (alignment_pad && args_addr == 0)
3807 anti_adjust_stack (alignment_pad);
3810 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3811 operations. */
3813 static rtx
3814 get_subtarget (rtx x)
3816 return (optimize
3817 || x == 0
3818 /* Only registers can be subtargets. */
3819 || !REG_P (x)
3820 /* Don't use hard regs to avoid extending their life. */
3821 || REGNO (x) < FIRST_PSEUDO_REGISTER
3822 ? 0 : x);
3825 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3826 FIELD is a bitfield. Returns true if the optimization was successful,
3827 and there's nothing else to do. */
3829 static bool
3830 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3831 unsigned HOST_WIDE_INT bitpos,
3832 enum machine_mode mode1, rtx str_rtx,
3833 tree to, tree src)
3835 enum machine_mode str_mode = GET_MODE (str_rtx);
3836 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3837 tree op0, op1;
3838 rtx value, result;
3839 optab binop;
3841 if (mode1 != VOIDmode
3842 || bitsize >= BITS_PER_WORD
3843 || str_bitsize > BITS_PER_WORD
3844 || TREE_SIDE_EFFECTS (to)
3845 || TREE_THIS_VOLATILE (to))
3846 return false;
3848 STRIP_NOPS (src);
3849 if (!BINARY_CLASS_P (src)
3850 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3851 return false;
3853 op0 = TREE_OPERAND (src, 0);
3854 op1 = TREE_OPERAND (src, 1);
3855 STRIP_NOPS (op0);
3857 if (!operand_equal_p (to, op0, 0))
3858 return false;
3860 if (MEM_P (str_rtx))
3862 unsigned HOST_WIDE_INT offset1;
3864 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3865 str_mode = word_mode;
3866 str_mode = get_best_mode (bitsize, bitpos,
3867 MEM_ALIGN (str_rtx), str_mode, 0);
3868 if (str_mode == VOIDmode)
3869 return false;
3870 str_bitsize = GET_MODE_BITSIZE (str_mode);
3872 offset1 = bitpos;
3873 bitpos %= str_bitsize;
3874 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3875 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3877 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3878 return false;
3880 /* If the bit field covers the whole REG/MEM, store_field
3881 will likely generate better code. */
3882 if (bitsize >= str_bitsize)
3883 return false;
3885 /* We can't handle fields split across multiple entities. */
3886 if (bitpos + bitsize > str_bitsize)
3887 return false;
3889 if (BYTES_BIG_ENDIAN)
3890 bitpos = str_bitsize - bitpos - bitsize;
3892 switch (TREE_CODE (src))
3894 case PLUS_EXPR:
3895 case MINUS_EXPR:
3896 /* For now, just optimize the case of the topmost bitfield
3897 where we don't need to do any masking and also
3898 1 bit bitfields where xor can be used.
3899 We might win by one instruction for the other bitfields
3900 too if insv/extv instructions aren't used, so that
3901 can be added later. */
3902 if (bitpos + bitsize != str_bitsize
3903 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3904 break;
3906 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3907 value = convert_modes (str_mode,
3908 TYPE_MODE (TREE_TYPE (op1)), value,
3909 TYPE_UNSIGNED (TREE_TYPE (op1)));
3911 /* We may be accessing data outside the field, which means
3912 we can alias adjacent data. */
3913 if (MEM_P (str_rtx))
3915 str_rtx = shallow_copy_rtx (str_rtx);
3916 set_mem_alias_set (str_rtx, 0);
3917 set_mem_expr (str_rtx, 0);
3920 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3921 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3923 value = expand_and (str_mode, value, const1_rtx, NULL);
3924 binop = xor_optab;
3926 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3927 build_int_cst (NULL_TREE, bitpos),
3928 NULL_RTX, 1);
3929 result = expand_binop (str_mode, binop, str_rtx,
3930 value, str_rtx, 1, OPTAB_WIDEN);
3931 if (result != str_rtx)
3932 emit_move_insn (str_rtx, result);
3933 return true;
3935 case BIT_IOR_EXPR:
3936 case BIT_XOR_EXPR:
3937 if (TREE_CODE (op1) != INTEGER_CST)
3938 break;
3939 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3940 value = convert_modes (GET_MODE (str_rtx),
3941 TYPE_MODE (TREE_TYPE (op1)), value,
3942 TYPE_UNSIGNED (TREE_TYPE (op1)));
3944 /* We may be accessing data outside the field, which means
3945 we can alias adjacent data. */
3946 if (MEM_P (str_rtx))
3948 str_rtx = shallow_copy_rtx (str_rtx);
3949 set_mem_alias_set (str_rtx, 0);
3950 set_mem_expr (str_rtx, 0);
3953 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3954 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3956 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3957 - 1);
3958 value = expand_and (GET_MODE (str_rtx), value, mask,
3959 NULL_RTX);
3961 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3962 build_int_cst (NULL_TREE, bitpos),
3963 NULL_RTX, 1);
3964 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3965 value, str_rtx, 1, OPTAB_WIDEN);
3966 if (result != str_rtx)
3967 emit_move_insn (str_rtx, result);
3968 return true;
3970 default:
3971 break;
3974 return false;
3978 /* Expand an assignment that stores the value of FROM into TO. */
3980 void
3981 expand_assignment (tree to, tree from)
3983 rtx to_rtx = 0;
3984 rtx result;
3986 /* Don't crash if the lhs of the assignment was erroneous. */
3988 if (TREE_CODE (to) == ERROR_MARK)
3990 result = expand_normal (from);
3991 return;
3994 /* Assignment of a structure component needs special treatment
3995 if the structure component's rtx is not simply a MEM.
3996 Assignment of an array element at a constant index, and assignment of
3997 an array element in an unaligned packed structure field, has the same
3998 problem. */
3999 if (handled_component_p (to)
4000 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4002 enum machine_mode mode1;
4003 HOST_WIDE_INT bitsize, bitpos;
4004 tree offset;
4005 int unsignedp;
4006 int volatilep = 0;
4007 tree tem;
4009 push_temp_slots ();
4010 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4011 &unsignedp, &volatilep, true);
4013 /* If we are going to use store_bit_field and extract_bit_field,
4014 make sure to_rtx will be safe for multiple use. */
4016 to_rtx = expand_normal (tem);
4018 if (offset != 0)
4020 rtx offset_rtx;
4022 if (!MEM_P (to_rtx))
4024 /* We can get constant negative offsets into arrays with broken
4025 user code. Translate this to a trap instead of ICEing. */
4026 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4027 expand_builtin_trap ();
4028 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4031 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4032 #ifdef POINTERS_EXTEND_UNSIGNED
4033 if (GET_MODE (offset_rtx) != Pmode)
4034 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4035 #else
4036 if (GET_MODE (offset_rtx) != ptr_mode)
4037 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4038 #endif
4040 /* A constant address in TO_RTX can have VOIDmode, we must not try
4041 to call force_reg for that case. Avoid that case. */
4042 if (MEM_P (to_rtx)
4043 && GET_MODE (to_rtx) == BLKmode
4044 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4045 && bitsize > 0
4046 && (bitpos % bitsize) == 0
4047 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4048 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4050 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4051 bitpos = 0;
4054 to_rtx = offset_address (to_rtx, offset_rtx,
4055 highest_pow2_factor_for_target (to,
4056 offset));
4059 /* Handle expand_expr of a complex value returning a CONCAT. */
4060 if (GET_CODE (to_rtx) == CONCAT)
4062 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4064 gcc_assert (bitpos == 0);
4065 result = store_expr (from, to_rtx, false);
4067 else
4069 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4070 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4073 else
4075 if (MEM_P (to_rtx))
4077 /* If the field is at offset zero, we could have been given the
4078 DECL_RTX of the parent struct. Don't munge it. */
4079 to_rtx = shallow_copy_rtx (to_rtx);
4081 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4083 /* Deal with volatile and readonly fields. The former is only
4084 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4085 if (volatilep)
4086 MEM_VOLATILE_P (to_rtx) = 1;
4087 if (component_uses_parent_alias_set (to))
4088 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4091 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4092 to_rtx, to, from))
4093 result = NULL;
4094 else
4095 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4096 TREE_TYPE (tem), get_alias_set (to));
4099 if (result)
4100 preserve_temp_slots (result);
4101 free_temp_slots ();
4102 pop_temp_slots ();
4103 return;
4106 /* If the rhs is a function call and its value is not an aggregate,
4107 call the function before we start to compute the lhs.
4108 This is needed for correct code for cases such as
4109 val = setjmp (buf) on machines where reference to val
4110 requires loading up part of an address in a separate insn.
4112 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4113 since it might be a promoted variable where the zero- or sign- extension
4114 needs to be done. Handling this in the normal way is safe because no
4115 computation is done before the call. */
4116 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4117 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4118 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4119 && REG_P (DECL_RTL (to))))
4121 rtx value;
4123 push_temp_slots ();
4124 value = expand_normal (from);
4125 if (to_rtx == 0)
4126 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4128 /* Handle calls that return values in multiple non-contiguous locations.
4129 The Irix 6 ABI has examples of this. */
4130 if (GET_CODE (to_rtx) == PARALLEL)
4131 emit_group_load (to_rtx, value, TREE_TYPE (from),
4132 int_size_in_bytes (TREE_TYPE (from)));
4133 else if (GET_MODE (to_rtx) == BLKmode)
4134 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4135 else
4137 if (POINTER_TYPE_P (TREE_TYPE (to)))
4138 value = convert_memory_address (GET_MODE (to_rtx), value);
4139 emit_move_insn (to_rtx, value);
4141 preserve_temp_slots (to_rtx);
4142 free_temp_slots ();
4143 pop_temp_slots ();
4144 return;
4147 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4148 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4150 if (to_rtx == 0)
4151 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4153 /* Don't move directly into a return register. */
4154 if (TREE_CODE (to) == RESULT_DECL
4155 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4157 rtx temp;
4159 push_temp_slots ();
4160 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4162 if (GET_CODE (to_rtx) == PARALLEL)
4163 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4164 int_size_in_bytes (TREE_TYPE (from)));
4165 else
4166 emit_move_insn (to_rtx, temp);
4168 preserve_temp_slots (to_rtx);
4169 free_temp_slots ();
4170 pop_temp_slots ();
4171 return;
4174 /* In case we are returning the contents of an object which overlaps
4175 the place the value is being stored, use a safe function when copying
4176 a value through a pointer into a structure value return block. */
4177 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4178 && current_function_returns_struct
4179 && !current_function_returns_pcc_struct)
4181 rtx from_rtx, size;
4183 push_temp_slots ();
4184 size = expr_size (from);
4185 from_rtx = expand_normal (from);
4187 emit_library_call (memmove_libfunc, LCT_NORMAL,
4188 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4189 XEXP (from_rtx, 0), Pmode,
4190 convert_to_mode (TYPE_MODE (sizetype),
4191 size, TYPE_UNSIGNED (sizetype)),
4192 TYPE_MODE (sizetype));
4194 preserve_temp_slots (to_rtx);
4195 free_temp_slots ();
4196 pop_temp_slots ();
4197 return;
4200 /* Compute FROM and store the value in the rtx we got. */
4202 push_temp_slots ();
4203 result = store_expr (from, to_rtx, 0);
4204 preserve_temp_slots (result);
4205 free_temp_slots ();
4206 pop_temp_slots ();
4207 return;
4210 /* Generate code for computing expression EXP,
4211 and storing the value into TARGET.
4213 If the mode is BLKmode then we may return TARGET itself.
4214 It turns out that in BLKmode it doesn't cause a problem.
4215 because C has no operators that could combine two different
4216 assignments into the same BLKmode object with different values
4217 with no sequence point. Will other languages need this to
4218 be more thorough?
4220 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4221 stack, and block moves may need to be treated specially. */
4224 store_expr (tree exp, rtx target, int call_param_p)
4226 rtx temp;
4227 rtx alt_rtl = NULL_RTX;
4228 int dont_return_target = 0;
4230 if (VOID_TYPE_P (TREE_TYPE (exp)))
4232 /* C++ can generate ?: expressions with a throw expression in one
4233 branch and an rvalue in the other. Here, we resolve attempts to
4234 store the throw expression's nonexistent result. */
4235 gcc_assert (!call_param_p);
4236 expand_expr (exp, const0_rtx, VOIDmode, 0);
4237 return NULL_RTX;
4239 if (TREE_CODE (exp) == COMPOUND_EXPR)
4241 /* Perform first part of compound expression, then assign from second
4242 part. */
4243 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4244 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4245 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4247 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4249 /* For conditional expression, get safe form of the target. Then
4250 test the condition, doing the appropriate assignment on either
4251 side. This avoids the creation of unnecessary temporaries.
4252 For non-BLKmode, it is more efficient not to do this. */
4254 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4256 do_pending_stack_adjust ();
4257 NO_DEFER_POP;
4258 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4259 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4260 emit_jump_insn (gen_jump (lab2));
4261 emit_barrier ();
4262 emit_label (lab1);
4263 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4264 emit_label (lab2);
4265 OK_DEFER_POP;
4267 return NULL_RTX;
4269 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4270 /* If this is a scalar in a register that is stored in a wider mode
4271 than the declared mode, compute the result into its declared mode
4272 and then convert to the wider mode. Our value is the computed
4273 expression. */
4275 rtx inner_target = 0;
4277 /* We can do the conversion inside EXP, which will often result
4278 in some optimizations. Do the conversion in two steps: first
4279 change the signedness, if needed, then the extend. But don't
4280 do this if the type of EXP is a subtype of something else
4281 since then the conversion might involve more than just
4282 converting modes. */
4283 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4284 && TREE_TYPE (TREE_TYPE (exp)) == 0
4285 && (!lang_hooks.reduce_bit_field_operations
4286 || (GET_MODE_PRECISION (GET_MODE (target))
4287 == TYPE_PRECISION (TREE_TYPE (exp)))))
4289 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4290 != SUBREG_PROMOTED_UNSIGNED_P (target))
4291 exp = fold_convert
4292 (lang_hooks.types.signed_or_unsigned_type
4293 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4295 exp = fold_convert (lang_hooks.types.type_for_mode
4296 (GET_MODE (SUBREG_REG (target)),
4297 SUBREG_PROMOTED_UNSIGNED_P (target)),
4298 exp);
4300 inner_target = SUBREG_REG (target);
4303 temp = expand_expr (exp, inner_target, VOIDmode,
4304 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4306 /* If TEMP is a VOIDmode constant, use convert_modes to make
4307 sure that we properly convert it. */
4308 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4310 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4311 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4312 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4313 GET_MODE (target), temp,
4314 SUBREG_PROMOTED_UNSIGNED_P (target));
4317 convert_move (SUBREG_REG (target), temp,
4318 SUBREG_PROMOTED_UNSIGNED_P (target));
4320 return NULL_RTX;
4322 else
4324 temp = expand_expr_real (exp, target, GET_MODE (target),
4325 (call_param_p
4326 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4327 &alt_rtl);
4328 /* Return TARGET if it's a specified hardware register.
4329 If TARGET is a volatile mem ref, either return TARGET
4330 or return a reg copied *from* TARGET; ANSI requires this.
4332 Otherwise, if TEMP is not TARGET, return TEMP
4333 if it is constant (for efficiency),
4334 or if we really want the correct value. */
4335 if (!(target && REG_P (target)
4336 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4337 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4338 && ! rtx_equal_p (temp, target)
4339 && CONSTANT_P (temp))
4340 dont_return_target = 1;
4343 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4344 the same as that of TARGET, adjust the constant. This is needed, for
4345 example, in case it is a CONST_DOUBLE and we want only a word-sized
4346 value. */
4347 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4348 && TREE_CODE (exp) != ERROR_MARK
4349 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4350 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4351 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4353 /* If value was not generated in the target, store it there.
4354 Convert the value to TARGET's type first if necessary and emit the
4355 pending incrementations that have been queued when expanding EXP.
4356 Note that we cannot emit the whole queue blindly because this will
4357 effectively disable the POST_INC optimization later.
4359 If TEMP and TARGET compare equal according to rtx_equal_p, but
4360 one or both of them are volatile memory refs, we have to distinguish
4361 two cases:
4362 - expand_expr has used TARGET. In this case, we must not generate
4363 another copy. This can be detected by TARGET being equal according
4364 to == .
4365 - expand_expr has not used TARGET - that means that the source just
4366 happens to have the same RTX form. Since temp will have been created
4367 by expand_expr, it will compare unequal according to == .
4368 We must generate a copy in this case, to reach the correct number
4369 of volatile memory references. */
4371 if ((! rtx_equal_p (temp, target)
4372 || (temp != target && (side_effects_p (temp)
4373 || side_effects_p (target))))
4374 && TREE_CODE (exp) != ERROR_MARK
4375 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4376 but TARGET is not valid memory reference, TEMP will differ
4377 from TARGET although it is really the same location. */
4378 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4379 /* If there's nothing to copy, don't bother. Don't call
4380 expr_size unless necessary, because some front-ends (C++)
4381 expr_size-hook must not be given objects that are not
4382 supposed to be bit-copied or bit-initialized. */
4383 && expr_size (exp) != const0_rtx)
4385 if (GET_MODE (temp) != GET_MODE (target)
4386 && GET_MODE (temp) != VOIDmode)
4388 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4389 if (dont_return_target)
4391 /* In this case, we will return TEMP,
4392 so make sure it has the proper mode.
4393 But don't forget to store the value into TARGET. */
4394 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4395 emit_move_insn (target, temp);
4397 else
4398 convert_move (target, temp, unsignedp);
4401 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4403 /* Handle copying a string constant into an array. The string
4404 constant may be shorter than the array. So copy just the string's
4405 actual length, and clear the rest. First get the size of the data
4406 type of the string, which is actually the size of the target. */
4407 rtx size = expr_size (exp);
4409 if (GET_CODE (size) == CONST_INT
4410 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4411 emit_block_move (target, temp, size,
4412 (call_param_p
4413 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4414 else
4416 /* Compute the size of the data to copy from the string. */
4417 tree copy_size
4418 = size_binop (MIN_EXPR,
4419 make_tree (sizetype, size),
4420 size_int (TREE_STRING_LENGTH (exp)));
4421 rtx copy_size_rtx
4422 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4423 (call_param_p
4424 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4425 rtx label = 0;
4427 /* Copy that much. */
4428 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4429 TYPE_UNSIGNED (sizetype));
4430 emit_block_move (target, temp, copy_size_rtx,
4431 (call_param_p
4432 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4434 /* Figure out how much is left in TARGET that we have to clear.
4435 Do all calculations in ptr_mode. */
4436 if (GET_CODE (copy_size_rtx) == CONST_INT)
4438 size = plus_constant (size, -INTVAL (copy_size_rtx));
4439 target = adjust_address (target, BLKmode,
4440 INTVAL (copy_size_rtx));
4442 else
4444 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4445 copy_size_rtx, NULL_RTX, 0,
4446 OPTAB_LIB_WIDEN);
4448 #ifdef POINTERS_EXTEND_UNSIGNED
4449 if (GET_MODE (copy_size_rtx) != Pmode)
4450 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4451 TYPE_UNSIGNED (sizetype));
4452 #endif
4454 target = offset_address (target, copy_size_rtx,
4455 highest_pow2_factor (copy_size));
4456 label = gen_label_rtx ();
4457 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4458 GET_MODE (size), 0, label);
4461 if (size != const0_rtx)
4462 clear_storage (target, size, BLOCK_OP_NORMAL);
4464 if (label)
4465 emit_label (label);
4468 /* Handle calls that return values in multiple non-contiguous locations.
4469 The Irix 6 ABI has examples of this. */
4470 else if (GET_CODE (target) == PARALLEL)
4471 emit_group_load (target, temp, TREE_TYPE (exp),
4472 int_size_in_bytes (TREE_TYPE (exp)));
4473 else if (GET_MODE (temp) == BLKmode)
4474 emit_block_move (target, temp, expr_size (exp),
4475 (call_param_p
4476 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4477 else
4479 temp = force_operand (temp, target);
4480 if (temp != target)
4481 emit_move_insn (target, temp);
4485 return NULL_RTX;
4488 /* Examine CTOR to discover:
4489 * how many scalar fields are set to nonzero values,
4490 and place it in *P_NZ_ELTS;
4491 * how many scalar fields are set to non-constant values,
4492 and place it in *P_NC_ELTS; and
4493 * how many scalar fields in total are in CTOR,
4494 and place it in *P_ELT_COUNT.
4495 * if a type is a union, and the initializer from the constructor
4496 is not the largest element in the union, then set *p_must_clear. */
4498 static void
4499 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4500 HOST_WIDE_INT *p_nc_elts,
4501 HOST_WIDE_INT *p_elt_count,
4502 bool *p_must_clear)
4504 unsigned HOST_WIDE_INT idx;
4505 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4506 tree value, purpose;
4508 nz_elts = 0;
4509 nc_elts = 0;
4510 elt_count = 0;
4512 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4514 HOST_WIDE_INT mult;
4516 mult = 1;
4517 if (TREE_CODE (purpose) == RANGE_EXPR)
4519 tree lo_index = TREE_OPERAND (purpose, 0);
4520 tree hi_index = TREE_OPERAND (purpose, 1);
4522 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4523 mult = (tree_low_cst (hi_index, 1)
4524 - tree_low_cst (lo_index, 1) + 1);
4527 switch (TREE_CODE (value))
4529 case CONSTRUCTOR:
4531 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4532 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4533 nz_elts += mult * nz;
4534 nc_elts += mult * nc;
4535 elt_count += mult * ic;
4537 break;
4539 case INTEGER_CST:
4540 case REAL_CST:
4541 if (!initializer_zerop (value))
4542 nz_elts += mult;
4543 elt_count += mult;
4544 break;
4546 case STRING_CST:
4547 nz_elts += mult * TREE_STRING_LENGTH (value);
4548 elt_count += mult * TREE_STRING_LENGTH (value);
4549 break;
4551 case COMPLEX_CST:
4552 if (!initializer_zerop (TREE_REALPART (value)))
4553 nz_elts += mult;
4554 if (!initializer_zerop (TREE_IMAGPART (value)))
4555 nz_elts += mult;
4556 elt_count += mult;
4557 break;
4559 case VECTOR_CST:
4561 tree v;
4562 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4564 if (!initializer_zerop (TREE_VALUE (v)))
4565 nz_elts += mult;
4566 elt_count += mult;
4569 break;
4571 default:
4572 nz_elts += mult;
4573 elt_count += mult;
4574 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4575 nc_elts += mult;
4576 break;
4580 if (!*p_must_clear
4581 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4582 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4584 tree init_sub_type;
4585 bool clear_this = true;
4587 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4589 /* We don't expect more than one element of the union to be
4590 initialized. Not sure what we should do otherwise... */
4591 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4592 == 1);
4594 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4595 CONSTRUCTOR_ELTS (ctor),
4596 0)->value);
4598 /* ??? We could look at each element of the union, and find the
4599 largest element. Which would avoid comparing the size of the
4600 initialized element against any tail padding in the union.
4601 Doesn't seem worth the effort... */
4602 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4603 TYPE_SIZE (init_sub_type)) == 1)
4605 /* And now we have to find out if the element itself is fully
4606 constructed. E.g. for union { struct { int a, b; } s; } u
4607 = { .s = { .a = 1 } }. */
4608 if (elt_count == count_type_elements (init_sub_type, false))
4609 clear_this = false;
4613 *p_must_clear = clear_this;
4616 *p_nz_elts += nz_elts;
4617 *p_nc_elts += nc_elts;
4618 *p_elt_count += elt_count;
4621 void
4622 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4623 HOST_WIDE_INT *p_nc_elts,
4624 HOST_WIDE_INT *p_elt_count,
4625 bool *p_must_clear)
4627 *p_nz_elts = 0;
4628 *p_nc_elts = 0;
4629 *p_elt_count = 0;
4630 *p_must_clear = false;
4631 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4632 p_must_clear);
4635 /* Count the number of scalars in TYPE. Return -1 on overflow or
4636 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4637 array member at the end of the structure. */
4639 HOST_WIDE_INT
4640 count_type_elements (tree type, bool allow_flexarr)
4642 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4643 switch (TREE_CODE (type))
4645 case ARRAY_TYPE:
4647 tree telts = array_type_nelts (type);
4648 if (telts && host_integerp (telts, 1))
4650 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4651 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4652 if (n == 0)
4653 return 0;
4654 else if (max / n > m)
4655 return n * m;
4657 return -1;
4660 case RECORD_TYPE:
4662 HOST_WIDE_INT n = 0, t;
4663 tree f;
4665 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4666 if (TREE_CODE (f) == FIELD_DECL)
4668 t = count_type_elements (TREE_TYPE (f), false);
4669 if (t < 0)
4671 /* Check for structures with flexible array member. */
4672 tree tf = TREE_TYPE (f);
4673 if (allow_flexarr
4674 && TREE_CHAIN (f) == NULL
4675 && TREE_CODE (tf) == ARRAY_TYPE
4676 && TYPE_DOMAIN (tf)
4677 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4678 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4679 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4680 && int_size_in_bytes (type) >= 0)
4681 break;
4683 return -1;
4685 n += t;
4688 return n;
4691 case UNION_TYPE:
4692 case QUAL_UNION_TYPE:
4694 /* Ho hum. How in the world do we guess here? Clearly it isn't
4695 right to count the fields. Guess based on the number of words. */
4696 HOST_WIDE_INT n = int_size_in_bytes (type);
4697 if (n < 0)
4698 return -1;
4699 return n / UNITS_PER_WORD;
4702 case COMPLEX_TYPE:
4703 return 2;
4705 case VECTOR_TYPE:
4706 return TYPE_VECTOR_SUBPARTS (type);
4708 case INTEGER_TYPE:
4709 case REAL_TYPE:
4710 case ENUMERAL_TYPE:
4711 case BOOLEAN_TYPE:
4712 case POINTER_TYPE:
4713 case OFFSET_TYPE:
4714 case REFERENCE_TYPE:
4715 return 1;
4717 case VOID_TYPE:
4718 case METHOD_TYPE:
4719 case FUNCTION_TYPE:
4720 case LANG_TYPE:
4721 default:
4722 gcc_unreachable ();
4726 /* Return 1 if EXP contains mostly (3/4) zeros. */
4728 static int
4729 mostly_zeros_p (tree exp)
4731 if (TREE_CODE (exp) == CONSTRUCTOR)
4734 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4735 bool must_clear;
4737 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4738 if (must_clear)
4739 return 1;
4741 elts = count_type_elements (TREE_TYPE (exp), false);
4743 return nz_elts < elts / 4;
4746 return initializer_zerop (exp);
4749 /* Return 1 if EXP contains all zeros. */
4751 static int
4752 all_zeros_p (tree exp)
4754 if (TREE_CODE (exp) == CONSTRUCTOR)
4757 HOST_WIDE_INT nz_elts, nc_elts, count;
4758 bool must_clear;
4760 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4761 return nz_elts == 0;
4764 return initializer_zerop (exp);
4767 /* Helper function for store_constructor.
4768 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4769 TYPE is the type of the CONSTRUCTOR, not the element type.
4770 CLEARED is as for store_constructor.
4771 ALIAS_SET is the alias set to use for any stores.
4773 This provides a recursive shortcut back to store_constructor when it isn't
4774 necessary to go through store_field. This is so that we can pass through
4775 the cleared field to let store_constructor know that we may not have to
4776 clear a substructure if the outer structure has already been cleared. */
4778 static void
4779 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4780 HOST_WIDE_INT bitpos, enum machine_mode mode,
4781 tree exp, tree type, int cleared, int alias_set)
4783 if (TREE_CODE (exp) == CONSTRUCTOR
4784 /* We can only call store_constructor recursively if the size and
4785 bit position are on a byte boundary. */
4786 && bitpos % BITS_PER_UNIT == 0
4787 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4788 /* If we have a nonzero bitpos for a register target, then we just
4789 let store_field do the bitfield handling. This is unlikely to
4790 generate unnecessary clear instructions anyways. */
4791 && (bitpos == 0 || MEM_P (target)))
4793 if (MEM_P (target))
4794 target
4795 = adjust_address (target,
4796 GET_MODE (target) == BLKmode
4797 || 0 != (bitpos
4798 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4799 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4802 /* Update the alias set, if required. */
4803 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4804 && MEM_ALIAS_SET (target) != 0)
4806 target = copy_rtx (target);
4807 set_mem_alias_set (target, alias_set);
4810 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4812 else
4813 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4816 /* Store the value of constructor EXP into the rtx TARGET.
4817 TARGET is either a REG or a MEM; we know it cannot conflict, since
4818 safe_from_p has been called.
4819 CLEARED is true if TARGET is known to have been zero'd.
4820 SIZE is the number of bytes of TARGET we are allowed to modify: this
4821 may not be the same as the size of EXP if we are assigning to a field
4822 which has been packed to exclude padding bits. */
4824 static void
4825 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4827 tree type = TREE_TYPE (exp);
4828 #ifdef WORD_REGISTER_OPERATIONS
4829 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4830 #endif
4832 switch (TREE_CODE (type))
4834 case RECORD_TYPE:
4835 case UNION_TYPE:
4836 case QUAL_UNION_TYPE:
4838 unsigned HOST_WIDE_INT idx;
4839 tree field, value;
4841 /* If size is zero or the target is already cleared, do nothing. */
4842 if (size == 0 || cleared)
4843 cleared = 1;
4844 /* We either clear the aggregate or indicate the value is dead. */
4845 else if ((TREE_CODE (type) == UNION_TYPE
4846 || TREE_CODE (type) == QUAL_UNION_TYPE)
4847 && ! CONSTRUCTOR_ELTS (exp))
4848 /* If the constructor is empty, clear the union. */
4850 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4851 cleared = 1;
4854 /* If we are building a static constructor into a register,
4855 set the initial value as zero so we can fold the value into
4856 a constant. But if more than one register is involved,
4857 this probably loses. */
4858 else if (REG_P (target) && TREE_STATIC (exp)
4859 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4861 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4862 cleared = 1;
4865 /* If the constructor has fewer fields than the structure or
4866 if we are initializing the structure to mostly zeros, clear
4867 the whole structure first. Don't do this if TARGET is a
4868 register whose mode size isn't equal to SIZE since
4869 clear_storage can't handle this case. */
4870 else if (size > 0
4871 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4872 != fields_length (type))
4873 || mostly_zeros_p (exp))
4874 && (!REG_P (target)
4875 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4876 == size)))
4878 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4879 cleared = 1;
4882 if (! cleared)
4883 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4885 /* Store each element of the constructor into the
4886 corresponding field of TARGET. */
4887 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4889 enum machine_mode mode;
4890 HOST_WIDE_INT bitsize;
4891 HOST_WIDE_INT bitpos = 0;
4892 tree offset;
4893 rtx to_rtx = target;
4895 /* Just ignore missing fields. We cleared the whole
4896 structure, above, if any fields are missing. */
4897 if (field == 0)
4898 continue;
4900 if (cleared && initializer_zerop (value))
4901 continue;
4903 if (host_integerp (DECL_SIZE (field), 1))
4904 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4905 else
4906 bitsize = -1;
4908 mode = DECL_MODE (field);
4909 if (DECL_BIT_FIELD (field))
4910 mode = VOIDmode;
4912 offset = DECL_FIELD_OFFSET (field);
4913 if (host_integerp (offset, 0)
4914 && host_integerp (bit_position (field), 0))
4916 bitpos = int_bit_position (field);
4917 offset = 0;
4919 else
4920 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4922 if (offset)
4924 rtx offset_rtx;
4926 offset
4927 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4928 make_tree (TREE_TYPE (exp),
4929 target));
4931 offset_rtx = expand_normal (offset);
4932 gcc_assert (MEM_P (to_rtx));
4934 #ifdef POINTERS_EXTEND_UNSIGNED
4935 if (GET_MODE (offset_rtx) != Pmode)
4936 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4937 #else
4938 if (GET_MODE (offset_rtx) != ptr_mode)
4939 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4940 #endif
4942 to_rtx = offset_address (to_rtx, offset_rtx,
4943 highest_pow2_factor (offset));
4946 #ifdef WORD_REGISTER_OPERATIONS
4947 /* If this initializes a field that is smaller than a
4948 word, at the start of a word, try to widen it to a full
4949 word. This special case allows us to output C++ member
4950 function initializations in a form that the optimizers
4951 can understand. */
4952 if (REG_P (target)
4953 && bitsize < BITS_PER_WORD
4954 && bitpos % BITS_PER_WORD == 0
4955 && GET_MODE_CLASS (mode) == MODE_INT
4956 && TREE_CODE (value) == INTEGER_CST
4957 && exp_size >= 0
4958 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4960 tree type = TREE_TYPE (value);
4962 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4964 type = lang_hooks.types.type_for_size
4965 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4966 value = fold_convert (type, value);
4969 if (BYTES_BIG_ENDIAN)
4970 value
4971 = fold_build2 (LSHIFT_EXPR, type, value,
4972 build_int_cst (type,
4973 BITS_PER_WORD - bitsize));
4974 bitsize = BITS_PER_WORD;
4975 mode = word_mode;
4977 #endif
4979 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4980 && DECL_NONADDRESSABLE_P (field))
4982 to_rtx = copy_rtx (to_rtx);
4983 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4986 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4987 value, type, cleared,
4988 get_alias_set (TREE_TYPE (field)));
4990 break;
4992 case ARRAY_TYPE:
4994 tree value, index;
4995 unsigned HOST_WIDE_INT i;
4996 int need_to_clear;
4997 tree domain;
4998 tree elttype = TREE_TYPE (type);
4999 int const_bounds_p;
5000 HOST_WIDE_INT minelt = 0;
5001 HOST_WIDE_INT maxelt = 0;
5003 domain = TYPE_DOMAIN (type);
5004 const_bounds_p = (TYPE_MIN_VALUE (domain)
5005 && TYPE_MAX_VALUE (domain)
5006 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5007 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5009 /* If we have constant bounds for the range of the type, get them. */
5010 if (const_bounds_p)
5012 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5013 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5016 /* If the constructor has fewer elements than the array, clear
5017 the whole array first. Similarly if this is static
5018 constructor of a non-BLKmode object. */
5019 if (cleared)
5020 need_to_clear = 0;
5021 else if (REG_P (target) && TREE_STATIC (exp))
5022 need_to_clear = 1;
5023 else
5025 unsigned HOST_WIDE_INT idx;
5026 tree index, value;
5027 HOST_WIDE_INT count = 0, zero_count = 0;
5028 need_to_clear = ! const_bounds_p;
5030 /* This loop is a more accurate version of the loop in
5031 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5032 is also needed to check for missing elements. */
5033 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5035 HOST_WIDE_INT this_node_count;
5037 if (need_to_clear)
5038 break;
5040 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5042 tree lo_index = TREE_OPERAND (index, 0);
5043 tree hi_index = TREE_OPERAND (index, 1);
5045 if (! host_integerp (lo_index, 1)
5046 || ! host_integerp (hi_index, 1))
5048 need_to_clear = 1;
5049 break;
5052 this_node_count = (tree_low_cst (hi_index, 1)
5053 - tree_low_cst (lo_index, 1) + 1);
5055 else
5056 this_node_count = 1;
5058 count += this_node_count;
5059 if (mostly_zeros_p (value))
5060 zero_count += this_node_count;
5063 /* Clear the entire array first if there are any missing
5064 elements, or if the incidence of zero elements is >=
5065 75%. */
5066 if (! need_to_clear
5067 && (count < maxelt - minelt + 1
5068 || 4 * zero_count >= 3 * count))
5069 need_to_clear = 1;
5072 if (need_to_clear && size > 0)
5074 if (REG_P (target))
5075 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5076 else
5077 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5078 cleared = 1;
5081 if (!cleared && REG_P (target))
5082 /* Inform later passes that the old value is dead. */
5083 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5085 /* Store each element of the constructor into the
5086 corresponding element of TARGET, determined by counting the
5087 elements. */
5088 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5090 enum machine_mode mode;
5091 HOST_WIDE_INT bitsize;
5092 HOST_WIDE_INT bitpos;
5093 int unsignedp;
5094 rtx xtarget = target;
5096 if (cleared && initializer_zerop (value))
5097 continue;
5099 unsignedp = TYPE_UNSIGNED (elttype);
5100 mode = TYPE_MODE (elttype);
5101 if (mode == BLKmode)
5102 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5103 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5104 : -1);
5105 else
5106 bitsize = GET_MODE_BITSIZE (mode);
5108 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5110 tree lo_index = TREE_OPERAND (index, 0);
5111 tree hi_index = TREE_OPERAND (index, 1);
5112 rtx index_r, pos_rtx;
5113 HOST_WIDE_INT lo, hi, count;
5114 tree position;
5116 /* If the range is constant and "small", unroll the loop. */
5117 if (const_bounds_p
5118 && host_integerp (lo_index, 0)
5119 && host_integerp (hi_index, 0)
5120 && (lo = tree_low_cst (lo_index, 0),
5121 hi = tree_low_cst (hi_index, 0),
5122 count = hi - lo + 1,
5123 (!MEM_P (target)
5124 || count <= 2
5125 || (host_integerp (TYPE_SIZE (elttype), 1)
5126 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5127 <= 40 * 8)))))
5129 lo -= minelt; hi -= minelt;
5130 for (; lo <= hi; lo++)
5132 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5134 if (MEM_P (target)
5135 && !MEM_KEEP_ALIAS_SET_P (target)
5136 && TREE_CODE (type) == ARRAY_TYPE
5137 && TYPE_NONALIASED_COMPONENT (type))
5139 target = copy_rtx (target);
5140 MEM_KEEP_ALIAS_SET_P (target) = 1;
5143 store_constructor_field
5144 (target, bitsize, bitpos, mode, value, type, cleared,
5145 get_alias_set (elttype));
5148 else
5150 rtx loop_start = gen_label_rtx ();
5151 rtx loop_end = gen_label_rtx ();
5152 tree exit_cond;
5154 expand_normal (hi_index);
5155 unsignedp = TYPE_UNSIGNED (domain);
5157 index = build_decl (VAR_DECL, NULL_TREE, domain);
5159 index_r
5160 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5161 &unsignedp, 0));
5162 SET_DECL_RTL (index, index_r);
5163 store_expr (lo_index, index_r, 0);
5165 /* Build the head of the loop. */
5166 do_pending_stack_adjust ();
5167 emit_label (loop_start);
5169 /* Assign value to element index. */
5170 position =
5171 fold_convert (ssizetype,
5172 fold_build2 (MINUS_EXPR,
5173 TREE_TYPE (index),
5174 index,
5175 TYPE_MIN_VALUE (domain)));
5177 position =
5178 size_binop (MULT_EXPR, position,
5179 fold_convert (ssizetype,
5180 TYPE_SIZE_UNIT (elttype)));
5182 pos_rtx = expand_normal (position);
5183 xtarget = offset_address (target, pos_rtx,
5184 highest_pow2_factor (position));
5185 xtarget = adjust_address (xtarget, mode, 0);
5186 if (TREE_CODE (value) == CONSTRUCTOR)
5187 store_constructor (value, xtarget, cleared,
5188 bitsize / BITS_PER_UNIT);
5189 else
5190 store_expr (value, xtarget, 0);
5192 /* Generate a conditional jump to exit the loop. */
5193 exit_cond = build2 (LT_EXPR, integer_type_node,
5194 index, hi_index);
5195 jumpif (exit_cond, loop_end);
5197 /* Update the loop counter, and jump to the head of
5198 the loop. */
5199 expand_assignment (index,
5200 build2 (PLUS_EXPR, TREE_TYPE (index),
5201 index, integer_one_node));
5203 emit_jump (loop_start);
5205 /* Build the end of the loop. */
5206 emit_label (loop_end);
5209 else if ((index != 0 && ! host_integerp (index, 0))
5210 || ! host_integerp (TYPE_SIZE (elttype), 1))
5212 tree position;
5214 if (index == 0)
5215 index = ssize_int (1);
5217 if (minelt)
5218 index = fold_convert (ssizetype,
5219 fold_build2 (MINUS_EXPR,
5220 TREE_TYPE (index),
5221 index,
5222 TYPE_MIN_VALUE (domain)));
5224 position =
5225 size_binop (MULT_EXPR, index,
5226 fold_convert (ssizetype,
5227 TYPE_SIZE_UNIT (elttype)));
5228 xtarget = offset_address (target,
5229 expand_normal (position),
5230 highest_pow2_factor (position));
5231 xtarget = adjust_address (xtarget, mode, 0);
5232 store_expr (value, xtarget, 0);
5234 else
5236 if (index != 0)
5237 bitpos = ((tree_low_cst (index, 0) - minelt)
5238 * tree_low_cst (TYPE_SIZE (elttype), 1));
5239 else
5240 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5242 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5243 && TREE_CODE (type) == ARRAY_TYPE
5244 && TYPE_NONALIASED_COMPONENT (type))
5246 target = copy_rtx (target);
5247 MEM_KEEP_ALIAS_SET_P (target) = 1;
5249 store_constructor_field (target, bitsize, bitpos, mode, value,
5250 type, cleared, get_alias_set (elttype));
5253 break;
5256 case VECTOR_TYPE:
5258 unsigned HOST_WIDE_INT idx;
5259 constructor_elt *ce;
5260 int i;
5261 int need_to_clear;
5262 int icode = 0;
5263 tree elttype = TREE_TYPE (type);
5264 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5265 enum machine_mode eltmode = TYPE_MODE (elttype);
5266 HOST_WIDE_INT bitsize;
5267 HOST_WIDE_INT bitpos;
5268 rtvec vector = NULL;
5269 unsigned n_elts;
5271 gcc_assert (eltmode != BLKmode);
5273 n_elts = TYPE_VECTOR_SUBPARTS (type);
5274 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5276 enum machine_mode mode = GET_MODE (target);
5278 icode = (int) vec_init_optab->handlers[mode].insn_code;
5279 if (icode != CODE_FOR_nothing)
5281 unsigned int i;
5283 vector = rtvec_alloc (n_elts);
5284 for (i = 0; i < n_elts; i++)
5285 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5289 /* If the constructor has fewer elements than the vector,
5290 clear the whole array first. Similarly if this is static
5291 constructor of a non-BLKmode object. */
5292 if (cleared)
5293 need_to_clear = 0;
5294 else if (REG_P (target) && TREE_STATIC (exp))
5295 need_to_clear = 1;
5296 else
5298 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5299 tree value;
5301 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5303 int n_elts_here = tree_low_cst
5304 (int_const_binop (TRUNC_DIV_EXPR,
5305 TYPE_SIZE (TREE_TYPE (value)),
5306 TYPE_SIZE (elttype), 0), 1);
5308 count += n_elts_here;
5309 if (mostly_zeros_p (value))
5310 zero_count += n_elts_here;
5313 /* Clear the entire vector first if there are any missing elements,
5314 or if the incidence of zero elements is >= 75%. */
5315 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5318 if (need_to_clear && size > 0 && !vector)
5320 if (REG_P (target))
5321 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5322 else
5323 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5324 cleared = 1;
5327 /* Inform later passes that the old value is dead. */
5328 if (!cleared && !vector && REG_P (target))
5329 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5331 /* Store each element of the constructor into the corresponding
5332 element of TARGET, determined by counting the elements. */
5333 for (idx = 0, i = 0;
5334 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5335 idx++, i += bitsize / elt_size)
5337 HOST_WIDE_INT eltpos;
5338 tree value = ce->value;
5340 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5341 if (cleared && initializer_zerop (value))
5342 continue;
5344 if (ce->index)
5345 eltpos = tree_low_cst (ce->index, 1);
5346 else
5347 eltpos = i;
5349 if (vector)
5351 /* Vector CONSTRUCTORs should only be built from smaller
5352 vectors in the case of BLKmode vectors. */
5353 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5354 RTVEC_ELT (vector, eltpos)
5355 = expand_normal (value);
5357 else
5359 enum machine_mode value_mode =
5360 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5361 ? TYPE_MODE (TREE_TYPE (value))
5362 : eltmode;
5363 bitpos = eltpos * elt_size;
5364 store_constructor_field (target, bitsize, bitpos,
5365 value_mode, value, type,
5366 cleared, get_alias_set (elttype));
5370 if (vector)
5371 emit_insn (GEN_FCN (icode)
5372 (target,
5373 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5374 break;
5377 default:
5378 gcc_unreachable ();
5382 /* Store the value of EXP (an expression tree)
5383 into a subfield of TARGET which has mode MODE and occupies
5384 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5385 If MODE is VOIDmode, it means that we are storing into a bit-field.
5387 Always return const0_rtx unless we have something particular to
5388 return.
5390 TYPE is the type of the underlying object,
5392 ALIAS_SET is the alias set for the destination. This value will
5393 (in general) be different from that for TARGET, since TARGET is a
5394 reference to the containing structure. */
5396 static rtx
5397 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5398 enum machine_mode mode, tree exp, tree type, int alias_set)
5400 HOST_WIDE_INT width_mask = 0;
5402 if (TREE_CODE (exp) == ERROR_MARK)
5403 return const0_rtx;
5405 /* If we have nothing to store, do nothing unless the expression has
5406 side-effects. */
5407 if (bitsize == 0)
5408 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5409 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5410 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5412 /* If we are storing into an unaligned field of an aligned union that is
5413 in a register, we may have the mode of TARGET being an integer mode but
5414 MODE == BLKmode. In that case, get an aligned object whose size and
5415 alignment are the same as TARGET and store TARGET into it (we can avoid
5416 the store if the field being stored is the entire width of TARGET). Then
5417 call ourselves recursively to store the field into a BLKmode version of
5418 that object. Finally, load from the object into TARGET. This is not
5419 very efficient in general, but should only be slightly more expensive
5420 than the otherwise-required unaligned accesses. Perhaps this can be
5421 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5422 twice, once with emit_move_insn and once via store_field. */
5424 if (mode == BLKmode
5425 && (REG_P (target) || GET_CODE (target) == SUBREG))
5427 rtx object = assign_temp (type, 0, 1, 1);
5428 rtx blk_object = adjust_address (object, BLKmode, 0);
5430 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5431 emit_move_insn (object, target);
5433 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5435 emit_move_insn (target, object);
5437 /* We want to return the BLKmode version of the data. */
5438 return blk_object;
5441 if (GET_CODE (target) == CONCAT)
5443 /* We're storing into a struct containing a single __complex. */
5445 gcc_assert (!bitpos);
5446 return store_expr (exp, target, 0);
5449 /* If the structure is in a register or if the component
5450 is a bit field, we cannot use addressing to access it.
5451 Use bit-field techniques or SUBREG to store in it. */
5453 if (mode == VOIDmode
5454 || (mode != BLKmode && ! direct_store[(int) mode]
5455 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5456 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5457 || REG_P (target)
5458 || GET_CODE (target) == SUBREG
5459 /* If the field isn't aligned enough to store as an ordinary memref,
5460 store it as a bit field. */
5461 || (mode != BLKmode
5462 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5463 || bitpos % GET_MODE_ALIGNMENT (mode))
5464 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5465 || (bitpos % BITS_PER_UNIT != 0)))
5466 /* If the RHS and field are a constant size and the size of the
5467 RHS isn't the same size as the bitfield, we must use bitfield
5468 operations. */
5469 || (bitsize >= 0
5470 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5471 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5473 rtx temp;
5475 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5476 implies a mask operation. If the precision is the same size as
5477 the field we're storing into, that mask is redundant. This is
5478 particularly common with bit field assignments generated by the
5479 C front end. */
5480 if (TREE_CODE (exp) == NOP_EXPR)
5482 tree type = TREE_TYPE (exp);
5483 if (INTEGRAL_TYPE_P (type)
5484 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5485 && bitsize == TYPE_PRECISION (type))
5487 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5488 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5489 exp = TREE_OPERAND (exp, 0);
5493 temp = expand_normal (exp);
5495 /* If BITSIZE is narrower than the size of the type of EXP
5496 we will be narrowing TEMP. Normally, what's wanted are the
5497 low-order bits. However, if EXP's type is a record and this is
5498 big-endian machine, we want the upper BITSIZE bits. */
5499 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5500 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5501 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5502 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5503 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5504 - bitsize),
5505 NULL_RTX, 1);
5507 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5508 MODE. */
5509 if (mode != VOIDmode && mode != BLKmode
5510 && mode != TYPE_MODE (TREE_TYPE (exp)))
5511 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5513 /* If the modes of TARGET and TEMP are both BLKmode, both
5514 must be in memory and BITPOS must be aligned on a byte
5515 boundary. If so, we simply do a block copy. */
5516 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5518 gcc_assert (MEM_P (target) && MEM_P (temp)
5519 && !(bitpos % BITS_PER_UNIT));
5521 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5522 emit_block_move (target, temp,
5523 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5524 / BITS_PER_UNIT),
5525 BLOCK_OP_NORMAL);
5527 return const0_rtx;
5530 /* Store the value in the bitfield. */
5531 store_bit_field (target, bitsize, bitpos, mode, temp);
5533 return const0_rtx;
5535 else
5537 /* Now build a reference to just the desired component. */
5538 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5540 if (to_rtx == target)
5541 to_rtx = copy_rtx (to_rtx);
5543 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5544 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5545 set_mem_alias_set (to_rtx, alias_set);
5547 return store_expr (exp, to_rtx, 0);
5551 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5552 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5553 codes and find the ultimate containing object, which we return.
5555 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5556 bit position, and *PUNSIGNEDP to the signedness of the field.
5557 If the position of the field is variable, we store a tree
5558 giving the variable offset (in units) in *POFFSET.
5559 This offset is in addition to the bit position.
5560 If the position is not variable, we store 0 in *POFFSET.
5562 If any of the extraction expressions is volatile,
5563 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5565 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5566 is a mode that can be used to access the field. In that case, *PBITSIZE
5567 is redundant.
5569 If the field describes a variable-sized object, *PMODE is set to
5570 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5571 this case, but the address of the object can be found.
5573 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5574 look through nodes that serve as markers of a greater alignment than
5575 the one that can be deduced from the expression. These nodes make it
5576 possible for front-ends to prevent temporaries from being created by
5577 the middle-end on alignment considerations. For that purpose, the
5578 normal operating mode at high-level is to always pass FALSE so that
5579 the ultimate containing object is really returned; moreover, the
5580 associated predicate handled_component_p will always return TRUE
5581 on these nodes, thus indicating that they are essentially handled
5582 by get_inner_reference. TRUE should only be passed when the caller
5583 is scanning the expression in order to build another representation
5584 and specifically knows how to handle these nodes; as such, this is
5585 the normal operating mode in the RTL expanders. */
5587 tree
5588 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5589 HOST_WIDE_INT *pbitpos, tree *poffset,
5590 enum machine_mode *pmode, int *punsignedp,
5591 int *pvolatilep, bool keep_aligning)
5593 tree size_tree = 0;
5594 enum machine_mode mode = VOIDmode;
5595 tree offset = size_zero_node;
5596 tree bit_offset = bitsize_zero_node;
5597 tree tem;
5599 /* First get the mode, signedness, and size. We do this from just the
5600 outermost expression. */
5601 if (TREE_CODE (exp) == COMPONENT_REF)
5603 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5604 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5605 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5607 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5609 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5611 size_tree = TREE_OPERAND (exp, 1);
5612 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5614 else
5616 mode = TYPE_MODE (TREE_TYPE (exp));
5617 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5619 if (mode == BLKmode)
5620 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5621 else
5622 *pbitsize = GET_MODE_BITSIZE (mode);
5625 if (size_tree != 0)
5627 if (! host_integerp (size_tree, 1))
5628 mode = BLKmode, *pbitsize = -1;
5629 else
5630 *pbitsize = tree_low_cst (size_tree, 1);
5633 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5634 and find the ultimate containing object. */
5635 while (1)
5637 switch (TREE_CODE (exp))
5639 case BIT_FIELD_REF:
5640 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5641 TREE_OPERAND (exp, 2));
5642 break;
5644 case COMPONENT_REF:
5646 tree field = TREE_OPERAND (exp, 1);
5647 tree this_offset = component_ref_field_offset (exp);
5649 /* If this field hasn't been filled in yet, don't go past it.
5650 This should only happen when folding expressions made during
5651 type construction. */
5652 if (this_offset == 0)
5653 break;
5655 offset = size_binop (PLUS_EXPR, offset, this_offset);
5656 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5657 DECL_FIELD_BIT_OFFSET (field));
5659 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5661 break;
5663 case ARRAY_REF:
5664 case ARRAY_RANGE_REF:
5666 tree index = TREE_OPERAND (exp, 1);
5667 tree low_bound = array_ref_low_bound (exp);
5668 tree unit_size = array_ref_element_size (exp);
5670 /* We assume all arrays have sizes that are a multiple of a byte.
5671 First subtract the lower bound, if any, in the type of the
5672 index, then convert to sizetype and multiply by the size of
5673 the array element. */
5674 if (! integer_zerop (low_bound))
5675 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5676 index, low_bound);
5678 offset = size_binop (PLUS_EXPR, offset,
5679 size_binop (MULT_EXPR,
5680 fold_convert (sizetype, index),
5681 unit_size));
5683 break;
5685 case REALPART_EXPR:
5686 break;
5688 case IMAGPART_EXPR:
5689 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5690 bitsize_int (*pbitsize));
5691 break;
5693 case VIEW_CONVERT_EXPR:
5694 if (keep_aligning && STRICT_ALIGNMENT
5695 && (TYPE_ALIGN (TREE_TYPE (exp))
5696 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5697 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5698 < BIGGEST_ALIGNMENT)
5699 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5700 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5701 goto done;
5702 break;
5704 default:
5705 goto done;
5708 /* If any reference in the chain is volatile, the effect is volatile. */
5709 if (TREE_THIS_VOLATILE (exp))
5710 *pvolatilep = 1;
5712 exp = TREE_OPERAND (exp, 0);
5714 done:
5716 /* If OFFSET is constant, see if we can return the whole thing as a
5717 constant bit position. Otherwise, split it up. */
5718 if (host_integerp (offset, 0)
5719 && 0 != (tem = size_binop (MULT_EXPR,
5720 fold_convert (bitsizetype, offset),
5721 bitsize_unit_node))
5722 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5723 && host_integerp (tem, 0))
5724 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5725 else
5726 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5728 *pmode = mode;
5729 return exp;
5732 /* Return a tree of sizetype representing the size, in bytes, of the element
5733 of EXP, an ARRAY_REF. */
5735 tree
5736 array_ref_element_size (tree exp)
5738 tree aligned_size = TREE_OPERAND (exp, 3);
5739 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5741 /* If a size was specified in the ARRAY_REF, it's the size measured
5742 in alignment units of the element type. So multiply by that value. */
5743 if (aligned_size)
5745 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5746 sizetype from another type of the same width and signedness. */
5747 if (TREE_TYPE (aligned_size) != sizetype)
5748 aligned_size = fold_convert (sizetype, aligned_size);
5749 return size_binop (MULT_EXPR, aligned_size,
5750 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5753 /* Otherwise, take the size from that of the element type. Substitute
5754 any PLACEHOLDER_EXPR that we have. */
5755 else
5756 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5759 /* Return a tree representing the lower bound of the array mentioned in
5760 EXP, an ARRAY_REF. */
5762 tree
5763 array_ref_low_bound (tree exp)
5765 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5767 /* If a lower bound is specified in EXP, use it. */
5768 if (TREE_OPERAND (exp, 2))
5769 return TREE_OPERAND (exp, 2);
5771 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5772 substituting for a PLACEHOLDER_EXPR as needed. */
5773 if (domain_type && TYPE_MIN_VALUE (domain_type))
5774 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5776 /* Otherwise, return a zero of the appropriate type. */
5777 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5780 /* Return a tree representing the upper bound of the array mentioned in
5781 EXP, an ARRAY_REF. */
5783 tree
5784 array_ref_up_bound (tree exp)
5786 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5788 /* If there is a domain type and it has an upper bound, use it, substituting
5789 for a PLACEHOLDER_EXPR as needed. */
5790 if (domain_type && TYPE_MAX_VALUE (domain_type))
5791 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5793 /* Otherwise fail. */
5794 return NULL_TREE;
5797 /* Return a tree representing the offset, in bytes, of the field referenced
5798 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5800 tree
5801 component_ref_field_offset (tree exp)
5803 tree aligned_offset = TREE_OPERAND (exp, 2);
5804 tree field = TREE_OPERAND (exp, 1);
5806 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5807 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5808 value. */
5809 if (aligned_offset)
5811 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5812 sizetype from another type of the same width and signedness. */
5813 if (TREE_TYPE (aligned_offset) != sizetype)
5814 aligned_offset = fold_convert (sizetype, aligned_offset);
5815 return size_binop (MULT_EXPR, aligned_offset,
5816 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5819 /* Otherwise, take the offset from that of the field. Substitute
5820 any PLACEHOLDER_EXPR that we have. */
5821 else
5822 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5825 /* Return 1 if T is an expression that get_inner_reference handles. */
5828 handled_component_p (tree t)
5830 switch (TREE_CODE (t))
5832 case BIT_FIELD_REF:
5833 case COMPONENT_REF:
5834 case ARRAY_REF:
5835 case ARRAY_RANGE_REF:
5836 case VIEW_CONVERT_EXPR:
5837 case REALPART_EXPR:
5838 case IMAGPART_EXPR:
5839 return 1;
5841 default:
5842 return 0;
5846 /* Given an rtx VALUE that may contain additions and multiplications, return
5847 an equivalent value that just refers to a register, memory, or constant.
5848 This is done by generating instructions to perform the arithmetic and
5849 returning a pseudo-register containing the value.
5851 The returned value may be a REG, SUBREG, MEM or constant. */
5854 force_operand (rtx value, rtx target)
5856 rtx op1, op2;
5857 /* Use subtarget as the target for operand 0 of a binary operation. */
5858 rtx subtarget = get_subtarget (target);
5859 enum rtx_code code = GET_CODE (value);
5861 /* Check for subreg applied to an expression produced by loop optimizer. */
5862 if (code == SUBREG
5863 && !REG_P (SUBREG_REG (value))
5864 && !MEM_P (SUBREG_REG (value)))
5866 value = simplify_gen_subreg (GET_MODE (value),
5867 force_reg (GET_MODE (SUBREG_REG (value)),
5868 force_operand (SUBREG_REG (value),
5869 NULL_RTX)),
5870 GET_MODE (SUBREG_REG (value)),
5871 SUBREG_BYTE (value));
5872 code = GET_CODE (value);
5875 /* Check for a PIC address load. */
5876 if ((code == PLUS || code == MINUS)
5877 && XEXP (value, 0) == pic_offset_table_rtx
5878 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5879 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5880 || GET_CODE (XEXP (value, 1)) == CONST))
5882 if (!subtarget)
5883 subtarget = gen_reg_rtx (GET_MODE (value));
5884 emit_move_insn (subtarget, value);
5885 return subtarget;
5888 if (ARITHMETIC_P (value))
5890 op2 = XEXP (value, 1);
5891 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5892 subtarget = 0;
5893 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5895 code = PLUS;
5896 op2 = negate_rtx (GET_MODE (value), op2);
5899 /* Check for an addition with OP2 a constant integer and our first
5900 operand a PLUS of a virtual register and something else. In that
5901 case, we want to emit the sum of the virtual register and the
5902 constant first and then add the other value. This allows virtual
5903 register instantiation to simply modify the constant rather than
5904 creating another one around this addition. */
5905 if (code == PLUS && GET_CODE (op2) == CONST_INT
5906 && GET_CODE (XEXP (value, 0)) == PLUS
5907 && REG_P (XEXP (XEXP (value, 0), 0))
5908 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5909 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5911 rtx temp = expand_simple_binop (GET_MODE (value), code,
5912 XEXP (XEXP (value, 0), 0), op2,
5913 subtarget, 0, OPTAB_LIB_WIDEN);
5914 return expand_simple_binop (GET_MODE (value), code, temp,
5915 force_operand (XEXP (XEXP (value,
5916 0), 1), 0),
5917 target, 0, OPTAB_LIB_WIDEN);
5920 op1 = force_operand (XEXP (value, 0), subtarget);
5921 op2 = force_operand (op2, NULL_RTX);
5922 switch (code)
5924 case MULT:
5925 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5926 case DIV:
5927 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5928 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5929 target, 1, OPTAB_LIB_WIDEN);
5930 else
5931 return expand_divmod (0,
5932 FLOAT_MODE_P (GET_MODE (value))
5933 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5934 GET_MODE (value), op1, op2, target, 0);
5935 break;
5936 case MOD:
5937 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5938 target, 0);
5939 break;
5940 case UDIV:
5941 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5942 target, 1);
5943 break;
5944 case UMOD:
5945 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5946 target, 1);
5947 break;
5948 case ASHIFTRT:
5949 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5950 target, 0, OPTAB_LIB_WIDEN);
5951 break;
5952 default:
5953 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5954 target, 1, OPTAB_LIB_WIDEN);
5957 if (UNARY_P (value))
5959 if (!target)
5960 target = gen_reg_rtx (GET_MODE (value));
5961 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5962 switch (code)
5964 case ZERO_EXTEND:
5965 case SIGN_EXTEND:
5966 case TRUNCATE:
5967 convert_move (target, op1, code == ZERO_EXTEND);
5968 return target;
5970 case FIX:
5971 case UNSIGNED_FIX:
5972 expand_fix (target, op1, code == UNSIGNED_FIX);
5973 return target;
5975 case FLOAT:
5976 case UNSIGNED_FLOAT:
5977 expand_float (target, op1, code == UNSIGNED_FLOAT);
5978 return target;
5980 default:
5981 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5985 #ifdef INSN_SCHEDULING
5986 /* On machines that have insn scheduling, we want all memory reference to be
5987 explicit, so we need to deal with such paradoxical SUBREGs. */
5988 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5989 && (GET_MODE_SIZE (GET_MODE (value))
5990 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5991 value
5992 = simplify_gen_subreg (GET_MODE (value),
5993 force_reg (GET_MODE (SUBREG_REG (value)),
5994 force_operand (SUBREG_REG (value),
5995 NULL_RTX)),
5996 GET_MODE (SUBREG_REG (value)),
5997 SUBREG_BYTE (value));
5998 #endif
6000 return value;
6003 /* Subroutine of expand_expr: return nonzero iff there is no way that
6004 EXP can reference X, which is being modified. TOP_P is nonzero if this
6005 call is going to be used to determine whether we need a temporary
6006 for EXP, as opposed to a recursive call to this function.
6008 It is always safe for this routine to return zero since it merely
6009 searches for optimization opportunities. */
6012 safe_from_p (rtx x, tree exp, int top_p)
6014 rtx exp_rtl = 0;
6015 int i, nops;
6017 if (x == 0
6018 /* If EXP has varying size, we MUST use a target since we currently
6019 have no way of allocating temporaries of variable size
6020 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6021 So we assume here that something at a higher level has prevented a
6022 clash. This is somewhat bogus, but the best we can do. Only
6023 do this when X is BLKmode and when we are at the top level. */
6024 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6025 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6026 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6027 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6028 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6029 != INTEGER_CST)
6030 && GET_MODE (x) == BLKmode)
6031 /* If X is in the outgoing argument area, it is always safe. */
6032 || (MEM_P (x)
6033 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6034 || (GET_CODE (XEXP (x, 0)) == PLUS
6035 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6036 return 1;
6038 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6039 find the underlying pseudo. */
6040 if (GET_CODE (x) == SUBREG)
6042 x = SUBREG_REG (x);
6043 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6044 return 0;
6047 /* Now look at our tree code and possibly recurse. */
6048 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6050 case tcc_declaration:
6051 exp_rtl = DECL_RTL_IF_SET (exp);
6052 break;
6054 case tcc_constant:
6055 return 1;
6057 case tcc_exceptional:
6058 if (TREE_CODE (exp) == TREE_LIST)
6060 while (1)
6062 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6063 return 0;
6064 exp = TREE_CHAIN (exp);
6065 if (!exp)
6066 return 1;
6067 if (TREE_CODE (exp) != TREE_LIST)
6068 return safe_from_p (x, exp, 0);
6071 else if (TREE_CODE (exp) == ERROR_MARK)
6072 return 1; /* An already-visited SAVE_EXPR? */
6073 else
6074 return 0;
6076 case tcc_statement:
6077 /* The only case we look at here is the DECL_INITIAL inside a
6078 DECL_EXPR. */
6079 return (TREE_CODE (exp) != DECL_EXPR
6080 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6081 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6082 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6084 case tcc_binary:
6085 case tcc_comparison:
6086 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6087 return 0;
6088 /* Fall through. */
6090 case tcc_unary:
6091 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6093 case tcc_expression:
6094 case tcc_reference:
6095 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6096 the expression. If it is set, we conflict iff we are that rtx or
6097 both are in memory. Otherwise, we check all operands of the
6098 expression recursively. */
6100 switch (TREE_CODE (exp))
6102 case ADDR_EXPR:
6103 /* If the operand is static or we are static, we can't conflict.
6104 Likewise if we don't conflict with the operand at all. */
6105 if (staticp (TREE_OPERAND (exp, 0))
6106 || TREE_STATIC (exp)
6107 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6108 return 1;
6110 /* Otherwise, the only way this can conflict is if we are taking
6111 the address of a DECL a that address if part of X, which is
6112 very rare. */
6113 exp = TREE_OPERAND (exp, 0);
6114 if (DECL_P (exp))
6116 if (!DECL_RTL_SET_P (exp)
6117 || !MEM_P (DECL_RTL (exp)))
6118 return 0;
6119 else
6120 exp_rtl = XEXP (DECL_RTL (exp), 0);
6122 break;
6124 case MISALIGNED_INDIRECT_REF:
6125 case ALIGN_INDIRECT_REF:
6126 case INDIRECT_REF:
6127 if (MEM_P (x)
6128 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6129 get_alias_set (exp)))
6130 return 0;
6131 break;
6133 case CALL_EXPR:
6134 /* Assume that the call will clobber all hard registers and
6135 all of memory. */
6136 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6137 || MEM_P (x))
6138 return 0;
6139 break;
6141 case WITH_CLEANUP_EXPR:
6142 case CLEANUP_POINT_EXPR:
6143 /* Lowered by gimplify.c. */
6144 gcc_unreachable ();
6146 case SAVE_EXPR:
6147 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6149 default:
6150 break;
6153 /* If we have an rtx, we do not need to scan our operands. */
6154 if (exp_rtl)
6155 break;
6157 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6158 for (i = 0; i < nops; i++)
6159 if (TREE_OPERAND (exp, i) != 0
6160 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6161 return 0;
6163 /* If this is a language-specific tree code, it may require
6164 special handling. */
6165 if ((unsigned int) TREE_CODE (exp)
6166 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6167 && !lang_hooks.safe_from_p (x, exp))
6168 return 0;
6169 break;
6171 case tcc_type:
6172 /* Should never get a type here. */
6173 gcc_unreachable ();
6176 /* If we have an rtl, find any enclosed object. Then see if we conflict
6177 with it. */
6178 if (exp_rtl)
6180 if (GET_CODE (exp_rtl) == SUBREG)
6182 exp_rtl = SUBREG_REG (exp_rtl);
6183 if (REG_P (exp_rtl)
6184 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6185 return 0;
6188 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6189 are memory and they conflict. */
6190 return ! (rtx_equal_p (x, exp_rtl)
6191 || (MEM_P (x) && MEM_P (exp_rtl)
6192 && true_dependence (exp_rtl, VOIDmode, x,
6193 rtx_addr_varies_p)));
6196 /* If we reach here, it is safe. */
6197 return 1;
6201 /* Return the highest power of two that EXP is known to be a multiple of.
6202 This is used in updating alignment of MEMs in array references. */
6204 unsigned HOST_WIDE_INT
6205 highest_pow2_factor (tree exp)
6207 unsigned HOST_WIDE_INT c0, c1;
6209 switch (TREE_CODE (exp))
6211 case INTEGER_CST:
6212 /* We can find the lowest bit that's a one. If the low
6213 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6214 We need to handle this case since we can find it in a COND_EXPR,
6215 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6216 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6217 later ICE. */
6218 if (TREE_CONSTANT_OVERFLOW (exp))
6219 return BIGGEST_ALIGNMENT;
6220 else
6222 /* Note: tree_low_cst is intentionally not used here,
6223 we don't care about the upper bits. */
6224 c0 = TREE_INT_CST_LOW (exp);
6225 c0 &= -c0;
6226 return c0 ? c0 : BIGGEST_ALIGNMENT;
6228 break;
6230 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6231 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6232 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6233 return MIN (c0, c1);
6235 case MULT_EXPR:
6236 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6237 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6238 return c0 * c1;
6240 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6241 case CEIL_DIV_EXPR:
6242 if (integer_pow2p (TREE_OPERAND (exp, 1))
6243 && host_integerp (TREE_OPERAND (exp, 1), 1))
6245 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6246 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6247 return MAX (1, c0 / c1);
6249 break;
6251 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6252 case SAVE_EXPR:
6253 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6255 case COMPOUND_EXPR:
6256 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6258 case COND_EXPR:
6259 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6260 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6261 return MIN (c0, c1);
6263 default:
6264 break;
6267 return 1;
6270 /* Similar, except that the alignment requirements of TARGET are
6271 taken into account. Assume it is at least as aligned as its
6272 type, unless it is a COMPONENT_REF in which case the layout of
6273 the structure gives the alignment. */
6275 static unsigned HOST_WIDE_INT
6276 highest_pow2_factor_for_target (tree target, tree exp)
6278 unsigned HOST_WIDE_INT target_align, factor;
6280 factor = highest_pow2_factor (exp);
6281 if (TREE_CODE (target) == COMPONENT_REF)
6282 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6283 else
6284 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6285 return MAX (factor, target_align);
6288 /* Expands variable VAR. */
6290 void
6291 expand_var (tree var)
6293 if (DECL_EXTERNAL (var))
6294 return;
6296 if (TREE_STATIC (var))
6297 /* If this is an inlined copy of a static local variable,
6298 look up the original decl. */
6299 var = DECL_ORIGIN (var);
6301 if (TREE_STATIC (var)
6302 ? !TREE_ASM_WRITTEN (var)
6303 : !DECL_RTL_SET_P (var))
6305 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6306 /* Should be ignored. */;
6307 else if (lang_hooks.expand_decl (var))
6308 /* OK. */;
6309 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6310 expand_decl (var);
6311 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6312 rest_of_decl_compilation (var, 0, 0);
6313 else
6314 /* No expansion needed. */
6315 gcc_assert (TREE_CODE (var) == TYPE_DECL
6316 || TREE_CODE (var) == CONST_DECL
6317 || TREE_CODE (var) == FUNCTION_DECL
6318 || TREE_CODE (var) == LABEL_DECL);
6322 /* Subroutine of expand_expr. Expand the two operands of a binary
6323 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6324 The value may be stored in TARGET if TARGET is nonzero. The
6325 MODIFIER argument is as documented by expand_expr. */
6327 static void
6328 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6329 enum expand_modifier modifier)
6331 if (! safe_from_p (target, exp1, 1))
6332 target = 0;
6333 if (operand_equal_p (exp0, exp1, 0))
6335 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6336 *op1 = copy_rtx (*op0);
6338 else
6340 /* If we need to preserve evaluation order, copy exp0 into its own
6341 temporary variable so that it can't be clobbered by exp1. */
6342 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6343 exp0 = save_expr (exp0);
6344 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6345 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6350 /* Return a MEM that contains constant EXP. DEFER is as for
6351 output_constant_def and MODIFIER is as for expand_expr. */
6353 static rtx
6354 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6356 rtx mem;
6358 mem = output_constant_def (exp, defer);
6359 if (modifier != EXPAND_INITIALIZER)
6360 mem = use_anchored_address (mem);
6361 return mem;
6364 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6365 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6367 static rtx
6368 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6369 enum expand_modifier modifier)
6371 rtx result, subtarget;
6372 tree inner, offset;
6373 HOST_WIDE_INT bitsize, bitpos;
6374 int volatilep, unsignedp;
6375 enum machine_mode mode1;
6377 /* If we are taking the address of a constant and are at the top level,
6378 we have to use output_constant_def since we can't call force_const_mem
6379 at top level. */
6380 /* ??? This should be considered a front-end bug. We should not be
6381 generating ADDR_EXPR of something that isn't an LVALUE. The only
6382 exception here is STRING_CST. */
6383 if (TREE_CODE (exp) == CONSTRUCTOR
6384 || CONSTANT_CLASS_P (exp))
6385 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6387 /* Everything must be something allowed by is_gimple_addressable. */
6388 switch (TREE_CODE (exp))
6390 case INDIRECT_REF:
6391 /* This case will happen via recursion for &a->b. */
6392 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6394 case CONST_DECL:
6395 /* Recurse and make the output_constant_def clause above handle this. */
6396 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6397 tmode, modifier);
6399 case REALPART_EXPR:
6400 /* The real part of the complex number is always first, therefore
6401 the address is the same as the address of the parent object. */
6402 offset = 0;
6403 bitpos = 0;
6404 inner = TREE_OPERAND (exp, 0);
6405 break;
6407 case IMAGPART_EXPR:
6408 /* The imaginary part of the complex number is always second.
6409 The expression is therefore always offset by the size of the
6410 scalar type. */
6411 offset = 0;
6412 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6413 inner = TREE_OPERAND (exp, 0);
6414 break;
6416 default:
6417 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6418 expand_expr, as that can have various side effects; LABEL_DECLs for
6419 example, may not have their DECL_RTL set yet. Assume language
6420 specific tree nodes can be expanded in some interesting way. */
6421 if (DECL_P (exp)
6422 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6424 result = expand_expr (exp, target, tmode,
6425 modifier == EXPAND_INITIALIZER
6426 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6428 /* If the DECL isn't in memory, then the DECL wasn't properly
6429 marked TREE_ADDRESSABLE, which will be either a front-end
6430 or a tree optimizer bug. */
6431 gcc_assert (MEM_P (result));
6432 result = XEXP (result, 0);
6434 /* ??? Is this needed anymore? */
6435 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6437 assemble_external (exp);
6438 TREE_USED (exp) = 1;
6441 if (modifier != EXPAND_INITIALIZER
6442 && modifier != EXPAND_CONST_ADDRESS)
6443 result = force_operand (result, target);
6444 return result;
6447 /* Pass FALSE as the last argument to get_inner_reference although
6448 we are expanding to RTL. The rationale is that we know how to
6449 handle "aligning nodes" here: we can just bypass them because
6450 they won't change the final object whose address will be returned
6451 (they actually exist only for that purpose). */
6452 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6453 &mode1, &unsignedp, &volatilep, false);
6454 break;
6457 /* We must have made progress. */
6458 gcc_assert (inner != exp);
6460 subtarget = offset || bitpos ? NULL_RTX : target;
6461 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6463 if (offset)
6465 rtx tmp;
6467 if (modifier != EXPAND_NORMAL)
6468 result = force_operand (result, NULL);
6469 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6471 result = convert_memory_address (tmode, result);
6472 tmp = convert_memory_address (tmode, tmp);
6474 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6475 result = gen_rtx_PLUS (tmode, result, tmp);
6476 else
6478 subtarget = bitpos ? NULL_RTX : target;
6479 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6480 1, OPTAB_LIB_WIDEN);
6484 if (bitpos)
6486 /* Someone beforehand should have rejected taking the address
6487 of such an object. */
6488 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6490 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6491 if (modifier < EXPAND_SUM)
6492 result = force_operand (result, target);
6495 return result;
6498 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6499 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6501 static rtx
6502 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6503 enum expand_modifier modifier)
6505 enum machine_mode rmode;
6506 rtx result;
6508 /* Target mode of VOIDmode says "whatever's natural". */
6509 if (tmode == VOIDmode)
6510 tmode = TYPE_MODE (TREE_TYPE (exp));
6512 /* We can get called with some Weird Things if the user does silliness
6513 like "(short) &a". In that case, convert_memory_address won't do
6514 the right thing, so ignore the given target mode. */
6515 if (tmode != Pmode && tmode != ptr_mode)
6516 tmode = Pmode;
6518 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6519 tmode, modifier);
6521 /* Despite expand_expr claims concerning ignoring TMODE when not
6522 strictly convenient, stuff breaks if we don't honor it. Note
6523 that combined with the above, we only do this for pointer modes. */
6524 rmode = GET_MODE (result);
6525 if (rmode == VOIDmode)
6526 rmode = tmode;
6527 if (rmode != tmode)
6528 result = convert_memory_address (tmode, result);
6530 return result;
6534 /* expand_expr: generate code for computing expression EXP.
6535 An rtx for the computed value is returned. The value is never null.
6536 In the case of a void EXP, const0_rtx is returned.
6538 The value may be stored in TARGET if TARGET is nonzero.
6539 TARGET is just a suggestion; callers must assume that
6540 the rtx returned may not be the same as TARGET.
6542 If TARGET is CONST0_RTX, it means that the value will be ignored.
6544 If TMODE is not VOIDmode, it suggests generating the
6545 result in mode TMODE. But this is done only when convenient.
6546 Otherwise, TMODE is ignored and the value generated in its natural mode.
6547 TMODE is just a suggestion; callers must assume that
6548 the rtx returned may not have mode TMODE.
6550 Note that TARGET may have neither TMODE nor MODE. In that case, it
6551 probably will not be used.
6553 If MODIFIER is EXPAND_SUM then when EXP is an addition
6554 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6555 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6556 products as above, or REG or MEM, or constant.
6557 Ordinarily in such cases we would output mul or add instructions
6558 and then return a pseudo reg containing the sum.
6560 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6561 it also marks a label as absolutely required (it can't be dead).
6562 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6563 This is used for outputting expressions used in initializers.
6565 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6566 with a constant address even if that address is not normally legitimate.
6567 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6569 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6570 a call parameter. Such targets require special care as we haven't yet
6571 marked TARGET so that it's safe from being trashed by libcalls. We
6572 don't want to use TARGET for anything but the final result;
6573 Intermediate values must go elsewhere. Additionally, calls to
6574 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6576 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6577 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6578 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6579 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6580 recursively. */
6582 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6583 enum expand_modifier, rtx *);
6586 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6587 enum expand_modifier modifier, rtx *alt_rtl)
6589 int rn = -1;
6590 rtx ret, last = NULL;
6592 /* Handle ERROR_MARK before anybody tries to access its type. */
6593 if (TREE_CODE (exp) == ERROR_MARK
6594 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6596 ret = CONST0_RTX (tmode);
6597 return ret ? ret : const0_rtx;
6600 if (flag_non_call_exceptions)
6602 rn = lookup_stmt_eh_region (exp);
6603 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6604 if (rn >= 0)
6605 last = get_last_insn ();
6608 /* If this is an expression of some kind and it has an associated line
6609 number, then emit the line number before expanding the expression.
6611 We need to save and restore the file and line information so that
6612 errors discovered during expansion are emitted with the right
6613 information. It would be better of the diagnostic routines
6614 used the file/line information embedded in the tree nodes rather
6615 than globals. */
6616 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6618 location_t saved_location = input_location;
6619 input_location = EXPR_LOCATION (exp);
6620 emit_line_note (input_location);
6622 /* Record where the insns produced belong. */
6623 record_block_change (TREE_BLOCK (exp));
6625 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6627 input_location = saved_location;
6629 else
6631 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6634 /* If using non-call exceptions, mark all insns that may trap.
6635 expand_call() will mark CALL_INSNs before we get to this code,
6636 but it doesn't handle libcalls, and these may trap. */
6637 if (rn >= 0)
6639 rtx insn;
6640 for (insn = next_real_insn (last); insn;
6641 insn = next_real_insn (insn))
6643 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6644 /* If we want exceptions for non-call insns, any
6645 may_trap_p instruction may throw. */
6646 && GET_CODE (PATTERN (insn)) != CLOBBER
6647 && GET_CODE (PATTERN (insn)) != USE
6648 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6650 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6651 REG_NOTES (insn));
6656 return ret;
6659 static rtx
6660 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6661 enum expand_modifier modifier, rtx *alt_rtl)
6663 rtx op0, op1, temp, decl_rtl;
6664 tree type = TREE_TYPE (exp);
6665 int unsignedp;
6666 enum machine_mode mode;
6667 enum tree_code code = TREE_CODE (exp);
6668 optab this_optab;
6669 rtx subtarget, original_target;
6670 int ignore;
6671 tree context, subexp0, subexp1;
6672 bool reduce_bit_field = false;
6673 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6674 ? reduce_to_bit_field_precision ((expr), \
6675 target, \
6676 type) \
6677 : (expr))
6679 mode = TYPE_MODE (type);
6680 unsignedp = TYPE_UNSIGNED (type);
6681 if (lang_hooks.reduce_bit_field_operations
6682 && TREE_CODE (type) == INTEGER_TYPE
6683 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6685 /* An operation in what may be a bit-field type needs the
6686 result to be reduced to the precision of the bit-field type,
6687 which is narrower than that of the type's mode. */
6688 reduce_bit_field = true;
6689 if (modifier == EXPAND_STACK_PARM)
6690 target = 0;
6693 /* Use subtarget as the target for operand 0 of a binary operation. */
6694 subtarget = get_subtarget (target);
6695 original_target = target;
6696 ignore = (target == const0_rtx
6697 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6698 || code == CONVERT_EXPR || code == COND_EXPR
6699 || code == VIEW_CONVERT_EXPR)
6700 && TREE_CODE (type) == VOID_TYPE));
6702 /* If we are going to ignore this result, we need only do something
6703 if there is a side-effect somewhere in the expression. If there
6704 is, short-circuit the most common cases here. Note that we must
6705 not call expand_expr with anything but const0_rtx in case this
6706 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6708 if (ignore)
6710 if (! TREE_SIDE_EFFECTS (exp))
6711 return const0_rtx;
6713 /* Ensure we reference a volatile object even if value is ignored, but
6714 don't do this if all we are doing is taking its address. */
6715 if (TREE_THIS_VOLATILE (exp)
6716 && TREE_CODE (exp) != FUNCTION_DECL
6717 && mode != VOIDmode && mode != BLKmode
6718 && modifier != EXPAND_CONST_ADDRESS)
6720 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6721 if (MEM_P (temp))
6722 temp = copy_to_reg (temp);
6723 return const0_rtx;
6726 if (TREE_CODE_CLASS (code) == tcc_unary
6727 || code == COMPONENT_REF || code == INDIRECT_REF)
6728 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6729 modifier);
6731 else if (TREE_CODE_CLASS (code) == tcc_binary
6732 || TREE_CODE_CLASS (code) == tcc_comparison
6733 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6735 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6736 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6737 return const0_rtx;
6739 else if (code == BIT_FIELD_REF)
6741 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6742 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6743 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6744 return const0_rtx;
6747 target = 0;
6751 switch (code)
6753 case LABEL_DECL:
6755 tree function = decl_function_context (exp);
6757 temp = label_rtx (exp);
6758 temp = gen_rtx_LABEL_REF (Pmode, temp);
6760 if (function != current_function_decl
6761 && function != 0)
6762 LABEL_REF_NONLOCAL_P (temp) = 1;
6764 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6765 return temp;
6768 case SSA_NAME:
6769 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6770 NULL);
6772 case PARM_DECL:
6773 case VAR_DECL:
6774 /* If a static var's type was incomplete when the decl was written,
6775 but the type is complete now, lay out the decl now. */
6776 if (DECL_SIZE (exp) == 0
6777 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6778 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6779 layout_decl (exp, 0);
6781 /* ... fall through ... */
6783 case FUNCTION_DECL:
6784 case RESULT_DECL:
6785 decl_rtl = DECL_RTL (exp);
6786 gcc_assert (decl_rtl);
6788 /* Ensure variable marked as used even if it doesn't go through
6789 a parser. If it hasn't be used yet, write out an external
6790 definition. */
6791 if (! TREE_USED (exp))
6793 assemble_external (exp);
6794 TREE_USED (exp) = 1;
6797 /* Show we haven't gotten RTL for this yet. */
6798 temp = 0;
6800 /* Variables inherited from containing functions should have
6801 been lowered by this point. */
6802 context = decl_function_context (exp);
6803 gcc_assert (!context
6804 || context == current_function_decl
6805 || TREE_STATIC (exp)
6806 /* ??? C++ creates functions that are not TREE_STATIC. */
6807 || TREE_CODE (exp) == FUNCTION_DECL);
6809 /* This is the case of an array whose size is to be determined
6810 from its initializer, while the initializer is still being parsed.
6811 See expand_decl. */
6813 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6814 temp = validize_mem (decl_rtl);
6816 /* If DECL_RTL is memory, we are in the normal case and either
6817 the address is not valid or it is not a register and -fforce-addr
6818 is specified, get the address into a register. */
6820 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6822 if (alt_rtl)
6823 *alt_rtl = decl_rtl;
6824 decl_rtl = use_anchored_address (decl_rtl);
6825 if (modifier != EXPAND_CONST_ADDRESS
6826 && modifier != EXPAND_SUM
6827 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6828 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6829 temp = replace_equiv_address (decl_rtl,
6830 copy_rtx (XEXP (decl_rtl, 0)));
6833 /* If we got something, return it. But first, set the alignment
6834 if the address is a register. */
6835 if (temp != 0)
6837 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6838 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6840 return temp;
6843 /* If the mode of DECL_RTL does not match that of the decl, it
6844 must be a promoted value. We return a SUBREG of the wanted mode,
6845 but mark it so that we know that it was already extended. */
6847 if (REG_P (decl_rtl)
6848 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6850 enum machine_mode pmode;
6852 /* Get the signedness used for this variable. Ensure we get the
6853 same mode we got when the variable was declared. */
6854 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6855 (TREE_CODE (exp) == RESULT_DECL
6856 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6857 gcc_assert (GET_MODE (decl_rtl) == pmode);
6859 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6860 SUBREG_PROMOTED_VAR_P (temp) = 1;
6861 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6862 return temp;
6865 return decl_rtl;
6867 case INTEGER_CST:
6868 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6869 TREE_INT_CST_HIGH (exp), mode);
6871 /* ??? If overflow is set, fold will have done an incomplete job,
6872 which can result in (plus xx (const_int 0)), which can get
6873 simplified by validate_replace_rtx during virtual register
6874 instantiation, which can result in unrecognizable insns.
6875 Avoid this by forcing all overflows into registers. */
6876 if (TREE_CONSTANT_OVERFLOW (exp)
6877 && modifier != EXPAND_INITIALIZER)
6878 temp = force_reg (mode, temp);
6880 return temp;
6882 case VECTOR_CST:
6883 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6884 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6885 return const_vector_from_tree (exp);
6886 else
6887 return expand_expr (build_constructor_from_list
6888 (TREE_TYPE (exp),
6889 TREE_VECTOR_CST_ELTS (exp)),
6890 ignore ? const0_rtx : target, tmode, modifier);
6892 case CONST_DECL:
6893 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6895 case REAL_CST:
6896 /* If optimized, generate immediate CONST_DOUBLE
6897 which will be turned into memory by reload if necessary.
6899 We used to force a register so that loop.c could see it. But
6900 this does not allow gen_* patterns to perform optimizations with
6901 the constants. It also produces two insns in cases like "x = 1.0;".
6902 On most machines, floating-point constants are not permitted in
6903 many insns, so we'd end up copying it to a register in any case.
6905 Now, we do the copying in expand_binop, if appropriate. */
6906 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6907 TYPE_MODE (TREE_TYPE (exp)));
6909 case COMPLEX_CST:
6910 /* Handle evaluating a complex constant in a CONCAT target. */
6911 if (original_target && GET_CODE (original_target) == CONCAT)
6913 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6914 rtx rtarg, itarg;
6916 rtarg = XEXP (original_target, 0);
6917 itarg = XEXP (original_target, 1);
6919 /* Move the real and imaginary parts separately. */
6920 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6921 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6923 if (op0 != rtarg)
6924 emit_move_insn (rtarg, op0);
6925 if (op1 != itarg)
6926 emit_move_insn (itarg, op1);
6928 return original_target;
6931 /* ... fall through ... */
6933 case STRING_CST:
6934 temp = expand_expr_constant (exp, 1, modifier);
6936 /* temp contains a constant address.
6937 On RISC machines where a constant address isn't valid,
6938 make some insns to get that address into a register. */
6939 if (modifier != EXPAND_CONST_ADDRESS
6940 && modifier != EXPAND_INITIALIZER
6941 && modifier != EXPAND_SUM
6942 && (! memory_address_p (mode, XEXP (temp, 0))
6943 || flag_force_addr))
6944 return replace_equiv_address (temp,
6945 copy_rtx (XEXP (temp, 0)));
6946 return temp;
6948 case SAVE_EXPR:
6950 tree val = TREE_OPERAND (exp, 0);
6951 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6953 if (!SAVE_EXPR_RESOLVED_P (exp))
6955 /* We can indeed still hit this case, typically via builtin
6956 expanders calling save_expr immediately before expanding
6957 something. Assume this means that we only have to deal
6958 with non-BLKmode values. */
6959 gcc_assert (GET_MODE (ret) != BLKmode);
6961 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6962 DECL_ARTIFICIAL (val) = 1;
6963 DECL_IGNORED_P (val) = 1;
6964 TREE_OPERAND (exp, 0) = val;
6965 SAVE_EXPR_RESOLVED_P (exp) = 1;
6967 if (!CONSTANT_P (ret))
6968 ret = copy_to_reg (ret);
6969 SET_DECL_RTL (val, ret);
6972 return ret;
6975 case GOTO_EXPR:
6976 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6977 expand_goto (TREE_OPERAND (exp, 0));
6978 else
6979 expand_computed_goto (TREE_OPERAND (exp, 0));
6980 return const0_rtx;
6982 case CONSTRUCTOR:
6983 /* If we don't need the result, just ensure we evaluate any
6984 subexpressions. */
6985 if (ignore)
6987 unsigned HOST_WIDE_INT idx;
6988 tree value;
6990 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6991 expand_expr (value, const0_rtx, VOIDmode, 0);
6993 return const0_rtx;
6996 /* Try to avoid creating a temporary at all. This is possible
6997 if all of the initializer is zero.
6998 FIXME: try to handle all [0..255] initializers we can handle
6999 with memset. */
7000 else if (TREE_STATIC (exp)
7001 && !TREE_ADDRESSABLE (exp)
7002 && target != 0 && mode == BLKmode
7003 && all_zeros_p (exp))
7005 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7006 return target;
7009 /* All elts simple constants => refer to a constant in memory. But
7010 if this is a non-BLKmode mode, let it store a field at a time
7011 since that should make a CONST_INT or CONST_DOUBLE when we
7012 fold. Likewise, if we have a target we can use, it is best to
7013 store directly into the target unless the type is large enough
7014 that memcpy will be used. If we are making an initializer and
7015 all operands are constant, put it in memory as well.
7017 FIXME: Avoid trying to fill vector constructors piece-meal.
7018 Output them with output_constant_def below unless we're sure
7019 they're zeros. This should go away when vector initializers
7020 are treated like VECTOR_CST instead of arrays.
7022 else if ((TREE_STATIC (exp)
7023 && ((mode == BLKmode
7024 && ! (target != 0 && safe_from_p (target, exp, 1)))
7025 || TREE_ADDRESSABLE (exp)
7026 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7027 && (! MOVE_BY_PIECES_P
7028 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7029 TYPE_ALIGN (type)))
7030 && ! mostly_zeros_p (exp))))
7031 || ((modifier == EXPAND_INITIALIZER
7032 || modifier == EXPAND_CONST_ADDRESS)
7033 && TREE_CONSTANT (exp)))
7035 rtx constructor = expand_expr_constant (exp, 1, modifier);
7037 if (modifier != EXPAND_CONST_ADDRESS
7038 && modifier != EXPAND_INITIALIZER
7039 && modifier != EXPAND_SUM)
7040 constructor = validize_mem (constructor);
7042 return constructor;
7044 else
7046 /* Handle calls that pass values in multiple non-contiguous
7047 locations. The Irix 6 ABI has examples of this. */
7048 if (target == 0 || ! safe_from_p (target, exp, 1)
7049 || GET_CODE (target) == PARALLEL
7050 || modifier == EXPAND_STACK_PARM)
7051 target
7052 = assign_temp (build_qualified_type (type,
7053 (TYPE_QUALS (type)
7054 | (TREE_READONLY (exp)
7055 * TYPE_QUAL_CONST))),
7056 0, TREE_ADDRESSABLE (exp), 1);
7058 store_constructor (exp, target, 0, int_expr_size (exp));
7059 return target;
7062 case MISALIGNED_INDIRECT_REF:
7063 case ALIGN_INDIRECT_REF:
7064 case INDIRECT_REF:
7066 tree exp1 = TREE_OPERAND (exp, 0);
7068 if (modifier != EXPAND_WRITE)
7070 tree t;
7072 t = fold_read_from_constant_string (exp);
7073 if (t)
7074 return expand_expr (t, target, tmode, modifier);
7077 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7078 op0 = memory_address (mode, op0);
7080 if (code == ALIGN_INDIRECT_REF)
7082 int align = TYPE_ALIGN_UNIT (type);
7083 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7084 op0 = memory_address (mode, op0);
7087 temp = gen_rtx_MEM (mode, op0);
7089 set_mem_attributes (temp, exp, 0);
7091 /* Resolve the misalignment now, so that we don't have to remember
7092 to resolve it later. Of course, this only works for reads. */
7093 /* ??? When we get around to supporting writes, we'll have to handle
7094 this in store_expr directly. The vectorizer isn't generating
7095 those yet, however. */
7096 if (code == MISALIGNED_INDIRECT_REF)
7098 int icode;
7099 rtx reg, insn;
7101 gcc_assert (modifier == EXPAND_NORMAL
7102 || modifier == EXPAND_STACK_PARM);
7104 /* The vectorizer should have already checked the mode. */
7105 icode = movmisalign_optab->handlers[mode].insn_code;
7106 gcc_assert (icode != CODE_FOR_nothing);
7108 /* We've already validated the memory, and we're creating a
7109 new pseudo destination. The predicates really can't fail. */
7110 reg = gen_reg_rtx (mode);
7112 /* Nor can the insn generator. */
7113 insn = GEN_FCN (icode) (reg, temp);
7114 emit_insn (insn);
7116 return reg;
7119 return temp;
7122 case TARGET_MEM_REF:
7124 struct mem_address addr;
7126 get_address_description (exp, &addr);
7127 op0 = addr_for_mem_ref (&addr, true);
7128 op0 = memory_address (mode, op0);
7129 temp = gen_rtx_MEM (mode, op0);
7130 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7132 return temp;
7134 case ARRAY_REF:
7137 tree array = TREE_OPERAND (exp, 0);
7138 tree index = TREE_OPERAND (exp, 1);
7140 /* Fold an expression like: "foo"[2].
7141 This is not done in fold so it won't happen inside &.
7142 Don't fold if this is for wide characters since it's too
7143 difficult to do correctly and this is a very rare case. */
7145 if (modifier != EXPAND_CONST_ADDRESS
7146 && modifier != EXPAND_INITIALIZER
7147 && modifier != EXPAND_MEMORY)
7149 tree t = fold_read_from_constant_string (exp);
7151 if (t)
7152 return expand_expr (t, target, tmode, modifier);
7155 /* If this is a constant index into a constant array,
7156 just get the value from the array. Handle both the cases when
7157 we have an explicit constructor and when our operand is a variable
7158 that was declared const. */
7160 if (modifier != EXPAND_CONST_ADDRESS
7161 && modifier != EXPAND_INITIALIZER
7162 && modifier != EXPAND_MEMORY
7163 && TREE_CODE (array) == CONSTRUCTOR
7164 && ! TREE_SIDE_EFFECTS (array)
7165 && TREE_CODE (index) == INTEGER_CST)
7167 unsigned HOST_WIDE_INT ix;
7168 tree field, value;
7170 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7171 field, value)
7172 if (tree_int_cst_equal (field, index))
7174 if (!TREE_SIDE_EFFECTS (value))
7175 return expand_expr (fold (value), target, tmode, modifier);
7176 break;
7180 else if (optimize >= 1
7181 && modifier != EXPAND_CONST_ADDRESS
7182 && modifier != EXPAND_INITIALIZER
7183 && modifier != EXPAND_MEMORY
7184 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7185 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7186 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7187 && targetm.binds_local_p (array))
7189 if (TREE_CODE (index) == INTEGER_CST)
7191 tree init = DECL_INITIAL (array);
7193 if (TREE_CODE (init) == CONSTRUCTOR)
7195 unsigned HOST_WIDE_INT ix;
7196 tree field, value;
7198 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7199 field, value)
7200 if (tree_int_cst_equal (field, index))
7202 if (!TREE_SIDE_EFFECTS (value))
7203 return expand_expr (fold (value), target, tmode,
7204 modifier);
7205 break;
7208 else if(TREE_CODE (init) == STRING_CST)
7210 tree index1 = index;
7211 tree low_bound = array_ref_low_bound (exp);
7212 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7214 /* Optimize the special-case of a zero lower bound.
7216 We convert the low_bound to sizetype to avoid some problems
7217 with constant folding. (E.g. suppose the lower bound is 1,
7218 and its mode is QI. Without the conversion,l (ARRAY
7219 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7220 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7222 if (! integer_zerop (low_bound))
7223 index1 = size_diffop (index1, fold_convert (sizetype,
7224 low_bound));
7226 if (0 > compare_tree_int (index1,
7227 TREE_STRING_LENGTH (init)))
7229 tree type = TREE_TYPE (TREE_TYPE (init));
7230 enum machine_mode mode = TYPE_MODE (type);
7232 if (GET_MODE_CLASS (mode) == MODE_INT
7233 && GET_MODE_SIZE (mode) == 1)
7234 return gen_int_mode (TREE_STRING_POINTER (init)
7235 [TREE_INT_CST_LOW (index1)],
7236 mode);
7242 goto normal_inner_ref;
7244 case COMPONENT_REF:
7245 /* If the operand is a CONSTRUCTOR, we can just extract the
7246 appropriate field if it is present. */
7247 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7249 unsigned HOST_WIDE_INT idx;
7250 tree field, value;
7252 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7253 idx, field, value)
7254 if (field == TREE_OPERAND (exp, 1)
7255 /* We can normally use the value of the field in the
7256 CONSTRUCTOR. However, if this is a bitfield in
7257 an integral mode that we can fit in a HOST_WIDE_INT,
7258 we must mask only the number of bits in the bitfield,
7259 since this is done implicitly by the constructor. If
7260 the bitfield does not meet either of those conditions,
7261 we can't do this optimization. */
7262 && (! DECL_BIT_FIELD (field)
7263 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7264 && (GET_MODE_BITSIZE (DECL_MODE (field))
7265 <= HOST_BITS_PER_WIDE_INT))))
7267 if (DECL_BIT_FIELD (field)
7268 && modifier == EXPAND_STACK_PARM)
7269 target = 0;
7270 op0 = expand_expr (value, target, tmode, modifier);
7271 if (DECL_BIT_FIELD (field))
7273 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7274 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7276 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7278 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7279 op0 = expand_and (imode, op0, op1, target);
7281 else
7283 tree count
7284 = build_int_cst (NULL_TREE,
7285 GET_MODE_BITSIZE (imode) - bitsize);
7287 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7288 target, 0);
7289 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7290 target, 0);
7294 return op0;
7297 goto normal_inner_ref;
7299 case BIT_FIELD_REF:
7300 case ARRAY_RANGE_REF:
7301 normal_inner_ref:
7303 enum machine_mode mode1;
7304 HOST_WIDE_INT bitsize, bitpos;
7305 tree offset;
7306 int volatilep = 0;
7307 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7308 &mode1, &unsignedp, &volatilep, true);
7309 rtx orig_op0;
7311 /* If we got back the original object, something is wrong. Perhaps
7312 we are evaluating an expression too early. In any event, don't
7313 infinitely recurse. */
7314 gcc_assert (tem != exp);
7316 /* If TEM's type is a union of variable size, pass TARGET to the inner
7317 computation, since it will need a temporary and TARGET is known
7318 to have to do. This occurs in unchecked conversion in Ada. */
7320 orig_op0 = op0
7321 = expand_expr (tem,
7322 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7323 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7324 != INTEGER_CST)
7325 && modifier != EXPAND_STACK_PARM
7326 ? target : NULL_RTX),
7327 VOIDmode,
7328 (modifier == EXPAND_INITIALIZER
7329 || modifier == EXPAND_CONST_ADDRESS
7330 || modifier == EXPAND_STACK_PARM)
7331 ? modifier : EXPAND_NORMAL);
7333 /* If this is a constant, put it into a register if it is a legitimate
7334 constant, OFFSET is 0, and we won't try to extract outside the
7335 register (in case we were passed a partially uninitialized object
7336 or a view_conversion to a larger size). Force the constant to
7337 memory otherwise. */
7338 if (CONSTANT_P (op0))
7340 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7341 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7342 && offset == 0
7343 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7344 op0 = force_reg (mode, op0);
7345 else
7346 op0 = validize_mem (force_const_mem (mode, op0));
7349 /* Otherwise, if this object not in memory and we either have an
7350 offset, a BLKmode result, or a reference outside the object, put it
7351 there. Such cases can occur in Ada if we have unchecked conversion
7352 of an expression from a scalar type to an array or record type or
7353 for an ARRAY_RANGE_REF whose type is BLKmode. */
7354 else if (!MEM_P (op0)
7355 && (offset != 0
7356 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7357 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7359 tree nt = build_qualified_type (TREE_TYPE (tem),
7360 (TYPE_QUALS (TREE_TYPE (tem))
7361 | TYPE_QUAL_CONST));
7362 rtx memloc = assign_temp (nt, 1, 1, 1);
7364 emit_move_insn (memloc, op0);
7365 op0 = memloc;
7368 if (offset != 0)
7370 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7371 EXPAND_SUM);
7373 gcc_assert (MEM_P (op0));
7375 #ifdef POINTERS_EXTEND_UNSIGNED
7376 if (GET_MODE (offset_rtx) != Pmode)
7377 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7378 #else
7379 if (GET_MODE (offset_rtx) != ptr_mode)
7380 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7381 #endif
7383 if (GET_MODE (op0) == BLKmode
7384 /* A constant address in OP0 can have VOIDmode, we must
7385 not try to call force_reg in that case. */
7386 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7387 && bitsize != 0
7388 && (bitpos % bitsize) == 0
7389 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7390 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7392 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7393 bitpos = 0;
7396 op0 = offset_address (op0, offset_rtx,
7397 highest_pow2_factor (offset));
7400 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7401 record its alignment as BIGGEST_ALIGNMENT. */
7402 if (MEM_P (op0) && bitpos == 0 && offset != 0
7403 && is_aligning_offset (offset, tem))
7404 set_mem_align (op0, BIGGEST_ALIGNMENT);
7406 /* Don't forget about volatility even if this is a bitfield. */
7407 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7409 if (op0 == orig_op0)
7410 op0 = copy_rtx (op0);
7412 MEM_VOLATILE_P (op0) = 1;
7415 /* The following code doesn't handle CONCAT.
7416 Assume only bitpos == 0 can be used for CONCAT, due to
7417 one element arrays having the same mode as its element. */
7418 if (GET_CODE (op0) == CONCAT)
7420 gcc_assert (bitpos == 0
7421 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7422 return op0;
7425 /* In cases where an aligned union has an unaligned object
7426 as a field, we might be extracting a BLKmode value from
7427 an integer-mode (e.g., SImode) object. Handle this case
7428 by doing the extract into an object as wide as the field
7429 (which we know to be the width of a basic mode), then
7430 storing into memory, and changing the mode to BLKmode. */
7431 if (mode1 == VOIDmode
7432 || REG_P (op0) || GET_CODE (op0) == SUBREG
7433 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7434 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7435 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7436 && modifier != EXPAND_CONST_ADDRESS
7437 && modifier != EXPAND_INITIALIZER)
7438 /* If the field isn't aligned enough to fetch as a memref,
7439 fetch it as a bit field. */
7440 || (mode1 != BLKmode
7441 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7442 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7443 || (MEM_P (op0)
7444 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7445 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7446 && ((modifier == EXPAND_CONST_ADDRESS
7447 || modifier == EXPAND_INITIALIZER)
7448 ? STRICT_ALIGNMENT
7449 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7450 || (bitpos % BITS_PER_UNIT != 0)))
7451 /* If the type and the field are a constant size and the
7452 size of the type isn't the same size as the bitfield,
7453 we must use bitfield operations. */
7454 || (bitsize >= 0
7455 && TYPE_SIZE (TREE_TYPE (exp))
7456 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7457 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7458 bitsize)))
7460 enum machine_mode ext_mode = mode;
7462 if (ext_mode == BLKmode
7463 && ! (target != 0 && MEM_P (op0)
7464 && MEM_P (target)
7465 && bitpos % BITS_PER_UNIT == 0))
7466 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7468 if (ext_mode == BLKmode)
7470 if (target == 0)
7471 target = assign_temp (type, 0, 1, 1);
7473 if (bitsize == 0)
7474 return target;
7476 /* In this case, BITPOS must start at a byte boundary and
7477 TARGET, if specified, must be a MEM. */
7478 gcc_assert (MEM_P (op0)
7479 && (!target || MEM_P (target))
7480 && !(bitpos % BITS_PER_UNIT));
7482 emit_block_move (target,
7483 adjust_address (op0, VOIDmode,
7484 bitpos / BITS_PER_UNIT),
7485 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7486 / BITS_PER_UNIT),
7487 (modifier == EXPAND_STACK_PARM
7488 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7490 return target;
7493 op0 = validize_mem (op0);
7495 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7496 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7498 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7499 (modifier == EXPAND_STACK_PARM
7500 ? NULL_RTX : target),
7501 ext_mode, ext_mode);
7503 /* If the result is a record type and BITSIZE is narrower than
7504 the mode of OP0, an integral mode, and this is a big endian
7505 machine, we must put the field into the high-order bits. */
7506 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7507 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7508 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7509 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7510 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7511 - bitsize),
7512 op0, 1);
7514 /* If the result type is BLKmode, store the data into a temporary
7515 of the appropriate type, but with the mode corresponding to the
7516 mode for the data we have (op0's mode). It's tempting to make
7517 this a constant type, since we know it's only being stored once,
7518 but that can cause problems if we are taking the address of this
7519 COMPONENT_REF because the MEM of any reference via that address
7520 will have flags corresponding to the type, which will not
7521 necessarily be constant. */
7522 if (mode == BLKmode)
7524 rtx new
7525 = assign_stack_temp_for_type
7526 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7528 emit_move_insn (new, op0);
7529 op0 = copy_rtx (new);
7530 PUT_MODE (op0, BLKmode);
7531 set_mem_attributes (op0, exp, 1);
7534 return op0;
7537 /* If the result is BLKmode, use that to access the object
7538 now as well. */
7539 if (mode == BLKmode)
7540 mode1 = BLKmode;
7542 /* Get a reference to just this component. */
7543 if (modifier == EXPAND_CONST_ADDRESS
7544 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7545 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7546 else
7547 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7549 if (op0 == orig_op0)
7550 op0 = copy_rtx (op0);
7552 set_mem_attributes (op0, exp, 0);
7553 if (REG_P (XEXP (op0, 0)))
7554 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7556 MEM_VOLATILE_P (op0) |= volatilep;
7557 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7558 || modifier == EXPAND_CONST_ADDRESS
7559 || modifier == EXPAND_INITIALIZER)
7560 return op0;
7561 else if (target == 0)
7562 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7564 convert_move (target, op0, unsignedp);
7565 return target;
7568 case OBJ_TYPE_REF:
7569 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7571 case CALL_EXPR:
7572 /* Check for a built-in function. */
7573 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7574 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7575 == FUNCTION_DECL)
7576 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7578 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7579 == BUILT_IN_FRONTEND)
7580 return lang_hooks.expand_expr (exp, original_target,
7581 tmode, modifier,
7582 alt_rtl);
7583 else
7584 return expand_builtin (exp, target, subtarget, tmode, ignore);
7587 return expand_call (exp, target, ignore);
7589 case NON_LVALUE_EXPR:
7590 case NOP_EXPR:
7591 case CONVERT_EXPR:
7592 if (TREE_OPERAND (exp, 0) == error_mark_node)
7593 return const0_rtx;
7595 if (TREE_CODE (type) == UNION_TYPE)
7597 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7599 /* If both input and output are BLKmode, this conversion isn't doing
7600 anything except possibly changing memory attribute. */
7601 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7603 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7604 modifier);
7606 result = copy_rtx (result);
7607 set_mem_attributes (result, exp, 0);
7608 return result;
7611 if (target == 0)
7613 if (TYPE_MODE (type) != BLKmode)
7614 target = gen_reg_rtx (TYPE_MODE (type));
7615 else
7616 target = assign_temp (type, 0, 1, 1);
7619 if (MEM_P (target))
7620 /* Store data into beginning of memory target. */
7621 store_expr (TREE_OPERAND (exp, 0),
7622 adjust_address (target, TYPE_MODE (valtype), 0),
7623 modifier == EXPAND_STACK_PARM);
7625 else
7627 gcc_assert (REG_P (target));
7629 /* Store this field into a union of the proper type. */
7630 store_field (target,
7631 MIN ((int_size_in_bytes (TREE_TYPE
7632 (TREE_OPERAND (exp, 0)))
7633 * BITS_PER_UNIT),
7634 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7635 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7636 type, 0);
7639 /* Return the entire union. */
7640 return target;
7643 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7645 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7646 modifier);
7648 /* If the signedness of the conversion differs and OP0 is
7649 a promoted SUBREG, clear that indication since we now
7650 have to do the proper extension. */
7651 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7652 && GET_CODE (op0) == SUBREG)
7653 SUBREG_PROMOTED_VAR_P (op0) = 0;
7655 return REDUCE_BIT_FIELD (op0);
7658 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7659 if (GET_MODE (op0) == mode)
7662 /* If OP0 is a constant, just convert it into the proper mode. */
7663 else if (CONSTANT_P (op0))
7665 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7666 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7668 if (modifier == EXPAND_INITIALIZER)
7669 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7670 subreg_lowpart_offset (mode,
7671 inner_mode));
7672 else
7673 op0= convert_modes (mode, inner_mode, op0,
7674 TYPE_UNSIGNED (inner_type));
7677 else if (modifier == EXPAND_INITIALIZER)
7678 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7680 else if (target == 0)
7681 op0 = convert_to_mode (mode, op0,
7682 TYPE_UNSIGNED (TREE_TYPE
7683 (TREE_OPERAND (exp, 0))));
7684 else
7686 convert_move (target, op0,
7687 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7688 op0 = target;
7691 return REDUCE_BIT_FIELD (op0);
7693 case VIEW_CONVERT_EXPR:
7694 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7696 /* If the input and output modes are both the same, we are done. */
7697 if (TYPE_MODE (type) == GET_MODE (op0))
7699 /* If neither mode is BLKmode, and both modes are the same size
7700 then we can use gen_lowpart. */
7701 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7702 && GET_MODE_SIZE (TYPE_MODE (type))
7703 == GET_MODE_SIZE (GET_MODE (op0)))
7705 if (GET_CODE (op0) == SUBREG)
7706 op0 = force_reg (GET_MODE (op0), op0);
7707 op0 = gen_lowpart (TYPE_MODE (type), op0);
7709 /* If both modes are integral, then we can convert from one to the
7710 other. */
7711 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7712 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7713 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7714 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7715 /* As a last resort, spill op0 to memory, and reload it in a
7716 different mode. */
7717 else if (!MEM_P (op0))
7719 /* If the operand is not a MEM, force it into memory. Since we
7720 are going to be changing the mode of the MEM, don't call
7721 force_const_mem for constants because we don't allow pool
7722 constants to change mode. */
7723 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7725 gcc_assert (!TREE_ADDRESSABLE (exp));
7727 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7728 target
7729 = assign_stack_temp_for_type
7730 (TYPE_MODE (inner_type),
7731 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7733 emit_move_insn (target, op0);
7734 op0 = target;
7737 /* At this point, OP0 is in the correct mode. If the output type is such
7738 that the operand is known to be aligned, indicate that it is.
7739 Otherwise, we need only be concerned about alignment for non-BLKmode
7740 results. */
7741 if (MEM_P (op0))
7743 op0 = copy_rtx (op0);
7745 if (TYPE_ALIGN_OK (type))
7746 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7747 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7748 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7750 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7751 HOST_WIDE_INT temp_size
7752 = MAX (int_size_in_bytes (inner_type),
7753 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7754 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7755 temp_size, 0, type);
7756 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7758 gcc_assert (!TREE_ADDRESSABLE (exp));
7760 if (GET_MODE (op0) == BLKmode)
7761 emit_block_move (new_with_op0_mode, op0,
7762 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7763 (modifier == EXPAND_STACK_PARM
7764 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7765 else
7766 emit_move_insn (new_with_op0_mode, op0);
7768 op0 = new;
7771 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7774 return op0;
7776 case PLUS_EXPR:
7777 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7778 something else, make sure we add the register to the constant and
7779 then to the other thing. This case can occur during strength
7780 reduction and doing it this way will produce better code if the
7781 frame pointer or argument pointer is eliminated.
7783 fold-const.c will ensure that the constant is always in the inner
7784 PLUS_EXPR, so the only case we need to do anything about is if
7785 sp, ap, or fp is our second argument, in which case we must swap
7786 the innermost first argument and our second argument. */
7788 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7789 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7790 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7791 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7792 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7793 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7795 tree t = TREE_OPERAND (exp, 1);
7797 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7798 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7801 /* If the result is to be ptr_mode and we are adding an integer to
7802 something, we might be forming a constant. So try to use
7803 plus_constant. If it produces a sum and we can't accept it,
7804 use force_operand. This allows P = &ARR[const] to generate
7805 efficient code on machines where a SYMBOL_REF is not a valid
7806 address.
7808 If this is an EXPAND_SUM call, always return the sum. */
7809 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7810 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7812 if (modifier == EXPAND_STACK_PARM)
7813 target = 0;
7814 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7815 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7816 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7818 rtx constant_part;
7820 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7821 EXPAND_SUM);
7822 /* Use immed_double_const to ensure that the constant is
7823 truncated according to the mode of OP1, then sign extended
7824 to a HOST_WIDE_INT. Using the constant directly can result
7825 in non-canonical RTL in a 64x32 cross compile. */
7826 constant_part
7827 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7828 (HOST_WIDE_INT) 0,
7829 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7830 op1 = plus_constant (op1, INTVAL (constant_part));
7831 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7832 op1 = force_operand (op1, target);
7833 return REDUCE_BIT_FIELD (op1);
7836 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7837 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7838 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7840 rtx constant_part;
7842 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7843 (modifier == EXPAND_INITIALIZER
7844 ? EXPAND_INITIALIZER : EXPAND_SUM));
7845 if (! CONSTANT_P (op0))
7847 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7848 VOIDmode, modifier);
7849 /* Return a PLUS if modifier says it's OK. */
7850 if (modifier == EXPAND_SUM
7851 || modifier == EXPAND_INITIALIZER)
7852 return simplify_gen_binary (PLUS, mode, op0, op1);
7853 goto binop2;
7855 /* Use immed_double_const to ensure that the constant is
7856 truncated according to the mode of OP1, then sign extended
7857 to a HOST_WIDE_INT. Using the constant directly can result
7858 in non-canonical RTL in a 64x32 cross compile. */
7859 constant_part
7860 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7861 (HOST_WIDE_INT) 0,
7862 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7863 op0 = plus_constant (op0, INTVAL (constant_part));
7864 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7865 op0 = force_operand (op0, target);
7866 return REDUCE_BIT_FIELD (op0);
7870 /* No sense saving up arithmetic to be done
7871 if it's all in the wrong mode to form part of an address.
7872 And force_operand won't know whether to sign-extend or
7873 zero-extend. */
7874 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7875 || mode != ptr_mode)
7877 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7878 subtarget, &op0, &op1, 0);
7879 if (op0 == const0_rtx)
7880 return op1;
7881 if (op1 == const0_rtx)
7882 return op0;
7883 goto binop2;
7886 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7887 subtarget, &op0, &op1, modifier);
7888 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7890 case MINUS_EXPR:
7891 /* For initializers, we are allowed to return a MINUS of two
7892 symbolic constants. Here we handle all cases when both operands
7893 are constant. */
7894 /* Handle difference of two symbolic constants,
7895 for the sake of an initializer. */
7896 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7897 && really_constant_p (TREE_OPERAND (exp, 0))
7898 && really_constant_p (TREE_OPERAND (exp, 1)))
7900 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7901 NULL_RTX, &op0, &op1, modifier);
7903 /* If the last operand is a CONST_INT, use plus_constant of
7904 the negated constant. Else make the MINUS. */
7905 if (GET_CODE (op1) == CONST_INT)
7906 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7907 else
7908 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7911 /* No sense saving up arithmetic to be done
7912 if it's all in the wrong mode to form part of an address.
7913 And force_operand won't know whether to sign-extend or
7914 zero-extend. */
7915 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7916 || mode != ptr_mode)
7917 goto binop;
7919 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7920 subtarget, &op0, &op1, modifier);
7922 /* Convert A - const to A + (-const). */
7923 if (GET_CODE (op1) == CONST_INT)
7925 op1 = negate_rtx (mode, op1);
7926 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7929 goto binop2;
7931 case MULT_EXPR:
7932 /* If first operand is constant, swap them.
7933 Thus the following special case checks need only
7934 check the second operand. */
7935 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7937 tree t1 = TREE_OPERAND (exp, 0);
7938 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7939 TREE_OPERAND (exp, 1) = t1;
7942 /* Attempt to return something suitable for generating an
7943 indexed address, for machines that support that. */
7945 if (modifier == EXPAND_SUM && mode == ptr_mode
7946 && host_integerp (TREE_OPERAND (exp, 1), 0))
7948 tree exp1 = TREE_OPERAND (exp, 1);
7950 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7951 EXPAND_SUM);
7953 if (!REG_P (op0))
7954 op0 = force_operand (op0, NULL_RTX);
7955 if (!REG_P (op0))
7956 op0 = copy_to_mode_reg (mode, op0);
7958 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7959 gen_int_mode (tree_low_cst (exp1, 0),
7960 TYPE_MODE (TREE_TYPE (exp1)))));
7963 if (modifier == EXPAND_STACK_PARM)
7964 target = 0;
7966 /* Check for multiplying things that have been extended
7967 from a narrower type. If this machine supports multiplying
7968 in that narrower type with a result in the desired type,
7969 do it that way, and avoid the explicit type-conversion. */
7971 subexp0 = TREE_OPERAND (exp, 0);
7972 subexp1 = TREE_OPERAND (exp, 1);
7973 /* First, check if we have a multiplication of one signed and one
7974 unsigned operand. */
7975 if (TREE_CODE (subexp0) == NOP_EXPR
7976 && TREE_CODE (subexp1) == NOP_EXPR
7977 && TREE_CODE (type) == INTEGER_TYPE
7978 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7979 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7980 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7981 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
7982 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7983 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
7985 enum machine_mode innermode
7986 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
7987 this_optab = usmul_widen_optab;
7988 if (mode == GET_MODE_WIDER_MODE (innermode))
7990 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7992 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
7993 expand_operands (TREE_OPERAND (subexp0, 0),
7994 TREE_OPERAND (subexp1, 0),
7995 NULL_RTX, &op0, &op1, 0);
7996 else
7997 expand_operands (TREE_OPERAND (subexp0, 0),
7998 TREE_OPERAND (subexp1, 0),
7999 NULL_RTX, &op1, &op0, 0);
8001 goto binop3;
8005 /* Check for a multiplication with matching signedness. */
8006 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8007 && TREE_CODE (type) == INTEGER_TYPE
8008 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8009 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8010 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8011 && int_fits_type_p (TREE_OPERAND (exp, 1),
8012 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8013 /* Don't use a widening multiply if a shift will do. */
8014 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8015 > HOST_BITS_PER_WIDE_INT)
8016 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8018 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8019 && (TYPE_PRECISION (TREE_TYPE
8020 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8021 == TYPE_PRECISION (TREE_TYPE
8022 (TREE_OPERAND
8023 (TREE_OPERAND (exp, 0), 0))))
8024 /* If both operands are extended, they must either both
8025 be zero-extended or both be sign-extended. */
8026 && (TYPE_UNSIGNED (TREE_TYPE
8027 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8028 == TYPE_UNSIGNED (TREE_TYPE
8029 (TREE_OPERAND
8030 (TREE_OPERAND (exp, 0), 0)))))))
8032 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8033 enum machine_mode innermode = TYPE_MODE (op0type);
8034 bool zextend_p = TYPE_UNSIGNED (op0type);
8035 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8036 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8038 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8040 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8042 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8043 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8044 TREE_OPERAND (exp, 1),
8045 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8046 else
8047 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8048 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8049 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8050 goto binop3;
8052 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8053 && innermode == word_mode)
8055 rtx htem, hipart;
8056 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8057 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8058 op1 = convert_modes (innermode, mode,
8059 expand_normal (TREE_OPERAND (exp, 1)),
8060 unsignedp);
8061 else
8062 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8063 temp = expand_binop (mode, other_optab, op0, op1, target,
8064 unsignedp, OPTAB_LIB_WIDEN);
8065 hipart = gen_highpart (innermode, temp);
8066 htem = expand_mult_highpart_adjust (innermode, hipart,
8067 op0, op1, hipart,
8068 zextend_p);
8069 if (htem != hipart)
8070 emit_move_insn (hipart, htem);
8071 return REDUCE_BIT_FIELD (temp);
8075 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8076 subtarget, &op0, &op1, 0);
8077 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8079 case TRUNC_DIV_EXPR:
8080 case FLOOR_DIV_EXPR:
8081 case CEIL_DIV_EXPR:
8082 case ROUND_DIV_EXPR:
8083 case EXACT_DIV_EXPR:
8084 if (modifier == EXPAND_STACK_PARM)
8085 target = 0;
8086 /* Possible optimization: compute the dividend with EXPAND_SUM
8087 then if the divisor is constant can optimize the case
8088 where some terms of the dividend have coeffs divisible by it. */
8089 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8090 subtarget, &op0, &op1, 0);
8091 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8093 case RDIV_EXPR:
8094 goto binop;
8096 case TRUNC_MOD_EXPR:
8097 case FLOOR_MOD_EXPR:
8098 case CEIL_MOD_EXPR:
8099 case ROUND_MOD_EXPR:
8100 if (modifier == EXPAND_STACK_PARM)
8101 target = 0;
8102 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8103 subtarget, &op0, &op1, 0);
8104 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8106 case FIX_ROUND_EXPR:
8107 case FIX_FLOOR_EXPR:
8108 case FIX_CEIL_EXPR:
8109 gcc_unreachable (); /* Not used for C. */
8111 case FIX_TRUNC_EXPR:
8112 op0 = expand_normal (TREE_OPERAND (exp, 0));
8113 if (target == 0 || modifier == EXPAND_STACK_PARM)
8114 target = gen_reg_rtx (mode);
8115 expand_fix (target, op0, unsignedp);
8116 return target;
8118 case FLOAT_EXPR:
8119 op0 = expand_normal (TREE_OPERAND (exp, 0));
8120 if (target == 0 || modifier == EXPAND_STACK_PARM)
8121 target = gen_reg_rtx (mode);
8122 /* expand_float can't figure out what to do if FROM has VOIDmode.
8123 So give it the correct mode. With -O, cse will optimize this. */
8124 if (GET_MODE (op0) == VOIDmode)
8125 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8126 op0);
8127 expand_float (target, op0,
8128 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8129 return target;
8131 case NEGATE_EXPR:
8132 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8133 if (modifier == EXPAND_STACK_PARM)
8134 target = 0;
8135 temp = expand_unop (mode,
8136 optab_for_tree_code (NEGATE_EXPR, type),
8137 op0, target, 0);
8138 gcc_assert (temp);
8139 return REDUCE_BIT_FIELD (temp);
8141 case ABS_EXPR:
8142 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8143 if (modifier == EXPAND_STACK_PARM)
8144 target = 0;
8146 /* ABS_EXPR is not valid for complex arguments. */
8147 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8148 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8150 /* Unsigned abs is simply the operand. Testing here means we don't
8151 risk generating incorrect code below. */
8152 if (TYPE_UNSIGNED (type))
8153 return op0;
8155 return expand_abs (mode, op0, target, unsignedp,
8156 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8158 case MAX_EXPR:
8159 case MIN_EXPR:
8160 target = original_target;
8161 if (target == 0
8162 || modifier == EXPAND_STACK_PARM
8163 || (MEM_P (target) && MEM_VOLATILE_P (target))
8164 || GET_MODE (target) != mode
8165 || (REG_P (target)
8166 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8167 target = gen_reg_rtx (mode);
8168 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8169 target, &op0, &op1, 0);
8171 /* First try to do it with a special MIN or MAX instruction.
8172 If that does not win, use a conditional jump to select the proper
8173 value. */
8174 this_optab = optab_for_tree_code (code, type);
8175 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8176 OPTAB_WIDEN);
8177 if (temp != 0)
8178 return temp;
8180 /* At this point, a MEM target is no longer useful; we will get better
8181 code without it. */
8183 if (! REG_P (target))
8184 target = gen_reg_rtx (mode);
8186 /* If op1 was placed in target, swap op0 and op1. */
8187 if (target != op0 && target == op1)
8189 temp = op0;
8190 op0 = op1;
8191 op1 = temp;
8194 /* We generate better code and avoid problems with op1 mentioning
8195 target by forcing op1 into a pseudo if it isn't a constant. */
8196 if (! CONSTANT_P (op1))
8197 op1 = force_reg (mode, op1);
8200 enum rtx_code comparison_code;
8201 rtx cmpop1 = op1;
8203 if (code == MAX_EXPR)
8204 comparison_code = unsignedp ? GEU : GE;
8205 else
8206 comparison_code = unsignedp ? LEU : LE;
8208 /* Canonicalize to comparisons against 0. */
8209 if (op1 == const1_rtx)
8211 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8212 or (a != 0 ? a : 1) for unsigned.
8213 For MIN we are safe converting (a <= 1 ? a : 1)
8214 into (a <= 0 ? a : 1) */
8215 cmpop1 = const0_rtx;
8216 if (code == MAX_EXPR)
8217 comparison_code = unsignedp ? NE : GT;
8219 if (op1 == constm1_rtx && !unsignedp)
8221 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8222 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8223 cmpop1 = const0_rtx;
8224 if (code == MIN_EXPR)
8225 comparison_code = LT;
8227 #ifdef HAVE_conditional_move
8228 /* Use a conditional move if possible. */
8229 if (can_conditionally_move_p (mode))
8231 rtx insn;
8233 /* ??? Same problem as in expmed.c: emit_conditional_move
8234 forces a stack adjustment via compare_from_rtx, and we
8235 lose the stack adjustment if the sequence we are about
8236 to create is discarded. */
8237 do_pending_stack_adjust ();
8239 start_sequence ();
8241 /* Try to emit the conditional move. */
8242 insn = emit_conditional_move (target, comparison_code,
8243 op0, cmpop1, mode,
8244 op0, op1, mode,
8245 unsignedp);
8247 /* If we could do the conditional move, emit the sequence,
8248 and return. */
8249 if (insn)
8251 rtx seq = get_insns ();
8252 end_sequence ();
8253 emit_insn (seq);
8254 return target;
8257 /* Otherwise discard the sequence and fall back to code with
8258 branches. */
8259 end_sequence ();
8261 #endif
8262 if (target != op0)
8263 emit_move_insn (target, op0);
8265 temp = gen_label_rtx ();
8266 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8267 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8269 emit_move_insn (target, op1);
8270 emit_label (temp);
8271 return target;
8273 case BIT_NOT_EXPR:
8274 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8275 if (modifier == EXPAND_STACK_PARM)
8276 target = 0;
8277 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8278 gcc_assert (temp);
8279 return temp;
8281 /* ??? Can optimize bitwise operations with one arg constant.
8282 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8283 and (a bitwise1 b) bitwise2 b (etc)
8284 but that is probably not worth while. */
8286 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8287 boolean values when we want in all cases to compute both of them. In
8288 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8289 as actual zero-or-1 values and then bitwise anding. In cases where
8290 there cannot be any side effects, better code would be made by
8291 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8292 how to recognize those cases. */
8294 case TRUTH_AND_EXPR:
8295 code = BIT_AND_EXPR;
8296 case BIT_AND_EXPR:
8297 goto binop;
8299 case TRUTH_OR_EXPR:
8300 code = BIT_IOR_EXPR;
8301 case BIT_IOR_EXPR:
8302 goto binop;
8304 case TRUTH_XOR_EXPR:
8305 code = BIT_XOR_EXPR;
8306 case BIT_XOR_EXPR:
8307 goto binop;
8309 case LSHIFT_EXPR:
8310 case RSHIFT_EXPR:
8311 case LROTATE_EXPR:
8312 case RROTATE_EXPR:
8313 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8314 subtarget = 0;
8315 if (modifier == EXPAND_STACK_PARM)
8316 target = 0;
8317 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8318 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8319 unsignedp);
8321 /* Could determine the answer when only additive constants differ. Also,
8322 the addition of one can be handled by changing the condition. */
8323 case LT_EXPR:
8324 case LE_EXPR:
8325 case GT_EXPR:
8326 case GE_EXPR:
8327 case EQ_EXPR:
8328 case NE_EXPR:
8329 case UNORDERED_EXPR:
8330 case ORDERED_EXPR:
8331 case UNLT_EXPR:
8332 case UNLE_EXPR:
8333 case UNGT_EXPR:
8334 case UNGE_EXPR:
8335 case UNEQ_EXPR:
8336 case LTGT_EXPR:
8337 temp = do_store_flag (exp,
8338 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8339 tmode != VOIDmode ? tmode : mode, 0);
8340 if (temp != 0)
8341 return temp;
8343 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8344 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8345 && original_target
8346 && REG_P (original_target)
8347 && (GET_MODE (original_target)
8348 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8350 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8351 VOIDmode, 0);
8353 /* If temp is constant, we can just compute the result. */
8354 if (GET_CODE (temp) == CONST_INT)
8356 if (INTVAL (temp) != 0)
8357 emit_move_insn (target, const1_rtx);
8358 else
8359 emit_move_insn (target, const0_rtx);
8361 return target;
8364 if (temp != original_target)
8366 enum machine_mode mode1 = GET_MODE (temp);
8367 if (mode1 == VOIDmode)
8368 mode1 = tmode != VOIDmode ? tmode : mode;
8370 temp = copy_to_mode_reg (mode1, temp);
8373 op1 = gen_label_rtx ();
8374 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8375 GET_MODE (temp), unsignedp, op1);
8376 emit_move_insn (temp, const1_rtx);
8377 emit_label (op1);
8378 return temp;
8381 /* If no set-flag instruction, must generate a conditional store
8382 into a temporary variable. Drop through and handle this
8383 like && and ||. */
8385 if (! ignore
8386 && (target == 0
8387 || modifier == EXPAND_STACK_PARM
8388 || ! safe_from_p (target, exp, 1)
8389 /* Make sure we don't have a hard reg (such as function's return
8390 value) live across basic blocks, if not optimizing. */
8391 || (!optimize && REG_P (target)
8392 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8393 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8395 if (target)
8396 emit_move_insn (target, const0_rtx);
8398 op1 = gen_label_rtx ();
8399 jumpifnot (exp, op1);
8401 if (target)
8402 emit_move_insn (target, const1_rtx);
8404 emit_label (op1);
8405 return ignore ? const0_rtx : target;
8407 case TRUTH_NOT_EXPR:
8408 if (modifier == EXPAND_STACK_PARM)
8409 target = 0;
8410 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8411 /* The parser is careful to generate TRUTH_NOT_EXPR
8412 only with operands that are always zero or one. */
8413 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8414 target, 1, OPTAB_LIB_WIDEN);
8415 gcc_assert (temp);
8416 return temp;
8418 case STATEMENT_LIST:
8420 tree_stmt_iterator iter;
8422 gcc_assert (ignore);
8424 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8425 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8427 return const0_rtx;
8429 case COND_EXPR:
8430 /* A COND_EXPR with its type being VOID_TYPE represents a
8431 conditional jump and is handled in
8432 expand_gimple_cond_expr. */
8433 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8435 /* Note that COND_EXPRs whose type is a structure or union
8436 are required to be constructed to contain assignments of
8437 a temporary variable, so that we can evaluate them here
8438 for side effect only. If type is void, we must do likewise. */
8440 gcc_assert (!TREE_ADDRESSABLE (type)
8441 && !ignore
8442 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8443 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8445 /* If we are not to produce a result, we have no target. Otherwise,
8446 if a target was specified use it; it will not be used as an
8447 intermediate target unless it is safe. If no target, use a
8448 temporary. */
8450 if (modifier != EXPAND_STACK_PARM
8451 && original_target
8452 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8453 && GET_MODE (original_target) == mode
8454 #ifdef HAVE_conditional_move
8455 && (! can_conditionally_move_p (mode)
8456 || REG_P (original_target))
8457 #endif
8458 && !MEM_P (original_target))
8459 temp = original_target;
8460 else
8461 temp = assign_temp (type, 0, 0, 1);
8463 do_pending_stack_adjust ();
8464 NO_DEFER_POP;
8465 op0 = gen_label_rtx ();
8466 op1 = gen_label_rtx ();
8467 jumpifnot (TREE_OPERAND (exp, 0), op0);
8468 store_expr (TREE_OPERAND (exp, 1), temp,
8469 modifier == EXPAND_STACK_PARM);
8471 emit_jump_insn (gen_jump (op1));
8472 emit_barrier ();
8473 emit_label (op0);
8474 store_expr (TREE_OPERAND (exp, 2), temp,
8475 modifier == EXPAND_STACK_PARM);
8477 emit_label (op1);
8478 OK_DEFER_POP;
8479 return temp;
8481 case VEC_COND_EXPR:
8482 target = expand_vec_cond_expr (exp, target);
8483 return target;
8485 case MODIFY_EXPR:
8487 tree lhs = TREE_OPERAND (exp, 0);
8488 tree rhs = TREE_OPERAND (exp, 1);
8490 gcc_assert (ignore);
8492 /* Check for |= or &= of a bitfield of size one into another bitfield
8493 of size 1. In this case, (unless we need the result of the
8494 assignment) we can do this more efficiently with a
8495 test followed by an assignment, if necessary.
8497 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8498 things change so we do, this code should be enhanced to
8499 support it. */
8500 if (TREE_CODE (lhs) == COMPONENT_REF
8501 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8502 || TREE_CODE (rhs) == BIT_AND_EXPR)
8503 && TREE_OPERAND (rhs, 0) == lhs
8504 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8505 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8506 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8508 rtx label = gen_label_rtx ();
8509 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8510 do_jump (TREE_OPERAND (rhs, 1),
8511 value ? label : 0,
8512 value ? 0 : label);
8513 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8514 do_pending_stack_adjust ();
8515 emit_label (label);
8516 return const0_rtx;
8519 expand_assignment (lhs, rhs);
8521 return const0_rtx;
8524 case RETURN_EXPR:
8525 if (!TREE_OPERAND (exp, 0))
8526 expand_null_return ();
8527 else
8528 expand_return (TREE_OPERAND (exp, 0));
8529 return const0_rtx;
8531 case ADDR_EXPR:
8532 return expand_expr_addr_expr (exp, target, tmode, modifier);
8534 case COMPLEX_EXPR:
8535 /* Get the rtx code of the operands. */
8536 op0 = expand_normal (TREE_OPERAND (exp, 0));
8537 op1 = expand_normal (TREE_OPERAND (exp, 1));
8539 if (!target)
8540 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8542 /* Move the real (op0) and imaginary (op1) parts to their location. */
8543 write_complex_part (target, op0, false);
8544 write_complex_part (target, op1, true);
8546 return target;
8548 case REALPART_EXPR:
8549 op0 = expand_normal (TREE_OPERAND (exp, 0));
8550 return read_complex_part (op0, false);
8552 case IMAGPART_EXPR:
8553 op0 = expand_normal (TREE_OPERAND (exp, 0));
8554 return read_complex_part (op0, true);
8556 case RESX_EXPR:
8557 expand_resx_expr (exp);
8558 return const0_rtx;
8560 case TRY_CATCH_EXPR:
8561 case CATCH_EXPR:
8562 case EH_FILTER_EXPR:
8563 case TRY_FINALLY_EXPR:
8564 /* Lowered by tree-eh.c. */
8565 gcc_unreachable ();
8567 case WITH_CLEANUP_EXPR:
8568 case CLEANUP_POINT_EXPR:
8569 case TARGET_EXPR:
8570 case CASE_LABEL_EXPR:
8571 case VA_ARG_EXPR:
8572 case BIND_EXPR:
8573 case INIT_EXPR:
8574 case CONJ_EXPR:
8575 case COMPOUND_EXPR:
8576 case PREINCREMENT_EXPR:
8577 case PREDECREMENT_EXPR:
8578 case POSTINCREMENT_EXPR:
8579 case POSTDECREMENT_EXPR:
8580 case LOOP_EXPR:
8581 case EXIT_EXPR:
8582 case TRUTH_ANDIF_EXPR:
8583 case TRUTH_ORIF_EXPR:
8584 /* Lowered by gimplify.c. */
8585 gcc_unreachable ();
8587 case EXC_PTR_EXPR:
8588 return get_exception_pointer (cfun);
8590 case FILTER_EXPR:
8591 return get_exception_filter (cfun);
8593 case FDESC_EXPR:
8594 /* Function descriptors are not valid except for as
8595 initialization constants, and should not be expanded. */
8596 gcc_unreachable ();
8598 case SWITCH_EXPR:
8599 expand_case (exp);
8600 return const0_rtx;
8602 case LABEL_EXPR:
8603 expand_label (TREE_OPERAND (exp, 0));
8604 return const0_rtx;
8606 case ASM_EXPR:
8607 expand_asm_expr (exp);
8608 return const0_rtx;
8610 case WITH_SIZE_EXPR:
8611 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8612 have pulled out the size to use in whatever context it needed. */
8613 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8614 modifier, alt_rtl);
8616 case REALIGN_LOAD_EXPR:
8618 tree oprnd0 = TREE_OPERAND (exp, 0);
8619 tree oprnd1 = TREE_OPERAND (exp, 1);
8620 tree oprnd2 = TREE_OPERAND (exp, 2);
8621 rtx op2;
8623 this_optab = optab_for_tree_code (code, type);
8624 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8625 op2 = expand_normal (oprnd2);
8626 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8627 target, unsignedp);
8628 gcc_assert (temp);
8629 return temp;
8632 case DOT_PROD_EXPR:
8634 tree oprnd0 = TREE_OPERAND (exp, 0);
8635 tree oprnd1 = TREE_OPERAND (exp, 1);
8636 tree oprnd2 = TREE_OPERAND (exp, 2);
8637 rtx op2;
8639 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8640 op2 = expand_normal (oprnd2);
8641 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8642 target, unsignedp);
8643 return target;
8646 case WIDEN_SUM_EXPR:
8648 tree oprnd0 = TREE_OPERAND (exp, 0);
8649 tree oprnd1 = TREE_OPERAND (exp, 1);
8651 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8652 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8653 target, unsignedp);
8654 return target;
8657 case REDUC_MAX_EXPR:
8658 case REDUC_MIN_EXPR:
8659 case REDUC_PLUS_EXPR:
8661 op0 = expand_normal (TREE_OPERAND (exp, 0));
8662 this_optab = optab_for_tree_code (code, type);
8663 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8664 gcc_assert (temp);
8665 return temp;
8668 case VEC_LSHIFT_EXPR:
8669 case VEC_RSHIFT_EXPR:
8671 target = expand_vec_shift_expr (exp, target);
8672 return target;
8675 default:
8676 return lang_hooks.expand_expr (exp, original_target, tmode,
8677 modifier, alt_rtl);
8680 /* Here to do an ordinary binary operator. */
8681 binop:
8682 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8683 subtarget, &op0, &op1, 0);
8684 binop2:
8685 this_optab = optab_for_tree_code (code, type);
8686 binop3:
8687 if (modifier == EXPAND_STACK_PARM)
8688 target = 0;
8689 temp = expand_binop (mode, this_optab, op0, op1, target,
8690 unsignedp, OPTAB_LIB_WIDEN);
8691 gcc_assert (temp);
8692 return REDUCE_BIT_FIELD (temp);
8694 #undef REDUCE_BIT_FIELD
8696 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8697 signedness of TYPE), possibly returning the result in TARGET. */
8698 static rtx
8699 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8701 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8702 if (target && GET_MODE (target) != GET_MODE (exp))
8703 target = 0;
8704 if (TYPE_UNSIGNED (type))
8706 rtx mask;
8707 if (prec < HOST_BITS_PER_WIDE_INT)
8708 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8709 GET_MODE (exp));
8710 else
8711 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8712 ((unsigned HOST_WIDE_INT) 1
8713 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8714 GET_MODE (exp));
8715 return expand_and (GET_MODE (exp), exp, mask, target);
8717 else
8719 tree count = build_int_cst (NULL_TREE,
8720 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8721 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8722 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8726 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8727 when applied to the address of EXP produces an address known to be
8728 aligned more than BIGGEST_ALIGNMENT. */
8730 static int
8731 is_aligning_offset (tree offset, tree exp)
8733 /* Strip off any conversions. */
8734 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8735 || TREE_CODE (offset) == NOP_EXPR
8736 || TREE_CODE (offset) == CONVERT_EXPR)
8737 offset = TREE_OPERAND (offset, 0);
8739 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8740 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8741 if (TREE_CODE (offset) != BIT_AND_EXPR
8742 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8743 || compare_tree_int (TREE_OPERAND (offset, 1),
8744 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8745 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8746 return 0;
8748 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8749 It must be NEGATE_EXPR. Then strip any more conversions. */
8750 offset = TREE_OPERAND (offset, 0);
8751 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8752 || TREE_CODE (offset) == NOP_EXPR
8753 || TREE_CODE (offset) == CONVERT_EXPR)
8754 offset = TREE_OPERAND (offset, 0);
8756 if (TREE_CODE (offset) != NEGATE_EXPR)
8757 return 0;
8759 offset = TREE_OPERAND (offset, 0);
8760 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8761 || TREE_CODE (offset) == NOP_EXPR
8762 || TREE_CODE (offset) == CONVERT_EXPR)
8763 offset = TREE_OPERAND (offset, 0);
8765 /* This must now be the address of EXP. */
8766 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8769 /* Return the tree node if an ARG corresponds to a string constant or zero
8770 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8771 in bytes within the string that ARG is accessing. The type of the
8772 offset will be `sizetype'. */
8774 tree
8775 string_constant (tree arg, tree *ptr_offset)
8777 tree array, offset;
8778 STRIP_NOPS (arg);
8780 if (TREE_CODE (arg) == ADDR_EXPR)
8782 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8784 *ptr_offset = size_zero_node;
8785 return TREE_OPERAND (arg, 0);
8787 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8789 array = TREE_OPERAND (arg, 0);
8790 offset = size_zero_node;
8792 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8794 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8795 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8796 if (TREE_CODE (array) != STRING_CST
8797 && TREE_CODE (array) != VAR_DECL)
8798 return 0;
8800 else
8801 return 0;
8803 else if (TREE_CODE (arg) == PLUS_EXPR)
8805 tree arg0 = TREE_OPERAND (arg, 0);
8806 tree arg1 = TREE_OPERAND (arg, 1);
8808 STRIP_NOPS (arg0);
8809 STRIP_NOPS (arg1);
8811 if (TREE_CODE (arg0) == ADDR_EXPR
8812 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8813 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8815 array = TREE_OPERAND (arg0, 0);
8816 offset = arg1;
8818 else if (TREE_CODE (arg1) == ADDR_EXPR
8819 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8820 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8822 array = TREE_OPERAND (arg1, 0);
8823 offset = arg0;
8825 else
8826 return 0;
8828 else
8829 return 0;
8831 if (TREE_CODE (array) == STRING_CST)
8833 *ptr_offset = fold_convert (sizetype, offset);
8834 return array;
8836 else if (TREE_CODE (array) == VAR_DECL)
8838 int length;
8840 /* Variables initialized to string literals can be handled too. */
8841 if (DECL_INITIAL (array) == NULL_TREE
8842 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8843 return 0;
8845 /* If they are read-only, non-volatile and bind locally. */
8846 if (! TREE_READONLY (array)
8847 || TREE_SIDE_EFFECTS (array)
8848 || ! targetm.binds_local_p (array))
8849 return 0;
8851 /* Avoid const char foo[4] = "abcde"; */
8852 if (DECL_SIZE_UNIT (array) == NULL_TREE
8853 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8854 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8855 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8856 return 0;
8858 /* If variable is bigger than the string literal, OFFSET must be constant
8859 and inside of the bounds of the string literal. */
8860 offset = fold_convert (sizetype, offset);
8861 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8862 && (! host_integerp (offset, 1)
8863 || compare_tree_int (offset, length) >= 0))
8864 return 0;
8866 *ptr_offset = offset;
8867 return DECL_INITIAL (array);
8870 return 0;
8873 /* Generate code to calculate EXP using a store-flag instruction
8874 and return an rtx for the result. EXP is either a comparison
8875 or a TRUTH_NOT_EXPR whose operand is a comparison.
8877 If TARGET is nonzero, store the result there if convenient.
8879 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8880 cheap.
8882 Return zero if there is no suitable set-flag instruction
8883 available on this machine.
8885 Once expand_expr has been called on the arguments of the comparison,
8886 we are committed to doing the store flag, since it is not safe to
8887 re-evaluate the expression. We emit the store-flag insn by calling
8888 emit_store_flag, but only expand the arguments if we have a reason
8889 to believe that emit_store_flag will be successful. If we think that
8890 it will, but it isn't, we have to simulate the store-flag with a
8891 set/jump/set sequence. */
8893 static rtx
8894 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8896 enum rtx_code code;
8897 tree arg0, arg1, type;
8898 tree tem;
8899 enum machine_mode operand_mode;
8900 int invert = 0;
8901 int unsignedp;
8902 rtx op0, op1;
8903 enum insn_code icode;
8904 rtx subtarget = target;
8905 rtx result, label;
8907 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8908 result at the end. We can't simply invert the test since it would
8909 have already been inverted if it were valid. This case occurs for
8910 some floating-point comparisons. */
8912 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8913 invert = 1, exp = TREE_OPERAND (exp, 0);
8915 arg0 = TREE_OPERAND (exp, 0);
8916 arg1 = TREE_OPERAND (exp, 1);
8918 /* Don't crash if the comparison was erroneous. */
8919 if (arg0 == error_mark_node || arg1 == error_mark_node)
8920 return const0_rtx;
8922 type = TREE_TYPE (arg0);
8923 operand_mode = TYPE_MODE (type);
8924 unsignedp = TYPE_UNSIGNED (type);
8926 /* We won't bother with BLKmode store-flag operations because it would mean
8927 passing a lot of information to emit_store_flag. */
8928 if (operand_mode == BLKmode)
8929 return 0;
8931 /* We won't bother with store-flag operations involving function pointers
8932 when function pointers must be canonicalized before comparisons. */
8933 #ifdef HAVE_canonicalize_funcptr_for_compare
8934 if (HAVE_canonicalize_funcptr_for_compare
8935 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8936 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8937 == FUNCTION_TYPE))
8938 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8939 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8940 == FUNCTION_TYPE))))
8941 return 0;
8942 #endif
8944 STRIP_NOPS (arg0);
8945 STRIP_NOPS (arg1);
8947 /* Get the rtx comparison code to use. We know that EXP is a comparison
8948 operation of some type. Some comparisons against 1 and -1 can be
8949 converted to comparisons with zero. Do so here so that the tests
8950 below will be aware that we have a comparison with zero. These
8951 tests will not catch constants in the first operand, but constants
8952 are rarely passed as the first operand. */
8954 switch (TREE_CODE (exp))
8956 case EQ_EXPR:
8957 code = EQ;
8958 break;
8959 case NE_EXPR:
8960 code = NE;
8961 break;
8962 case LT_EXPR:
8963 if (integer_onep (arg1))
8964 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8965 else
8966 code = unsignedp ? LTU : LT;
8967 break;
8968 case LE_EXPR:
8969 if (! unsignedp && integer_all_onesp (arg1))
8970 arg1 = integer_zero_node, code = LT;
8971 else
8972 code = unsignedp ? LEU : LE;
8973 break;
8974 case GT_EXPR:
8975 if (! unsignedp && integer_all_onesp (arg1))
8976 arg1 = integer_zero_node, code = GE;
8977 else
8978 code = unsignedp ? GTU : GT;
8979 break;
8980 case GE_EXPR:
8981 if (integer_onep (arg1))
8982 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8983 else
8984 code = unsignedp ? GEU : GE;
8985 break;
8987 case UNORDERED_EXPR:
8988 code = UNORDERED;
8989 break;
8990 case ORDERED_EXPR:
8991 code = ORDERED;
8992 break;
8993 case UNLT_EXPR:
8994 code = UNLT;
8995 break;
8996 case UNLE_EXPR:
8997 code = UNLE;
8998 break;
8999 case UNGT_EXPR:
9000 code = UNGT;
9001 break;
9002 case UNGE_EXPR:
9003 code = UNGE;
9004 break;
9005 case UNEQ_EXPR:
9006 code = UNEQ;
9007 break;
9008 case LTGT_EXPR:
9009 code = LTGT;
9010 break;
9012 default:
9013 gcc_unreachable ();
9016 /* Put a constant second. */
9017 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9019 tem = arg0; arg0 = arg1; arg1 = tem;
9020 code = swap_condition (code);
9023 /* If this is an equality or inequality test of a single bit, we can
9024 do this by shifting the bit being tested to the low-order bit and
9025 masking the result with the constant 1. If the condition was EQ,
9026 we xor it with 1. This does not require an scc insn and is faster
9027 than an scc insn even if we have it.
9029 The code to make this transformation was moved into fold_single_bit_test,
9030 so we just call into the folder and expand its result. */
9032 if ((code == NE || code == EQ)
9033 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9034 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9036 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9037 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9038 arg0, arg1, type),
9039 target, VOIDmode, EXPAND_NORMAL);
9042 /* Now see if we are likely to be able to do this. Return if not. */
9043 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9044 return 0;
9046 icode = setcc_gen_code[(int) code];
9047 if (icode == CODE_FOR_nothing
9048 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9050 /* We can only do this if it is one of the special cases that
9051 can be handled without an scc insn. */
9052 if ((code == LT && integer_zerop (arg1))
9053 || (! only_cheap && code == GE && integer_zerop (arg1)))
9055 else if (! only_cheap && (code == NE || code == EQ)
9056 && TREE_CODE (type) != REAL_TYPE
9057 && ((abs_optab->handlers[(int) operand_mode].insn_code
9058 != CODE_FOR_nothing)
9059 || (ffs_optab->handlers[(int) operand_mode].insn_code
9060 != CODE_FOR_nothing)))
9062 else
9063 return 0;
9066 if (! get_subtarget (target)
9067 || GET_MODE (subtarget) != operand_mode)
9068 subtarget = 0;
9070 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9072 if (target == 0)
9073 target = gen_reg_rtx (mode);
9075 result = emit_store_flag (target, code, op0, op1,
9076 operand_mode, unsignedp, 1);
9078 if (result)
9080 if (invert)
9081 result = expand_binop (mode, xor_optab, result, const1_rtx,
9082 result, 0, OPTAB_LIB_WIDEN);
9083 return result;
9086 /* If this failed, we have to do this with set/compare/jump/set code. */
9087 if (!REG_P (target)
9088 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9089 target = gen_reg_rtx (GET_MODE (target));
9091 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9092 result = compare_from_rtx (op0, op1, code, unsignedp,
9093 operand_mode, NULL_RTX);
9094 if (GET_CODE (result) == CONST_INT)
9095 return (((result == const0_rtx && ! invert)
9096 || (result != const0_rtx && invert))
9097 ? const0_rtx : const1_rtx);
9099 /* The code of RESULT may not match CODE if compare_from_rtx
9100 decided to swap its operands and reverse the original code.
9102 We know that compare_from_rtx returns either a CONST_INT or
9103 a new comparison code, so it is safe to just extract the
9104 code from RESULT. */
9105 code = GET_CODE (result);
9107 label = gen_label_rtx ();
9108 gcc_assert (bcc_gen_fctn[(int) code]);
9110 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9111 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9112 emit_label (label);
9114 return target;
9118 /* Stubs in case we haven't got a casesi insn. */
9119 #ifndef HAVE_casesi
9120 # define HAVE_casesi 0
9121 # define gen_casesi(a, b, c, d, e) (0)
9122 # define CODE_FOR_casesi CODE_FOR_nothing
9123 #endif
9125 /* If the machine does not have a case insn that compares the bounds,
9126 this means extra overhead for dispatch tables, which raises the
9127 threshold for using them. */
9128 #ifndef CASE_VALUES_THRESHOLD
9129 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9130 #endif /* CASE_VALUES_THRESHOLD */
9132 unsigned int
9133 case_values_threshold (void)
9135 return CASE_VALUES_THRESHOLD;
9138 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9139 0 otherwise (i.e. if there is no casesi instruction). */
9141 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9142 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9144 enum machine_mode index_mode = SImode;
9145 int index_bits = GET_MODE_BITSIZE (index_mode);
9146 rtx op1, op2, index;
9147 enum machine_mode op_mode;
9149 if (! HAVE_casesi)
9150 return 0;
9152 /* Convert the index to SImode. */
9153 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9155 enum machine_mode omode = TYPE_MODE (index_type);
9156 rtx rangertx = expand_normal (range);
9158 /* We must handle the endpoints in the original mode. */
9159 index_expr = build2 (MINUS_EXPR, index_type,
9160 index_expr, minval);
9161 minval = integer_zero_node;
9162 index = expand_normal (index_expr);
9163 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9164 omode, 1, default_label);
9165 /* Now we can safely truncate. */
9166 index = convert_to_mode (index_mode, index, 0);
9168 else
9170 if (TYPE_MODE (index_type) != index_mode)
9172 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9173 index_expr = fold_convert (index_type, index_expr);
9176 index = expand_normal (index_expr);
9179 do_pending_stack_adjust ();
9181 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9182 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9183 (index, op_mode))
9184 index = copy_to_mode_reg (op_mode, index);
9186 op1 = expand_normal (minval);
9188 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9189 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9190 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9191 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9192 (op1, op_mode))
9193 op1 = copy_to_mode_reg (op_mode, op1);
9195 op2 = expand_normal (range);
9197 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9198 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9199 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9200 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9201 (op2, op_mode))
9202 op2 = copy_to_mode_reg (op_mode, op2);
9204 emit_jump_insn (gen_casesi (index, op1, op2,
9205 table_label, default_label));
9206 return 1;
9209 /* Attempt to generate a tablejump instruction; same concept. */
9210 #ifndef HAVE_tablejump
9211 #define HAVE_tablejump 0
9212 #define gen_tablejump(x, y) (0)
9213 #endif
9215 /* Subroutine of the next function.
9217 INDEX is the value being switched on, with the lowest value
9218 in the table already subtracted.
9219 MODE is its expected mode (needed if INDEX is constant).
9220 RANGE is the length of the jump table.
9221 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9223 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9224 index value is out of range. */
9226 static void
9227 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9228 rtx default_label)
9230 rtx temp, vector;
9232 if (INTVAL (range) > cfun->max_jumptable_ents)
9233 cfun->max_jumptable_ents = INTVAL (range);
9235 /* Do an unsigned comparison (in the proper mode) between the index
9236 expression and the value which represents the length of the range.
9237 Since we just finished subtracting the lower bound of the range
9238 from the index expression, this comparison allows us to simultaneously
9239 check that the original index expression value is both greater than
9240 or equal to the minimum value of the range and less than or equal to
9241 the maximum value of the range. */
9243 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9244 default_label);
9246 /* If index is in range, it must fit in Pmode.
9247 Convert to Pmode so we can index with it. */
9248 if (mode != Pmode)
9249 index = convert_to_mode (Pmode, index, 1);
9251 /* Don't let a MEM slip through, because then INDEX that comes
9252 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9253 and break_out_memory_refs will go to work on it and mess it up. */
9254 #ifdef PIC_CASE_VECTOR_ADDRESS
9255 if (flag_pic && !REG_P (index))
9256 index = copy_to_mode_reg (Pmode, index);
9257 #endif
9259 /* If flag_force_addr were to affect this address
9260 it could interfere with the tricky assumptions made
9261 about addresses that contain label-refs,
9262 which may be valid only very near the tablejump itself. */
9263 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9264 GET_MODE_SIZE, because this indicates how large insns are. The other
9265 uses should all be Pmode, because they are addresses. This code
9266 could fail if addresses and insns are not the same size. */
9267 index = gen_rtx_PLUS (Pmode,
9268 gen_rtx_MULT (Pmode, index,
9269 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9270 gen_rtx_LABEL_REF (Pmode, table_label));
9271 #ifdef PIC_CASE_VECTOR_ADDRESS
9272 if (flag_pic)
9273 index = PIC_CASE_VECTOR_ADDRESS (index);
9274 else
9275 #endif
9276 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9277 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9278 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9279 convert_move (temp, vector, 0);
9281 emit_jump_insn (gen_tablejump (temp, table_label));
9283 /* If we are generating PIC code or if the table is PC-relative, the
9284 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9285 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9286 emit_barrier ();
9290 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9291 rtx table_label, rtx default_label)
9293 rtx index;
9295 if (! HAVE_tablejump)
9296 return 0;
9298 index_expr = fold_build2 (MINUS_EXPR, index_type,
9299 fold_convert (index_type, index_expr),
9300 fold_convert (index_type, minval));
9301 index = expand_normal (index_expr);
9302 do_pending_stack_adjust ();
9304 do_tablejump (index, TYPE_MODE (index_type),
9305 convert_modes (TYPE_MODE (index_type),
9306 TYPE_MODE (TREE_TYPE (range)),
9307 expand_normal (range),
9308 TYPE_UNSIGNED (TREE_TYPE (range))),
9309 table_label, default_label);
9310 return 1;
9313 /* Nonzero if the mode is a valid vector mode for this architecture.
9314 This returns nonzero even if there is no hardware support for the
9315 vector mode, but we can emulate with narrower modes. */
9318 vector_mode_valid_p (enum machine_mode mode)
9320 enum mode_class class = GET_MODE_CLASS (mode);
9321 enum machine_mode innermode;
9323 /* Doh! What's going on? */
9324 if (class != MODE_VECTOR_INT
9325 && class != MODE_VECTOR_FLOAT)
9326 return 0;
9328 /* Hardware support. Woo hoo! */
9329 if (targetm.vector_mode_supported_p (mode))
9330 return 1;
9332 innermode = GET_MODE_INNER (mode);
9334 /* We should probably return 1 if requesting V4DI and we have no DI,
9335 but we have V2DI, but this is probably very unlikely. */
9337 /* If we have support for the inner mode, we can safely emulate it.
9338 We may not have V2DI, but me can emulate with a pair of DIs. */
9339 return targetm.scalar_mode_supported_p (innermode);
9342 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9343 static rtx
9344 const_vector_from_tree (tree exp)
9346 rtvec v;
9347 int units, i;
9348 tree link, elt;
9349 enum machine_mode inner, mode;
9351 mode = TYPE_MODE (TREE_TYPE (exp));
9353 if (initializer_zerop (exp))
9354 return CONST0_RTX (mode);
9356 units = GET_MODE_NUNITS (mode);
9357 inner = GET_MODE_INNER (mode);
9359 v = rtvec_alloc (units);
9361 link = TREE_VECTOR_CST_ELTS (exp);
9362 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9364 elt = TREE_VALUE (link);
9366 if (TREE_CODE (elt) == REAL_CST)
9367 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9368 inner);
9369 else
9370 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9371 TREE_INT_CST_HIGH (elt),
9372 inner);
9375 /* Initialize remaining elements to 0. */
9376 for (; i < units; ++i)
9377 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9379 return gen_rtx_CONST_VECTOR (mode, v);
9381 #include "gt-expr.h"