Remove old autovect-branch by moving to "dead" directory.
[official-gcc.git] / old-autovect-branch / gcc / expr.c
blob252601b9221a99eada43bfb2f8af2d0233836cc0
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, int);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
196 #endif
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
243 void
244 init_expr_once (void)
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
269 int regno;
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 REGNO (reg) = regno;
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 enum insn_code ic;
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
336 void
337 init_expr (void)
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
347 void
348 convert_move (rtx to, rtx from, int unsignedp)
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
353 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
354 enum insn_code code;
355 rtx libcall;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
362 gcc_assert (to_real == from_real);
364 /* If the source and destination are already the same, then there's
365 nothing to do. */
366 if (to == from)
367 return;
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
371 TO here. */
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 emit_move_insn (to, from);
385 return;
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394 else
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397 emit_move_insn (to, from);
398 return;
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405 return;
408 if (to_real)
410 rtx value, insns;
411 convert_optab tab;
413 gcc_assert (GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode));
416 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
417 tab = sext_optab;
418 else
419 tab = trunc_optab;
421 /* Try converting directly if the insn is supported. */
423 code = tab->handlers[to_mode][from_mode].insn_code;
424 if (code != CODE_FOR_nothing)
426 emit_unop_insn (code, to, from,
427 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
428 return;
431 /* Otherwise use a libcall. */
432 libcall = tab->handlers[to_mode][from_mode].libfunc;
434 /* Is this conversion implemented yet? */
435 gcc_assert (libcall);
437 start_sequence ();
438 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
439 1, from, from_mode);
440 insns = get_insns ();
441 end_sequence ();
442 emit_libcall_block (insns, to, value,
443 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
444 from)
445 : gen_rtx_FLOAT_EXTEND (to_mode, from));
446 return;
449 /* Handle pointer conversion. */ /* SPEE 900220. */
450 /* Targets are expected to provide conversion insns between PxImode and
451 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
452 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
454 enum machine_mode full_mode
455 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
457 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
458 != CODE_FOR_nothing);
460 if (full_mode != from_mode)
461 from = convert_to_mode (full_mode, from, unsignedp);
462 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
463 to, from, UNKNOWN);
464 return;
466 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
468 rtx new_from;
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
472 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
473 != CODE_FOR_nothing);
475 if (to_mode == full_mode)
477 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
478 to, from, UNKNOWN);
479 return;
482 new_from = gen_reg_rtx (full_mode);
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 new_from, from, UNKNOWN);
486 /* else proceed to integer conversions below. */
487 from_mode = full_mode;
488 from = new_from;
491 /* Now both modes are integers. */
493 /* Handle expanding beyond a word. */
494 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
495 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
497 rtx insns;
498 rtx lowpart;
499 rtx fill_value;
500 rtx lowfrom;
501 int i;
502 enum machine_mode lowpart_mode;
503 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
505 /* Try converting directly if the insn is supported. */
506 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
507 != CODE_FOR_nothing)
509 /* If FROM is a SUBREG, put it into a register. Do this
510 so that we always generate the same set of insns for
511 better cse'ing; if an intermediate assignment occurred,
512 we won't be doing the operation directly on the SUBREG. */
513 if (optimize > 0 && GET_CODE (from) == SUBREG)
514 from = force_reg (from_mode, from);
515 emit_unop_insn (code, to, from, equiv_code);
516 return;
518 /* Next, try converting via full word. */
519 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
520 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
521 != CODE_FOR_nothing))
523 if (REG_P (to))
525 if (reg_overlap_mentioned_p (to, from))
526 from = force_reg (from_mode, from);
527 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
529 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
530 emit_unop_insn (code, to,
531 gen_lowpart (word_mode, to), equiv_code);
532 return;
535 /* No special multiword conversion insn; do it by hand. */
536 start_sequence ();
538 /* Since we will turn this into a no conflict block, we must ensure
539 that the source does not overlap the target. */
541 if (reg_overlap_mentioned_p (to, from))
542 from = force_reg (from_mode, from);
544 /* Get a copy of FROM widened to a word, if necessary. */
545 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
546 lowpart_mode = word_mode;
547 else
548 lowpart_mode = from_mode;
550 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
552 lowpart = gen_lowpart (lowpart_mode, to);
553 emit_move_insn (lowpart, lowfrom);
555 /* Compute the value to put in each remaining word. */
556 if (unsignedp)
557 fill_value = const0_rtx;
558 else
560 #ifdef HAVE_slt
561 if (HAVE_slt
562 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
563 && STORE_FLAG_VALUE == -1)
565 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
566 lowpart_mode, 0);
567 fill_value = gen_reg_rtx (word_mode);
568 emit_insn (gen_slt (fill_value));
570 else
571 #endif
573 fill_value
574 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
575 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
576 NULL_RTX, 0);
577 fill_value = convert_to_mode (word_mode, fill_value, 1);
581 /* Fill the remaining words. */
582 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
584 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
585 rtx subword = operand_subword (to, index, 1, to_mode);
587 gcc_assert (subword);
589 if (fill_value != subword)
590 emit_move_insn (subword, fill_value);
593 insns = get_insns ();
594 end_sequence ();
596 emit_no_conflict_block (insns, to, from, NULL_RTX,
597 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
598 return;
601 /* Truncating multi-word to a word or less. */
602 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
603 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
605 if (!((MEM_P (from)
606 && ! MEM_VOLATILE_P (from)
607 && direct_load[(int) to_mode]
608 && ! mode_dependent_address_p (XEXP (from, 0)))
609 || REG_P (from)
610 || GET_CODE (from) == SUBREG))
611 from = force_reg (from_mode, from);
612 convert_move (to, gen_lowpart (word_mode, from), 0);
613 return;
616 /* Now follow all the conversions between integers
617 no more than a word long. */
619 /* For truncation, usually we can just refer to FROM in a narrower mode. */
620 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
621 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
622 GET_MODE_BITSIZE (from_mode)))
624 if (!((MEM_P (from)
625 && ! MEM_VOLATILE_P (from)
626 && direct_load[(int) to_mode]
627 && ! mode_dependent_address_p (XEXP (from, 0)))
628 || REG_P (from)
629 || GET_CODE (from) == SUBREG))
630 from = force_reg (from_mode, from);
631 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
632 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
633 from = copy_to_reg (from);
634 emit_move_insn (to, gen_lowpart (to_mode, from));
635 return;
638 /* Handle extension. */
639 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
641 /* Convert directly if that works. */
642 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
643 != CODE_FOR_nothing)
645 emit_unop_insn (code, to, from, equiv_code);
646 return;
648 else
650 enum machine_mode intermediate;
651 rtx tmp;
652 tree shift_amount;
654 /* Search for a mode to convert via. */
655 for (intermediate = from_mode; intermediate != VOIDmode;
656 intermediate = GET_MODE_WIDER_MODE (intermediate))
657 if (((can_extend_p (to_mode, intermediate, unsignedp)
658 != CODE_FOR_nothing)
659 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
660 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
661 GET_MODE_BITSIZE (intermediate))))
662 && (can_extend_p (intermediate, from_mode, unsignedp)
663 != CODE_FOR_nothing))
665 convert_move (to, convert_to_mode (intermediate, from,
666 unsignedp), unsignedp);
667 return;
670 /* No suitable intermediate mode.
671 Generate what we need with shifts. */
672 shift_amount = build_int_cst (NULL_TREE,
673 GET_MODE_BITSIZE (to_mode)
674 - GET_MODE_BITSIZE (from_mode));
675 from = gen_lowpart (to_mode, force_reg (from_mode, from));
676 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
677 to, unsignedp);
678 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
679 to, unsignedp);
680 if (tmp != to)
681 emit_move_insn (to, tmp);
682 return;
686 /* Support special truncate insns for certain modes. */
687 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
689 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
690 to, from, UNKNOWN);
691 return;
694 /* Handle truncation of volatile memrefs, and so on;
695 the things that couldn't be truncated directly,
696 and for which there was no special instruction.
698 ??? Code above formerly short-circuited this, for most integer
699 mode pairs, with a force_reg in from_mode followed by a recursive
700 call to this routine. Appears always to have been wrong. */
701 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
703 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
704 emit_move_insn (to, temp);
705 return;
708 /* Mode combination is not recognized. */
709 gcc_unreachable ();
712 /* Return an rtx for a value that would result
713 from converting X to mode MODE.
714 Both X and MODE may be floating, or both integer.
715 UNSIGNEDP is nonzero if X is an unsigned value.
716 This can be done by referring to a part of X in place
717 or by copying to a new temporary with conversion. */
720 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
722 return convert_modes (mode, VOIDmode, x, unsignedp);
725 /* Return an rtx for a value that would result
726 from converting X from mode OLDMODE to mode MODE.
727 Both modes may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
730 This can be done by referring to a part of X in place
731 or by copying to a new temporary with conversion.
733 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
736 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
738 rtx temp;
740 /* If FROM is a SUBREG that indicates that we have already done at least
741 the required extension, strip it. */
743 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
744 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
745 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
746 x = gen_lowpart (mode, x);
748 if (GET_MODE (x) != VOIDmode)
749 oldmode = GET_MODE (x);
751 if (mode == oldmode)
752 return x;
754 /* There is one case that we must handle specially: If we are converting
755 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
756 we are to interpret the constant as unsigned, gen_lowpart will do
757 the wrong if the constant appears negative. What we want to do is
758 make the high-order word of the constant zero, not all ones. */
760 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
761 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
762 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
764 HOST_WIDE_INT val = INTVAL (x);
766 if (oldmode != VOIDmode
767 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
769 int width = GET_MODE_BITSIZE (oldmode);
771 /* We need to zero extend VAL. */
772 val &= ((HOST_WIDE_INT) 1 << width) - 1;
775 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
778 /* We can do this with a gen_lowpart if both desired and current modes
779 are integer, and this is either a constant integer, a register, or a
780 non-volatile MEM. Except for the constant case where MODE is no
781 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
783 if ((GET_CODE (x) == CONST_INT
784 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
785 || (GET_MODE_CLASS (mode) == MODE_INT
786 && GET_MODE_CLASS (oldmode) == MODE_INT
787 && (GET_CODE (x) == CONST_DOUBLE
788 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
789 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
790 && direct_load[(int) mode])
791 || (REG_P (x)
792 && (! HARD_REGISTER_P (x)
793 || HARD_REGNO_MODE_OK (REGNO (x), mode))
794 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
795 GET_MODE_BITSIZE (GET_MODE (x)))))))))
797 /* ?? If we don't know OLDMODE, we have to assume here that
798 X does not need sign- or zero-extension. This may not be
799 the case, but it's the best we can do. */
800 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
801 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
803 HOST_WIDE_INT val = INTVAL (x);
804 int width = GET_MODE_BITSIZE (oldmode);
806 /* We must sign or zero-extend in this case. Start by
807 zero-extending, then sign extend if we need to. */
808 val &= ((HOST_WIDE_INT) 1 << width) - 1;
809 if (! unsignedp
810 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
811 val |= (HOST_WIDE_INT) (-1) << width;
813 return gen_int_mode (val, mode);
816 return gen_lowpart (mode, x);
819 /* Converting from integer constant into mode is always equivalent to an
820 subreg operation. */
821 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
823 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
824 return simplify_gen_subreg (mode, x, oldmode, 0);
827 temp = gen_reg_rtx (mode);
828 convert_move (temp, x, unsignedp);
829 return temp;
832 /* STORE_MAX_PIECES is the number of bytes at a time that we can
833 store efficiently. Due to internal GCC limitations, this is
834 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
835 for an immediate constant. */
837 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
839 /* Determine whether the LEN bytes can be moved by using several move
840 instructions. Return nonzero if a call to move_by_pieces should
841 succeed. */
844 can_move_by_pieces (unsigned HOST_WIDE_INT len,
845 unsigned int align ATTRIBUTE_UNUSED)
847 return MOVE_BY_PIECES_P (len, align);
850 /* Generate several move instructions to copy LEN bytes from block FROM to
851 block TO. (These are MEM rtx's with BLKmode).
853 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
854 used to push FROM to the stack.
856 ALIGN is maximum stack alignment we can assume.
858 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
859 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
860 stpcpy. */
863 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
864 unsigned int align, int endp)
866 struct move_by_pieces data;
867 rtx to_addr, from_addr = XEXP (from, 0);
868 unsigned int max_size = MOVE_MAX_PIECES + 1;
869 enum machine_mode mode = VOIDmode, tmode;
870 enum insn_code icode;
872 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
874 data.offset = 0;
875 data.from_addr = from_addr;
876 if (to)
878 to_addr = XEXP (to, 0);
879 data.to = to;
880 data.autinc_to
881 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
882 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
883 data.reverse
884 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
886 else
888 to_addr = NULL_RTX;
889 data.to = NULL_RTX;
890 data.autinc_to = 1;
891 #ifdef STACK_GROWS_DOWNWARD
892 data.reverse = 1;
893 #else
894 data.reverse = 0;
895 #endif
897 data.to_addr = to_addr;
898 data.from = from;
899 data.autinc_from
900 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
901 || GET_CODE (from_addr) == POST_INC
902 || GET_CODE (from_addr) == POST_DEC);
904 data.explicit_inc_from = 0;
905 data.explicit_inc_to = 0;
906 if (data.reverse) data.offset = len;
907 data.len = len;
909 /* If copying requires more than two move insns,
910 copy addresses to registers (to make displacements shorter)
911 and use post-increment if available. */
912 if (!(data.autinc_from && data.autinc_to)
913 && move_by_pieces_ninsns (len, align, max_size) > 2)
915 /* Find the mode of the largest move... */
916 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
917 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
918 if (GET_MODE_SIZE (tmode) < max_size)
919 mode = tmode;
921 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
923 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
924 data.autinc_from = 1;
925 data.explicit_inc_from = -1;
927 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
929 data.from_addr = copy_addr_to_reg (from_addr);
930 data.autinc_from = 1;
931 data.explicit_inc_from = 1;
933 if (!data.autinc_from && CONSTANT_P (from_addr))
934 data.from_addr = copy_addr_to_reg (from_addr);
935 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
937 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
938 data.autinc_to = 1;
939 data.explicit_inc_to = -1;
941 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
943 data.to_addr = copy_addr_to_reg (to_addr);
944 data.autinc_to = 1;
945 data.explicit_inc_to = 1;
947 if (!data.autinc_to && CONSTANT_P (to_addr))
948 data.to_addr = copy_addr_to_reg (to_addr);
951 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
952 if (align >= GET_MODE_ALIGNMENT (tmode))
953 align = GET_MODE_ALIGNMENT (tmode);
954 else
956 enum machine_mode xmode;
958 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
959 tmode != VOIDmode;
960 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
961 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
962 || SLOW_UNALIGNED_ACCESS (tmode, align))
963 break;
965 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
968 /* First move what we can in the largest integer mode, then go to
969 successively smaller modes. */
971 while (max_size > 1)
973 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
974 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
975 if (GET_MODE_SIZE (tmode) < max_size)
976 mode = tmode;
978 if (mode == VOIDmode)
979 break;
981 icode = mov_optab->handlers[(int) mode].insn_code;
982 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
983 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
985 max_size = GET_MODE_SIZE (mode);
988 /* The code above should have handled everything. */
989 gcc_assert (!data.len);
991 if (endp)
993 rtx to1;
995 gcc_assert (!data.reverse);
996 if (data.autinc_to)
998 if (endp == 2)
1000 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1001 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1002 else
1003 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1004 -1));
1006 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1007 data.offset);
1009 else
1011 if (endp == 2)
1012 --data.offset;
1013 to1 = adjust_address (data.to, QImode, data.offset);
1015 return to1;
1017 else
1018 return data.to;
1021 /* Return number of insns required to move L bytes by pieces.
1022 ALIGN (in bits) is maximum alignment we can assume. */
1024 static unsigned HOST_WIDE_INT
1025 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1026 unsigned int max_size)
1028 unsigned HOST_WIDE_INT n_insns = 0;
1029 enum machine_mode tmode;
1031 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1032 if (align >= GET_MODE_ALIGNMENT (tmode))
1033 align = GET_MODE_ALIGNMENT (tmode);
1034 else
1036 enum machine_mode tmode, xmode;
1038 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1039 tmode != VOIDmode;
1040 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1041 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1042 || SLOW_UNALIGNED_ACCESS (tmode, align))
1043 break;
1045 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1048 while (max_size > 1)
1050 enum machine_mode mode = VOIDmode;
1051 enum insn_code icode;
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
1056 mode = tmode;
1058 if (mode == VOIDmode)
1059 break;
1061 icode = mov_optab->handlers[(int) mode].insn_code;
1062 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1063 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1065 max_size = GET_MODE_SIZE (mode);
1068 gcc_assert (!l);
1069 return n_insns;
1072 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1073 with move instructions for mode MODE. GENFUN is the gen_... function
1074 to make a move insn for that mode. DATA has all the other info. */
1076 static void
1077 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1078 struct move_by_pieces *data)
1080 unsigned int size = GET_MODE_SIZE (mode);
1081 rtx to1 = NULL_RTX, from1;
1083 while (data->len >= size)
1085 if (data->reverse)
1086 data->offset -= size;
1088 if (data->to)
1090 if (data->autinc_to)
1091 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1092 data->offset);
1093 else
1094 to1 = adjust_address (data->to, mode, data->offset);
1097 if (data->autinc_from)
1098 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1099 data->offset);
1100 else
1101 from1 = adjust_address (data->from, mode, data->offset);
1103 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1104 emit_insn (gen_add2_insn (data->to_addr,
1105 GEN_INT (-(HOST_WIDE_INT)size)));
1106 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1107 emit_insn (gen_add2_insn (data->from_addr,
1108 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (data->to)
1111 emit_insn ((*genfun) (to1, from1));
1112 else
1114 #ifdef PUSH_ROUNDING
1115 emit_single_push_insn (mode, from1, NULL);
1116 #else
1117 gcc_unreachable ();
1118 #endif
1121 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1123 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1126 if (! data->reverse)
1127 data->offset += size;
1129 data->len -= size;
1133 /* Emit code to move a block Y to a block X. This may be done with
1134 string-move instructions, with multiple scalar move instructions,
1135 or with a library call.
1137 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1138 SIZE is an rtx that says how long they are.
1139 ALIGN is the maximum alignment we can assume they have.
1140 METHOD describes what kind of copy this is, and what mechanisms may be used.
1142 Return the address of the new block, if memcpy is called and returns it,
1143 0 otherwise. */
1146 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1148 bool may_use_call;
1149 rtx retval = 0;
1150 unsigned int align;
1152 switch (method)
1154 case BLOCK_OP_NORMAL:
1155 case BLOCK_OP_TAILCALL:
1156 may_use_call = true;
1157 break;
1159 case BLOCK_OP_CALL_PARM:
1160 may_use_call = block_move_libcall_safe_for_call_parm ();
1162 /* Make inhibit_defer_pop nonzero around the library call
1163 to force it to pop the arguments right away. */
1164 NO_DEFER_POP;
1165 break;
1167 case BLOCK_OP_NO_LIBCALL:
1168 may_use_call = false;
1169 break;
1171 default:
1172 gcc_unreachable ();
1175 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1177 gcc_assert (MEM_P (x));
1178 gcc_assert (MEM_P (y));
1179 gcc_assert (size);
1181 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1182 block copy is more efficient for other large modes, e.g. DCmode. */
1183 x = adjust_address (x, BLKmode, 0);
1184 y = adjust_address (y, BLKmode, 0);
1186 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1187 can be incorrect is coming from __builtin_memcpy. */
1188 if (GET_CODE (size) == CONST_INT)
1190 if (INTVAL (size) == 0)
1191 return 0;
1193 x = shallow_copy_rtx (x);
1194 y = shallow_copy_rtx (y);
1195 set_mem_size (x, size);
1196 set_mem_size (y, size);
1199 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1200 move_by_pieces (x, y, INTVAL (size), align, 0);
1201 else if (emit_block_move_via_movmem (x, y, size, align))
1203 else if (may_use_call)
1204 retval = emit_block_move_via_libcall (x, y, size,
1205 method == BLOCK_OP_TAILCALL);
1206 else
1207 emit_block_move_via_loop (x, y, size, align);
1209 if (method == BLOCK_OP_CALL_PARM)
1210 OK_DEFER_POP;
1212 return retval;
1215 /* A subroutine of emit_block_move. Returns true if calling the
1216 block move libcall will not clobber any parameters which may have
1217 already been placed on the stack. */
1219 static bool
1220 block_move_libcall_safe_for_call_parm (void)
1222 /* If arguments are pushed on the stack, then they're safe. */
1223 if (PUSH_ARGS)
1224 return true;
1226 /* If registers go on the stack anyway, any argument is sure to clobber
1227 an outgoing argument. */
1228 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1230 tree fn = emit_block_move_libcall_fn (false);
1231 (void) fn;
1232 if (REG_PARM_STACK_SPACE (fn) != 0)
1233 return false;
1235 #endif
1237 /* If any argument goes in memory, then it might clobber an outgoing
1238 argument. */
1240 CUMULATIVE_ARGS args_so_far;
1241 tree fn, arg;
1243 fn = emit_block_move_libcall_fn (false);
1244 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1246 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1247 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1249 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1250 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1251 if (!tmp || !REG_P (tmp))
1252 return false;
1253 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1254 return false;
1255 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1258 return true;
1261 /* A subroutine of emit_block_move. Expand a movmem pattern;
1262 return true if successful. */
1264 static bool
1265 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1267 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1268 int save_volatile_ok = volatile_ok;
1269 enum machine_mode mode;
1271 /* Since this is a move insn, we don't care about volatility. */
1272 volatile_ok = 1;
1274 /* Try the most limited insn first, because there's no point
1275 including more than one in the machine description unless
1276 the more limited one has some advantage. */
1278 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1279 mode = GET_MODE_WIDER_MODE (mode))
1281 enum insn_code code = movmem_optab[(int) mode];
1282 insn_operand_predicate_fn pred;
1284 if (code != CODE_FOR_nothing
1285 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1286 here because if SIZE is less than the mode mask, as it is
1287 returned by the macro, it will definitely be less than the
1288 actual mode mask. */
1289 && ((GET_CODE (size) == CONST_INT
1290 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1291 <= (GET_MODE_MASK (mode) >> 1)))
1292 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1293 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1294 || (*pred) (x, BLKmode))
1295 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1296 || (*pred) (y, BLKmode))
1297 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1298 || (*pred) (opalign, VOIDmode)))
1300 rtx op2;
1301 rtx last = get_last_insn ();
1302 rtx pat;
1304 op2 = convert_to_mode (mode, size, 1);
1305 pred = insn_data[(int) code].operand[2].predicate;
1306 if (pred != 0 && ! (*pred) (op2, mode))
1307 op2 = copy_to_mode_reg (mode, op2);
1309 /* ??? When called via emit_block_move_for_call, it'd be
1310 nice if there were some way to inform the backend, so
1311 that it doesn't fail the expansion because it thinks
1312 emitting the libcall would be more efficient. */
1314 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1315 if (pat)
1317 emit_insn (pat);
1318 volatile_ok = save_volatile_ok;
1319 return true;
1321 else
1322 delete_insns_since (last);
1326 volatile_ok = save_volatile_ok;
1327 return false;
1330 /* A subroutine of emit_block_move. Expand a call to memcpy.
1331 Return the return value from memcpy, 0 otherwise. */
1333 static rtx
1334 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1336 rtx dst_addr, src_addr;
1337 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1338 enum machine_mode size_mode;
1339 rtx retval;
1341 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1342 pseudos. We can then place those new pseudos into a VAR_DECL and
1343 use them later. */
1345 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1346 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1348 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1349 src_addr = convert_memory_address (ptr_mode, src_addr);
1351 dst_tree = make_tree (ptr_type_node, dst_addr);
1352 src_tree = make_tree (ptr_type_node, src_addr);
1354 size_mode = TYPE_MODE (sizetype);
1356 size = convert_to_mode (size_mode, size, 1);
1357 size = copy_to_mode_reg (size_mode, size);
1359 /* It is incorrect to use the libcall calling conventions to call
1360 memcpy in this context. This could be a user call to memcpy and
1361 the user may wish to examine the return value from memcpy. For
1362 targets where libcalls and normal calls have different conventions
1363 for returning pointers, we could end up generating incorrect code. */
1365 size_tree = make_tree (sizetype, size);
1367 fn = emit_block_move_libcall_fn (true);
1368 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1369 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1370 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1372 /* Now we have to build up the CALL_EXPR itself. */
1373 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1374 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1375 call_expr, arg_list, NULL_TREE);
1376 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1378 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1380 return retval;
1383 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1384 for the function we use for block copies. The first time FOR_CALL
1385 is true, we call assemble_external. */
1387 static GTY(()) tree block_move_fn;
1389 void
1390 init_block_move_fn (const char *asmspec)
1392 if (!block_move_fn)
1394 tree args, fn;
1396 fn = get_identifier ("memcpy");
1397 args = build_function_type_list (ptr_type_node, ptr_type_node,
1398 const_ptr_type_node, sizetype,
1399 NULL_TREE);
1401 fn = build_decl (FUNCTION_DECL, fn, args);
1402 DECL_EXTERNAL (fn) = 1;
1403 TREE_PUBLIC (fn) = 1;
1404 DECL_ARTIFICIAL (fn) = 1;
1405 TREE_NOTHROW (fn) = 1;
1407 block_move_fn = fn;
1410 if (asmspec)
1411 set_user_assembler_name (block_move_fn, asmspec);
1414 static tree
1415 emit_block_move_libcall_fn (int for_call)
1417 static bool emitted_extern;
1419 if (!block_move_fn)
1420 init_block_move_fn (NULL);
1422 if (for_call && !emitted_extern)
1424 emitted_extern = true;
1425 make_decl_rtl (block_move_fn);
1426 assemble_external (block_move_fn);
1429 return block_move_fn;
1432 /* A subroutine of emit_block_move. Copy the data via an explicit
1433 loop. This is used only when libcalls are forbidden. */
1434 /* ??? It'd be nice to copy in hunks larger than QImode. */
1436 static void
1437 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1438 unsigned int align ATTRIBUTE_UNUSED)
1440 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1441 enum machine_mode iter_mode;
1443 iter_mode = GET_MODE (size);
1444 if (iter_mode == VOIDmode)
1445 iter_mode = word_mode;
1447 top_label = gen_label_rtx ();
1448 cmp_label = gen_label_rtx ();
1449 iter = gen_reg_rtx (iter_mode);
1451 emit_move_insn (iter, const0_rtx);
1453 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1454 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1455 do_pending_stack_adjust ();
1457 emit_jump (cmp_label);
1458 emit_label (top_label);
1460 tmp = convert_modes (Pmode, iter_mode, iter, true);
1461 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1462 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1463 x = change_address (x, QImode, x_addr);
1464 y = change_address (y, QImode, y_addr);
1466 emit_move_insn (x, y);
1468 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1469 true, OPTAB_LIB_WIDEN);
1470 if (tmp != iter)
1471 emit_move_insn (iter, tmp);
1473 emit_label (cmp_label);
1475 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1476 true, top_label);
1479 /* Copy all or part of a value X into registers starting at REGNO.
1480 The number of registers to be filled is NREGS. */
1482 void
1483 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1485 int i;
1486 #ifdef HAVE_load_multiple
1487 rtx pat;
1488 rtx last;
1489 #endif
1491 if (nregs == 0)
1492 return;
1494 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1495 x = validize_mem (force_const_mem (mode, x));
1497 /* See if the machine can do this with a load multiple insn. */
1498 #ifdef HAVE_load_multiple
1499 if (HAVE_load_multiple)
1501 last = get_last_insn ();
1502 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1503 GEN_INT (nregs));
1504 if (pat)
1506 emit_insn (pat);
1507 return;
1509 else
1510 delete_insns_since (last);
1512 #endif
1514 for (i = 0; i < nregs; i++)
1515 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1516 operand_subword_force (x, i, mode));
1519 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1520 The number of registers to be filled is NREGS. */
1522 void
1523 move_block_from_reg (int regno, rtx x, int nregs)
1525 int i;
1527 if (nregs == 0)
1528 return;
1530 /* See if the machine can do this with a store multiple insn. */
1531 #ifdef HAVE_store_multiple
1532 if (HAVE_store_multiple)
1534 rtx last = get_last_insn ();
1535 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1536 GEN_INT (nregs));
1537 if (pat)
1539 emit_insn (pat);
1540 return;
1542 else
1543 delete_insns_since (last);
1545 #endif
1547 for (i = 0; i < nregs; i++)
1549 rtx tem = operand_subword (x, i, 1, BLKmode);
1551 gcc_assert (tem);
1553 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1557 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1558 ORIG, where ORIG is a non-consecutive group of registers represented by
1559 a PARALLEL. The clone is identical to the original except in that the
1560 original set of registers is replaced by a new set of pseudo registers.
1561 The new set has the same modes as the original set. */
1564 gen_group_rtx (rtx orig)
1566 int i, length;
1567 rtx *tmps;
1569 gcc_assert (GET_CODE (orig) == PARALLEL);
1571 length = XVECLEN (orig, 0);
1572 tmps = alloca (sizeof (rtx) * length);
1574 /* Skip a NULL entry in first slot. */
1575 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1577 if (i)
1578 tmps[0] = 0;
1580 for (; i < length; i++)
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1583 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1585 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1588 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1591 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1592 except that values are placed in TMPS[i], and must later be moved
1593 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1595 static void
1596 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1598 rtx src;
1599 int start, i;
1600 enum machine_mode m = GET_MODE (orig_src);
1602 gcc_assert (GET_CODE (dst) == PARALLEL);
1604 if (m != VOIDmode
1605 && !SCALAR_INT_MODE_P (m)
1606 && !MEM_P (orig_src)
1607 && GET_CODE (orig_src) != CONCAT)
1609 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1610 if (imode == BLKmode)
1611 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1612 else
1613 src = gen_reg_rtx (imode);
1614 if (imode != BLKmode)
1615 src = gen_lowpart (GET_MODE (orig_src), src);
1616 emit_move_insn (src, orig_src);
1617 /* ...and back again. */
1618 if (imode != BLKmode)
1619 src = gen_lowpart (imode, src);
1620 emit_group_load_1 (tmps, dst, src, type, ssize);
1621 return;
1624 /* Check for a NULL entry, used to indicate that the parameter goes
1625 both on the stack and in registers. */
1626 if (XEXP (XVECEXP (dst, 0, 0), 0))
1627 start = 0;
1628 else
1629 start = 1;
1631 /* Process the pieces. */
1632 for (i = start; i < XVECLEN (dst, 0); i++)
1634 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1635 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1636 unsigned int bytelen = GET_MODE_SIZE (mode);
1637 int shift = 0;
1639 /* Handle trailing fragments that run over the size of the struct. */
1640 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1642 /* Arrange to shift the fragment to where it belongs.
1643 extract_bit_field loads to the lsb of the reg. */
1644 if (
1645 #ifdef BLOCK_REG_PADDING
1646 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1647 == (BYTES_BIG_ENDIAN ? upward : downward)
1648 #else
1649 BYTES_BIG_ENDIAN
1650 #endif
1652 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1653 bytelen = ssize - bytepos;
1654 gcc_assert (bytelen > 0);
1657 /* If we won't be loading directly from memory, protect the real source
1658 from strange tricks we might play; but make sure that the source can
1659 be loaded directly into the destination. */
1660 src = orig_src;
1661 if (!MEM_P (orig_src)
1662 && (!CONSTANT_P (orig_src)
1663 || (GET_MODE (orig_src) != mode
1664 && GET_MODE (orig_src) != VOIDmode)))
1666 if (GET_MODE (orig_src) == VOIDmode)
1667 src = gen_reg_rtx (mode);
1668 else
1669 src = gen_reg_rtx (GET_MODE (orig_src));
1671 emit_move_insn (src, orig_src);
1674 /* Optimize the access just a bit. */
1675 if (MEM_P (src)
1676 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1677 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1678 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1679 && bytelen == GET_MODE_SIZE (mode))
1681 tmps[i] = gen_reg_rtx (mode);
1682 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1684 else if (COMPLEX_MODE_P (mode)
1685 && GET_MODE (src) == mode
1686 && bytelen == GET_MODE_SIZE (mode))
1687 /* Let emit_move_complex do the bulk of the work. */
1688 tmps[i] = src;
1689 else if (GET_CODE (src) == CONCAT)
1691 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1692 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1694 if ((bytepos == 0 && bytelen == slen0)
1695 || (bytepos != 0 && bytepos + bytelen <= slen))
1697 /* The following assumes that the concatenated objects all
1698 have the same size. In this case, a simple calculation
1699 can be used to determine the object and the bit field
1700 to be extracted. */
1701 tmps[i] = XEXP (src, bytepos / slen0);
1702 if (! CONSTANT_P (tmps[i])
1703 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1704 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1705 (bytepos % slen0) * BITS_PER_UNIT,
1706 1, NULL_RTX, mode, mode);
1708 else
1710 rtx mem;
1712 gcc_assert (!bytepos);
1713 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1714 emit_move_insn (mem, src);
1715 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1716 0, 1, NULL_RTX, mode, mode);
1719 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1720 SIMD register, which is currently broken. While we get GCC
1721 to emit proper RTL for these cases, let's dump to memory. */
1722 else if (VECTOR_MODE_P (GET_MODE (dst))
1723 && REG_P (src))
1725 int slen = GET_MODE_SIZE (GET_MODE (src));
1726 rtx mem;
1728 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1729 emit_move_insn (mem, src);
1730 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1732 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1733 && XVECLEN (dst, 0) > 1)
1734 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1735 else if (CONSTANT_P (src)
1736 || (REG_P (src) && GET_MODE (src) == mode))
1737 tmps[i] = src;
1738 else
1739 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1740 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1741 mode, mode);
1743 if (shift)
1744 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1745 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1749 /* Emit code to move a block SRC of type TYPE to a block DST,
1750 where DST is non-consecutive registers represented by a PARALLEL.
1751 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1752 if not known. */
1754 void
1755 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1757 rtx *tmps;
1758 int i;
1760 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1761 emit_group_load_1 (tmps, dst, src, type, ssize);
1763 /* Copy the extracted pieces into the proper (probable) hard regs. */
1764 for (i = 0; i < XVECLEN (dst, 0); i++)
1766 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1767 if (d == NULL)
1768 continue;
1769 emit_move_insn (d, tmps[i]);
1773 /* Similar, but load SRC into new pseudos in a format that looks like
1774 PARALLEL. This can later be fed to emit_group_move to get things
1775 in the right place. */
1778 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1780 rtvec vec;
1781 int i;
1783 vec = rtvec_alloc (XVECLEN (parallel, 0));
1784 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1786 /* Convert the vector to look just like the original PARALLEL, except
1787 with the computed values. */
1788 for (i = 0; i < XVECLEN (parallel, 0); i++)
1790 rtx e = XVECEXP (parallel, 0, i);
1791 rtx d = XEXP (e, 0);
1793 if (d)
1795 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1796 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1798 RTVEC_ELT (vec, i) = e;
1801 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1804 /* Emit code to move a block SRC to block DST, where SRC and DST are
1805 non-consecutive groups of registers, each represented by a PARALLEL. */
1807 void
1808 emit_group_move (rtx dst, rtx src)
1810 int i;
1812 gcc_assert (GET_CODE (src) == PARALLEL
1813 && GET_CODE (dst) == PARALLEL
1814 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1816 /* Skip first entry if NULL. */
1817 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1818 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1819 XEXP (XVECEXP (src, 0, i), 0));
1822 /* Move a group of registers represented by a PARALLEL into pseudos. */
1825 emit_group_move_into_temps (rtx src)
1827 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1828 int i;
1830 for (i = 0; i < XVECLEN (src, 0); i++)
1832 rtx e = XVECEXP (src, 0, i);
1833 rtx d = XEXP (e, 0);
1835 if (d)
1836 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1837 RTVEC_ELT (vec, i) = e;
1840 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1843 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1844 where SRC is non-consecutive registers represented by a PARALLEL.
1845 SSIZE represents the total size of block ORIG_DST, or -1 if not
1846 known. */
1848 void
1849 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1851 rtx *tmps, dst;
1852 int start, i;
1853 enum machine_mode m = GET_MODE (orig_dst);
1855 gcc_assert (GET_CODE (src) == PARALLEL);
1857 if (!SCALAR_INT_MODE_P (m)
1858 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1860 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1861 if (imode == BLKmode)
1862 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1863 else
1864 dst = gen_reg_rtx (imode);
1865 emit_group_store (dst, src, type, ssize);
1866 if (imode != BLKmode)
1867 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1868 emit_move_insn (orig_dst, dst);
1869 return;
1872 /* Check for a NULL entry, used to indicate that the parameter goes
1873 both on the stack and in registers. */
1874 if (XEXP (XVECEXP (src, 0, 0), 0))
1875 start = 0;
1876 else
1877 start = 1;
1879 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1881 /* Copy the (probable) hard regs into pseudos. */
1882 for (i = start; i < XVECLEN (src, 0); i++)
1884 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1885 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1886 emit_move_insn (tmps[i], reg);
1889 /* If we won't be storing directly into memory, protect the real destination
1890 from strange tricks we might play. */
1891 dst = orig_dst;
1892 if (GET_CODE (dst) == PARALLEL)
1894 rtx temp;
1896 /* We can get a PARALLEL dst if there is a conditional expression in
1897 a return statement. In that case, the dst and src are the same,
1898 so no action is necessary. */
1899 if (rtx_equal_p (dst, src))
1900 return;
1902 /* It is unclear if we can ever reach here, but we may as well handle
1903 it. Allocate a temporary, and split this into a store/load to/from
1904 the temporary. */
1906 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1907 emit_group_store (temp, src, type, ssize);
1908 emit_group_load (dst, temp, type, ssize);
1909 return;
1911 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1913 dst = gen_reg_rtx (GET_MODE (orig_dst));
1914 /* Make life a bit easier for combine. */
1915 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1918 /* Process the pieces. */
1919 for (i = start; i < XVECLEN (src, 0); i++)
1921 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1922 enum machine_mode mode = GET_MODE (tmps[i]);
1923 unsigned int bytelen = GET_MODE_SIZE (mode);
1924 rtx dest = dst;
1926 /* Handle trailing fragments that run over the size of the struct. */
1927 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1929 /* store_bit_field always takes its value from the lsb.
1930 Move the fragment to the lsb if it's not already there. */
1931 if (
1932 #ifdef BLOCK_REG_PADDING
1933 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1934 == (BYTES_BIG_ENDIAN ? upward : downward)
1935 #else
1936 BYTES_BIG_ENDIAN
1937 #endif
1940 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1941 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1942 build_int_cst (NULL_TREE, shift),
1943 tmps[i], 0);
1945 bytelen = ssize - bytepos;
1948 if (GET_CODE (dst) == CONCAT)
1950 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1951 dest = XEXP (dst, 0);
1952 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1954 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1955 dest = XEXP (dst, 1);
1957 else
1959 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1960 dest = assign_stack_temp (GET_MODE (dest),
1961 GET_MODE_SIZE (GET_MODE (dest)), 0);
1962 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1963 tmps[i]);
1964 dst = dest;
1965 break;
1969 /* Optimize the access just a bit. */
1970 if (MEM_P (dest)
1971 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1972 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1973 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1974 && bytelen == GET_MODE_SIZE (mode))
1975 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1976 else
1977 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1978 mode, tmps[i]);
1981 /* Copy from the pseudo into the (probable) hard reg. */
1982 if (orig_dst != dst)
1983 emit_move_insn (orig_dst, dst);
1986 /* Generate code to copy a BLKmode object of TYPE out of a
1987 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1988 is null, a stack temporary is created. TGTBLK is returned.
1990 The purpose of this routine is to handle functions that return
1991 BLKmode structures in registers. Some machines (the PA for example)
1992 want to return all small structures in registers regardless of the
1993 structure's alignment. */
1996 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1998 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1999 rtx src = NULL, dst = NULL;
2000 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2001 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2003 if (tgtblk == 0)
2005 tgtblk = assign_temp (build_qualified_type (type,
2006 (TYPE_QUALS (type)
2007 | TYPE_QUAL_CONST)),
2008 0, 1, 1);
2009 preserve_temp_slots (tgtblk);
2012 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2013 into a new pseudo which is a full word. */
2015 if (GET_MODE (srcreg) != BLKmode
2016 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2017 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2019 /* If the structure doesn't take up a whole number of words, see whether
2020 SRCREG is padded on the left or on the right. If it's on the left,
2021 set PADDING_CORRECTION to the number of bits to skip.
2023 In most ABIs, the structure will be returned at the least end of
2024 the register, which translates to right padding on little-endian
2025 targets and left padding on big-endian targets. The opposite
2026 holds if the structure is returned at the most significant
2027 end of the register. */
2028 if (bytes % UNITS_PER_WORD != 0
2029 && (targetm.calls.return_in_msb (type)
2030 ? !BYTES_BIG_ENDIAN
2031 : BYTES_BIG_ENDIAN))
2032 padding_correction
2033 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2035 /* Copy the structure BITSIZE bites at a time.
2037 We could probably emit more efficient code for machines which do not use
2038 strict alignment, but it doesn't seem worth the effort at the current
2039 time. */
2040 for (bitpos = 0, xbitpos = padding_correction;
2041 bitpos < bytes * BITS_PER_UNIT;
2042 bitpos += bitsize, xbitpos += bitsize)
2044 /* We need a new source operand each time xbitpos is on a
2045 word boundary and when xbitpos == padding_correction
2046 (the first time through). */
2047 if (xbitpos % BITS_PER_WORD == 0
2048 || xbitpos == padding_correction)
2049 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2050 GET_MODE (srcreg));
2052 /* We need a new destination operand each time bitpos is on
2053 a word boundary. */
2054 if (bitpos % BITS_PER_WORD == 0)
2055 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2057 /* Use xbitpos for the source extraction (right justified) and
2058 xbitpos for the destination store (left justified). */
2059 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2060 extract_bit_field (src, bitsize,
2061 xbitpos % BITS_PER_WORD, 1,
2062 NULL_RTX, word_mode, word_mode));
2065 return tgtblk;
2068 /* Add a USE expression for REG to the (possibly empty) list pointed
2069 to by CALL_FUSAGE. REG must denote a hard register. */
2071 void
2072 use_reg (rtx *call_fusage, rtx reg)
2074 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2076 *call_fusage
2077 = gen_rtx_EXPR_LIST (VOIDmode,
2078 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2081 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2082 starting at REGNO. All of these registers must be hard registers. */
2084 void
2085 use_regs (rtx *call_fusage, int regno, int nregs)
2087 int i;
2089 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2091 for (i = 0; i < nregs; i++)
2092 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2095 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2096 PARALLEL REGS. This is for calls that pass values in multiple
2097 non-contiguous locations. The Irix 6 ABI has examples of this. */
2099 void
2100 use_group_regs (rtx *call_fusage, rtx regs)
2102 int i;
2104 for (i = 0; i < XVECLEN (regs, 0); i++)
2106 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2108 /* A NULL entry means the parameter goes both on the stack and in
2109 registers. This can also be a MEM for targets that pass values
2110 partially on the stack and partially in registers. */
2111 if (reg != 0 && REG_P (reg))
2112 use_reg (call_fusage, reg);
2117 /* Determine whether the LEN bytes generated by CONSTFUN can be
2118 stored to memory using several move instructions. CONSTFUNDATA is
2119 a pointer which will be passed as argument in every CONSTFUN call.
2120 ALIGN is maximum alignment we can assume. Return nonzero if a
2121 call to store_by_pieces should succeed. */
2124 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2125 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2126 void *constfundata, unsigned int align)
2128 unsigned HOST_WIDE_INT l;
2129 unsigned int max_size;
2130 HOST_WIDE_INT offset = 0;
2131 enum machine_mode mode, tmode;
2132 enum insn_code icode;
2133 int reverse;
2134 rtx cst;
2136 if (len == 0)
2137 return 1;
2139 if (! STORE_BY_PIECES_P (len, align))
2140 return 0;
2142 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2143 if (align >= GET_MODE_ALIGNMENT (tmode))
2144 align = GET_MODE_ALIGNMENT (tmode);
2145 else
2147 enum machine_mode xmode;
2149 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2150 tmode != VOIDmode;
2151 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2152 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2153 || SLOW_UNALIGNED_ACCESS (tmode, align))
2154 break;
2156 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2159 /* We would first store what we can in the largest integer mode, then go to
2160 successively smaller modes. */
2162 for (reverse = 0;
2163 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2164 reverse++)
2166 l = len;
2167 mode = VOIDmode;
2168 max_size = STORE_MAX_PIECES + 1;
2169 while (max_size > 1)
2171 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2172 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2173 if (GET_MODE_SIZE (tmode) < max_size)
2174 mode = tmode;
2176 if (mode == VOIDmode)
2177 break;
2179 icode = mov_optab->handlers[(int) mode].insn_code;
2180 if (icode != CODE_FOR_nothing
2181 && align >= GET_MODE_ALIGNMENT (mode))
2183 unsigned int size = GET_MODE_SIZE (mode);
2185 while (l >= size)
2187 if (reverse)
2188 offset -= size;
2190 cst = (*constfun) (constfundata, offset, mode);
2191 if (!LEGITIMATE_CONSTANT_P (cst))
2192 return 0;
2194 if (!reverse)
2195 offset += size;
2197 l -= size;
2201 max_size = GET_MODE_SIZE (mode);
2204 /* The code above should have handled everything. */
2205 gcc_assert (!l);
2208 return 1;
2211 /* Generate several move instructions to store LEN bytes generated by
2212 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2213 pointer which will be passed as argument in every CONSTFUN call.
2214 ALIGN is maximum alignment we can assume.
2215 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2216 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2217 stpcpy. */
2220 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2221 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2222 void *constfundata, unsigned int align, int endp)
2224 struct store_by_pieces data;
2226 if (len == 0)
2228 gcc_assert (endp != 2);
2229 return to;
2232 gcc_assert (STORE_BY_PIECES_P (len, align));
2233 data.constfun = constfun;
2234 data.constfundata = constfundata;
2235 data.len = len;
2236 data.to = to;
2237 store_by_pieces_1 (&data, align);
2238 if (endp)
2240 rtx to1;
2242 gcc_assert (!data.reverse);
2243 if (data.autinc_to)
2245 if (endp == 2)
2247 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2248 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2249 else
2250 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2251 -1));
2253 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2254 data.offset);
2256 else
2258 if (endp == 2)
2259 --data.offset;
2260 to1 = adjust_address (data.to, QImode, data.offset);
2262 return to1;
2264 else
2265 return data.to;
2268 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2269 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2271 static void
2272 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2274 struct store_by_pieces data;
2276 if (len == 0)
2277 return;
2279 data.constfun = clear_by_pieces_1;
2280 data.constfundata = NULL;
2281 data.len = len;
2282 data.to = to;
2283 store_by_pieces_1 (&data, align);
2286 /* Callback routine for clear_by_pieces.
2287 Return const0_rtx unconditionally. */
2289 static rtx
2290 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2291 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2292 enum machine_mode mode ATTRIBUTE_UNUSED)
2294 return const0_rtx;
2297 /* Subroutine of clear_by_pieces and store_by_pieces.
2298 Generate several move instructions to store LEN bytes of block TO. (A MEM
2299 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2301 static void
2302 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2303 unsigned int align ATTRIBUTE_UNUSED)
2305 rtx to_addr = XEXP (data->to, 0);
2306 unsigned int max_size = STORE_MAX_PIECES + 1;
2307 enum machine_mode mode = VOIDmode, tmode;
2308 enum insn_code icode;
2310 data->offset = 0;
2311 data->to_addr = to_addr;
2312 data->autinc_to
2313 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2314 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2316 data->explicit_inc_to = 0;
2317 data->reverse
2318 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2319 if (data->reverse)
2320 data->offset = data->len;
2322 /* If storing requires more than two move insns,
2323 copy addresses to registers (to make displacements shorter)
2324 and use post-increment if available. */
2325 if (!data->autinc_to
2326 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2328 /* Determine the main mode we'll be using. */
2329 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2330 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2331 if (GET_MODE_SIZE (tmode) < max_size)
2332 mode = tmode;
2334 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2336 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2337 data->autinc_to = 1;
2338 data->explicit_inc_to = -1;
2341 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2342 && ! data->autinc_to)
2344 data->to_addr = copy_addr_to_reg (to_addr);
2345 data->autinc_to = 1;
2346 data->explicit_inc_to = 1;
2349 if ( !data->autinc_to && CONSTANT_P (to_addr))
2350 data->to_addr = copy_addr_to_reg (to_addr);
2353 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2354 if (align >= GET_MODE_ALIGNMENT (tmode))
2355 align = GET_MODE_ALIGNMENT (tmode);
2356 else
2358 enum machine_mode xmode;
2360 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2361 tmode != VOIDmode;
2362 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2363 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2364 || SLOW_UNALIGNED_ACCESS (tmode, align))
2365 break;
2367 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2370 /* First store what we can in the largest integer mode, then go to
2371 successively smaller modes. */
2373 while (max_size > 1)
2375 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2376 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2377 if (GET_MODE_SIZE (tmode) < max_size)
2378 mode = tmode;
2380 if (mode == VOIDmode)
2381 break;
2383 icode = mov_optab->handlers[(int) mode].insn_code;
2384 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2385 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2387 max_size = GET_MODE_SIZE (mode);
2390 /* The code above should have handled everything. */
2391 gcc_assert (!data->len);
2394 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2395 with move instructions for mode MODE. GENFUN is the gen_... function
2396 to make a move insn for that mode. DATA has all the other info. */
2398 static void
2399 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2400 struct store_by_pieces *data)
2402 unsigned int size = GET_MODE_SIZE (mode);
2403 rtx to1, cst;
2405 while (data->len >= size)
2407 if (data->reverse)
2408 data->offset -= size;
2410 if (data->autinc_to)
2411 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2412 data->offset);
2413 else
2414 to1 = adjust_address (data->to, mode, data->offset);
2416 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2417 emit_insn (gen_add2_insn (data->to_addr,
2418 GEN_INT (-(HOST_WIDE_INT) size)));
2420 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2421 emit_insn ((*genfun) (to1, cst));
2423 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2424 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2426 if (! data->reverse)
2427 data->offset += size;
2429 data->len -= size;
2433 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2434 its length in bytes. */
2437 clear_storage (rtx object, rtx size, enum block_op_methods method)
2439 enum machine_mode mode = GET_MODE (object);
2440 unsigned int align;
2442 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2444 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2445 just move a zero. Otherwise, do this a piece at a time. */
2446 if (mode != BLKmode
2447 && GET_CODE (size) == CONST_INT
2448 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2450 rtx zero = CONST0_RTX (mode);
2451 if (zero != NULL)
2453 emit_move_insn (object, zero);
2454 return NULL;
2457 if (COMPLEX_MODE_P (mode))
2459 zero = CONST0_RTX (GET_MODE_INNER (mode));
2460 if (zero != NULL)
2462 write_complex_part (object, zero, 0);
2463 write_complex_part (object, zero, 1);
2464 return NULL;
2469 if (size == const0_rtx)
2470 return NULL;
2472 align = MEM_ALIGN (object);
2474 if (GET_CODE (size) == CONST_INT
2475 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2476 clear_by_pieces (object, INTVAL (size), align);
2477 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2479 else
2480 return clear_storage_via_libcall (object, size,
2481 method == BLOCK_OP_TAILCALL);
2483 return NULL;
2486 /* A subroutine of clear_storage. Expand a call to memset.
2487 Return the return value of memset, 0 otherwise. */
2489 static rtx
2490 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2492 tree call_expr, arg_list, fn, object_tree, size_tree;
2493 enum machine_mode size_mode;
2494 rtx retval;
2496 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2497 place those into new pseudos into a VAR_DECL and use them later. */
2499 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2501 size_mode = TYPE_MODE (sizetype);
2502 size = convert_to_mode (size_mode, size, 1);
2503 size = copy_to_mode_reg (size_mode, size);
2505 /* It is incorrect to use the libcall calling conventions to call
2506 memset in this context. This could be a user call to memset and
2507 the user may wish to examine the return value from memset. For
2508 targets where libcalls and normal calls have different conventions
2509 for returning pointers, we could end up generating incorrect code. */
2511 object_tree = make_tree (ptr_type_node, object);
2512 size_tree = make_tree (sizetype, size);
2514 fn = clear_storage_libcall_fn (true);
2515 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2516 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2517 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2519 /* Now we have to build up the CALL_EXPR itself. */
2520 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2521 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2522 call_expr, arg_list, NULL_TREE);
2523 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2525 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2527 return retval;
2530 /* A subroutine of clear_storage_via_libcall. Create the tree node
2531 for the function we use for block clears. The first time FOR_CALL
2532 is true, we call assemble_external. */
2534 static GTY(()) tree block_clear_fn;
2536 void
2537 init_block_clear_fn (const char *asmspec)
2539 if (!block_clear_fn)
2541 tree fn, args;
2543 fn = get_identifier ("memset");
2544 args = build_function_type_list (ptr_type_node, ptr_type_node,
2545 integer_type_node, sizetype,
2546 NULL_TREE);
2548 fn = build_decl (FUNCTION_DECL, fn, args);
2549 DECL_EXTERNAL (fn) = 1;
2550 TREE_PUBLIC (fn) = 1;
2551 DECL_ARTIFICIAL (fn) = 1;
2552 TREE_NOTHROW (fn) = 1;
2554 block_clear_fn = fn;
2557 if (asmspec)
2558 set_user_assembler_name (block_clear_fn, asmspec);
2561 static tree
2562 clear_storage_libcall_fn (int for_call)
2564 static bool emitted_extern;
2566 if (!block_clear_fn)
2567 init_block_clear_fn (NULL);
2569 if (for_call && !emitted_extern)
2571 emitted_extern = true;
2572 make_decl_rtl (block_clear_fn);
2573 assemble_external (block_clear_fn);
2576 return block_clear_fn;
2579 /* Expand a setmem pattern; return true if successful. */
2581 bool
2582 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2588 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2589 enum machine_mode mode;
2591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2592 mode = GET_MODE_WIDER_MODE (mode))
2594 enum insn_code code = setmem_optab[(int) mode];
2595 insn_operand_predicate_fn pred;
2597 if (code != CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2604 <= (GET_MODE_MASK (mode) >> 1)))
2605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2606 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2607 || (*pred) (object, BLKmode))
2608 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2609 || (*pred) (opalign, VOIDmode)))
2611 rtx opsize, opchar;
2612 enum machine_mode char_mode;
2613 rtx last = get_last_insn ();
2614 rtx pat;
2616 opsize = convert_to_mode (mode, size, 1);
2617 pred = insn_data[(int) code].operand[1].predicate;
2618 if (pred != 0 && ! (*pred) (opsize, mode))
2619 opsize = copy_to_mode_reg (mode, opsize);
2621 opchar = val;
2622 char_mode = insn_data[(int) code].operand[2].mode;
2623 if (char_mode != VOIDmode)
2625 opchar = convert_to_mode (char_mode, opchar, 1);
2626 pred = insn_data[(int) code].operand[2].predicate;
2627 if (pred != 0 && ! (*pred) (opchar, char_mode))
2628 opchar = copy_to_mode_reg (char_mode, opchar);
2631 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2632 if (pat)
2634 emit_insn (pat);
2635 return true;
2637 else
2638 delete_insns_since (last);
2642 return false;
2646 /* Write to one of the components of the complex value CPLX. Write VAL to
2647 the real part if IMAG_P is false, and the imaginary part if its true. */
2649 static void
2650 write_complex_part (rtx cplx, rtx val, bool imag_p)
2652 enum machine_mode cmode;
2653 enum machine_mode imode;
2654 unsigned ibitsize;
2656 if (GET_CODE (cplx) == CONCAT)
2658 emit_move_insn (XEXP (cplx, imag_p), val);
2659 return;
2662 cmode = GET_MODE (cplx);
2663 imode = GET_MODE_INNER (cmode);
2664 ibitsize = GET_MODE_BITSIZE (imode);
2666 /* For MEMs simplify_gen_subreg may generate an invalid new address
2667 because, e.g., the original address is considered mode-dependent
2668 by the target, which restricts simplify_subreg from invoking
2669 adjust_address_nv. Instead of preparing fallback support for an
2670 invalid address, we call adjust_address_nv directly. */
2671 if (MEM_P (cplx))
2673 emit_move_insn (adjust_address_nv (cplx, imode,
2674 imag_p ? GET_MODE_SIZE (imode) : 0),
2675 val);
2676 return;
2679 /* If the sub-object is at least word sized, then we know that subregging
2680 will work. This special case is important, since store_bit_field
2681 wants to operate on integer modes, and there's rarely an OImode to
2682 correspond to TCmode. */
2683 if (ibitsize >= BITS_PER_WORD
2684 /* For hard regs we have exact predicates. Assume we can split
2685 the original object if it spans an even number of hard regs.
2686 This special case is important for SCmode on 64-bit platforms
2687 where the natural size of floating-point regs is 32-bit. */
2688 || (REG_P (cplx)
2689 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2690 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2692 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2693 imag_p ? GET_MODE_SIZE (imode) : 0);
2694 if (part)
2696 emit_move_insn (part, val);
2697 return;
2699 else
2700 /* simplify_gen_subreg may fail for sub-word MEMs. */
2701 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2704 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2707 /* Extract one of the components of the complex value CPLX. Extract the
2708 real part if IMAG_P is false, and the imaginary part if it's true. */
2710 static rtx
2711 read_complex_part (rtx cplx, bool imag_p)
2713 enum machine_mode cmode, imode;
2714 unsigned ibitsize;
2716 if (GET_CODE (cplx) == CONCAT)
2717 return XEXP (cplx, imag_p);
2719 cmode = GET_MODE (cplx);
2720 imode = GET_MODE_INNER (cmode);
2721 ibitsize = GET_MODE_BITSIZE (imode);
2723 /* Special case reads from complex constants that got spilled to memory. */
2724 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2726 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2727 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2729 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2730 if (CONSTANT_CLASS_P (part))
2731 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2735 /* For MEMs simplify_gen_subreg may generate an invalid new address
2736 because, e.g., the original address is considered mode-dependent
2737 by the target, which restricts simplify_subreg from invoking
2738 adjust_address_nv. Instead of preparing fallback support for an
2739 invalid address, we call adjust_address_nv directly. */
2740 if (MEM_P (cplx))
2741 return adjust_address_nv (cplx, imode,
2742 imag_p ? GET_MODE_SIZE (imode) : 0);
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since extract_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2753 || (REG_P (cplx)
2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2757 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
2759 if (ret)
2760 return ret;
2761 else
2762 /* simplify_gen_subreg may fail for sub-word MEMs. */
2763 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2766 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2767 true, NULL_RTX, imode, imode);
2770 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2771 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2772 represented in NEW_MODE. If FORCE is true, this will never happen, as
2773 we'll force-create a SUBREG if needed. */
2775 static rtx
2776 emit_move_change_mode (enum machine_mode new_mode,
2777 enum machine_mode old_mode, rtx x, bool force)
2779 rtx ret;
2781 if (reload_in_progress && MEM_P (x))
2783 /* We can't use gen_lowpart here because it may call change_address
2784 which is not appropriate if we were called when a reload was in
2785 progress. We don't have to worry about changing the address since
2786 the size in bytes is supposed to be the same. Copy the MEM to
2787 change the mode and move any substitutions from the old MEM to
2788 the new one. */
2790 ret = adjust_address_nv (x, new_mode, 0);
2791 copy_replacements (x, ret);
2793 else
2795 /* Note that we do want simplify_subreg's behavior of validating
2796 that the new mode is ok for a hard register. If we were to use
2797 simplify_gen_subreg, we would create the subreg, but would
2798 probably run into the target not being able to implement it. */
2799 /* Except, of course, when FORCE is true, when this is exactly what
2800 we want. Which is needed for CCmodes on some targets. */
2801 if (force)
2802 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2803 else
2804 ret = simplify_subreg (new_mode, x, old_mode, 0);
2807 return ret;
2810 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2811 an integer mode of the same size as MODE. Returns the instruction
2812 emitted, or NULL if such a move could not be generated. */
2814 static rtx
2815 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2817 enum machine_mode imode;
2818 enum insn_code code;
2820 /* There must exist a mode of the exact size we require. */
2821 imode = int_mode_for_mode (mode);
2822 if (imode == BLKmode)
2823 return NULL_RTX;
2825 /* The target must support moves in this mode. */
2826 code = mov_optab->handlers[imode].insn_code;
2827 if (code == CODE_FOR_nothing)
2828 return NULL_RTX;
2830 x = emit_move_change_mode (imode, mode, x, force);
2831 if (x == NULL_RTX)
2832 return NULL_RTX;
2833 y = emit_move_change_mode (imode, mode, y, force);
2834 if (y == NULL_RTX)
2835 return NULL_RTX;
2836 return emit_insn (GEN_FCN (code) (x, y));
2839 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2840 Return an equivalent MEM that does not use an auto-increment. */
2842 static rtx
2843 emit_move_resolve_push (enum machine_mode mode, rtx x)
2845 enum rtx_code code = GET_CODE (XEXP (x, 0));
2846 HOST_WIDE_INT adjust;
2847 rtx temp;
2849 adjust = GET_MODE_SIZE (mode);
2850 #ifdef PUSH_ROUNDING
2851 adjust = PUSH_ROUNDING (adjust);
2852 #endif
2853 if (code == PRE_DEC || code == POST_DEC)
2854 adjust = -adjust;
2855 else if (code == PRE_MODIFY || code == POST_MODIFY)
2857 rtx expr = XEXP (XEXP (x, 0), 1);
2858 HOST_WIDE_INT val;
2860 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2861 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2862 val = INTVAL (XEXP (expr, 1));
2863 if (GET_CODE (expr) == MINUS)
2864 val = -val;
2865 gcc_assert (adjust == val || adjust == -val);
2866 adjust = val;
2869 /* Do not use anti_adjust_stack, since we don't want to update
2870 stack_pointer_delta. */
2871 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2872 GEN_INT (adjust), stack_pointer_rtx,
2873 0, OPTAB_LIB_WIDEN);
2874 if (temp != stack_pointer_rtx)
2875 emit_move_insn (stack_pointer_rtx, temp);
2877 switch (code)
2879 case PRE_INC:
2880 case PRE_DEC:
2881 case PRE_MODIFY:
2882 temp = stack_pointer_rtx;
2883 break;
2884 case POST_INC:
2885 case POST_DEC:
2886 case POST_MODIFY:
2887 temp = plus_constant (stack_pointer_rtx, -adjust);
2888 break;
2889 default:
2890 gcc_unreachable ();
2893 return replace_equiv_address (x, temp);
2896 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2897 X is known to satisfy push_operand, and MODE is known to be complex.
2898 Returns the last instruction emitted. */
2900 static rtx
2901 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2903 enum machine_mode submode = GET_MODE_INNER (mode);
2904 bool imag_first;
2906 #ifdef PUSH_ROUNDING
2907 unsigned int submodesize = GET_MODE_SIZE (submode);
2909 /* In case we output to the stack, but the size is smaller than the
2910 machine can push exactly, we need to use move instructions. */
2911 if (PUSH_ROUNDING (submodesize) != submodesize)
2913 x = emit_move_resolve_push (mode, x);
2914 return emit_move_insn (x, y);
2916 #endif
2918 /* Note that the real part always precedes the imag part in memory
2919 regardless of machine's endianness. */
2920 switch (GET_CODE (XEXP (x, 0)))
2922 case PRE_DEC:
2923 case POST_DEC:
2924 imag_first = true;
2925 break;
2926 case PRE_INC:
2927 case POST_INC:
2928 imag_first = false;
2929 break;
2930 default:
2931 gcc_unreachable ();
2934 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2935 read_complex_part (y, imag_first));
2936 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2937 read_complex_part (y, !imag_first));
2940 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2941 MODE is known to be complex. Returns the last instruction emitted. */
2943 static rtx
2944 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2946 bool try_int;
2948 /* Need to take special care for pushes, to maintain proper ordering
2949 of the data, and possibly extra padding. */
2950 if (push_operand (x, mode))
2951 return emit_move_complex_push (mode, x, y);
2953 /* See if we can coerce the target into moving both values at once. */
2955 /* Move floating point as parts. */
2956 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2957 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2958 try_int = false;
2959 /* Not possible if the values are inherently not adjacent. */
2960 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2961 try_int = false;
2962 /* Is possible if both are registers (or subregs of registers). */
2963 else if (register_operand (x, mode) && register_operand (y, mode))
2964 try_int = true;
2965 /* If one of the operands is a memory, and alignment constraints
2966 are friendly enough, we may be able to do combined memory operations.
2967 We do not attempt this if Y is a constant because that combination is
2968 usually better with the by-parts thing below. */
2969 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2970 && (!STRICT_ALIGNMENT
2971 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2972 try_int = true;
2973 else
2974 try_int = false;
2976 if (try_int)
2978 rtx ret;
2980 /* For memory to memory moves, optimal behavior can be had with the
2981 existing block move logic. */
2982 if (MEM_P (x) && MEM_P (y))
2984 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2985 BLOCK_OP_NO_LIBCALL);
2986 return get_last_insn ();
2989 ret = emit_move_via_integer (mode, x, y, true);
2990 if (ret)
2991 return ret;
2994 /* Show the output dies here. This is necessary for SUBREGs
2995 of pseudos since we cannot track their lifetimes correctly;
2996 hard regs shouldn't appear here except as return values. */
2997 if (!reload_completed && !reload_in_progress
2998 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2999 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3001 write_complex_part (x, read_complex_part (y, false), false);
3002 write_complex_part (x, read_complex_part (y, true), true);
3003 return get_last_insn ();
3006 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3007 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3009 static rtx
3010 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3012 rtx ret;
3014 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3015 if (mode != CCmode)
3017 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3018 if (code != CODE_FOR_nothing)
3020 x = emit_move_change_mode (CCmode, mode, x, true);
3021 y = emit_move_change_mode (CCmode, mode, y, true);
3022 return emit_insn (GEN_FCN (code) (x, y));
3026 /* Otherwise, find the MODE_INT mode of the same width. */
3027 ret = emit_move_via_integer (mode, x, y, false);
3028 gcc_assert (ret != NULL);
3029 return ret;
3032 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3033 MODE is any multi-word or full-word mode that lacks a move_insn
3034 pattern. Note that you will get better code if you define such
3035 patterns, even if they must turn into multiple assembler instructions. */
3037 static rtx
3038 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3040 rtx last_insn = 0;
3041 rtx seq, inner;
3042 bool need_clobber;
3043 int i;
3045 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3047 /* If X is a push on the stack, do the push now and replace
3048 X with a reference to the stack pointer. */
3049 if (push_operand (x, mode))
3050 x = emit_move_resolve_push (mode, x);
3052 /* If we are in reload, see if either operand is a MEM whose address
3053 is scheduled for replacement. */
3054 if (reload_in_progress && MEM_P (x)
3055 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3056 x = replace_equiv_address_nv (x, inner);
3057 if (reload_in_progress && MEM_P (y)
3058 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3059 y = replace_equiv_address_nv (y, inner);
3061 start_sequence ();
3063 need_clobber = false;
3064 for (i = 0;
3065 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3066 i++)
3068 rtx xpart = operand_subword (x, i, 1, mode);
3069 rtx ypart = operand_subword (y, i, 1, mode);
3071 /* If we can't get a part of Y, put Y into memory if it is a
3072 constant. Otherwise, force it into a register. Then we must
3073 be able to get a part of Y. */
3074 if (ypart == 0 && CONSTANT_P (y))
3076 y = force_const_mem (mode, y);
3077 ypart = operand_subword (y, i, 1, mode);
3079 else if (ypart == 0)
3080 ypart = operand_subword_force (y, i, mode);
3082 gcc_assert (xpart && ypart);
3084 need_clobber |= (GET_CODE (xpart) == SUBREG);
3086 last_insn = emit_move_insn (xpart, ypart);
3089 seq = get_insns ();
3090 end_sequence ();
3092 /* Show the output dies here. This is necessary for SUBREGs
3093 of pseudos since we cannot track their lifetimes correctly;
3094 hard regs shouldn't appear here except as return values.
3095 We never want to emit such a clobber after reload. */
3096 if (x != y
3097 && ! (reload_in_progress || reload_completed)
3098 && need_clobber != 0)
3099 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3101 emit_insn (seq);
3103 return last_insn;
3106 /* Low level part of emit_move_insn.
3107 Called just like emit_move_insn, but assumes X and Y
3108 are basically valid. */
3111 emit_move_insn_1 (rtx x, rtx y)
3113 enum machine_mode mode = GET_MODE (x);
3114 enum insn_code code;
3116 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3118 code = mov_optab->handlers[mode].insn_code;
3119 if (code != CODE_FOR_nothing)
3120 return emit_insn (GEN_FCN (code) (x, y));
3122 /* Expand complex moves by moving real part and imag part. */
3123 if (COMPLEX_MODE_P (mode))
3124 return emit_move_complex (mode, x, y);
3126 if (GET_MODE_CLASS (mode) == MODE_CC)
3127 return emit_move_ccmode (mode, x, y);
3129 /* Try using a move pattern for the corresponding integer mode. This is
3130 only safe when simplify_subreg can convert MODE constants into integer
3131 constants. At present, it can only do this reliably if the value
3132 fits within a HOST_WIDE_INT. */
3133 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3135 rtx ret = emit_move_via_integer (mode, x, y, false);
3136 if (ret)
3137 return ret;
3140 return emit_move_multi_word (mode, x, y);
3143 /* Generate code to copy Y into X.
3144 Both Y and X must have the same mode, except that
3145 Y can be a constant with VOIDmode.
3146 This mode cannot be BLKmode; use emit_block_move for that.
3148 Return the last instruction emitted. */
3151 emit_move_insn (rtx x, rtx y)
3153 enum machine_mode mode = GET_MODE (x);
3154 rtx y_cst = NULL_RTX;
3155 rtx last_insn, set;
3157 gcc_assert (mode != BLKmode
3158 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3160 if (CONSTANT_P (y))
3162 if (optimize
3163 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3164 && (last_insn = compress_float_constant (x, y)))
3165 return last_insn;
3167 y_cst = y;
3169 if (!LEGITIMATE_CONSTANT_P (y))
3171 y = force_const_mem (mode, y);
3173 /* If the target's cannot_force_const_mem prevented the spill,
3174 assume that the target's move expanders will also take care
3175 of the non-legitimate constant. */
3176 if (!y)
3177 y = y_cst;
3181 /* If X or Y are memory references, verify that their addresses are valid
3182 for the machine. */
3183 if (MEM_P (x)
3184 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3185 && ! push_operand (x, GET_MODE (x)))
3186 || (flag_force_addr
3187 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3188 x = validize_mem (x);
3190 if (MEM_P (y)
3191 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3192 || (flag_force_addr
3193 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3194 y = validize_mem (y);
3196 gcc_assert (mode != BLKmode);
3198 last_insn = emit_move_insn_1 (x, y);
3200 if (y_cst && REG_P (x)
3201 && (set = single_set (last_insn)) != NULL_RTX
3202 && SET_DEST (set) == x
3203 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3204 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3206 return last_insn;
3209 /* If Y is representable exactly in a narrower mode, and the target can
3210 perform the extension directly from constant or memory, then emit the
3211 move as an extension. */
3213 static rtx
3214 compress_float_constant (rtx x, rtx y)
3216 enum machine_mode dstmode = GET_MODE (x);
3217 enum machine_mode orig_srcmode = GET_MODE (y);
3218 enum machine_mode srcmode;
3219 REAL_VALUE_TYPE r;
3220 int oldcost, newcost;
3222 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3224 if (LEGITIMATE_CONSTANT_P (y))
3225 oldcost = rtx_cost (y, SET);
3226 else
3227 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3229 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3230 srcmode != orig_srcmode;
3231 srcmode = GET_MODE_WIDER_MODE (srcmode))
3233 enum insn_code ic;
3234 rtx trunc_y, last_insn;
3236 /* Skip if the target can't extend this way. */
3237 ic = can_extend_p (dstmode, srcmode, 0);
3238 if (ic == CODE_FOR_nothing)
3239 continue;
3241 /* Skip if the narrowed value isn't exact. */
3242 if (! exact_real_truncate (srcmode, &r))
3243 continue;
3245 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3247 if (LEGITIMATE_CONSTANT_P (trunc_y))
3249 /* Skip if the target needs extra instructions to perform
3250 the extension. */
3251 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3252 continue;
3253 /* This is valid, but may not be cheaper than the original. */
3254 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3255 if (oldcost < newcost)
3256 continue;
3258 else if (float_extend_from_mem[dstmode][srcmode])
3260 trunc_y = force_const_mem (srcmode, trunc_y);
3261 /* This is valid, but may not be cheaper than the original. */
3262 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3263 if (oldcost < newcost)
3264 continue;
3265 trunc_y = validize_mem (trunc_y);
3267 else
3268 continue;
3270 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3271 last_insn = get_last_insn ();
3273 if (REG_P (x))
3274 set_unique_reg_note (last_insn, REG_EQUAL, y);
3276 return last_insn;
3279 return NULL_RTX;
3282 /* Pushing data onto the stack. */
3284 /* Push a block of length SIZE (perhaps variable)
3285 and return an rtx to address the beginning of the block.
3286 The value may be virtual_outgoing_args_rtx.
3288 EXTRA is the number of bytes of padding to push in addition to SIZE.
3289 BELOW nonzero means this padding comes at low addresses;
3290 otherwise, the padding comes at high addresses. */
3293 push_block (rtx size, int extra, int below)
3295 rtx temp;
3297 size = convert_modes (Pmode, ptr_mode, size, 1);
3298 if (CONSTANT_P (size))
3299 anti_adjust_stack (plus_constant (size, extra));
3300 else if (REG_P (size) && extra == 0)
3301 anti_adjust_stack (size);
3302 else
3304 temp = copy_to_mode_reg (Pmode, size);
3305 if (extra != 0)
3306 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3307 temp, 0, OPTAB_LIB_WIDEN);
3308 anti_adjust_stack (temp);
3311 #ifndef STACK_GROWS_DOWNWARD
3312 if (0)
3313 #else
3314 if (1)
3315 #endif
3317 temp = virtual_outgoing_args_rtx;
3318 if (extra != 0 && below)
3319 temp = plus_constant (temp, extra);
3321 else
3323 if (GET_CODE (size) == CONST_INT)
3324 temp = plus_constant (virtual_outgoing_args_rtx,
3325 -INTVAL (size) - (below ? 0 : extra));
3326 else if (extra != 0 && !below)
3327 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3328 negate_rtx (Pmode, plus_constant (size, extra)));
3329 else
3330 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3331 negate_rtx (Pmode, size));
3334 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3337 #ifdef PUSH_ROUNDING
3339 /* Emit single push insn. */
3341 static void
3342 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3344 rtx dest_addr;
3345 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3346 rtx dest;
3347 enum insn_code icode;
3348 insn_operand_predicate_fn pred;
3350 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3351 /* If there is push pattern, use it. Otherwise try old way of throwing
3352 MEM representing push operation to move expander. */
3353 icode = push_optab->handlers[(int) mode].insn_code;
3354 if (icode != CODE_FOR_nothing)
3356 if (((pred = insn_data[(int) icode].operand[0].predicate)
3357 && !((*pred) (x, mode))))
3358 x = force_reg (mode, x);
3359 emit_insn (GEN_FCN (icode) (x));
3360 return;
3362 if (GET_MODE_SIZE (mode) == rounded_size)
3363 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3364 /* If we are to pad downward, adjust the stack pointer first and
3365 then store X into the stack location using an offset. This is
3366 because emit_move_insn does not know how to pad; it does not have
3367 access to type. */
3368 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3370 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3371 HOST_WIDE_INT offset;
3373 emit_move_insn (stack_pointer_rtx,
3374 expand_binop (Pmode,
3375 #ifdef STACK_GROWS_DOWNWARD
3376 sub_optab,
3377 #else
3378 add_optab,
3379 #endif
3380 stack_pointer_rtx,
3381 GEN_INT (rounded_size),
3382 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3384 offset = (HOST_WIDE_INT) padding_size;
3385 #ifdef STACK_GROWS_DOWNWARD
3386 if (STACK_PUSH_CODE == POST_DEC)
3387 /* We have already decremented the stack pointer, so get the
3388 previous value. */
3389 offset += (HOST_WIDE_INT) rounded_size;
3390 #else
3391 if (STACK_PUSH_CODE == POST_INC)
3392 /* We have already incremented the stack pointer, so get the
3393 previous value. */
3394 offset -= (HOST_WIDE_INT) rounded_size;
3395 #endif
3396 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3398 else
3400 #ifdef STACK_GROWS_DOWNWARD
3401 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3402 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3403 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3404 #else
3405 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3406 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3407 GEN_INT (rounded_size));
3408 #endif
3409 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3412 dest = gen_rtx_MEM (mode, dest_addr);
3414 if (type != 0)
3416 set_mem_attributes (dest, type, 1);
3418 if (flag_optimize_sibling_calls)
3419 /* Function incoming arguments may overlap with sibling call
3420 outgoing arguments and we cannot allow reordering of reads
3421 from function arguments with stores to outgoing arguments
3422 of sibling calls. */
3423 set_mem_alias_set (dest, 0);
3425 emit_move_insn (dest, x);
3427 #endif
3429 /* Generate code to push X onto the stack, assuming it has mode MODE and
3430 type TYPE.
3431 MODE is redundant except when X is a CONST_INT (since they don't
3432 carry mode info).
3433 SIZE is an rtx for the size of data to be copied (in bytes),
3434 needed only if X is BLKmode.
3436 ALIGN (in bits) is maximum alignment we can assume.
3438 If PARTIAL and REG are both nonzero, then copy that many of the first
3439 bytes of X into registers starting with REG, and push the rest of X.
3440 The amount of space pushed is decreased by PARTIAL bytes.
3441 REG must be a hard register in this case.
3442 If REG is zero but PARTIAL is not, take any all others actions for an
3443 argument partially in registers, but do not actually load any
3444 registers.
3446 EXTRA is the amount in bytes of extra space to leave next to this arg.
3447 This is ignored if an argument block has already been allocated.
3449 On a machine that lacks real push insns, ARGS_ADDR is the address of
3450 the bottom of the argument block for this call. We use indexing off there
3451 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3452 argument block has not been preallocated.
3454 ARGS_SO_FAR is the size of args previously pushed for this call.
3456 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3457 for arguments passed in registers. If nonzero, it will be the number
3458 of bytes required. */
3460 void
3461 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3462 unsigned int align, int partial, rtx reg, int extra,
3463 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3464 rtx alignment_pad)
3466 rtx xinner;
3467 enum direction stack_direction
3468 #ifdef STACK_GROWS_DOWNWARD
3469 = downward;
3470 #else
3471 = upward;
3472 #endif
3474 /* Decide where to pad the argument: `downward' for below,
3475 `upward' for above, or `none' for don't pad it.
3476 Default is below for small data on big-endian machines; else above. */
3477 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3479 /* Invert direction if stack is post-decrement.
3480 FIXME: why? */
3481 if (STACK_PUSH_CODE == POST_DEC)
3482 if (where_pad != none)
3483 where_pad = (where_pad == downward ? upward : downward);
3485 xinner = x;
3487 if (mode == BLKmode)
3489 /* Copy a block into the stack, entirely or partially. */
3491 rtx temp;
3492 int used;
3493 int offset;
3494 int skip;
3496 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3497 used = partial - offset;
3499 gcc_assert (size);
3501 /* USED is now the # of bytes we need not copy to the stack
3502 because registers will take care of them. */
3504 if (partial != 0)
3505 xinner = adjust_address (xinner, BLKmode, used);
3507 /* If the partial register-part of the arg counts in its stack size,
3508 skip the part of stack space corresponding to the registers.
3509 Otherwise, start copying to the beginning of the stack space,
3510 by setting SKIP to 0. */
3511 skip = (reg_parm_stack_space == 0) ? 0 : used;
3513 #ifdef PUSH_ROUNDING
3514 /* Do it with several push insns if that doesn't take lots of insns
3515 and if there is no difficulty with push insns that skip bytes
3516 on the stack for alignment purposes. */
3517 if (args_addr == 0
3518 && PUSH_ARGS
3519 && GET_CODE (size) == CONST_INT
3520 && skip == 0
3521 && MEM_ALIGN (xinner) >= align
3522 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3523 /* Here we avoid the case of a structure whose weak alignment
3524 forces many pushes of a small amount of data,
3525 and such small pushes do rounding that causes trouble. */
3526 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3527 || align >= BIGGEST_ALIGNMENT
3528 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3529 == (align / BITS_PER_UNIT)))
3530 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3532 /* Push padding now if padding above and stack grows down,
3533 or if padding below and stack grows up.
3534 But if space already allocated, this has already been done. */
3535 if (extra && args_addr == 0
3536 && where_pad != none && where_pad != stack_direction)
3537 anti_adjust_stack (GEN_INT (extra));
3539 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3541 else
3542 #endif /* PUSH_ROUNDING */
3544 rtx target;
3546 /* Otherwise make space on the stack and copy the data
3547 to the address of that space. */
3549 /* Deduct words put into registers from the size we must copy. */
3550 if (partial != 0)
3552 if (GET_CODE (size) == CONST_INT)
3553 size = GEN_INT (INTVAL (size) - used);
3554 else
3555 size = expand_binop (GET_MODE (size), sub_optab, size,
3556 GEN_INT (used), NULL_RTX, 0,
3557 OPTAB_LIB_WIDEN);
3560 /* Get the address of the stack space.
3561 In this case, we do not deal with EXTRA separately.
3562 A single stack adjust will do. */
3563 if (! args_addr)
3565 temp = push_block (size, extra, where_pad == downward);
3566 extra = 0;
3568 else if (GET_CODE (args_so_far) == CONST_INT)
3569 temp = memory_address (BLKmode,
3570 plus_constant (args_addr,
3571 skip + INTVAL (args_so_far)));
3572 else
3573 temp = memory_address (BLKmode,
3574 plus_constant (gen_rtx_PLUS (Pmode,
3575 args_addr,
3576 args_so_far),
3577 skip));
3579 if (!ACCUMULATE_OUTGOING_ARGS)
3581 /* If the source is referenced relative to the stack pointer,
3582 copy it to another register to stabilize it. We do not need
3583 to do this if we know that we won't be changing sp. */
3585 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3586 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3587 temp = copy_to_reg (temp);
3590 target = gen_rtx_MEM (BLKmode, temp);
3592 /* We do *not* set_mem_attributes here, because incoming arguments
3593 may overlap with sibling call outgoing arguments and we cannot
3594 allow reordering of reads from function arguments with stores
3595 to outgoing arguments of sibling calls. We do, however, want
3596 to record the alignment of the stack slot. */
3597 /* ALIGN may well be better aligned than TYPE, e.g. due to
3598 PARM_BOUNDARY. Assume the caller isn't lying. */
3599 set_mem_align (target, align);
3601 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3604 else if (partial > 0)
3606 /* Scalar partly in registers. */
3608 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3609 int i;
3610 int not_stack;
3611 /* # bytes of start of argument
3612 that we must make space for but need not store. */
3613 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3614 int args_offset = INTVAL (args_so_far);
3615 int skip;
3617 /* Push padding now if padding above and stack grows down,
3618 or if padding below and stack grows up.
3619 But if space already allocated, this has already been done. */
3620 if (extra && args_addr == 0
3621 && where_pad != none && where_pad != stack_direction)
3622 anti_adjust_stack (GEN_INT (extra));
3624 /* If we make space by pushing it, we might as well push
3625 the real data. Otherwise, we can leave OFFSET nonzero
3626 and leave the space uninitialized. */
3627 if (args_addr == 0)
3628 offset = 0;
3630 /* Now NOT_STACK gets the number of words that we don't need to
3631 allocate on the stack. Convert OFFSET to words too. */
3632 not_stack = (partial - offset) / UNITS_PER_WORD;
3633 offset /= UNITS_PER_WORD;
3635 /* If the partial register-part of the arg counts in its stack size,
3636 skip the part of stack space corresponding to the registers.
3637 Otherwise, start copying to the beginning of the stack space,
3638 by setting SKIP to 0. */
3639 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3642 x = validize_mem (force_const_mem (mode, x));
3644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3645 SUBREGs of such registers are not allowed. */
3646 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3648 x = copy_to_reg (x);
3650 /* Loop over all the words allocated on the stack for this arg. */
3651 /* We can do it by words, because any scalar bigger than a word
3652 has a size a multiple of a word. */
3653 #ifndef PUSH_ARGS_REVERSED
3654 for (i = not_stack; i < size; i++)
3655 #else
3656 for (i = size - 1; i >= not_stack; i--)
3657 #endif
3658 if (i >= not_stack + offset)
3659 emit_push_insn (operand_subword_force (x, i, mode),
3660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3661 0, args_addr,
3662 GEN_INT (args_offset + ((i - not_stack + skip)
3663 * UNITS_PER_WORD)),
3664 reg_parm_stack_space, alignment_pad);
3666 else
3668 rtx addr;
3669 rtx dest;
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
3676 anti_adjust_stack (GEN_INT (extra));
3678 #ifdef PUSH_ROUNDING
3679 if (args_addr == 0 && PUSH_ARGS)
3680 emit_single_push_insn (mode, x, type);
3681 else
3682 #endif
3684 if (GET_CODE (args_so_far) == CONST_INT)
3685 addr
3686 = memory_address (mode,
3687 plus_constant (args_addr,
3688 INTVAL (args_so_far)));
3689 else
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3691 args_so_far));
3692 dest = gen_rtx_MEM (mode, addr);
3694 /* We do *not* set_mem_attributes here, because incoming arguments
3695 may overlap with sibling call outgoing arguments and we cannot
3696 allow reordering of reads from function arguments with stores
3697 to outgoing arguments of sibling calls. We do, however, want
3698 to record the alignment of the stack slot. */
3699 /* ALIGN may well be better aligned than TYPE, e.g. due to
3700 PARM_BOUNDARY. Assume the caller isn't lying. */
3701 set_mem_align (dest, align);
3703 emit_move_insn (dest, x);
3707 /* If part should go in registers, copy that part
3708 into the appropriate registers. Do this now, at the end,
3709 since mem-to-mem copies above may do function calls. */
3710 if (partial > 0 && reg != 0)
3712 /* Handle calls that pass values in multiple non-contiguous locations.
3713 The Irix 6 ABI has examples of this. */
3714 if (GET_CODE (reg) == PARALLEL)
3715 emit_group_load (reg, x, type, -1);
3716 else
3718 gcc_assert (partial % UNITS_PER_WORD == 0);
3719 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3723 if (extra && args_addr == 0 && where_pad == stack_direction)
3724 anti_adjust_stack (GEN_INT (extra));
3726 if (alignment_pad && args_addr == 0)
3727 anti_adjust_stack (alignment_pad);
3730 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3731 operations. */
3733 static rtx
3734 get_subtarget (rtx x)
3736 return (optimize
3737 || x == 0
3738 /* Only registers can be subtargets. */
3739 || !REG_P (x)
3740 /* Don't use hard regs to avoid extending their life. */
3741 || REGNO (x) < FIRST_PSEUDO_REGISTER
3742 ? 0 : x);
3745 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3746 FIELD is a bitfield. Returns true if the optimization was successful,
3747 and there's nothing else to do. */
3749 static bool
3750 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3751 unsigned HOST_WIDE_INT bitpos,
3752 enum machine_mode mode1, rtx str_rtx,
3753 tree to, tree src)
3755 enum machine_mode str_mode = GET_MODE (str_rtx);
3756 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3757 tree op0, op1;
3758 rtx value, result;
3759 optab binop;
3761 if (mode1 != VOIDmode
3762 || bitsize >= BITS_PER_WORD
3763 || str_bitsize > BITS_PER_WORD
3764 || TREE_SIDE_EFFECTS (to)
3765 || TREE_THIS_VOLATILE (to))
3766 return false;
3768 STRIP_NOPS (src);
3769 if (!BINARY_CLASS_P (src)
3770 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3771 return false;
3773 op0 = TREE_OPERAND (src, 0);
3774 op1 = TREE_OPERAND (src, 1);
3775 STRIP_NOPS (op0);
3777 if (!operand_equal_p (to, op0, 0))
3778 return false;
3780 if (MEM_P (str_rtx))
3782 unsigned HOST_WIDE_INT offset1;
3784 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3785 str_mode = word_mode;
3786 str_mode = get_best_mode (bitsize, bitpos,
3787 MEM_ALIGN (str_rtx), str_mode, 0);
3788 if (str_mode == VOIDmode)
3789 return false;
3790 str_bitsize = GET_MODE_BITSIZE (str_mode);
3792 offset1 = bitpos;
3793 bitpos %= str_bitsize;
3794 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3795 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3797 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3798 return false;
3800 /* If the bit field covers the whole REG/MEM, store_field
3801 will likely generate better code. */
3802 if (bitsize >= str_bitsize)
3803 return false;
3805 /* We can't handle fields split across multiple entities. */
3806 if (bitpos + bitsize > str_bitsize)
3807 return false;
3809 if (BYTES_BIG_ENDIAN)
3810 bitpos = str_bitsize - bitpos - bitsize;
3812 switch (TREE_CODE (src))
3814 case PLUS_EXPR:
3815 case MINUS_EXPR:
3816 /* For now, just optimize the case of the topmost bitfield
3817 where we don't need to do any masking and also
3818 1 bit bitfields where xor can be used.
3819 We might win by one instruction for the other bitfields
3820 too if insv/extv instructions aren't used, so that
3821 can be added later. */
3822 if (bitpos + bitsize != str_bitsize
3823 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3824 break;
3826 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3827 value = convert_modes (str_mode,
3828 TYPE_MODE (TREE_TYPE (op1)), value,
3829 TYPE_UNSIGNED (TREE_TYPE (op1)));
3831 /* We may be accessing data outside the field, which means
3832 we can alias adjacent data. */
3833 if (MEM_P (str_rtx))
3835 str_rtx = shallow_copy_rtx (str_rtx);
3836 set_mem_alias_set (str_rtx, 0);
3837 set_mem_expr (str_rtx, 0);
3840 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3841 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3843 value = expand_and (str_mode, value, const1_rtx, NULL);
3844 binop = xor_optab;
3846 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3847 build_int_cst (NULL_TREE, bitpos),
3848 NULL_RTX, 1);
3849 result = expand_binop (str_mode, binop, str_rtx,
3850 value, str_rtx, 1, OPTAB_WIDEN);
3851 if (result != str_rtx)
3852 emit_move_insn (str_rtx, result);
3853 return true;
3855 case BIT_IOR_EXPR:
3856 case BIT_XOR_EXPR:
3857 if (TREE_CODE (op1) != INTEGER_CST)
3858 break;
3859 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3860 value = convert_modes (GET_MODE (str_rtx),
3861 TYPE_MODE (TREE_TYPE (op1)), value,
3862 TYPE_UNSIGNED (TREE_TYPE (op1)));
3864 /* We may be accessing data outside the field, which means
3865 we can alias adjacent data. */
3866 if (MEM_P (str_rtx))
3868 str_rtx = shallow_copy_rtx (str_rtx);
3869 set_mem_alias_set (str_rtx, 0);
3870 set_mem_expr (str_rtx, 0);
3873 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3874 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3876 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3877 - 1);
3878 value = expand_and (GET_MODE (str_rtx), value, mask,
3879 NULL_RTX);
3881 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3882 build_int_cst (NULL_TREE, bitpos),
3883 NULL_RTX, 1);
3884 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3885 value, str_rtx, 1, OPTAB_WIDEN);
3886 if (result != str_rtx)
3887 emit_move_insn (str_rtx, result);
3888 return true;
3890 default:
3891 break;
3894 return false;
3898 /* Expand an assignment that stores the value of FROM into TO. */
3900 void
3901 expand_assignment (tree to, tree from)
3903 rtx to_rtx = 0;
3904 rtx result;
3906 /* Don't crash if the lhs of the assignment was erroneous. */
3908 if (TREE_CODE (to) == ERROR_MARK)
3910 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3911 return;
3914 /* Assignment of a structure component needs special treatment
3915 if the structure component's rtx is not simply a MEM.
3916 Assignment of an array element at a constant index, and assignment of
3917 an array element in an unaligned packed structure field, has the same
3918 problem. */
3919 if (handled_component_p (to)
3920 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3922 enum machine_mode mode1;
3923 HOST_WIDE_INT bitsize, bitpos;
3924 tree offset;
3925 int unsignedp;
3926 int volatilep = 0;
3927 tree tem;
3929 push_temp_slots ();
3930 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3931 &unsignedp, &volatilep, true);
3933 /* If we are going to use store_bit_field and extract_bit_field,
3934 make sure to_rtx will be safe for multiple use. */
3936 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3938 if (offset != 0)
3940 rtx offset_rtx;
3942 if (!MEM_P (to_rtx))
3944 /* We can get constant negative offsets into arrays with broken
3945 user code. Translate this to a trap instead of ICEing. */
3946 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
3947 expand_builtin_trap ();
3948 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
3951 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3952 #ifdef POINTERS_EXTEND_UNSIGNED
3953 if (GET_MODE (offset_rtx) != Pmode)
3954 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3955 #else
3956 if (GET_MODE (offset_rtx) != ptr_mode)
3957 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3958 #endif
3960 /* A constant address in TO_RTX can have VOIDmode, we must not try
3961 to call force_reg for that case. Avoid that case. */
3962 if (MEM_P (to_rtx)
3963 && GET_MODE (to_rtx) == BLKmode
3964 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3965 && bitsize > 0
3966 && (bitpos % bitsize) == 0
3967 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3968 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3970 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3971 bitpos = 0;
3974 to_rtx = offset_address (to_rtx, offset_rtx,
3975 highest_pow2_factor_for_target (to,
3976 offset));
3979 /* Handle expand_expr of a complex value returning a CONCAT. */
3980 if (GET_CODE (to_rtx) == CONCAT)
3982 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3984 gcc_assert (bitpos == 0);
3985 result = store_expr (from, to_rtx, false);
3987 else
3989 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3990 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3993 else
3995 if (MEM_P (to_rtx))
3997 /* If the field is at offset zero, we could have been given the
3998 DECL_RTX of the parent struct. Don't munge it. */
3999 to_rtx = shallow_copy_rtx (to_rtx);
4001 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4003 /* Deal with volatile and readonly fields. The former is only
4004 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4005 if (volatilep)
4006 MEM_VOLATILE_P (to_rtx) = 1;
4007 if (component_uses_parent_alias_set (to))
4008 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4011 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4012 to_rtx, to, from))
4013 result = NULL;
4014 else
4015 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4016 TREE_TYPE (tem), get_alias_set (to));
4019 if (result)
4020 preserve_temp_slots (result);
4021 free_temp_slots ();
4022 pop_temp_slots ();
4023 return;
4026 /* If the rhs is a function call and its value is not an aggregate,
4027 call the function before we start to compute the lhs.
4028 This is needed for correct code for cases such as
4029 val = setjmp (buf) on machines where reference to val
4030 requires loading up part of an address in a separate insn.
4032 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4033 since it might be a promoted variable where the zero- or sign- extension
4034 needs to be done. Handling this in the normal way is safe because no
4035 computation is done before the call. */
4036 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4037 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4038 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4039 && REG_P (DECL_RTL (to))))
4041 rtx value;
4043 push_temp_slots ();
4044 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4045 if (to_rtx == 0)
4046 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4048 /* Handle calls that return values in multiple non-contiguous locations.
4049 The Irix 6 ABI has examples of this. */
4050 if (GET_CODE (to_rtx) == PARALLEL)
4051 emit_group_load (to_rtx, value, TREE_TYPE (from),
4052 int_size_in_bytes (TREE_TYPE (from)));
4053 else if (GET_MODE (to_rtx) == BLKmode)
4054 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4055 else
4057 if (POINTER_TYPE_P (TREE_TYPE (to)))
4058 value = convert_memory_address (GET_MODE (to_rtx), value);
4059 emit_move_insn (to_rtx, value);
4061 preserve_temp_slots (to_rtx);
4062 free_temp_slots ();
4063 pop_temp_slots ();
4064 return;
4067 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4068 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4070 if (to_rtx == 0)
4071 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4073 /* Don't move directly into a return register. */
4074 if (TREE_CODE (to) == RESULT_DECL
4075 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4077 rtx temp;
4079 push_temp_slots ();
4080 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4082 if (GET_CODE (to_rtx) == PARALLEL)
4083 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4084 int_size_in_bytes (TREE_TYPE (from)));
4085 else
4086 emit_move_insn (to_rtx, temp);
4088 preserve_temp_slots (to_rtx);
4089 free_temp_slots ();
4090 pop_temp_slots ();
4091 return;
4094 /* In case we are returning the contents of an object which overlaps
4095 the place the value is being stored, use a safe function when copying
4096 a value through a pointer into a structure value return block. */
4097 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4098 && current_function_returns_struct
4099 && !current_function_returns_pcc_struct)
4101 rtx from_rtx, size;
4103 push_temp_slots ();
4104 size = expr_size (from);
4105 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4107 emit_library_call (memmove_libfunc, LCT_NORMAL,
4108 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4109 XEXP (from_rtx, 0), Pmode,
4110 convert_to_mode (TYPE_MODE (sizetype),
4111 size, TYPE_UNSIGNED (sizetype)),
4112 TYPE_MODE (sizetype));
4114 preserve_temp_slots (to_rtx);
4115 free_temp_slots ();
4116 pop_temp_slots ();
4117 return;
4120 /* Compute FROM and store the value in the rtx we got. */
4122 push_temp_slots ();
4123 result = store_expr (from, to_rtx, 0);
4124 preserve_temp_slots (result);
4125 free_temp_slots ();
4126 pop_temp_slots ();
4127 return;
4130 /* Generate code for computing expression EXP,
4131 and storing the value into TARGET.
4133 If the mode is BLKmode then we may return TARGET itself.
4134 It turns out that in BLKmode it doesn't cause a problem.
4135 because C has no operators that could combine two different
4136 assignments into the same BLKmode object with different values
4137 with no sequence point. Will other languages need this to
4138 be more thorough?
4140 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4141 stack, and block moves may need to be treated specially. */
4144 store_expr (tree exp, rtx target, int call_param_p)
4146 rtx temp;
4147 rtx alt_rtl = NULL_RTX;
4148 int dont_return_target = 0;
4150 if (VOID_TYPE_P (TREE_TYPE (exp)))
4152 /* C++ can generate ?: expressions with a throw expression in one
4153 branch and an rvalue in the other. Here, we resolve attempts to
4154 store the throw expression's nonexistent result. */
4155 gcc_assert (!call_param_p);
4156 expand_expr (exp, const0_rtx, VOIDmode, 0);
4157 return NULL_RTX;
4159 if (TREE_CODE (exp) == COMPOUND_EXPR)
4161 /* Perform first part of compound expression, then assign from second
4162 part. */
4163 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4164 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4165 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4167 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4169 /* For conditional expression, get safe form of the target. Then
4170 test the condition, doing the appropriate assignment on either
4171 side. This avoids the creation of unnecessary temporaries.
4172 For non-BLKmode, it is more efficient not to do this. */
4174 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4176 do_pending_stack_adjust ();
4177 NO_DEFER_POP;
4178 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4179 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4180 emit_jump_insn (gen_jump (lab2));
4181 emit_barrier ();
4182 emit_label (lab1);
4183 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4184 emit_label (lab2);
4185 OK_DEFER_POP;
4187 return NULL_RTX;
4189 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4190 /* If this is a scalar in a register that is stored in a wider mode
4191 than the declared mode, compute the result into its declared mode
4192 and then convert to the wider mode. Our value is the computed
4193 expression. */
4195 rtx inner_target = 0;
4197 /* We can do the conversion inside EXP, which will often result
4198 in some optimizations. Do the conversion in two steps: first
4199 change the signedness, if needed, then the extend. But don't
4200 do this if the type of EXP is a subtype of something else
4201 since then the conversion might involve more than just
4202 converting modes. */
4203 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4204 && TREE_TYPE (TREE_TYPE (exp)) == 0
4205 && (!lang_hooks.reduce_bit_field_operations
4206 || (GET_MODE_PRECISION (GET_MODE (target))
4207 == TYPE_PRECISION (TREE_TYPE (exp)))))
4209 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4210 != SUBREG_PROMOTED_UNSIGNED_P (target))
4211 exp = convert
4212 (lang_hooks.types.signed_or_unsigned_type
4213 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4215 exp = convert (lang_hooks.types.type_for_mode
4216 (GET_MODE (SUBREG_REG (target)),
4217 SUBREG_PROMOTED_UNSIGNED_P (target)),
4218 exp);
4220 inner_target = SUBREG_REG (target);
4223 temp = expand_expr (exp, inner_target, VOIDmode,
4224 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4226 /* If TEMP is a VOIDmode constant, use convert_modes to make
4227 sure that we properly convert it. */
4228 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4230 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4231 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4232 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4233 GET_MODE (target), temp,
4234 SUBREG_PROMOTED_UNSIGNED_P (target));
4237 convert_move (SUBREG_REG (target), temp,
4238 SUBREG_PROMOTED_UNSIGNED_P (target));
4240 return NULL_RTX;
4242 else
4244 temp = expand_expr_real (exp, target, GET_MODE (target),
4245 (call_param_p
4246 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4247 &alt_rtl);
4248 /* Return TARGET if it's a specified hardware register.
4249 If TARGET is a volatile mem ref, either return TARGET
4250 or return a reg copied *from* TARGET; ANSI requires this.
4252 Otherwise, if TEMP is not TARGET, return TEMP
4253 if it is constant (for efficiency),
4254 or if we really want the correct value. */
4255 if (!(target && REG_P (target)
4256 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4257 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4258 && ! rtx_equal_p (temp, target)
4259 && CONSTANT_P (temp))
4260 dont_return_target = 1;
4263 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4264 the same as that of TARGET, adjust the constant. This is needed, for
4265 example, in case it is a CONST_DOUBLE and we want only a word-sized
4266 value. */
4267 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4268 && TREE_CODE (exp) != ERROR_MARK
4269 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4270 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4271 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4273 /* If value was not generated in the target, store it there.
4274 Convert the value to TARGET's type first if necessary and emit the
4275 pending incrementations that have been queued when expanding EXP.
4276 Note that we cannot emit the whole queue blindly because this will
4277 effectively disable the POST_INC optimization later.
4279 If TEMP and TARGET compare equal according to rtx_equal_p, but
4280 one or both of them are volatile memory refs, we have to distinguish
4281 two cases:
4282 - expand_expr has used TARGET. In this case, we must not generate
4283 another copy. This can be detected by TARGET being equal according
4284 to == .
4285 - expand_expr has not used TARGET - that means that the source just
4286 happens to have the same RTX form. Since temp will have been created
4287 by expand_expr, it will compare unequal according to == .
4288 We must generate a copy in this case, to reach the correct number
4289 of volatile memory references. */
4291 if ((! rtx_equal_p (temp, target)
4292 || (temp != target && (side_effects_p (temp)
4293 || side_effects_p (target))))
4294 && TREE_CODE (exp) != ERROR_MARK
4295 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4296 but TARGET is not valid memory reference, TEMP will differ
4297 from TARGET although it is really the same location. */
4298 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4299 /* If there's nothing to copy, don't bother. Don't call
4300 expr_size unless necessary, because some front-ends (C++)
4301 expr_size-hook must not be given objects that are not
4302 supposed to be bit-copied or bit-initialized. */
4303 && expr_size (exp) != const0_rtx)
4305 if (GET_MODE (temp) != GET_MODE (target)
4306 && GET_MODE (temp) != VOIDmode)
4308 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4309 if (dont_return_target)
4311 /* In this case, we will return TEMP,
4312 so make sure it has the proper mode.
4313 But don't forget to store the value into TARGET. */
4314 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4315 emit_move_insn (target, temp);
4317 else
4318 convert_move (target, temp, unsignedp);
4321 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4323 /* Handle copying a string constant into an array. The string
4324 constant may be shorter than the array. So copy just the string's
4325 actual length, and clear the rest. First get the size of the data
4326 type of the string, which is actually the size of the target. */
4327 rtx size = expr_size (exp);
4329 if (GET_CODE (size) == CONST_INT
4330 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4331 emit_block_move (target, temp, size,
4332 (call_param_p
4333 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4334 else
4336 /* Compute the size of the data to copy from the string. */
4337 tree copy_size
4338 = size_binop (MIN_EXPR,
4339 make_tree (sizetype, size),
4340 size_int (TREE_STRING_LENGTH (exp)));
4341 rtx copy_size_rtx
4342 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4343 (call_param_p
4344 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4345 rtx label = 0;
4347 /* Copy that much. */
4348 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4349 TYPE_UNSIGNED (sizetype));
4350 emit_block_move (target, temp, copy_size_rtx,
4351 (call_param_p
4352 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4354 /* Figure out how much is left in TARGET that we have to clear.
4355 Do all calculations in ptr_mode. */
4356 if (GET_CODE (copy_size_rtx) == CONST_INT)
4358 size = plus_constant (size, -INTVAL (copy_size_rtx));
4359 target = adjust_address (target, BLKmode,
4360 INTVAL (copy_size_rtx));
4362 else
4364 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4365 copy_size_rtx, NULL_RTX, 0,
4366 OPTAB_LIB_WIDEN);
4368 #ifdef POINTERS_EXTEND_UNSIGNED
4369 if (GET_MODE (copy_size_rtx) != Pmode)
4370 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4371 TYPE_UNSIGNED (sizetype));
4372 #endif
4374 target = offset_address (target, copy_size_rtx,
4375 highest_pow2_factor (copy_size));
4376 label = gen_label_rtx ();
4377 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4378 GET_MODE (size), 0, label);
4381 if (size != const0_rtx)
4382 clear_storage (target, size, BLOCK_OP_NORMAL);
4384 if (label)
4385 emit_label (label);
4388 /* Handle calls that return values in multiple non-contiguous locations.
4389 The Irix 6 ABI has examples of this. */
4390 else if (GET_CODE (target) == PARALLEL)
4391 emit_group_load (target, temp, TREE_TYPE (exp),
4392 int_size_in_bytes (TREE_TYPE (exp)));
4393 else if (GET_MODE (temp) == BLKmode)
4394 emit_block_move (target, temp, expr_size (exp),
4395 (call_param_p
4396 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4397 else
4399 temp = force_operand (temp, target);
4400 if (temp != target)
4401 emit_move_insn (target, temp);
4405 return NULL_RTX;
4408 /* Examine CTOR to discover:
4409 * how many scalar fields are set to nonzero values,
4410 and place it in *P_NZ_ELTS;
4411 * how many scalar fields are set to non-constant values,
4412 and place it in *P_NC_ELTS; and
4413 * how many scalar fields in total are in CTOR,
4414 and place it in *P_ELT_COUNT.
4415 * if a type is a union, and the initializer from the constructor
4416 is not the largest element in the union, then set *p_must_clear. */
4418 static void
4419 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4420 HOST_WIDE_INT *p_nc_elts,
4421 HOST_WIDE_INT *p_elt_count,
4422 bool *p_must_clear)
4424 unsigned HOST_WIDE_INT idx;
4425 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4426 tree value, purpose;
4428 nz_elts = 0;
4429 nc_elts = 0;
4430 elt_count = 0;
4432 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4434 HOST_WIDE_INT mult;
4436 mult = 1;
4437 if (TREE_CODE (purpose) == RANGE_EXPR)
4439 tree lo_index = TREE_OPERAND (purpose, 0);
4440 tree hi_index = TREE_OPERAND (purpose, 1);
4442 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4443 mult = (tree_low_cst (hi_index, 1)
4444 - tree_low_cst (lo_index, 1) + 1);
4447 switch (TREE_CODE (value))
4449 case CONSTRUCTOR:
4451 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4452 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4453 nz_elts += mult * nz;
4454 nc_elts += mult * nc;
4455 elt_count += mult * ic;
4457 break;
4459 case INTEGER_CST:
4460 case REAL_CST:
4461 if (!initializer_zerop (value))
4462 nz_elts += mult;
4463 elt_count += mult;
4464 break;
4466 case STRING_CST:
4467 nz_elts += mult * TREE_STRING_LENGTH (value);
4468 elt_count += mult * TREE_STRING_LENGTH (value);
4469 break;
4471 case COMPLEX_CST:
4472 if (!initializer_zerop (TREE_REALPART (value)))
4473 nz_elts += mult;
4474 if (!initializer_zerop (TREE_IMAGPART (value)))
4475 nz_elts += mult;
4476 elt_count += mult;
4477 break;
4479 case VECTOR_CST:
4481 tree v;
4482 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4484 if (!initializer_zerop (TREE_VALUE (v)))
4485 nz_elts += mult;
4486 elt_count += mult;
4489 break;
4491 default:
4492 nz_elts += mult;
4493 elt_count += mult;
4494 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4495 nc_elts += mult;
4496 break;
4500 if (!*p_must_clear
4501 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4502 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4504 tree init_sub_type;
4505 bool clear_this = true;
4507 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4509 /* We don't expect more than one element of the union to be
4510 initialized. Not sure what we should do otherwise... */
4511 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4512 == 1);
4514 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4515 CONSTRUCTOR_ELTS (ctor),
4516 0)->value);
4518 /* ??? We could look at each element of the union, and find the
4519 largest element. Which would avoid comparing the size of the
4520 initialized element against any tail padding in the union.
4521 Doesn't seem worth the effort... */
4522 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4523 TYPE_SIZE (init_sub_type)) == 1)
4525 /* And now we have to find out if the element itself is fully
4526 constructed. E.g. for union { struct { int a, b; } s; } u
4527 = { .s = { .a = 1 } }. */
4528 if (elt_count == count_type_elements (init_sub_type, false))
4529 clear_this = false;
4533 *p_must_clear = clear_this;
4536 *p_nz_elts += nz_elts;
4537 *p_nc_elts += nc_elts;
4538 *p_elt_count += elt_count;
4541 void
4542 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4543 HOST_WIDE_INT *p_nc_elts,
4544 HOST_WIDE_INT *p_elt_count,
4545 bool *p_must_clear)
4547 *p_nz_elts = 0;
4548 *p_nc_elts = 0;
4549 *p_elt_count = 0;
4550 *p_must_clear = false;
4551 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4552 p_must_clear);
4555 /* Count the number of scalars in TYPE. Return -1 on overflow or
4556 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4557 array member at the end of the structure. */
4559 HOST_WIDE_INT
4560 count_type_elements (tree type, bool allow_flexarr)
4562 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4563 switch (TREE_CODE (type))
4565 case ARRAY_TYPE:
4567 tree telts = array_type_nelts (type);
4568 if (telts && host_integerp (telts, 1))
4570 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4571 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4572 if (n == 0)
4573 return 0;
4574 else if (max / n > m)
4575 return n * m;
4577 return -1;
4580 case RECORD_TYPE:
4582 HOST_WIDE_INT n = 0, t;
4583 tree f;
4585 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4586 if (TREE_CODE (f) == FIELD_DECL)
4588 t = count_type_elements (TREE_TYPE (f), false);
4589 if (t < 0)
4591 /* Check for structures with flexible array member. */
4592 tree tf = TREE_TYPE (f);
4593 if (allow_flexarr
4594 && TREE_CHAIN (f) == NULL
4595 && TREE_CODE (tf) == ARRAY_TYPE
4596 && TYPE_DOMAIN (tf)
4597 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4598 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4599 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4600 && int_size_in_bytes (type) >= 0)
4601 break;
4603 return -1;
4605 n += t;
4608 return n;
4611 case UNION_TYPE:
4612 case QUAL_UNION_TYPE:
4614 /* Ho hum. How in the world do we guess here? Clearly it isn't
4615 right to count the fields. Guess based on the number of words. */
4616 HOST_WIDE_INT n = int_size_in_bytes (type);
4617 if (n < 0)
4618 return -1;
4619 return n / UNITS_PER_WORD;
4622 case COMPLEX_TYPE:
4623 return 2;
4625 case VECTOR_TYPE:
4626 return TYPE_VECTOR_SUBPARTS (type);
4628 case INTEGER_TYPE:
4629 case REAL_TYPE:
4630 case ENUMERAL_TYPE:
4631 case BOOLEAN_TYPE:
4632 case CHAR_TYPE:
4633 case POINTER_TYPE:
4634 case OFFSET_TYPE:
4635 case REFERENCE_TYPE:
4636 return 1;
4638 case VOID_TYPE:
4639 case METHOD_TYPE:
4640 case FUNCTION_TYPE:
4641 case LANG_TYPE:
4642 default:
4643 gcc_unreachable ();
4647 /* Return 1 if EXP contains mostly (3/4) zeros. */
4649 static int
4650 mostly_zeros_p (tree exp)
4652 if (TREE_CODE (exp) == CONSTRUCTOR)
4655 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4656 bool must_clear;
4658 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4659 if (must_clear)
4660 return 1;
4662 elts = count_type_elements (TREE_TYPE (exp), false);
4664 return nz_elts < elts / 4;
4667 return initializer_zerop (exp);
4670 /* Return 1 if EXP contains all zeros. */
4672 static int
4673 all_zeros_p (tree exp)
4675 if (TREE_CODE (exp) == CONSTRUCTOR)
4678 HOST_WIDE_INT nz_elts, nc_elts, count;
4679 bool must_clear;
4681 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4682 return nz_elts == 0;
4685 return initializer_zerop (exp);
4688 /* Helper function for store_constructor.
4689 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4690 TYPE is the type of the CONSTRUCTOR, not the element type.
4691 CLEARED is as for store_constructor.
4692 ALIAS_SET is the alias set to use for any stores.
4694 This provides a recursive shortcut back to store_constructor when it isn't
4695 necessary to go through store_field. This is so that we can pass through
4696 the cleared field to let store_constructor know that we may not have to
4697 clear a substructure if the outer structure has already been cleared. */
4699 static void
4700 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4701 HOST_WIDE_INT bitpos, enum machine_mode mode,
4702 tree exp, tree type, int cleared, int alias_set)
4704 if (TREE_CODE (exp) == CONSTRUCTOR
4705 /* We can only call store_constructor recursively if the size and
4706 bit position are on a byte boundary. */
4707 && bitpos % BITS_PER_UNIT == 0
4708 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4709 /* If we have a nonzero bitpos for a register target, then we just
4710 let store_field do the bitfield handling. This is unlikely to
4711 generate unnecessary clear instructions anyways. */
4712 && (bitpos == 0 || MEM_P (target)))
4714 if (MEM_P (target))
4715 target
4716 = adjust_address (target,
4717 GET_MODE (target) == BLKmode
4718 || 0 != (bitpos
4719 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4720 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4723 /* Update the alias set, if required. */
4724 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4725 && MEM_ALIAS_SET (target) != 0)
4727 target = copy_rtx (target);
4728 set_mem_alias_set (target, alias_set);
4731 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4733 else
4734 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4737 /* Store the value of constructor EXP into the rtx TARGET.
4738 TARGET is either a REG or a MEM; we know it cannot conflict, since
4739 safe_from_p has been called.
4740 CLEARED is true if TARGET is known to have been zero'd.
4741 SIZE is the number of bytes of TARGET we are allowed to modify: this
4742 may not be the same as the size of EXP if we are assigning to a field
4743 which has been packed to exclude padding bits. */
4745 static void
4746 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4748 tree type = TREE_TYPE (exp);
4749 #ifdef WORD_REGISTER_OPERATIONS
4750 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4751 #endif
4753 switch (TREE_CODE (type))
4755 case RECORD_TYPE:
4756 case UNION_TYPE:
4757 case QUAL_UNION_TYPE:
4759 unsigned HOST_WIDE_INT idx;
4760 tree field, value;
4762 /* If size is zero or the target is already cleared, do nothing. */
4763 if (size == 0 || cleared)
4764 cleared = 1;
4765 /* We either clear the aggregate or indicate the value is dead. */
4766 else if ((TREE_CODE (type) == UNION_TYPE
4767 || TREE_CODE (type) == QUAL_UNION_TYPE)
4768 && ! CONSTRUCTOR_ELTS (exp))
4769 /* If the constructor is empty, clear the union. */
4771 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4772 cleared = 1;
4775 /* If we are building a static constructor into a register,
4776 set the initial value as zero so we can fold the value into
4777 a constant. But if more than one register is involved,
4778 this probably loses. */
4779 else if (REG_P (target) && TREE_STATIC (exp)
4780 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4782 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4783 cleared = 1;
4786 /* If the constructor has fewer fields than the structure or
4787 if we are initializing the structure to mostly zeros, clear
4788 the whole structure first. Don't do this if TARGET is a
4789 register whose mode size isn't equal to SIZE since
4790 clear_storage can't handle this case. */
4791 else if (size > 0
4792 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4793 != fields_length (type))
4794 || mostly_zeros_p (exp))
4795 && (!REG_P (target)
4796 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4797 == size)))
4799 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4800 cleared = 1;
4803 if (! cleared)
4804 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4806 /* Store each element of the constructor into the
4807 corresponding field of TARGET. */
4808 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4810 enum machine_mode mode;
4811 HOST_WIDE_INT bitsize;
4812 HOST_WIDE_INT bitpos = 0;
4813 tree offset;
4814 rtx to_rtx = target;
4816 /* Just ignore missing fields. We cleared the whole
4817 structure, above, if any fields are missing. */
4818 if (field == 0)
4819 continue;
4821 if (cleared && initializer_zerop (value))
4822 continue;
4824 if (host_integerp (DECL_SIZE (field), 1))
4825 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4826 else
4827 bitsize = -1;
4829 mode = DECL_MODE (field);
4830 if (DECL_BIT_FIELD (field))
4831 mode = VOIDmode;
4833 offset = DECL_FIELD_OFFSET (field);
4834 if (host_integerp (offset, 0)
4835 && host_integerp (bit_position (field), 0))
4837 bitpos = int_bit_position (field);
4838 offset = 0;
4840 else
4841 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4843 if (offset)
4845 rtx offset_rtx;
4847 offset
4848 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4849 make_tree (TREE_TYPE (exp),
4850 target));
4852 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4853 gcc_assert (MEM_P (to_rtx));
4855 #ifdef POINTERS_EXTEND_UNSIGNED
4856 if (GET_MODE (offset_rtx) != Pmode)
4857 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4858 #else
4859 if (GET_MODE (offset_rtx) != ptr_mode)
4860 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4861 #endif
4863 to_rtx = offset_address (to_rtx, offset_rtx,
4864 highest_pow2_factor (offset));
4867 #ifdef WORD_REGISTER_OPERATIONS
4868 /* If this initializes a field that is smaller than a
4869 word, at the start of a word, try to widen it to a full
4870 word. This special case allows us to output C++ member
4871 function initializations in a form that the optimizers
4872 can understand. */
4873 if (REG_P (target)
4874 && bitsize < BITS_PER_WORD
4875 && bitpos % BITS_PER_WORD == 0
4876 && GET_MODE_CLASS (mode) == MODE_INT
4877 && TREE_CODE (value) == INTEGER_CST
4878 && exp_size >= 0
4879 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4881 tree type = TREE_TYPE (value);
4883 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4885 type = lang_hooks.types.type_for_size
4886 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4887 value = convert (type, value);
4890 if (BYTES_BIG_ENDIAN)
4891 value
4892 = fold_build2 (LSHIFT_EXPR, type, value,
4893 build_int_cst (NULL_TREE,
4894 BITS_PER_WORD - bitsize));
4895 bitsize = BITS_PER_WORD;
4896 mode = word_mode;
4898 #endif
4900 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4901 && DECL_NONADDRESSABLE_P (field))
4903 to_rtx = copy_rtx (to_rtx);
4904 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4907 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4908 value, type, cleared,
4909 get_alias_set (TREE_TYPE (field)));
4911 break;
4913 case ARRAY_TYPE:
4915 tree value, index;
4916 unsigned HOST_WIDE_INT i;
4917 int need_to_clear;
4918 tree domain;
4919 tree elttype = TREE_TYPE (type);
4920 int const_bounds_p;
4921 HOST_WIDE_INT minelt = 0;
4922 HOST_WIDE_INT maxelt = 0;
4924 domain = TYPE_DOMAIN (type);
4925 const_bounds_p = (TYPE_MIN_VALUE (domain)
4926 && TYPE_MAX_VALUE (domain)
4927 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4928 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4930 /* If we have constant bounds for the range of the type, get them. */
4931 if (const_bounds_p)
4933 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4934 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4937 /* If the constructor has fewer elements than the array, clear
4938 the whole array first. Similarly if this is static
4939 constructor of a non-BLKmode object. */
4940 if (cleared)
4941 need_to_clear = 0;
4942 else if (REG_P (target) && TREE_STATIC (exp))
4943 need_to_clear = 1;
4944 else
4946 unsigned HOST_WIDE_INT idx;
4947 tree index, value;
4948 HOST_WIDE_INT count = 0, zero_count = 0;
4949 need_to_clear = ! const_bounds_p;
4951 /* This loop is a more accurate version of the loop in
4952 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4953 is also needed to check for missing elements. */
4954 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
4956 HOST_WIDE_INT this_node_count;
4958 if (need_to_clear)
4959 break;
4961 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4963 tree lo_index = TREE_OPERAND (index, 0);
4964 tree hi_index = TREE_OPERAND (index, 1);
4966 if (! host_integerp (lo_index, 1)
4967 || ! host_integerp (hi_index, 1))
4969 need_to_clear = 1;
4970 break;
4973 this_node_count = (tree_low_cst (hi_index, 1)
4974 - tree_low_cst (lo_index, 1) + 1);
4976 else
4977 this_node_count = 1;
4979 count += this_node_count;
4980 if (mostly_zeros_p (value))
4981 zero_count += this_node_count;
4984 /* Clear the entire array first if there are any missing
4985 elements, or if the incidence of zero elements is >=
4986 75%. */
4987 if (! need_to_clear
4988 && (count < maxelt - minelt + 1
4989 || 4 * zero_count >= 3 * count))
4990 need_to_clear = 1;
4993 if (need_to_clear && size > 0)
4995 if (REG_P (target))
4996 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4997 else
4998 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4999 cleared = 1;
5002 if (!cleared && REG_P (target))
5003 /* Inform later passes that the old value is dead. */
5004 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5006 /* Store each element of the constructor into the
5007 corresponding element of TARGET, determined by counting the
5008 elements. */
5009 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5011 enum machine_mode mode;
5012 HOST_WIDE_INT bitsize;
5013 HOST_WIDE_INT bitpos;
5014 int unsignedp;
5015 rtx xtarget = target;
5017 if (cleared && initializer_zerop (value))
5018 continue;
5020 unsignedp = TYPE_UNSIGNED (elttype);
5021 mode = TYPE_MODE (elttype);
5022 if (mode == BLKmode)
5023 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5024 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5025 : -1);
5026 else
5027 bitsize = GET_MODE_BITSIZE (mode);
5029 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5031 tree lo_index = TREE_OPERAND (index, 0);
5032 tree hi_index = TREE_OPERAND (index, 1);
5033 rtx index_r, pos_rtx;
5034 HOST_WIDE_INT lo, hi, count;
5035 tree position;
5037 /* If the range is constant and "small", unroll the loop. */
5038 if (const_bounds_p
5039 && host_integerp (lo_index, 0)
5040 && host_integerp (hi_index, 0)
5041 && (lo = tree_low_cst (lo_index, 0),
5042 hi = tree_low_cst (hi_index, 0),
5043 count = hi - lo + 1,
5044 (!MEM_P (target)
5045 || count <= 2
5046 || (host_integerp (TYPE_SIZE (elttype), 1)
5047 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5048 <= 40 * 8)))))
5050 lo -= minelt; hi -= minelt;
5051 for (; lo <= hi; lo++)
5053 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5055 if (MEM_P (target)
5056 && !MEM_KEEP_ALIAS_SET_P (target)
5057 && TREE_CODE (type) == ARRAY_TYPE
5058 && TYPE_NONALIASED_COMPONENT (type))
5060 target = copy_rtx (target);
5061 MEM_KEEP_ALIAS_SET_P (target) = 1;
5064 store_constructor_field
5065 (target, bitsize, bitpos, mode, value, type, cleared,
5066 get_alias_set (elttype));
5069 else
5071 rtx loop_start = gen_label_rtx ();
5072 rtx loop_end = gen_label_rtx ();
5073 tree exit_cond;
5075 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5076 unsignedp = TYPE_UNSIGNED (domain);
5078 index = build_decl (VAR_DECL, NULL_TREE, domain);
5080 index_r
5081 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5082 &unsignedp, 0));
5083 SET_DECL_RTL (index, index_r);
5084 store_expr (lo_index, index_r, 0);
5086 /* Build the head of the loop. */
5087 do_pending_stack_adjust ();
5088 emit_label (loop_start);
5090 /* Assign value to element index. */
5091 position
5092 = convert (ssizetype,
5093 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5094 index, TYPE_MIN_VALUE (domain)));
5095 position = size_binop (MULT_EXPR, position,
5096 convert (ssizetype,
5097 TYPE_SIZE_UNIT (elttype)));
5099 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5100 xtarget = offset_address (target, pos_rtx,
5101 highest_pow2_factor (position));
5102 xtarget = adjust_address (xtarget, mode, 0);
5103 if (TREE_CODE (value) == CONSTRUCTOR)
5104 store_constructor (value, xtarget, cleared,
5105 bitsize / BITS_PER_UNIT);
5106 else
5107 store_expr (value, xtarget, 0);
5109 /* Generate a conditional jump to exit the loop. */
5110 exit_cond = build2 (LT_EXPR, integer_type_node,
5111 index, hi_index);
5112 jumpif (exit_cond, loop_end);
5114 /* Update the loop counter, and jump to the head of
5115 the loop. */
5116 expand_assignment (index,
5117 build2 (PLUS_EXPR, TREE_TYPE (index),
5118 index, integer_one_node));
5120 emit_jump (loop_start);
5122 /* Build the end of the loop. */
5123 emit_label (loop_end);
5126 else if ((index != 0 && ! host_integerp (index, 0))
5127 || ! host_integerp (TYPE_SIZE (elttype), 1))
5129 tree position;
5131 if (index == 0)
5132 index = ssize_int (1);
5134 if (minelt)
5135 index = fold_convert (ssizetype,
5136 fold_build2 (MINUS_EXPR,
5137 TREE_TYPE (index),
5138 index,
5139 TYPE_MIN_VALUE (domain)));
5141 position = size_binop (MULT_EXPR, index,
5142 convert (ssizetype,
5143 TYPE_SIZE_UNIT (elttype)));
5144 xtarget = offset_address (target,
5145 expand_expr (position, 0, VOIDmode, 0),
5146 highest_pow2_factor (position));
5147 xtarget = adjust_address (xtarget, mode, 0);
5148 store_expr (value, xtarget, 0);
5150 else
5152 if (index != 0)
5153 bitpos = ((tree_low_cst (index, 0) - minelt)
5154 * tree_low_cst (TYPE_SIZE (elttype), 1));
5155 else
5156 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5158 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5159 && TREE_CODE (type) == ARRAY_TYPE
5160 && TYPE_NONALIASED_COMPONENT (type))
5162 target = copy_rtx (target);
5163 MEM_KEEP_ALIAS_SET_P (target) = 1;
5165 store_constructor_field (target, bitsize, bitpos, mode, value,
5166 type, cleared, get_alias_set (elttype));
5169 break;
5172 case VECTOR_TYPE:
5174 unsigned HOST_WIDE_INT idx;
5175 constructor_elt *ce;
5176 int i;
5177 int need_to_clear;
5178 int icode = 0;
5179 tree elttype = TREE_TYPE (type);
5180 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5181 enum machine_mode eltmode = TYPE_MODE (elttype);
5182 HOST_WIDE_INT bitsize;
5183 HOST_WIDE_INT bitpos;
5184 rtvec vector = NULL;
5185 unsigned n_elts;
5187 gcc_assert (eltmode != BLKmode);
5189 n_elts = TYPE_VECTOR_SUBPARTS (type);
5190 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5192 enum machine_mode mode = GET_MODE (target);
5194 icode = (int) vec_init_optab->handlers[mode].insn_code;
5195 if (icode != CODE_FOR_nothing)
5197 unsigned int i;
5199 vector = rtvec_alloc (n_elts);
5200 for (i = 0; i < n_elts; i++)
5201 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5205 /* If the constructor has fewer elements than the vector,
5206 clear the whole array first. Similarly if this is static
5207 constructor of a non-BLKmode object. */
5208 if (cleared)
5209 need_to_clear = 0;
5210 else if (REG_P (target) && TREE_STATIC (exp))
5211 need_to_clear = 1;
5212 else
5214 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5215 tree value;
5217 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5219 int n_elts_here = tree_low_cst
5220 (int_const_binop (TRUNC_DIV_EXPR,
5221 TYPE_SIZE (TREE_TYPE (value)),
5222 TYPE_SIZE (elttype), 0), 1);
5224 count += n_elts_here;
5225 if (mostly_zeros_p (value))
5226 zero_count += n_elts_here;
5229 /* Clear the entire vector first if there are any missing elements,
5230 or if the incidence of zero elements is >= 75%. */
5231 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5234 if (need_to_clear && size > 0 && !vector)
5236 if (REG_P (target))
5237 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5238 else
5239 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5240 cleared = 1;
5243 /* Inform later passes that the old value is dead. */
5244 if (!cleared && REG_P (target))
5245 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5247 /* Store each element of the constructor into the corresponding
5248 element of TARGET, determined by counting the elements. */
5249 for (idx = 0, i = 0;
5250 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5251 idx++, i += bitsize / elt_size)
5253 HOST_WIDE_INT eltpos;
5254 tree value = ce->value;
5256 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5257 if (cleared && initializer_zerop (value))
5258 continue;
5260 if (ce->index)
5261 eltpos = tree_low_cst (ce->index, 1);
5262 else
5263 eltpos = i;
5265 if (vector)
5267 /* Vector CONSTRUCTORs should only be built from smaller
5268 vectors in the case of BLKmode vectors. */
5269 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5270 RTVEC_ELT (vector, eltpos)
5271 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5273 else
5275 enum machine_mode value_mode =
5276 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5277 ? TYPE_MODE (TREE_TYPE (value))
5278 : eltmode;
5279 bitpos = eltpos * elt_size;
5280 store_constructor_field (target, bitsize, bitpos,
5281 value_mode, value, type,
5282 cleared, get_alias_set (elttype));
5286 if (vector)
5287 emit_insn (GEN_FCN (icode)
5288 (target,
5289 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5290 break;
5293 default:
5294 gcc_unreachable ();
5298 /* Store the value of EXP (an expression tree)
5299 into a subfield of TARGET which has mode MODE and occupies
5300 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5301 If MODE is VOIDmode, it means that we are storing into a bit-field.
5303 Always return const0_rtx unless we have something particular to
5304 return.
5306 TYPE is the type of the underlying object,
5308 ALIAS_SET is the alias set for the destination. This value will
5309 (in general) be different from that for TARGET, since TARGET is a
5310 reference to the containing structure. */
5312 static rtx
5313 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5314 enum machine_mode mode, tree exp, tree type, int alias_set)
5316 HOST_WIDE_INT width_mask = 0;
5318 if (TREE_CODE (exp) == ERROR_MARK)
5319 return const0_rtx;
5321 /* If we have nothing to store, do nothing unless the expression has
5322 side-effects. */
5323 if (bitsize == 0)
5324 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5325 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5326 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5328 /* If we are storing into an unaligned field of an aligned union that is
5329 in a register, we may have the mode of TARGET being an integer mode but
5330 MODE == BLKmode. In that case, get an aligned object whose size and
5331 alignment are the same as TARGET and store TARGET into it (we can avoid
5332 the store if the field being stored is the entire width of TARGET). Then
5333 call ourselves recursively to store the field into a BLKmode version of
5334 that object. Finally, load from the object into TARGET. This is not
5335 very efficient in general, but should only be slightly more expensive
5336 than the otherwise-required unaligned accesses. Perhaps this can be
5337 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5338 twice, once with emit_move_insn and once via store_field. */
5340 if (mode == BLKmode
5341 && (REG_P (target) || GET_CODE (target) == SUBREG))
5343 rtx object = assign_temp (type, 0, 1, 1);
5344 rtx blk_object = adjust_address (object, BLKmode, 0);
5346 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5347 emit_move_insn (object, target);
5349 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5351 emit_move_insn (target, object);
5353 /* We want to return the BLKmode version of the data. */
5354 return blk_object;
5357 if (GET_CODE (target) == CONCAT)
5359 /* We're storing into a struct containing a single __complex. */
5361 gcc_assert (!bitpos);
5362 return store_expr (exp, target, 0);
5365 /* If the structure is in a register or if the component
5366 is a bit field, we cannot use addressing to access it.
5367 Use bit-field techniques or SUBREG to store in it. */
5369 if (mode == VOIDmode
5370 || (mode != BLKmode && ! direct_store[(int) mode]
5371 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5372 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5373 || REG_P (target)
5374 || GET_CODE (target) == SUBREG
5375 /* If the field isn't aligned enough to store as an ordinary memref,
5376 store it as a bit field. */
5377 || (mode != BLKmode
5378 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5379 || bitpos % GET_MODE_ALIGNMENT (mode))
5380 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5381 || (bitpos % BITS_PER_UNIT != 0)))
5382 /* If the RHS and field are a constant size and the size of the
5383 RHS isn't the same size as the bitfield, we must use bitfield
5384 operations. */
5385 || (bitsize >= 0
5386 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5387 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5389 rtx temp;
5391 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5392 implies a mask operation. If the precision is the same size as
5393 the field we're storing into, that mask is redundant. This is
5394 particularly common with bit field assignments generated by the
5395 C front end. */
5396 if (TREE_CODE (exp) == NOP_EXPR)
5398 tree type = TREE_TYPE (exp);
5399 if (INTEGRAL_TYPE_P (type)
5400 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5401 && bitsize == TYPE_PRECISION (type))
5403 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5404 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5405 exp = TREE_OPERAND (exp, 0);
5409 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5411 /* If BITSIZE is narrower than the size of the type of EXP
5412 we will be narrowing TEMP. Normally, what's wanted are the
5413 low-order bits. However, if EXP's type is a record and this is
5414 big-endian machine, we want the upper BITSIZE bits. */
5415 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5416 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5417 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5418 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5419 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5420 - bitsize),
5421 NULL_RTX, 1);
5423 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5424 MODE. */
5425 if (mode != VOIDmode && mode != BLKmode
5426 && mode != TYPE_MODE (TREE_TYPE (exp)))
5427 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5429 /* If the modes of TARGET and TEMP are both BLKmode, both
5430 must be in memory and BITPOS must be aligned on a byte
5431 boundary. If so, we simply do a block copy. */
5432 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5434 gcc_assert (MEM_P (target) && MEM_P (temp)
5435 && !(bitpos % BITS_PER_UNIT));
5437 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5438 emit_block_move (target, temp,
5439 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5440 / BITS_PER_UNIT),
5441 BLOCK_OP_NORMAL);
5443 return const0_rtx;
5446 /* Store the value in the bitfield. */
5447 store_bit_field (target, bitsize, bitpos, mode, temp);
5449 return const0_rtx;
5451 else
5453 /* Now build a reference to just the desired component. */
5454 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5456 if (to_rtx == target)
5457 to_rtx = copy_rtx (to_rtx);
5459 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5460 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5461 set_mem_alias_set (to_rtx, alias_set);
5463 return store_expr (exp, to_rtx, 0);
5467 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5468 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5469 codes and find the ultimate containing object, which we return.
5471 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5472 bit position, and *PUNSIGNEDP to the signedness of the field.
5473 If the position of the field is variable, we store a tree
5474 giving the variable offset (in units) in *POFFSET.
5475 This offset is in addition to the bit position.
5476 If the position is not variable, we store 0 in *POFFSET.
5478 If any of the extraction expressions is volatile,
5479 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5481 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5482 is a mode that can be used to access the field. In that case, *PBITSIZE
5483 is redundant.
5485 If the field describes a variable-sized object, *PMODE is set to
5486 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5487 this case, but the address of the object can be found.
5489 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5490 look through nodes that serve as markers of a greater alignment than
5491 the one that can be deduced from the expression. These nodes make it
5492 possible for front-ends to prevent temporaries from being created by
5493 the middle-end on alignment considerations. For that purpose, the
5494 normal operating mode at high-level is to always pass FALSE so that
5495 the ultimate containing object is really returned; moreover, the
5496 associated predicate handled_component_p will always return TRUE
5497 on these nodes, thus indicating that they are essentially handled
5498 by get_inner_reference. TRUE should only be passed when the caller
5499 is scanning the expression in order to build another representation
5500 and specifically knows how to handle these nodes; as such, this is
5501 the normal operating mode in the RTL expanders. */
5503 tree
5504 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5505 HOST_WIDE_INT *pbitpos, tree *poffset,
5506 enum machine_mode *pmode, int *punsignedp,
5507 int *pvolatilep, bool keep_aligning)
5509 tree size_tree = 0;
5510 enum machine_mode mode = VOIDmode;
5511 tree offset = size_zero_node;
5512 tree bit_offset = bitsize_zero_node;
5513 tree tem;
5515 /* First get the mode, signedness, and size. We do this from just the
5516 outermost expression. */
5517 if (TREE_CODE (exp) == COMPONENT_REF)
5519 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5520 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5521 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5523 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5525 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5527 size_tree = TREE_OPERAND (exp, 1);
5528 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5530 else
5532 mode = TYPE_MODE (TREE_TYPE (exp));
5533 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5535 if (mode == BLKmode)
5536 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5537 else
5538 *pbitsize = GET_MODE_BITSIZE (mode);
5541 if (size_tree != 0)
5543 if (! host_integerp (size_tree, 1))
5544 mode = BLKmode, *pbitsize = -1;
5545 else
5546 *pbitsize = tree_low_cst (size_tree, 1);
5549 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5550 and find the ultimate containing object. */
5551 while (1)
5553 switch (TREE_CODE (exp))
5555 case BIT_FIELD_REF:
5556 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5557 TREE_OPERAND (exp, 2));
5558 break;
5560 case COMPONENT_REF:
5562 tree field = TREE_OPERAND (exp, 1);
5563 tree this_offset = component_ref_field_offset (exp);
5565 /* If this field hasn't been filled in yet, don't go past it.
5566 This should only happen when folding expressions made during
5567 type construction. */
5568 if (this_offset == 0)
5569 break;
5571 offset = size_binop (PLUS_EXPR, offset, this_offset);
5572 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5573 DECL_FIELD_BIT_OFFSET (field));
5575 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5577 break;
5579 case ARRAY_REF:
5580 case ARRAY_RANGE_REF:
5582 tree index = TREE_OPERAND (exp, 1);
5583 tree low_bound = array_ref_low_bound (exp);
5584 tree unit_size = array_ref_element_size (exp);
5586 /* We assume all arrays have sizes that are a multiple of a byte.
5587 First subtract the lower bound, if any, in the type of the
5588 index, then convert to sizetype and multiply by the size of
5589 the array element. */
5590 if (! integer_zerop (low_bound))
5591 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5592 index, low_bound);
5594 offset = size_binop (PLUS_EXPR, offset,
5595 size_binop (MULT_EXPR,
5596 convert (sizetype, index),
5597 unit_size));
5599 break;
5601 case REALPART_EXPR:
5602 break;
5604 case IMAGPART_EXPR:
5605 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5606 bitsize_int (*pbitsize));
5607 break;
5609 case VIEW_CONVERT_EXPR:
5610 if (keep_aligning && STRICT_ALIGNMENT
5611 && (TYPE_ALIGN (TREE_TYPE (exp))
5612 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5613 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5614 < BIGGEST_ALIGNMENT)
5615 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5616 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5617 goto done;
5618 break;
5620 default:
5621 goto done;
5624 /* If any reference in the chain is volatile, the effect is volatile. */
5625 if (TREE_THIS_VOLATILE (exp))
5626 *pvolatilep = 1;
5628 exp = TREE_OPERAND (exp, 0);
5630 done:
5632 /* If OFFSET is constant, see if we can return the whole thing as a
5633 constant bit position. Otherwise, split it up. */
5634 if (host_integerp (offset, 0)
5635 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5636 bitsize_unit_node))
5637 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5638 && host_integerp (tem, 0))
5639 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5640 else
5641 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5643 *pmode = mode;
5644 return exp;
5647 /* Return a tree of sizetype representing the size, in bytes, of the element
5648 of EXP, an ARRAY_REF. */
5650 tree
5651 array_ref_element_size (tree exp)
5653 tree aligned_size = TREE_OPERAND (exp, 3);
5654 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5656 /* If a size was specified in the ARRAY_REF, it's the size measured
5657 in alignment units of the element type. So multiply by that value. */
5658 if (aligned_size)
5660 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5661 sizetype from another type of the same width and signedness. */
5662 if (TREE_TYPE (aligned_size) != sizetype)
5663 aligned_size = fold_convert (sizetype, aligned_size);
5664 return size_binop (MULT_EXPR, aligned_size,
5665 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5668 /* Otherwise, take the size from that of the element type. Substitute
5669 any PLACEHOLDER_EXPR that we have. */
5670 else
5671 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5674 /* Return a tree representing the lower bound of the array mentioned in
5675 EXP, an ARRAY_REF. */
5677 tree
5678 array_ref_low_bound (tree exp)
5680 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5682 /* If a lower bound is specified in EXP, use it. */
5683 if (TREE_OPERAND (exp, 2))
5684 return TREE_OPERAND (exp, 2);
5686 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5687 substituting for a PLACEHOLDER_EXPR as needed. */
5688 if (domain_type && TYPE_MIN_VALUE (domain_type))
5689 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5691 /* Otherwise, return a zero of the appropriate type. */
5692 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5695 /* Return a tree representing the upper bound of the array mentioned in
5696 EXP, an ARRAY_REF. */
5698 tree
5699 array_ref_up_bound (tree exp)
5701 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5703 /* If there is a domain type and it has an upper bound, use it, substituting
5704 for a PLACEHOLDER_EXPR as needed. */
5705 if (domain_type && TYPE_MAX_VALUE (domain_type))
5706 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5708 /* Otherwise fail. */
5709 return NULL_TREE;
5712 /* Return a tree representing the offset, in bytes, of the field referenced
5713 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5715 tree
5716 component_ref_field_offset (tree exp)
5718 tree aligned_offset = TREE_OPERAND (exp, 2);
5719 tree field = TREE_OPERAND (exp, 1);
5721 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5722 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5723 value. */
5724 if (aligned_offset)
5726 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5727 sizetype from another type of the same width and signedness. */
5728 if (TREE_TYPE (aligned_offset) != sizetype)
5729 aligned_offset = fold_convert (sizetype, aligned_offset);
5730 return size_binop (MULT_EXPR, aligned_offset,
5731 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5734 /* Otherwise, take the offset from that of the field. Substitute
5735 any PLACEHOLDER_EXPR that we have. */
5736 else
5737 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5740 /* Return 1 if T is an expression that get_inner_reference handles. */
5743 handled_component_p (tree t)
5745 switch (TREE_CODE (t))
5747 case BIT_FIELD_REF:
5748 case COMPONENT_REF:
5749 case ARRAY_REF:
5750 case ARRAY_RANGE_REF:
5751 case VIEW_CONVERT_EXPR:
5752 case REALPART_EXPR:
5753 case IMAGPART_EXPR:
5754 return 1;
5756 default:
5757 return 0;
5761 /* Given an rtx VALUE that may contain additions and multiplications, return
5762 an equivalent value that just refers to a register, memory, or constant.
5763 This is done by generating instructions to perform the arithmetic and
5764 returning a pseudo-register containing the value.
5766 The returned value may be a REG, SUBREG, MEM or constant. */
5769 force_operand (rtx value, rtx target)
5771 rtx op1, op2;
5772 /* Use subtarget as the target for operand 0 of a binary operation. */
5773 rtx subtarget = get_subtarget (target);
5774 enum rtx_code code = GET_CODE (value);
5776 /* Check for subreg applied to an expression produced by loop optimizer. */
5777 if (code == SUBREG
5778 && !REG_P (SUBREG_REG (value))
5779 && !MEM_P (SUBREG_REG (value)))
5781 value = simplify_gen_subreg (GET_MODE (value),
5782 force_reg (GET_MODE (SUBREG_REG (value)),
5783 force_operand (SUBREG_REG (value),
5784 NULL_RTX)),
5785 GET_MODE (SUBREG_REG (value)),
5786 SUBREG_BYTE (value));
5787 code = GET_CODE (value);
5790 /* Check for a PIC address load. */
5791 if ((code == PLUS || code == MINUS)
5792 && XEXP (value, 0) == pic_offset_table_rtx
5793 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5794 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5795 || GET_CODE (XEXP (value, 1)) == CONST))
5797 if (!subtarget)
5798 subtarget = gen_reg_rtx (GET_MODE (value));
5799 emit_move_insn (subtarget, value);
5800 return subtarget;
5803 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5805 if (!target)
5806 target = gen_reg_rtx (GET_MODE (value));
5807 convert_move (target, force_operand (XEXP (value, 0), NULL),
5808 code == ZERO_EXTEND);
5809 return target;
5812 if (ARITHMETIC_P (value))
5814 op2 = XEXP (value, 1);
5815 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5816 subtarget = 0;
5817 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5819 code = PLUS;
5820 op2 = negate_rtx (GET_MODE (value), op2);
5823 /* Check for an addition with OP2 a constant integer and our first
5824 operand a PLUS of a virtual register and something else. In that
5825 case, we want to emit the sum of the virtual register and the
5826 constant first and then add the other value. This allows virtual
5827 register instantiation to simply modify the constant rather than
5828 creating another one around this addition. */
5829 if (code == PLUS && GET_CODE (op2) == CONST_INT
5830 && GET_CODE (XEXP (value, 0)) == PLUS
5831 && REG_P (XEXP (XEXP (value, 0), 0))
5832 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5833 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5835 rtx temp = expand_simple_binop (GET_MODE (value), code,
5836 XEXP (XEXP (value, 0), 0), op2,
5837 subtarget, 0, OPTAB_LIB_WIDEN);
5838 return expand_simple_binop (GET_MODE (value), code, temp,
5839 force_operand (XEXP (XEXP (value,
5840 0), 1), 0),
5841 target, 0, OPTAB_LIB_WIDEN);
5844 op1 = force_operand (XEXP (value, 0), subtarget);
5845 op2 = force_operand (op2, NULL_RTX);
5846 switch (code)
5848 case MULT:
5849 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5850 case DIV:
5851 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5852 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5853 target, 1, OPTAB_LIB_WIDEN);
5854 else
5855 return expand_divmod (0,
5856 FLOAT_MODE_P (GET_MODE (value))
5857 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5858 GET_MODE (value), op1, op2, target, 0);
5859 break;
5860 case MOD:
5861 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5862 target, 0);
5863 break;
5864 case UDIV:
5865 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5866 target, 1);
5867 break;
5868 case UMOD:
5869 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5870 target, 1);
5871 break;
5872 case ASHIFTRT:
5873 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5874 target, 0, OPTAB_LIB_WIDEN);
5875 break;
5876 default:
5877 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5878 target, 1, OPTAB_LIB_WIDEN);
5881 if (UNARY_P (value))
5883 int unsignedp = 0;
5885 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5886 switch (code)
5888 case ZERO_EXTEND: case UNSIGNED_FIX: case UNSIGNED_FLOAT:
5889 unsignedp = 1;
5890 /* fall through. */
5891 case TRUNCATE:
5892 case SIGN_EXTEND: case FIX: case FLOAT:
5893 return convert_to_mode (GET_MODE (value), op1, unsignedp);
5894 default:
5895 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5899 #ifdef INSN_SCHEDULING
5900 /* On machines that have insn scheduling, we want all memory reference to be
5901 explicit, so we need to deal with such paradoxical SUBREGs. */
5902 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5903 && (GET_MODE_SIZE (GET_MODE (value))
5904 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5905 value
5906 = simplify_gen_subreg (GET_MODE (value),
5907 force_reg (GET_MODE (SUBREG_REG (value)),
5908 force_operand (SUBREG_REG (value),
5909 NULL_RTX)),
5910 GET_MODE (SUBREG_REG (value)),
5911 SUBREG_BYTE (value));
5912 #endif
5914 return value;
5917 /* Subroutine of expand_expr: return nonzero iff there is no way that
5918 EXP can reference X, which is being modified. TOP_P is nonzero if this
5919 call is going to be used to determine whether we need a temporary
5920 for EXP, as opposed to a recursive call to this function.
5922 It is always safe for this routine to return zero since it merely
5923 searches for optimization opportunities. */
5926 safe_from_p (rtx x, tree exp, int top_p)
5928 rtx exp_rtl = 0;
5929 int i, nops;
5931 if (x == 0
5932 /* If EXP has varying size, we MUST use a target since we currently
5933 have no way of allocating temporaries of variable size
5934 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5935 So we assume here that something at a higher level has prevented a
5936 clash. This is somewhat bogus, but the best we can do. Only
5937 do this when X is BLKmode and when we are at the top level. */
5938 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5939 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5940 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5941 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5942 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5943 != INTEGER_CST)
5944 && GET_MODE (x) == BLKmode)
5945 /* If X is in the outgoing argument area, it is always safe. */
5946 || (MEM_P (x)
5947 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5948 || (GET_CODE (XEXP (x, 0)) == PLUS
5949 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5950 return 1;
5952 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5953 find the underlying pseudo. */
5954 if (GET_CODE (x) == SUBREG)
5956 x = SUBREG_REG (x);
5957 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5958 return 0;
5961 /* Now look at our tree code and possibly recurse. */
5962 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5964 case tcc_declaration:
5965 exp_rtl = DECL_RTL_IF_SET (exp);
5966 break;
5968 case tcc_constant:
5969 return 1;
5971 case tcc_exceptional:
5972 if (TREE_CODE (exp) == TREE_LIST)
5974 while (1)
5976 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5977 return 0;
5978 exp = TREE_CHAIN (exp);
5979 if (!exp)
5980 return 1;
5981 if (TREE_CODE (exp) != TREE_LIST)
5982 return safe_from_p (x, exp, 0);
5985 else if (TREE_CODE (exp) == ERROR_MARK)
5986 return 1; /* An already-visited SAVE_EXPR? */
5987 else
5988 return 0;
5990 case tcc_statement:
5991 /* The only case we look at here is the DECL_INITIAL inside a
5992 DECL_EXPR. */
5993 return (TREE_CODE (exp) != DECL_EXPR
5994 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5995 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5996 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5998 case tcc_binary:
5999 case tcc_comparison:
6000 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6001 return 0;
6002 /* Fall through. */
6004 case tcc_unary:
6005 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6007 case tcc_expression:
6008 case tcc_reference:
6009 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6010 the expression. If it is set, we conflict iff we are that rtx or
6011 both are in memory. Otherwise, we check all operands of the
6012 expression recursively. */
6014 switch (TREE_CODE (exp))
6016 case ADDR_EXPR:
6017 /* If the operand is static or we are static, we can't conflict.
6018 Likewise if we don't conflict with the operand at all. */
6019 if (staticp (TREE_OPERAND (exp, 0))
6020 || TREE_STATIC (exp)
6021 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6022 return 1;
6024 /* Otherwise, the only way this can conflict is if we are taking
6025 the address of a DECL a that address if part of X, which is
6026 very rare. */
6027 exp = TREE_OPERAND (exp, 0);
6028 if (DECL_P (exp))
6030 if (!DECL_RTL_SET_P (exp)
6031 || !MEM_P (DECL_RTL (exp)))
6032 return 0;
6033 else
6034 exp_rtl = XEXP (DECL_RTL (exp), 0);
6036 break;
6038 case MISALIGNED_INDIRECT_REF:
6039 case ALIGN_INDIRECT_REF:
6040 case INDIRECT_REF:
6041 if (MEM_P (x)
6042 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6043 get_alias_set (exp)))
6044 return 0;
6045 break;
6047 case CALL_EXPR:
6048 /* Assume that the call will clobber all hard registers and
6049 all of memory. */
6050 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6051 || MEM_P (x))
6052 return 0;
6053 break;
6055 case WITH_CLEANUP_EXPR:
6056 case CLEANUP_POINT_EXPR:
6057 /* Lowered by gimplify.c. */
6058 gcc_unreachable ();
6060 case SAVE_EXPR:
6061 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6063 default:
6064 break;
6067 /* If we have an rtx, we do not need to scan our operands. */
6068 if (exp_rtl)
6069 break;
6071 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6072 for (i = 0; i < nops; i++)
6073 if (TREE_OPERAND (exp, i) != 0
6074 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6075 return 0;
6077 /* If this is a language-specific tree code, it may require
6078 special handling. */
6079 if ((unsigned int) TREE_CODE (exp)
6080 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6081 && !lang_hooks.safe_from_p (x, exp))
6082 return 0;
6083 break;
6085 case tcc_type:
6086 /* Should never get a type here. */
6087 gcc_unreachable ();
6090 /* If we have an rtl, find any enclosed object. Then see if we conflict
6091 with it. */
6092 if (exp_rtl)
6094 if (GET_CODE (exp_rtl) == SUBREG)
6096 exp_rtl = SUBREG_REG (exp_rtl);
6097 if (REG_P (exp_rtl)
6098 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6099 return 0;
6102 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6103 are memory and they conflict. */
6104 return ! (rtx_equal_p (x, exp_rtl)
6105 || (MEM_P (x) && MEM_P (exp_rtl)
6106 && true_dependence (exp_rtl, VOIDmode, x,
6107 rtx_addr_varies_p)));
6110 /* If we reach here, it is safe. */
6111 return 1;
6115 /* Return the highest power of two that EXP is known to be a multiple of.
6116 This is used in updating alignment of MEMs in array references. */
6118 unsigned HOST_WIDE_INT
6119 highest_pow2_factor (tree exp)
6121 unsigned HOST_WIDE_INT c0, c1;
6123 switch (TREE_CODE (exp))
6125 case INTEGER_CST:
6126 /* We can find the lowest bit that's a one. If the low
6127 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6128 We need to handle this case since we can find it in a COND_EXPR,
6129 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6130 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6131 later ICE. */
6132 if (TREE_CONSTANT_OVERFLOW (exp))
6133 return BIGGEST_ALIGNMENT;
6134 else
6136 /* Note: tree_low_cst is intentionally not used here,
6137 we don't care about the upper bits. */
6138 c0 = TREE_INT_CST_LOW (exp);
6139 c0 &= -c0;
6140 return c0 ? c0 : BIGGEST_ALIGNMENT;
6142 break;
6144 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6145 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6146 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6147 return MIN (c0, c1);
6149 case MULT_EXPR:
6150 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6151 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6152 return c0 * c1;
6154 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6155 case CEIL_DIV_EXPR:
6156 if (integer_pow2p (TREE_OPERAND (exp, 1))
6157 && host_integerp (TREE_OPERAND (exp, 1), 1))
6159 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6160 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6161 return MAX (1, c0 / c1);
6163 break;
6165 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6166 case SAVE_EXPR:
6167 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6169 case COMPOUND_EXPR:
6170 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6172 case COND_EXPR:
6173 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6174 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6175 return MIN (c0, c1);
6177 default:
6178 break;
6181 return 1;
6184 /* Similar, except that the alignment requirements of TARGET are
6185 taken into account. Assume it is at least as aligned as its
6186 type, unless it is a COMPONENT_REF in which case the layout of
6187 the structure gives the alignment. */
6189 static unsigned HOST_WIDE_INT
6190 highest_pow2_factor_for_target (tree target, tree exp)
6192 unsigned HOST_WIDE_INT target_align, factor;
6194 factor = highest_pow2_factor (exp);
6195 if (TREE_CODE (target) == COMPONENT_REF)
6196 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6197 else
6198 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6199 return MAX (factor, target_align);
6202 /* Expands variable VAR. */
6204 void
6205 expand_var (tree var)
6207 if (DECL_EXTERNAL (var))
6208 return;
6210 if (TREE_STATIC (var))
6211 /* If this is an inlined copy of a static local variable,
6212 look up the original decl. */
6213 var = DECL_ORIGIN (var);
6215 if (TREE_STATIC (var)
6216 ? !TREE_ASM_WRITTEN (var)
6217 : !DECL_RTL_SET_P (var))
6219 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6220 /* Should be ignored. */;
6221 else if (lang_hooks.expand_decl (var))
6222 /* OK. */;
6223 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6224 expand_decl (var);
6225 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6226 rest_of_decl_compilation (var, 0, 0);
6227 else
6228 /* No expansion needed. */
6229 gcc_assert (TREE_CODE (var) == TYPE_DECL
6230 || TREE_CODE (var) == CONST_DECL
6231 || TREE_CODE (var) == FUNCTION_DECL
6232 || TREE_CODE (var) == LABEL_DECL);
6236 /* Subroutine of expand_expr. Expand the two operands of a binary
6237 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6238 The value may be stored in TARGET if TARGET is nonzero. The
6239 MODIFIER argument is as documented by expand_expr. */
6241 static void
6242 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6243 enum expand_modifier modifier)
6245 if (! safe_from_p (target, exp1, 1))
6246 target = 0;
6247 if (operand_equal_p (exp0, exp1, 0))
6249 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6250 *op1 = copy_rtx (*op0);
6252 else
6254 /* If we need to preserve evaluation order, copy exp0 into its own
6255 temporary variable so that it can't be clobbered by exp1. */
6256 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6257 exp0 = save_expr (exp0);
6258 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6259 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6264 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6265 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6267 static rtx
6268 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6269 enum expand_modifier modifier)
6271 rtx result, subtarget;
6272 tree inner, offset;
6273 HOST_WIDE_INT bitsize, bitpos;
6274 int volatilep, unsignedp;
6275 enum machine_mode mode1;
6277 /* If we are taking the address of a constant and are at the top level,
6278 we have to use output_constant_def since we can't call force_const_mem
6279 at top level. */
6280 /* ??? This should be considered a front-end bug. We should not be
6281 generating ADDR_EXPR of something that isn't an LVALUE. The only
6282 exception here is STRING_CST. */
6283 if (TREE_CODE (exp) == CONSTRUCTOR
6284 || CONSTANT_CLASS_P (exp))
6285 return XEXP (output_constant_def (exp, 0), 0);
6287 /* Everything must be something allowed by is_gimple_addressable. */
6288 switch (TREE_CODE (exp))
6290 case INDIRECT_REF:
6291 /* This case will happen via recursion for &a->b. */
6292 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6294 case CONST_DECL:
6295 /* Recurse and make the output_constant_def clause above handle this. */
6296 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6297 tmode, modifier);
6299 case REALPART_EXPR:
6300 /* The real part of the complex number is always first, therefore
6301 the address is the same as the address of the parent object. */
6302 offset = 0;
6303 bitpos = 0;
6304 inner = TREE_OPERAND (exp, 0);
6305 break;
6307 case IMAGPART_EXPR:
6308 /* The imaginary part of the complex number is always second.
6309 The expression is therefore always offset by the size of the
6310 scalar type. */
6311 offset = 0;
6312 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6313 inner = TREE_OPERAND (exp, 0);
6314 break;
6316 default:
6317 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6318 expand_expr, as that can have various side effects; LABEL_DECLs for
6319 example, may not have their DECL_RTL set yet. Assume language
6320 specific tree nodes can be expanded in some interesting way. */
6321 if (DECL_P (exp)
6322 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6324 result = expand_expr (exp, target, tmode,
6325 modifier == EXPAND_INITIALIZER
6326 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6328 /* If the DECL isn't in memory, then the DECL wasn't properly
6329 marked TREE_ADDRESSABLE, which will be either a front-end
6330 or a tree optimizer bug. */
6331 gcc_assert (MEM_P (result));
6332 result = XEXP (result, 0);
6334 /* ??? Is this needed anymore? */
6335 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6337 assemble_external (exp);
6338 TREE_USED (exp) = 1;
6341 if (modifier != EXPAND_INITIALIZER
6342 && modifier != EXPAND_CONST_ADDRESS)
6343 result = force_operand (result, target);
6344 return result;
6347 /* Pass FALSE as the last argument to get_inner_reference although
6348 we are expanding to RTL. The rationale is that we know how to
6349 handle "aligning nodes" here: we can just bypass them because
6350 they won't change the final object whose address will be returned
6351 (they actually exist only for that purpose). */
6352 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6353 &mode1, &unsignedp, &volatilep, false);
6354 break;
6357 /* We must have made progress. */
6358 gcc_assert (inner != exp);
6360 subtarget = offset || bitpos ? NULL_RTX : target;
6361 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6363 if (offset)
6365 rtx tmp;
6367 if (modifier != EXPAND_NORMAL)
6368 result = force_operand (result, NULL);
6369 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6371 result = convert_memory_address (tmode, result);
6372 tmp = convert_memory_address (tmode, tmp);
6374 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6375 result = gen_rtx_PLUS (tmode, result, tmp);
6376 else
6378 subtarget = bitpos ? NULL_RTX : target;
6379 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6380 1, OPTAB_LIB_WIDEN);
6384 if (bitpos)
6386 /* Someone beforehand should have rejected taking the address
6387 of such an object. */
6388 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6390 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6391 if (modifier < EXPAND_SUM)
6392 result = force_operand (result, target);
6395 return result;
6398 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6399 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6401 static rtx
6402 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6403 enum expand_modifier modifier)
6405 enum machine_mode rmode;
6406 rtx result;
6408 /* Target mode of VOIDmode says "whatever's natural". */
6409 if (tmode == VOIDmode)
6410 tmode = TYPE_MODE (TREE_TYPE (exp));
6412 /* We can get called with some Weird Things if the user does silliness
6413 like "(short) &a". In that case, convert_memory_address won't do
6414 the right thing, so ignore the given target mode. */
6415 if (tmode != Pmode && tmode != ptr_mode)
6416 tmode = Pmode;
6418 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6419 tmode, modifier);
6421 /* Despite expand_expr claims concerning ignoring TMODE when not
6422 strictly convenient, stuff breaks if we don't honor it. Note
6423 that combined with the above, we only do this for pointer modes. */
6424 rmode = GET_MODE (result);
6425 if (rmode == VOIDmode)
6426 rmode = tmode;
6427 if (rmode != tmode)
6428 result = convert_memory_address (tmode, result);
6430 return result;
6434 /* expand_expr: generate code for computing expression EXP.
6435 An rtx for the computed value is returned. The value is never null.
6436 In the case of a void EXP, const0_rtx is returned.
6438 The value may be stored in TARGET if TARGET is nonzero.
6439 TARGET is just a suggestion; callers must assume that
6440 the rtx returned may not be the same as TARGET.
6442 If TARGET is CONST0_RTX, it means that the value will be ignored.
6444 If TMODE is not VOIDmode, it suggests generating the
6445 result in mode TMODE. But this is done only when convenient.
6446 Otherwise, TMODE is ignored and the value generated in its natural mode.
6447 TMODE is just a suggestion; callers must assume that
6448 the rtx returned may not have mode TMODE.
6450 Note that TARGET may have neither TMODE nor MODE. In that case, it
6451 probably will not be used.
6453 If MODIFIER is EXPAND_SUM then when EXP is an addition
6454 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6455 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6456 products as above, or REG or MEM, or constant.
6457 Ordinarily in such cases we would output mul or add instructions
6458 and then return a pseudo reg containing the sum.
6460 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6461 it also marks a label as absolutely required (it can't be dead).
6462 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6463 This is used for outputting expressions used in initializers.
6465 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6466 with a constant address even if that address is not normally legitimate.
6467 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6469 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6470 a call parameter. Such targets require special care as we haven't yet
6471 marked TARGET so that it's safe from being trashed by libcalls. We
6472 don't want to use TARGET for anything but the final result;
6473 Intermediate values must go elsewhere. Additionally, calls to
6474 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6476 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6477 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6478 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6479 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6480 recursively. */
6482 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6483 enum expand_modifier, rtx *);
6486 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6487 enum expand_modifier modifier, rtx *alt_rtl)
6489 int rn = -1;
6490 rtx ret, last = NULL;
6492 /* Handle ERROR_MARK before anybody tries to access its type. */
6493 if (TREE_CODE (exp) == ERROR_MARK
6494 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6496 ret = CONST0_RTX (tmode);
6497 return ret ? ret : const0_rtx;
6500 if (flag_non_call_exceptions)
6502 rn = lookup_stmt_eh_region (exp);
6503 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6504 if (rn >= 0)
6505 last = get_last_insn ();
6508 /* If this is an expression of some kind and it has an associated line
6509 number, then emit the line number before expanding the expression.
6511 We need to save and restore the file and line information so that
6512 errors discovered during expansion are emitted with the right
6513 information. It would be better of the diagnostic routines
6514 used the file/line information embedded in the tree nodes rather
6515 than globals. */
6516 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6518 location_t saved_location = input_location;
6519 input_location = EXPR_LOCATION (exp);
6520 emit_line_note (input_location);
6522 /* Record where the insns produced belong. */
6523 record_block_change (TREE_BLOCK (exp));
6525 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6527 input_location = saved_location;
6529 else
6531 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6534 /* If using non-call exceptions, mark all insns that may trap.
6535 expand_call() will mark CALL_INSNs before we get to this code,
6536 but it doesn't handle libcalls, and these may trap. */
6537 if (rn >= 0)
6539 rtx insn;
6540 for (insn = next_real_insn (last); insn;
6541 insn = next_real_insn (insn))
6543 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6544 /* If we want exceptions for non-call insns, any
6545 may_trap_p instruction may throw. */
6546 && GET_CODE (PATTERN (insn)) != CLOBBER
6547 && GET_CODE (PATTERN (insn)) != USE
6548 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6550 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6551 REG_NOTES (insn));
6556 return ret;
6559 static rtx
6560 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6561 enum expand_modifier modifier, rtx *alt_rtl)
6563 rtx op0, op1, temp;
6564 tree type = TREE_TYPE (exp);
6565 int unsignedp;
6566 enum machine_mode mode;
6567 enum tree_code code = TREE_CODE (exp);
6568 optab this_optab;
6569 rtx subtarget, original_target;
6570 int ignore;
6571 tree context, subexp0, subexp1;
6572 bool reduce_bit_field = false;
6573 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6574 ? reduce_to_bit_field_precision ((expr), \
6575 target, \
6576 type) \
6577 : (expr))
6579 mode = TYPE_MODE (type);
6580 unsignedp = TYPE_UNSIGNED (type);
6581 if (lang_hooks.reduce_bit_field_operations
6582 && TREE_CODE (type) == INTEGER_TYPE
6583 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6585 /* An operation in what may be a bit-field type needs the
6586 result to be reduced to the precision of the bit-field type,
6587 which is narrower than that of the type's mode. */
6588 reduce_bit_field = true;
6589 if (modifier == EXPAND_STACK_PARM)
6590 target = 0;
6593 /* Use subtarget as the target for operand 0 of a binary operation. */
6594 subtarget = get_subtarget (target);
6595 original_target = target;
6596 ignore = (target == const0_rtx
6597 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6598 || code == CONVERT_EXPR || code == COND_EXPR
6599 || code == VIEW_CONVERT_EXPR)
6600 && TREE_CODE (type) == VOID_TYPE));
6602 /* If we are going to ignore this result, we need only do something
6603 if there is a side-effect somewhere in the expression. If there
6604 is, short-circuit the most common cases here. Note that we must
6605 not call expand_expr with anything but const0_rtx in case this
6606 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6608 if (ignore)
6610 if (! TREE_SIDE_EFFECTS (exp))
6611 return const0_rtx;
6613 /* Ensure we reference a volatile object even if value is ignored, but
6614 don't do this if all we are doing is taking its address. */
6615 if (TREE_THIS_VOLATILE (exp)
6616 && TREE_CODE (exp) != FUNCTION_DECL
6617 && mode != VOIDmode && mode != BLKmode
6618 && modifier != EXPAND_CONST_ADDRESS)
6620 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6621 if (MEM_P (temp))
6622 temp = copy_to_reg (temp);
6623 return const0_rtx;
6626 if (TREE_CODE_CLASS (code) == tcc_unary
6627 || code == COMPONENT_REF || code == INDIRECT_REF)
6628 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6629 modifier);
6631 else if (TREE_CODE_CLASS (code) == tcc_binary
6632 || TREE_CODE_CLASS (code) == tcc_comparison
6633 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6635 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6636 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6637 return const0_rtx;
6639 else if (code == BIT_FIELD_REF)
6641 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6642 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6643 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6644 return const0_rtx;
6647 target = 0;
6651 switch (code)
6653 case LABEL_DECL:
6655 tree function = decl_function_context (exp);
6657 temp = label_rtx (exp);
6658 temp = gen_rtx_LABEL_REF (Pmode, temp);
6660 if (function != current_function_decl
6661 && function != 0)
6662 LABEL_REF_NONLOCAL_P (temp) = 1;
6664 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6665 return temp;
6668 case SSA_NAME:
6669 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6670 NULL);
6672 case PARM_DECL:
6673 case VAR_DECL:
6674 /* If a static var's type was incomplete when the decl was written,
6675 but the type is complete now, lay out the decl now. */
6676 if (DECL_SIZE (exp) == 0
6677 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6678 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6679 layout_decl (exp, 0);
6681 /* ... fall through ... */
6683 case FUNCTION_DECL:
6684 case RESULT_DECL:
6685 gcc_assert (DECL_RTL (exp));
6687 /* Ensure variable marked as used even if it doesn't go through
6688 a parser. If it hasn't be used yet, write out an external
6689 definition. */
6690 if (! TREE_USED (exp))
6692 assemble_external (exp);
6693 TREE_USED (exp) = 1;
6696 /* Show we haven't gotten RTL for this yet. */
6697 temp = 0;
6699 /* Variables inherited from containing functions should have
6700 been lowered by this point. */
6701 context = decl_function_context (exp);
6702 gcc_assert (!context
6703 || context == current_function_decl
6704 || TREE_STATIC (exp)
6705 /* ??? C++ creates functions that are not TREE_STATIC. */
6706 || TREE_CODE (exp) == FUNCTION_DECL);
6708 /* This is the case of an array whose size is to be determined
6709 from its initializer, while the initializer is still being parsed.
6710 See expand_decl. */
6712 if (MEM_P (DECL_RTL (exp))
6713 && REG_P (XEXP (DECL_RTL (exp), 0)))
6714 temp = validize_mem (DECL_RTL (exp));
6716 /* If DECL_RTL is memory, we are in the normal case and either
6717 the address is not valid or it is not a register and -fforce-addr
6718 is specified, get the address into a register. */
6720 else if (MEM_P (DECL_RTL (exp))
6721 && modifier != EXPAND_CONST_ADDRESS
6722 && modifier != EXPAND_SUM
6723 && modifier != EXPAND_INITIALIZER
6724 && (! memory_address_p (DECL_MODE (exp),
6725 XEXP (DECL_RTL (exp), 0))
6726 || (flag_force_addr
6727 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6729 if (alt_rtl)
6730 *alt_rtl = DECL_RTL (exp);
6731 temp = replace_equiv_address (DECL_RTL (exp),
6732 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6735 /* If we got something, return it. But first, set the alignment
6736 if the address is a register. */
6737 if (temp != 0)
6739 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6740 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6742 return temp;
6745 /* If the mode of DECL_RTL does not match that of the decl, it
6746 must be a promoted value. We return a SUBREG of the wanted mode,
6747 but mark it so that we know that it was already extended. */
6749 if (REG_P (DECL_RTL (exp))
6750 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6752 enum machine_mode pmode;
6754 /* Get the signedness used for this variable. Ensure we get the
6755 same mode we got when the variable was declared. */
6756 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6757 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6758 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6760 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6761 SUBREG_PROMOTED_VAR_P (temp) = 1;
6762 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6763 return temp;
6766 return DECL_RTL (exp);
6768 case INTEGER_CST:
6769 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6770 TREE_INT_CST_HIGH (exp), mode);
6772 /* ??? If overflow is set, fold will have done an incomplete job,
6773 which can result in (plus xx (const_int 0)), which can get
6774 simplified by validate_replace_rtx during virtual register
6775 instantiation, which can result in unrecognizable insns.
6776 Avoid this by forcing all overflows into registers. */
6777 if (TREE_CONSTANT_OVERFLOW (exp)
6778 && modifier != EXPAND_INITIALIZER)
6779 temp = force_reg (mode, temp);
6781 return temp;
6783 case VECTOR_CST:
6784 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6785 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6786 return const_vector_from_tree (exp);
6787 else
6788 return expand_expr (build_constructor_from_list
6789 (TREE_TYPE (exp),
6790 TREE_VECTOR_CST_ELTS (exp)),
6791 ignore ? const0_rtx : target, tmode, modifier);
6793 case CONST_DECL:
6794 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6796 case REAL_CST:
6797 /* If optimized, generate immediate CONST_DOUBLE
6798 which will be turned into memory by reload if necessary.
6800 We used to force a register so that loop.c could see it. But
6801 this does not allow gen_* patterns to perform optimizations with
6802 the constants. It also produces two insns in cases like "x = 1.0;".
6803 On most machines, floating-point constants are not permitted in
6804 many insns, so we'd end up copying it to a register in any case.
6806 Now, we do the copying in expand_binop, if appropriate. */
6807 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6808 TYPE_MODE (TREE_TYPE (exp)));
6810 case COMPLEX_CST:
6811 /* Handle evaluating a complex constant in a CONCAT target. */
6812 if (original_target && GET_CODE (original_target) == CONCAT)
6814 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6815 rtx rtarg, itarg;
6817 rtarg = XEXP (original_target, 0);
6818 itarg = XEXP (original_target, 1);
6820 /* Move the real and imaginary parts separately. */
6821 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6822 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6824 if (op0 != rtarg)
6825 emit_move_insn (rtarg, op0);
6826 if (op1 != itarg)
6827 emit_move_insn (itarg, op1);
6829 return original_target;
6832 /* ... fall through ... */
6834 case STRING_CST:
6835 temp = output_constant_def (exp, 1);
6837 /* temp contains a constant address.
6838 On RISC machines where a constant address isn't valid,
6839 make some insns to get that address into a register. */
6840 if (modifier != EXPAND_CONST_ADDRESS
6841 && modifier != EXPAND_INITIALIZER
6842 && modifier != EXPAND_SUM
6843 && (! memory_address_p (mode, XEXP (temp, 0))
6844 || flag_force_addr))
6845 return replace_equiv_address (temp,
6846 copy_rtx (XEXP (temp, 0)));
6847 return temp;
6849 case SAVE_EXPR:
6851 tree val = TREE_OPERAND (exp, 0);
6852 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6854 if (!SAVE_EXPR_RESOLVED_P (exp))
6856 /* We can indeed still hit this case, typically via builtin
6857 expanders calling save_expr immediately before expanding
6858 something. Assume this means that we only have to deal
6859 with non-BLKmode values. */
6860 gcc_assert (GET_MODE (ret) != BLKmode);
6862 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6863 DECL_ARTIFICIAL (val) = 1;
6864 DECL_IGNORED_P (val) = 1;
6865 TREE_OPERAND (exp, 0) = val;
6866 SAVE_EXPR_RESOLVED_P (exp) = 1;
6868 if (!CONSTANT_P (ret))
6869 ret = copy_to_reg (ret);
6870 SET_DECL_RTL (val, ret);
6873 return ret;
6876 case GOTO_EXPR:
6877 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6878 expand_goto (TREE_OPERAND (exp, 0));
6879 else
6880 expand_computed_goto (TREE_OPERAND (exp, 0));
6881 return const0_rtx;
6883 case CONSTRUCTOR:
6884 /* If we don't need the result, just ensure we evaluate any
6885 subexpressions. */
6886 if (ignore)
6888 unsigned HOST_WIDE_INT idx;
6889 tree value;
6891 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6892 expand_expr (value, const0_rtx, VOIDmode, 0);
6894 return const0_rtx;
6897 /* Try to avoid creating a temporary at all. This is possible
6898 if all of the initializer is zero.
6899 FIXME: try to handle all [0..255] initializers we can handle
6900 with memset. */
6901 else if (TREE_STATIC (exp)
6902 && !TREE_ADDRESSABLE (exp)
6903 && target != 0 && mode == BLKmode
6904 && all_zeros_p (exp))
6906 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6907 return target;
6910 /* All elts simple constants => refer to a constant in memory. But
6911 if this is a non-BLKmode mode, let it store a field at a time
6912 since that should make a CONST_INT or CONST_DOUBLE when we
6913 fold. Likewise, if we have a target we can use, it is best to
6914 store directly into the target unless the type is large enough
6915 that memcpy will be used. If we are making an initializer and
6916 all operands are constant, put it in memory as well.
6918 FIXME: Avoid trying to fill vector constructors piece-meal.
6919 Output them with output_constant_def below unless we're sure
6920 they're zeros. This should go away when vector initializers
6921 are treated like VECTOR_CST instead of arrays.
6923 else if ((TREE_STATIC (exp)
6924 && ((mode == BLKmode
6925 && ! (target != 0 && safe_from_p (target, exp, 1)))
6926 || TREE_ADDRESSABLE (exp)
6927 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6928 && (! MOVE_BY_PIECES_P
6929 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6930 TYPE_ALIGN (type)))
6931 && ! mostly_zeros_p (exp))))
6932 || ((modifier == EXPAND_INITIALIZER
6933 || modifier == EXPAND_CONST_ADDRESS)
6934 && TREE_CONSTANT (exp)))
6936 rtx constructor = output_constant_def (exp, 1);
6938 if (modifier != EXPAND_CONST_ADDRESS
6939 && modifier != EXPAND_INITIALIZER
6940 && modifier != EXPAND_SUM)
6941 constructor = validize_mem (constructor);
6943 return constructor;
6945 else
6947 /* Handle calls that pass values in multiple non-contiguous
6948 locations. The Irix 6 ABI has examples of this. */
6949 if (target == 0 || ! safe_from_p (target, exp, 1)
6950 || GET_CODE (target) == PARALLEL
6951 || modifier == EXPAND_STACK_PARM)
6952 target
6953 = assign_temp (build_qualified_type (type,
6954 (TYPE_QUALS (type)
6955 | (TREE_READONLY (exp)
6956 * TYPE_QUAL_CONST))),
6957 0, TREE_ADDRESSABLE (exp), 1);
6959 store_constructor (exp, target, 0, int_expr_size (exp));
6960 return target;
6963 case MISALIGNED_INDIRECT_REF:
6964 case ALIGN_INDIRECT_REF:
6965 case INDIRECT_REF:
6967 tree exp1 = TREE_OPERAND (exp, 0);
6969 if (modifier != EXPAND_WRITE)
6971 tree t;
6973 t = fold_read_from_constant_string (exp);
6974 if (t)
6975 return expand_expr (t, target, tmode, modifier);
6978 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6979 op0 = memory_address (mode, op0);
6981 if (code == ALIGN_INDIRECT_REF)
6983 int align = TYPE_ALIGN_UNIT (type);
6984 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6985 op0 = memory_address (mode, op0);
6988 temp = gen_rtx_MEM (mode, op0);
6990 set_mem_attributes (temp, exp, 0);
6992 /* Resolve the misalignment now, so that we don't have to remember
6993 to resolve it later. Of course, this only works for reads. */
6994 /* ??? When we get around to supporting writes, we'll have to handle
6995 this in store_expr directly. The vectorizer isn't generating
6996 those yet, however. */
6997 if (code == MISALIGNED_INDIRECT_REF)
6999 int icode;
7000 rtx reg, insn;
7002 gcc_assert (modifier == EXPAND_NORMAL
7003 || modifier == EXPAND_STACK_PARM);
7005 /* The vectorizer should have already checked the mode. */
7006 icode = movmisalign_optab->handlers[mode].insn_code;
7007 gcc_assert (icode != CODE_FOR_nothing);
7009 /* We've already validated the memory, and we're creating a
7010 new pseudo destination. The predicates really can't fail. */
7011 reg = gen_reg_rtx (mode);
7013 /* Nor can the insn generator. */
7014 insn = GEN_FCN (icode) (reg, temp);
7015 emit_insn (insn);
7017 return reg;
7020 return temp;
7023 case TARGET_MEM_REF:
7025 struct mem_address addr;
7027 get_address_description (exp, &addr);
7028 op0 = addr_for_mem_ref (&addr, true);
7029 op0 = memory_address (mode, op0);
7030 temp = gen_rtx_MEM (mode, op0);
7031 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7033 return temp;
7035 case ARRAY_REF:
7038 tree array = TREE_OPERAND (exp, 0);
7039 tree index = TREE_OPERAND (exp, 1);
7041 /* Fold an expression like: "foo"[2].
7042 This is not done in fold so it won't happen inside &.
7043 Don't fold if this is for wide characters since it's too
7044 difficult to do correctly and this is a very rare case. */
7046 if (modifier != EXPAND_CONST_ADDRESS
7047 && modifier != EXPAND_INITIALIZER
7048 && modifier != EXPAND_MEMORY)
7050 tree t = fold_read_from_constant_string (exp);
7052 if (t)
7053 return expand_expr (t, target, tmode, modifier);
7056 /* If this is a constant index into a constant array,
7057 just get the value from the array. Handle both the cases when
7058 we have an explicit constructor and when our operand is a variable
7059 that was declared const. */
7061 if (modifier != EXPAND_CONST_ADDRESS
7062 && modifier != EXPAND_INITIALIZER
7063 && modifier != EXPAND_MEMORY
7064 && TREE_CODE (array) == CONSTRUCTOR
7065 && ! TREE_SIDE_EFFECTS (array)
7066 && TREE_CODE (index) == INTEGER_CST)
7068 unsigned HOST_WIDE_INT ix;
7069 tree field, value;
7071 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7072 field, value)
7073 if (tree_int_cst_equal (field, index))
7075 if (!TREE_SIDE_EFFECTS (value))
7076 return expand_expr (fold (value), target, tmode, modifier);
7077 break;
7081 else if (optimize >= 1
7082 && modifier != EXPAND_CONST_ADDRESS
7083 && modifier != EXPAND_INITIALIZER
7084 && modifier != EXPAND_MEMORY
7085 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7086 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7087 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7088 && targetm.binds_local_p (array))
7090 if (TREE_CODE (index) == INTEGER_CST)
7092 tree init = DECL_INITIAL (array);
7094 if (TREE_CODE (init) == CONSTRUCTOR)
7096 unsigned HOST_WIDE_INT ix;
7097 tree field, value;
7099 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7100 field, value)
7101 if (tree_int_cst_equal (field, index))
7103 if (!TREE_SIDE_EFFECTS (value))
7104 return expand_expr (fold (value), target, tmode,
7105 modifier);
7106 break;
7109 else if (TREE_CODE (init) == STRING_CST
7110 && 0 > compare_tree_int (index,
7111 TREE_STRING_LENGTH (init)))
7113 tree type = TREE_TYPE (TREE_TYPE (init));
7114 enum machine_mode mode = TYPE_MODE (type);
7116 if (GET_MODE_CLASS (mode) == MODE_INT
7117 && GET_MODE_SIZE (mode) == 1)
7118 return gen_int_mode (TREE_STRING_POINTER (init)
7119 [TREE_INT_CST_LOW (index)], mode);
7124 goto normal_inner_ref;
7126 case COMPONENT_REF:
7127 /* If the operand is a CONSTRUCTOR, we can just extract the
7128 appropriate field if it is present. */
7129 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7131 unsigned HOST_WIDE_INT idx;
7132 tree field, value;
7134 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7135 idx, field, value)
7136 if (field == TREE_OPERAND (exp, 1)
7137 /* We can normally use the value of the field in the
7138 CONSTRUCTOR. However, if this is a bitfield in
7139 an integral mode that we can fit in a HOST_WIDE_INT,
7140 we must mask only the number of bits in the bitfield,
7141 since this is done implicitly by the constructor. If
7142 the bitfield does not meet either of those conditions,
7143 we can't do this optimization. */
7144 && (! DECL_BIT_FIELD (field)
7145 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7146 && (GET_MODE_BITSIZE (DECL_MODE (field))
7147 <= HOST_BITS_PER_WIDE_INT))))
7149 if (DECL_BIT_FIELD (field)
7150 && modifier == EXPAND_STACK_PARM)
7151 target = 0;
7152 op0 = expand_expr (value, target, tmode, modifier);
7153 if (DECL_BIT_FIELD (field))
7155 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7156 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7158 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7160 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7161 op0 = expand_and (imode, op0, op1, target);
7163 else
7165 tree count
7166 = build_int_cst (NULL_TREE,
7167 GET_MODE_BITSIZE (imode) - bitsize);
7169 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7170 target, 0);
7171 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7172 target, 0);
7176 return op0;
7179 goto normal_inner_ref;
7181 case BIT_FIELD_REF:
7182 case ARRAY_RANGE_REF:
7183 normal_inner_ref:
7185 enum machine_mode mode1;
7186 HOST_WIDE_INT bitsize, bitpos;
7187 tree offset;
7188 int volatilep = 0;
7189 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7190 &mode1, &unsignedp, &volatilep, true);
7191 rtx orig_op0;
7193 /* If we got back the original object, something is wrong. Perhaps
7194 we are evaluating an expression too early. In any event, don't
7195 infinitely recurse. */
7196 gcc_assert (tem != exp);
7198 /* If TEM's type is a union of variable size, pass TARGET to the inner
7199 computation, since it will need a temporary and TARGET is known
7200 to have to do. This occurs in unchecked conversion in Ada. */
7202 orig_op0 = op0
7203 = expand_expr (tem,
7204 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7205 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7206 != INTEGER_CST)
7207 && modifier != EXPAND_STACK_PARM
7208 ? target : NULL_RTX),
7209 VOIDmode,
7210 (modifier == EXPAND_INITIALIZER
7211 || modifier == EXPAND_CONST_ADDRESS
7212 || modifier == EXPAND_STACK_PARM)
7213 ? modifier : EXPAND_NORMAL);
7215 /* If this is a constant, put it into a register if it is a legitimate
7216 constant, OFFSET is 0, and we won't try to extract outside the
7217 register (in case we were passed a partially uninitialized object
7218 or a view_conversion to a larger size). Force the constant to
7219 memory otherwise. */
7220 if (CONSTANT_P (op0))
7222 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7223 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7224 && offset == 0
7225 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7226 op0 = force_reg (mode, op0);
7227 else
7228 op0 = validize_mem (force_const_mem (mode, op0));
7231 /* Otherwise, if this object not in memory and we either have an
7232 offset, a BLKmode result, or a reference outside the object, put it
7233 there. Such cases can occur in Ada if we have unchecked conversion
7234 of an expression from a scalar type to an array or record type or
7235 for an ARRAY_RANGE_REF whose type is BLKmode. */
7236 else if (!MEM_P (op0)
7237 && (offset != 0
7238 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7239 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7241 tree nt = build_qualified_type (TREE_TYPE (tem),
7242 (TYPE_QUALS (TREE_TYPE (tem))
7243 | TYPE_QUAL_CONST));
7244 rtx memloc = assign_temp (nt, 1, 1, 1);
7246 emit_move_insn (memloc, op0);
7247 op0 = memloc;
7250 if (offset != 0)
7252 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7253 EXPAND_SUM);
7255 gcc_assert (MEM_P (op0));
7257 #ifdef POINTERS_EXTEND_UNSIGNED
7258 if (GET_MODE (offset_rtx) != Pmode)
7259 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7260 #else
7261 if (GET_MODE (offset_rtx) != ptr_mode)
7262 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7263 #endif
7265 if (GET_MODE (op0) == BLKmode
7266 /* A constant address in OP0 can have VOIDmode, we must
7267 not try to call force_reg in that case. */
7268 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7269 && bitsize != 0
7270 && (bitpos % bitsize) == 0
7271 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7272 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7274 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7275 bitpos = 0;
7278 op0 = offset_address (op0, offset_rtx,
7279 highest_pow2_factor (offset));
7282 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7283 record its alignment as BIGGEST_ALIGNMENT. */
7284 if (MEM_P (op0) && bitpos == 0 && offset != 0
7285 && is_aligning_offset (offset, tem))
7286 set_mem_align (op0, BIGGEST_ALIGNMENT);
7288 /* Don't forget about volatility even if this is a bitfield. */
7289 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7291 if (op0 == orig_op0)
7292 op0 = copy_rtx (op0);
7294 MEM_VOLATILE_P (op0) = 1;
7297 /* The following code doesn't handle CONCAT.
7298 Assume only bitpos == 0 can be used for CONCAT, due to
7299 one element arrays having the same mode as its element. */
7300 if (GET_CODE (op0) == CONCAT)
7302 gcc_assert (bitpos == 0
7303 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7304 return op0;
7307 /* In cases where an aligned union has an unaligned object
7308 as a field, we might be extracting a BLKmode value from
7309 an integer-mode (e.g., SImode) object. Handle this case
7310 by doing the extract into an object as wide as the field
7311 (which we know to be the width of a basic mode), then
7312 storing into memory, and changing the mode to BLKmode. */
7313 if (mode1 == VOIDmode
7314 || REG_P (op0) || GET_CODE (op0) == SUBREG
7315 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7316 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7317 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7318 && modifier != EXPAND_CONST_ADDRESS
7319 && modifier != EXPAND_INITIALIZER)
7320 /* If the field isn't aligned enough to fetch as a memref,
7321 fetch it as a bit field. */
7322 || (mode1 != BLKmode
7323 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7324 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7325 || (MEM_P (op0)
7326 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7327 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7328 && ((modifier == EXPAND_CONST_ADDRESS
7329 || modifier == EXPAND_INITIALIZER)
7330 ? STRICT_ALIGNMENT
7331 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7332 || (bitpos % BITS_PER_UNIT != 0)))
7333 /* If the type and the field are a constant size and the
7334 size of the type isn't the same size as the bitfield,
7335 we must use bitfield operations. */
7336 || (bitsize >= 0
7337 && TYPE_SIZE (TREE_TYPE (exp))
7338 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7339 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7340 bitsize)))
7342 enum machine_mode ext_mode = mode;
7344 if (ext_mode == BLKmode
7345 && ! (target != 0 && MEM_P (op0)
7346 && MEM_P (target)
7347 && bitpos % BITS_PER_UNIT == 0))
7348 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7350 if (ext_mode == BLKmode)
7352 if (target == 0)
7353 target = assign_temp (type, 0, 1, 1);
7355 if (bitsize == 0)
7356 return target;
7358 /* In this case, BITPOS must start at a byte boundary and
7359 TARGET, if specified, must be a MEM. */
7360 gcc_assert (MEM_P (op0)
7361 && (!target || MEM_P (target))
7362 && !(bitpos % BITS_PER_UNIT));
7364 emit_block_move (target,
7365 adjust_address (op0, VOIDmode,
7366 bitpos / BITS_PER_UNIT),
7367 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7368 / BITS_PER_UNIT),
7369 (modifier == EXPAND_STACK_PARM
7370 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7372 return target;
7375 op0 = validize_mem (op0);
7377 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7378 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7380 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7381 (modifier == EXPAND_STACK_PARM
7382 ? NULL_RTX : target),
7383 ext_mode, ext_mode);
7385 /* If the result is a record type and BITSIZE is narrower than
7386 the mode of OP0, an integral mode, and this is a big endian
7387 machine, we must put the field into the high-order bits. */
7388 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7389 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7390 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7391 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7392 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7393 - bitsize),
7394 op0, 1);
7396 /* If the result type is BLKmode, store the data into a temporary
7397 of the appropriate type, but with the mode corresponding to the
7398 mode for the data we have (op0's mode). It's tempting to make
7399 this a constant type, since we know it's only being stored once,
7400 but that can cause problems if we are taking the address of this
7401 COMPONENT_REF because the MEM of any reference via that address
7402 will have flags corresponding to the type, which will not
7403 necessarily be constant. */
7404 if (mode == BLKmode)
7406 rtx new
7407 = assign_stack_temp_for_type
7408 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7410 emit_move_insn (new, op0);
7411 op0 = copy_rtx (new);
7412 PUT_MODE (op0, BLKmode);
7413 set_mem_attributes (op0, exp, 1);
7416 return op0;
7419 /* If the result is BLKmode, use that to access the object
7420 now as well. */
7421 if (mode == BLKmode)
7422 mode1 = BLKmode;
7424 /* Get a reference to just this component. */
7425 if (modifier == EXPAND_CONST_ADDRESS
7426 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7427 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7428 else
7429 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7431 if (op0 == orig_op0)
7432 op0 = copy_rtx (op0);
7434 set_mem_attributes (op0, exp, 0);
7435 if (REG_P (XEXP (op0, 0)))
7436 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7438 MEM_VOLATILE_P (op0) |= volatilep;
7439 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7440 || modifier == EXPAND_CONST_ADDRESS
7441 || modifier == EXPAND_INITIALIZER)
7442 return op0;
7443 else if (target == 0)
7444 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7446 convert_move (target, op0, unsignedp);
7447 return target;
7450 case OBJ_TYPE_REF:
7451 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7453 case CALL_EXPR:
7454 /* Check for a built-in function. */
7455 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7456 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7457 == FUNCTION_DECL)
7458 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7460 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7461 == BUILT_IN_FRONTEND)
7462 return lang_hooks.expand_expr (exp, original_target,
7463 tmode, modifier,
7464 alt_rtl);
7465 else
7466 return expand_builtin (exp, target, subtarget, tmode, ignore);
7469 return expand_call (exp, target, ignore);
7471 case NON_LVALUE_EXPR:
7472 case NOP_EXPR:
7473 case CONVERT_EXPR:
7474 if (TREE_OPERAND (exp, 0) == error_mark_node)
7475 return const0_rtx;
7477 if (TREE_CODE (type) == UNION_TYPE)
7479 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7481 /* If both input and output are BLKmode, this conversion isn't doing
7482 anything except possibly changing memory attribute. */
7483 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7485 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7486 modifier);
7488 result = copy_rtx (result);
7489 set_mem_attributes (result, exp, 0);
7490 return result;
7493 if (target == 0)
7495 if (TYPE_MODE (type) != BLKmode)
7496 target = gen_reg_rtx (TYPE_MODE (type));
7497 else
7498 target = assign_temp (type, 0, 1, 1);
7501 if (MEM_P (target))
7502 /* Store data into beginning of memory target. */
7503 store_expr (TREE_OPERAND (exp, 0),
7504 adjust_address (target, TYPE_MODE (valtype), 0),
7505 modifier == EXPAND_STACK_PARM);
7507 else
7509 gcc_assert (REG_P (target));
7511 /* Store this field into a union of the proper type. */
7512 store_field (target,
7513 MIN ((int_size_in_bytes (TREE_TYPE
7514 (TREE_OPERAND (exp, 0)))
7515 * BITS_PER_UNIT),
7516 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7517 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7518 type, 0);
7521 /* Return the entire union. */
7522 return target;
7525 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7527 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7528 modifier);
7530 /* If the signedness of the conversion differs and OP0 is
7531 a promoted SUBREG, clear that indication since we now
7532 have to do the proper extension. */
7533 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7534 && GET_CODE (op0) == SUBREG)
7535 SUBREG_PROMOTED_VAR_P (op0) = 0;
7537 return REDUCE_BIT_FIELD (op0);
7540 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7541 if (GET_MODE (op0) == mode)
7544 /* If OP0 is a constant, just convert it into the proper mode. */
7545 else if (CONSTANT_P (op0))
7547 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7548 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7550 if (modifier == EXPAND_INITIALIZER)
7551 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7552 subreg_lowpart_offset (mode,
7553 inner_mode));
7554 else
7555 op0= convert_modes (mode, inner_mode, op0,
7556 TYPE_UNSIGNED (inner_type));
7559 else if (modifier == EXPAND_INITIALIZER)
7560 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7562 else if (target == 0)
7563 op0 = convert_to_mode (mode, op0,
7564 TYPE_UNSIGNED (TREE_TYPE
7565 (TREE_OPERAND (exp, 0))));
7566 else
7568 convert_move (target, op0,
7569 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7570 op0 = target;
7573 return REDUCE_BIT_FIELD (op0);
7575 case VIEW_CONVERT_EXPR:
7576 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7578 /* If the input and output modes are both the same, we are done. */
7579 if (TYPE_MODE (type) == GET_MODE (op0))
7581 /* If neither mode is BLKmode, and both modes are the same size
7582 then we can use gen_lowpart. */
7583 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7584 && GET_MODE_SIZE (TYPE_MODE (type))
7585 == GET_MODE_SIZE (GET_MODE (op0)))
7587 if (GET_CODE (op0) == SUBREG)
7588 op0 = force_reg (GET_MODE (op0), op0);
7589 op0 = gen_lowpart (TYPE_MODE (type), op0);
7591 /* If both modes are integral, then we can convert from one to the
7592 other. */
7593 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7594 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7595 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7596 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7597 /* As a last resort, spill op0 to memory, and reload it in a
7598 different mode. */
7599 else if (!MEM_P (op0))
7601 /* If the operand is not a MEM, force it into memory. Since we
7602 are going to be be changing the mode of the MEM, don't call
7603 force_const_mem for constants because we don't allow pool
7604 constants to change mode. */
7605 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7607 gcc_assert (!TREE_ADDRESSABLE (exp));
7609 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7610 target
7611 = assign_stack_temp_for_type
7612 (TYPE_MODE (inner_type),
7613 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7615 emit_move_insn (target, op0);
7616 op0 = target;
7619 /* At this point, OP0 is in the correct mode. If the output type is such
7620 that the operand is known to be aligned, indicate that it is.
7621 Otherwise, we need only be concerned about alignment for non-BLKmode
7622 results. */
7623 if (MEM_P (op0))
7625 op0 = copy_rtx (op0);
7627 if (TYPE_ALIGN_OK (type))
7628 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7629 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7630 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7632 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7633 HOST_WIDE_INT temp_size
7634 = MAX (int_size_in_bytes (inner_type),
7635 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7636 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7637 temp_size, 0, type);
7638 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7640 gcc_assert (!TREE_ADDRESSABLE (exp));
7642 if (GET_MODE (op0) == BLKmode)
7643 emit_block_move (new_with_op0_mode, op0,
7644 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7645 (modifier == EXPAND_STACK_PARM
7646 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7647 else
7648 emit_move_insn (new_with_op0_mode, op0);
7650 op0 = new;
7653 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7656 return op0;
7658 case PLUS_EXPR:
7659 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7660 something else, make sure we add the register to the constant and
7661 then to the other thing. This case can occur during strength
7662 reduction and doing it this way will produce better code if the
7663 frame pointer or argument pointer is eliminated.
7665 fold-const.c will ensure that the constant is always in the inner
7666 PLUS_EXPR, so the only case we need to do anything about is if
7667 sp, ap, or fp is our second argument, in which case we must swap
7668 the innermost first argument and our second argument. */
7670 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7671 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7672 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7673 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7674 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7675 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7677 tree t = TREE_OPERAND (exp, 1);
7679 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7680 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7683 /* If the result is to be ptr_mode and we are adding an integer to
7684 something, we might be forming a constant. So try to use
7685 plus_constant. If it produces a sum and we can't accept it,
7686 use force_operand. This allows P = &ARR[const] to generate
7687 efficient code on machines where a SYMBOL_REF is not a valid
7688 address.
7690 If this is an EXPAND_SUM call, always return the sum. */
7691 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7692 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7694 if (modifier == EXPAND_STACK_PARM)
7695 target = 0;
7696 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7697 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7698 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7700 rtx constant_part;
7702 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7703 EXPAND_SUM);
7704 /* Use immed_double_const to ensure that the constant is
7705 truncated according to the mode of OP1, then sign extended
7706 to a HOST_WIDE_INT. Using the constant directly can result
7707 in non-canonical RTL in a 64x32 cross compile. */
7708 constant_part
7709 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7710 (HOST_WIDE_INT) 0,
7711 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7712 op1 = plus_constant (op1, INTVAL (constant_part));
7713 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7714 op1 = force_operand (op1, target);
7715 return REDUCE_BIT_FIELD (op1);
7718 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7719 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7720 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7722 rtx constant_part;
7724 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7725 (modifier == EXPAND_INITIALIZER
7726 ? EXPAND_INITIALIZER : EXPAND_SUM));
7727 if (! CONSTANT_P (op0))
7729 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7730 VOIDmode, modifier);
7731 /* Return a PLUS if modifier says it's OK. */
7732 if (modifier == EXPAND_SUM
7733 || modifier == EXPAND_INITIALIZER)
7734 return simplify_gen_binary (PLUS, mode, op0, op1);
7735 goto binop2;
7737 /* Use immed_double_const to ensure that the constant is
7738 truncated according to the mode of OP1, then sign extended
7739 to a HOST_WIDE_INT. Using the constant directly can result
7740 in non-canonical RTL in a 64x32 cross compile. */
7741 constant_part
7742 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7743 (HOST_WIDE_INT) 0,
7744 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7745 op0 = plus_constant (op0, INTVAL (constant_part));
7746 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7747 op0 = force_operand (op0, target);
7748 return REDUCE_BIT_FIELD (op0);
7752 /* No sense saving up arithmetic to be done
7753 if it's all in the wrong mode to form part of an address.
7754 And force_operand won't know whether to sign-extend or
7755 zero-extend. */
7756 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7757 || mode != ptr_mode)
7759 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7760 subtarget, &op0, &op1, 0);
7761 if (op0 == const0_rtx)
7762 return op1;
7763 if (op1 == const0_rtx)
7764 return op0;
7765 goto binop2;
7768 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7769 subtarget, &op0, &op1, modifier);
7770 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7772 case MINUS_EXPR:
7773 /* For initializers, we are allowed to return a MINUS of two
7774 symbolic constants. Here we handle all cases when both operands
7775 are constant. */
7776 /* Handle difference of two symbolic constants,
7777 for the sake of an initializer. */
7778 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7779 && really_constant_p (TREE_OPERAND (exp, 0))
7780 && really_constant_p (TREE_OPERAND (exp, 1)))
7782 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7783 NULL_RTX, &op0, &op1, modifier);
7785 /* If the last operand is a CONST_INT, use plus_constant of
7786 the negated constant. Else make the MINUS. */
7787 if (GET_CODE (op1) == CONST_INT)
7788 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7789 else
7790 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7793 /* No sense saving up arithmetic to be done
7794 if it's all in the wrong mode to form part of an address.
7795 And force_operand won't know whether to sign-extend or
7796 zero-extend. */
7797 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7798 || mode != ptr_mode)
7799 goto binop;
7801 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7802 subtarget, &op0, &op1, modifier);
7804 /* Convert A - const to A + (-const). */
7805 if (GET_CODE (op1) == CONST_INT)
7807 op1 = negate_rtx (mode, op1);
7808 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7811 goto binop2;
7813 case SAT_MINUS_EXPR:
7814 goto binop;
7816 case MULT_EXPR:
7817 /* If first operand is constant, swap them.
7818 Thus the following special case checks need only
7819 check the second operand. */
7820 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7822 tree t1 = TREE_OPERAND (exp, 0);
7823 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7824 TREE_OPERAND (exp, 1) = t1;
7827 /* Attempt to return something suitable for generating an
7828 indexed address, for machines that support that. */
7830 if (modifier == EXPAND_SUM && mode == ptr_mode
7831 && host_integerp (TREE_OPERAND (exp, 1), 0))
7833 tree exp1 = TREE_OPERAND (exp, 1);
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7836 EXPAND_SUM);
7838 if (!REG_P (op0))
7839 op0 = force_operand (op0, NULL_RTX);
7840 if (!REG_P (op0))
7841 op0 = copy_to_mode_reg (mode, op0);
7843 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7844 gen_int_mode (tree_low_cst (exp1, 0),
7845 TYPE_MODE (TREE_TYPE (exp1)))));
7848 if (modifier == EXPAND_STACK_PARM)
7849 target = 0;
7851 /* Check for multiplying things that have been extended
7852 from a narrower type. If this machine supports multiplying
7853 in that narrower type with a result in the desired type,
7854 do it that way, and avoid the explicit type-conversion. */
7856 subexp0 = TREE_OPERAND (exp, 0);
7857 subexp1 = TREE_OPERAND (exp, 1);
7858 /* First, check if we have a multiplication of one signed and one
7859 unsigned operand. */
7860 if (TREE_CODE (subexp0) == NOP_EXPR
7861 && TREE_CODE (subexp1) == NOP_EXPR
7862 && TREE_CODE (type) == INTEGER_TYPE
7863 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7864 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7865 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7866 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
7867 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7868 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
7870 enum machine_mode innermode
7871 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
7872 this_optab = usmul_widen_optab;
7873 if (mode == GET_MODE_WIDER_MODE (innermode))
7875 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7877 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
7878 expand_operands (TREE_OPERAND (subexp0, 0),
7879 TREE_OPERAND (subexp1, 0),
7880 NULL_RTX, &op0, &op1, 0);
7881 else
7882 expand_operands (TREE_OPERAND (subexp0, 0),
7883 TREE_OPERAND (subexp1, 0),
7884 NULL_RTX, &op1, &op0, 0);
7886 goto binop3;
7890 /* Check for a multiplication with matching signedness. */
7891 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7892 && TREE_CODE (type) == INTEGER_TYPE
7893 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7894 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7895 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7896 && int_fits_type_p (TREE_OPERAND (exp, 1),
7897 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7898 /* Don't use a widening multiply if a shift will do. */
7899 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7900 > HOST_BITS_PER_WIDE_INT)
7901 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7903 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7904 && (TYPE_PRECISION (TREE_TYPE
7905 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7906 == TYPE_PRECISION (TREE_TYPE
7907 (TREE_OPERAND
7908 (TREE_OPERAND (exp, 0), 0))))
7909 /* If both operands are extended, they must either both
7910 be zero-extended or both be sign-extended. */
7911 && (TYPE_UNSIGNED (TREE_TYPE
7912 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7913 == TYPE_UNSIGNED (TREE_TYPE
7914 (TREE_OPERAND
7915 (TREE_OPERAND (exp, 0), 0)))))))
7917 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7918 enum machine_mode innermode = TYPE_MODE (op0type);
7919 bool zextend_p = TYPE_UNSIGNED (op0type);
7920 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7921 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7923 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7925 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7927 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7928 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7929 TREE_OPERAND (exp, 1),
7930 NULL_RTX, &op0, &op1, 0);
7931 else
7932 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7933 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7934 NULL_RTX, &op0, &op1, 0);
7935 goto binop3;
7937 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7938 && innermode == word_mode)
7940 rtx htem, hipart;
7941 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7942 NULL_RTX, VOIDmode, 0);
7943 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7944 op1 = convert_modes (innermode, mode,
7945 expand_expr (TREE_OPERAND (exp, 1),
7946 NULL_RTX, VOIDmode, 0),
7947 unsignedp);
7948 else
7949 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7950 NULL_RTX, VOIDmode, 0);
7951 temp = expand_binop (mode, other_optab, op0, op1, target,
7952 unsignedp, OPTAB_LIB_WIDEN);
7953 hipart = gen_highpart (innermode, temp);
7954 htem = expand_mult_highpart_adjust (innermode, hipart,
7955 op0, op1, hipart,
7956 zextend_p);
7957 if (htem != hipart)
7958 emit_move_insn (hipart, htem);
7959 return REDUCE_BIT_FIELD (temp);
7963 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7964 subtarget, &op0, &op1, 0);
7965 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7967 case TRUNC_DIV_EXPR:
7968 case FLOOR_DIV_EXPR:
7969 case CEIL_DIV_EXPR:
7970 case ROUND_DIV_EXPR:
7971 case EXACT_DIV_EXPR:
7972 if (modifier == EXPAND_STACK_PARM)
7973 target = 0;
7974 /* Possible optimization: compute the dividend with EXPAND_SUM
7975 then if the divisor is constant can optimize the case
7976 where some terms of the dividend have coeffs divisible by it. */
7977 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7978 subtarget, &op0, &op1, 0);
7979 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7981 case RDIV_EXPR:
7982 goto binop;
7984 case TRUNC_MOD_EXPR:
7985 case FLOOR_MOD_EXPR:
7986 case CEIL_MOD_EXPR:
7987 case ROUND_MOD_EXPR:
7988 if (modifier == EXPAND_STACK_PARM)
7989 target = 0;
7990 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7991 subtarget, &op0, &op1, 0);
7992 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7994 case FIX_ROUND_EXPR:
7995 case FIX_FLOOR_EXPR:
7996 case FIX_CEIL_EXPR:
7997 gcc_unreachable (); /* Not used for C. */
7999 case FIX_TRUNC_EXPR:
8000 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8001 if (target == 0 || modifier == EXPAND_STACK_PARM)
8002 target = gen_reg_rtx (mode);
8003 expand_fix (target, op0, unsignedp);
8004 return target;
8006 case FLOAT_EXPR:
8007 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8008 if (target == 0 || modifier == EXPAND_STACK_PARM)
8009 target = gen_reg_rtx (mode);
8010 /* expand_float can't figure out what to do if FROM has VOIDmode.
8011 So give it the correct mode. With -O, cse will optimize this. */
8012 if (GET_MODE (op0) == VOIDmode)
8013 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8014 op0);
8015 expand_float (target, op0,
8016 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8017 return target;
8019 case NEGATE_EXPR:
8020 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8021 if (modifier == EXPAND_STACK_PARM)
8022 target = 0;
8023 temp = expand_unop (mode,
8024 optab_for_tree_code (NEGATE_EXPR, type),
8025 op0, target, 0);
8026 gcc_assert (temp);
8027 return REDUCE_BIT_FIELD (temp);
8029 case ABS_EXPR:
8030 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8031 if (modifier == EXPAND_STACK_PARM)
8032 target = 0;
8034 /* ABS_EXPR is not valid for complex arguments. */
8035 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8036 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8038 /* Unsigned abs is simply the operand. Testing here means we don't
8039 risk generating incorrect code below. */
8040 if (TYPE_UNSIGNED (type))
8041 return op0;
8043 return expand_abs (mode, op0, target, unsignedp,
8044 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8046 case MAX_EXPR:
8047 case MIN_EXPR:
8048 target = original_target;
8049 if (target == 0
8050 || modifier == EXPAND_STACK_PARM
8051 || (MEM_P (target) && MEM_VOLATILE_P (target))
8052 || GET_MODE (target) != mode
8053 || (REG_P (target)
8054 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8055 target = gen_reg_rtx (mode);
8056 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8057 target, &op0, &op1, 0);
8059 /* First try to do it with a special MIN or MAX instruction.
8060 If that does not win, use a conditional jump to select the proper
8061 value. */
8062 this_optab = optab_for_tree_code (code, type);
8063 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8064 OPTAB_WIDEN);
8065 if (temp != 0)
8066 return temp;
8068 /* At this point, a MEM target is no longer useful; we will get better
8069 code without it. */
8071 if (! REG_P (target))
8072 target = gen_reg_rtx (mode);
8074 /* If op1 was placed in target, swap op0 and op1. */
8075 if (target != op0 && target == op1)
8077 temp = op0;
8078 op0 = op1;
8079 op1 = temp;
8082 /* We generate better code and avoid problems with op1 mentioning
8083 target by forcing op1 into a pseudo if it isn't a constant. */
8084 if (! CONSTANT_P (op1))
8085 op1 = force_reg (mode, op1);
8088 enum rtx_code comparison_code;
8089 rtx cmpop1 = op1;
8091 if (code == MAX_EXPR)
8092 comparison_code = unsignedp ? GEU : GE;
8093 else
8094 comparison_code = unsignedp ? LEU : LE;
8096 /* Canonicalize to comparisons against 0. */
8097 if (op1 == const1_rtx)
8099 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8100 or (a != 0 ? a : 1) for unsigned.
8101 For MIN we are safe converting (a <= 1 ? a : 1)
8102 into (a <= 0 ? a : 1) */
8103 cmpop1 = const0_rtx;
8104 if (code == MAX_EXPR)
8105 comparison_code = unsignedp ? NE : GT;
8107 if (op1 == constm1_rtx && !unsignedp)
8109 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8110 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8111 cmpop1 = const0_rtx;
8112 if (code == MIN_EXPR)
8113 comparison_code = LT;
8115 #ifdef HAVE_conditional_move
8116 /* Use a conditional move if possible. */
8117 if (can_conditionally_move_p (mode))
8119 rtx insn;
8121 /* ??? Same problem as in expmed.c: emit_conditional_move
8122 forces a stack adjustment via compare_from_rtx, and we
8123 lose the stack adjustment if the sequence we are about
8124 to create is discarded. */
8125 do_pending_stack_adjust ();
8127 start_sequence ();
8129 /* Try to emit the conditional move. */
8130 insn = emit_conditional_move (target, comparison_code,
8131 op0, cmpop1, mode,
8132 op0, op1, mode,
8133 unsignedp);
8135 /* If we could do the conditional move, emit the sequence,
8136 and return. */
8137 if (insn)
8139 rtx seq = get_insns ();
8140 end_sequence ();
8141 emit_insn (seq);
8142 return target;
8145 /* Otherwise discard the sequence and fall back to code with
8146 branches. */
8147 end_sequence ();
8149 #endif
8150 if (target != op0)
8151 emit_move_insn (target, op0);
8153 temp = gen_label_rtx ();
8155 /* If this mode is an integer too wide to compare properly,
8156 compare word by word. Rely on cse to optimize constant cases. */
8157 if (GET_MODE_CLASS (mode) == MODE_INT
8158 && ! can_compare_p (GE, mode, ccp_jump))
8160 if (code == MAX_EXPR)
8161 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8162 NULL_RTX, temp);
8163 else
8164 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8165 NULL_RTX, temp);
8167 else
8169 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8170 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8173 emit_move_insn (target, op1);
8174 emit_label (temp);
8175 return target;
8177 case BIT_NOT_EXPR:
8178 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8179 if (modifier == EXPAND_STACK_PARM)
8180 target = 0;
8181 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8182 gcc_assert (temp);
8183 return temp;
8185 /* ??? Can optimize bitwise operations with one arg constant.
8186 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8187 and (a bitwise1 b) bitwise2 b (etc)
8188 but that is probably not worth while. */
8190 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8191 boolean values when we want in all cases to compute both of them. In
8192 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8193 as actual zero-or-1 values and then bitwise anding. In cases where
8194 there cannot be any side effects, better code would be made by
8195 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8196 how to recognize those cases. */
8198 case TRUTH_AND_EXPR:
8199 code = BIT_AND_EXPR;
8200 case BIT_AND_EXPR:
8201 goto binop;
8203 case TRUTH_OR_EXPR:
8204 code = BIT_IOR_EXPR;
8205 case BIT_IOR_EXPR:
8206 goto binop;
8208 case TRUTH_XOR_EXPR:
8209 code = BIT_XOR_EXPR;
8210 case BIT_XOR_EXPR:
8211 goto binop;
8213 case LSHIFT_EXPR:
8214 case RSHIFT_EXPR:
8215 case LROTATE_EXPR:
8216 case RROTATE_EXPR:
8217 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8218 subtarget = 0;
8219 if (modifier == EXPAND_STACK_PARM)
8220 target = 0;
8221 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8222 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8223 unsignedp);
8225 /* Could determine the answer when only additive constants differ. Also,
8226 the addition of one can be handled by changing the condition. */
8227 case LT_EXPR:
8228 case LE_EXPR:
8229 case GT_EXPR:
8230 case GE_EXPR:
8231 case EQ_EXPR:
8232 case NE_EXPR:
8233 case UNORDERED_EXPR:
8234 case ORDERED_EXPR:
8235 case UNLT_EXPR:
8236 case UNLE_EXPR:
8237 case UNGT_EXPR:
8238 case UNGE_EXPR:
8239 case UNEQ_EXPR:
8240 case LTGT_EXPR:
8241 temp = do_store_flag (exp,
8242 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8243 tmode != VOIDmode ? tmode : mode, 0);
8244 if (temp != 0)
8245 return temp;
8247 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8248 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8249 && original_target
8250 && REG_P (original_target)
8251 && (GET_MODE (original_target)
8252 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8254 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8255 VOIDmode, 0);
8257 /* If temp is constant, we can just compute the result. */
8258 if (GET_CODE (temp) == CONST_INT)
8260 if (INTVAL (temp) != 0)
8261 emit_move_insn (target, const1_rtx);
8262 else
8263 emit_move_insn (target, const0_rtx);
8265 return target;
8268 if (temp != original_target)
8270 enum machine_mode mode1 = GET_MODE (temp);
8271 if (mode1 == VOIDmode)
8272 mode1 = tmode != VOIDmode ? tmode : mode;
8274 temp = copy_to_mode_reg (mode1, temp);
8277 op1 = gen_label_rtx ();
8278 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8279 GET_MODE (temp), unsignedp, op1);
8280 emit_move_insn (temp, const1_rtx);
8281 emit_label (op1);
8282 return temp;
8285 /* If no set-flag instruction, must generate a conditional store
8286 into a temporary variable. Drop through and handle this
8287 like && and ||. */
8289 if (! ignore
8290 && (target == 0
8291 || modifier == EXPAND_STACK_PARM
8292 || ! safe_from_p (target, exp, 1)
8293 /* Make sure we don't have a hard reg (such as function's return
8294 value) live across basic blocks, if not optimizing. */
8295 || (!optimize && REG_P (target)
8296 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8297 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8299 if (target)
8300 emit_move_insn (target, const0_rtx);
8302 op1 = gen_label_rtx ();
8303 jumpifnot (exp, op1);
8305 if (target)
8306 emit_move_insn (target, const1_rtx);
8308 emit_label (op1);
8309 return ignore ? const0_rtx : target;
8311 case TRUTH_NOT_EXPR:
8312 if (modifier == EXPAND_STACK_PARM)
8313 target = 0;
8314 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8315 /* The parser is careful to generate TRUTH_NOT_EXPR
8316 only with operands that are always zero or one. */
8317 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8318 target, 1, OPTAB_LIB_WIDEN);
8319 gcc_assert (temp);
8320 return temp;
8322 case STATEMENT_LIST:
8324 tree_stmt_iterator iter;
8326 gcc_assert (ignore);
8328 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8329 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8331 return const0_rtx;
8333 case COND_EXPR:
8334 /* A COND_EXPR with its type being VOID_TYPE represents a
8335 conditional jump and is handled in
8336 expand_gimple_cond_expr. */
8337 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8339 /* Note that COND_EXPRs whose type is a structure or union
8340 are required to be constructed to contain assignments of
8341 a temporary variable, so that we can evaluate them here
8342 for side effect only. If type is void, we must do likewise. */
8344 gcc_assert (!TREE_ADDRESSABLE (type)
8345 && !ignore
8346 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8347 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8349 /* If we are not to produce a result, we have no target. Otherwise,
8350 if a target was specified use it; it will not be used as an
8351 intermediate target unless it is safe. If no target, use a
8352 temporary. */
8354 if (modifier != EXPAND_STACK_PARM
8355 && original_target
8356 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8357 && GET_MODE (original_target) == mode
8358 #ifdef HAVE_conditional_move
8359 && (! can_conditionally_move_p (mode)
8360 || REG_P (original_target))
8361 #endif
8362 && !MEM_P (original_target))
8363 temp = original_target;
8364 else
8365 temp = assign_temp (type, 0, 0, 1);
8367 do_pending_stack_adjust ();
8368 NO_DEFER_POP;
8369 op0 = gen_label_rtx ();
8370 op1 = gen_label_rtx ();
8371 jumpifnot (TREE_OPERAND (exp, 0), op0);
8372 store_expr (TREE_OPERAND (exp, 1), temp,
8373 modifier == EXPAND_STACK_PARM);
8375 emit_jump_insn (gen_jump (op1));
8376 emit_barrier ();
8377 emit_label (op0);
8378 store_expr (TREE_OPERAND (exp, 2), temp,
8379 modifier == EXPAND_STACK_PARM);
8381 emit_label (op1);
8382 OK_DEFER_POP;
8383 return temp;
8385 case VEC_COND_EXPR:
8386 target = expand_vec_cond_expr (exp, target);
8387 return target;
8389 case MODIFY_EXPR:
8391 tree lhs = TREE_OPERAND (exp, 0);
8392 tree rhs = TREE_OPERAND (exp, 1);
8394 gcc_assert (ignore);
8396 /* Check for |= or &= of a bitfield of size one into another bitfield
8397 of size 1. In this case, (unless we need the result of the
8398 assignment) we can do this more efficiently with a
8399 test followed by an assignment, if necessary.
8401 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8402 things change so we do, this code should be enhanced to
8403 support it. */
8404 if (TREE_CODE (lhs) == COMPONENT_REF
8405 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8406 || TREE_CODE (rhs) == BIT_AND_EXPR)
8407 && TREE_OPERAND (rhs, 0) == lhs
8408 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8409 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8410 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8412 rtx label = gen_label_rtx ();
8414 do_jump (TREE_OPERAND (rhs, 1),
8415 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8416 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8417 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8418 (TREE_CODE (rhs) == BIT_IOR_EXPR
8419 ? integer_one_node
8420 : integer_zero_node)));
8421 do_pending_stack_adjust ();
8422 emit_label (label);
8423 return const0_rtx;
8426 expand_assignment (lhs, rhs);
8428 return const0_rtx;
8431 case RETURN_EXPR:
8432 if (!TREE_OPERAND (exp, 0))
8433 expand_null_return ();
8434 else
8435 expand_return (TREE_OPERAND (exp, 0));
8436 return const0_rtx;
8438 case ADDR_EXPR:
8439 return expand_expr_addr_expr (exp, target, tmode, modifier);
8441 case COMPLEX_EXPR:
8442 /* Get the rtx code of the operands. */
8443 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8444 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8446 if (!target)
8447 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8449 /* Move the real (op0) and imaginary (op1) parts to their location. */
8450 write_complex_part (target, op0, false);
8451 write_complex_part (target, op1, true);
8453 return target;
8455 case REALPART_EXPR:
8456 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8457 return read_complex_part (op0, false);
8459 case IMAGPART_EXPR:
8460 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8461 return read_complex_part (op0, true);
8463 case RESX_EXPR:
8464 expand_resx_expr (exp);
8465 return const0_rtx;
8467 case TRY_CATCH_EXPR:
8468 case CATCH_EXPR:
8469 case EH_FILTER_EXPR:
8470 case TRY_FINALLY_EXPR:
8471 /* Lowered by tree-eh.c. */
8472 gcc_unreachable ();
8474 case WITH_CLEANUP_EXPR:
8475 case CLEANUP_POINT_EXPR:
8476 case TARGET_EXPR:
8477 case CASE_LABEL_EXPR:
8478 case VA_ARG_EXPR:
8479 case BIND_EXPR:
8480 case INIT_EXPR:
8481 case CONJ_EXPR:
8482 case COMPOUND_EXPR:
8483 case PREINCREMENT_EXPR:
8484 case PREDECREMENT_EXPR:
8485 case POSTINCREMENT_EXPR:
8486 case POSTDECREMENT_EXPR:
8487 case LOOP_EXPR:
8488 case EXIT_EXPR:
8489 case TRUTH_ANDIF_EXPR:
8490 case TRUTH_ORIF_EXPR:
8491 /* Lowered by gimplify.c. */
8492 gcc_unreachable ();
8494 case EXC_PTR_EXPR:
8495 return get_exception_pointer (cfun);
8497 case FILTER_EXPR:
8498 return get_exception_filter (cfun);
8500 case FDESC_EXPR:
8501 /* Function descriptors are not valid except for as
8502 initialization constants, and should not be expanded. */
8503 gcc_unreachable ();
8505 case SWITCH_EXPR:
8506 expand_case (exp);
8507 return const0_rtx;
8509 case LABEL_EXPR:
8510 expand_label (TREE_OPERAND (exp, 0));
8511 return const0_rtx;
8513 case ASM_EXPR:
8514 expand_asm_expr (exp);
8515 return const0_rtx;
8517 case WITH_SIZE_EXPR:
8518 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8519 have pulled out the size to use in whatever context it needed. */
8520 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8521 modifier, alt_rtl);
8523 case REALIGN_LOAD_EXPR:
8525 tree oprnd0 = TREE_OPERAND (exp, 0);
8526 tree oprnd1 = TREE_OPERAND (exp, 1);
8527 tree oprnd2 = TREE_OPERAND (exp, 2);
8528 rtx op2;
8530 this_optab = optab_for_tree_code (code, type);
8531 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8532 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8533 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8534 target, unsignedp);
8535 gcc_assert (temp);
8536 return temp;
8539 case WIDEN_SUM_EXPR:
8541 tree oprnd0 = TREE_OPERAND (exp, 0);
8542 tree oprnd1 = TREE_OPERAND (exp, 1);
8544 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8545 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8546 target, unsignedp);
8547 return target;
8550 case MULT_HI_EXPR:
8552 goto binop;
8555 case SAD_EXPR:
8556 case DOT_PROD_EXPR:
8558 tree oprnd0 = TREE_OPERAND (exp, 0);
8559 tree oprnd1 = TREE_OPERAND (exp, 1);
8560 tree oprnd2 = TREE_OPERAND (exp, 2);
8561 rtx op2;
8563 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8564 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8565 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8566 target, unsignedp);
8567 return target;
8570 case REDUC_MAX_EXPR:
8571 case REDUC_MIN_EXPR:
8572 case REDUC_PLUS_EXPR:
8574 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8575 this_optab = optab_for_tree_code (code, type);
8576 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8577 gcc_assert (temp);
8578 return temp;
8581 case VEC_EXTRACT_EVEN_EXPR:
8582 case VEC_EXTRACT_ODD_EXPR:
8584 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8585 NULL_RTX, &op0, &op1, 0);
8586 this_optab = optab_for_tree_code (code, type);
8587 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8588 OPTAB_WIDEN);
8589 gcc_assert (temp);
8590 return temp;
8593 case VEC_INTERLEAVE_HIGH_EXPR:
8594 case VEC_INTERLEAVE_LOW_EXPR:
8596 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8597 NULL_RTX, &op0, &op1, 0);
8598 this_optab = optab_for_tree_code (code, type);
8599 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8600 OPTAB_WIDEN);
8601 gcc_assert (temp);
8602 return temp;
8605 case VEC_LSHIFT_EXPR:
8606 case VEC_RSHIFT_EXPR:
8608 target = expand_vec_shift_expr (exp, target);
8609 return target;
8612 case VEC_UNPACK_HI_EXPR:
8613 case VEC_UNPACK_LO_EXPR:
8615 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8616 this_optab = optab_for_tree_code (code, type);
8617 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8618 target, unsignedp);
8619 gcc_assert (temp);
8620 return temp;
8623 case VEC_WIDEN_MULT_HI_EXPR:
8624 case VEC_WIDEN_MULT_LO_EXPR:
8626 tree oprnd0 = TREE_OPERAND (exp, 0);
8627 tree oprnd1 = TREE_OPERAND (exp, 1);
8629 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8630 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8631 target, unsignedp);
8632 gcc_assert (target);
8633 return target;
8636 case VEC_PACK_MOD_EXPR:
8637 case VEC_PACK_SAT_EXPR:
8639 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8640 goto binop;
8643 default:
8644 return lang_hooks.expand_expr (exp, original_target, tmode,
8645 modifier, alt_rtl);
8648 /* Here to do an ordinary binary operator. */
8649 binop:
8650 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8651 subtarget, &op0, &op1, 0);
8652 binop2:
8653 this_optab = optab_for_tree_code (code, type);
8654 binop3:
8655 if (modifier == EXPAND_STACK_PARM)
8656 target = 0;
8657 temp = expand_binop (mode, this_optab, op0, op1, target,
8658 unsignedp, OPTAB_LIB_WIDEN);
8659 gcc_assert (temp);
8660 return REDUCE_BIT_FIELD (temp);
8662 #undef REDUCE_BIT_FIELD
8664 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8665 signedness of TYPE), possibly returning the result in TARGET. */
8666 static rtx
8667 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8669 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8670 if (target && GET_MODE (target) != GET_MODE (exp))
8671 target = 0;
8672 if (TYPE_UNSIGNED (type))
8674 rtx mask;
8675 if (prec < HOST_BITS_PER_WIDE_INT)
8676 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8677 GET_MODE (exp));
8678 else
8679 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8680 ((unsigned HOST_WIDE_INT) 1
8681 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8682 GET_MODE (exp));
8683 return expand_and (GET_MODE (exp), exp, mask, target);
8685 else
8687 tree count = build_int_cst (NULL_TREE,
8688 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8689 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8690 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8694 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8695 when applied to the address of EXP produces an address known to be
8696 aligned more than BIGGEST_ALIGNMENT. */
8698 static int
8699 is_aligning_offset (tree offset, tree exp)
8701 /* Strip off any conversions. */
8702 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8703 || TREE_CODE (offset) == NOP_EXPR
8704 || TREE_CODE (offset) == CONVERT_EXPR)
8705 offset = TREE_OPERAND (offset, 0);
8707 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8708 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8709 if (TREE_CODE (offset) != BIT_AND_EXPR
8710 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8711 || compare_tree_int (TREE_OPERAND (offset, 1),
8712 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8713 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8714 return 0;
8716 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8717 It must be NEGATE_EXPR. Then strip any more conversions. */
8718 offset = TREE_OPERAND (offset, 0);
8719 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8720 || TREE_CODE (offset) == NOP_EXPR
8721 || TREE_CODE (offset) == CONVERT_EXPR)
8722 offset = TREE_OPERAND (offset, 0);
8724 if (TREE_CODE (offset) != NEGATE_EXPR)
8725 return 0;
8727 offset = TREE_OPERAND (offset, 0);
8728 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8729 || TREE_CODE (offset) == NOP_EXPR
8730 || TREE_CODE (offset) == CONVERT_EXPR)
8731 offset = TREE_OPERAND (offset, 0);
8733 /* This must now be the address of EXP. */
8734 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8737 /* Return the tree node if an ARG corresponds to a string constant or zero
8738 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8739 in bytes within the string that ARG is accessing. The type of the
8740 offset will be `sizetype'. */
8742 tree
8743 string_constant (tree arg, tree *ptr_offset)
8745 tree array, offset;
8746 STRIP_NOPS (arg);
8748 if (TREE_CODE (arg) == ADDR_EXPR)
8750 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8752 *ptr_offset = size_zero_node;
8753 return TREE_OPERAND (arg, 0);
8755 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8757 array = TREE_OPERAND (arg, 0);
8758 offset = size_zero_node;
8760 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8762 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8763 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8764 if (TREE_CODE (array) != STRING_CST
8765 && TREE_CODE (array) != VAR_DECL)
8766 return 0;
8768 else
8769 return 0;
8771 else if (TREE_CODE (arg) == PLUS_EXPR)
8773 tree arg0 = TREE_OPERAND (arg, 0);
8774 tree arg1 = TREE_OPERAND (arg, 1);
8776 STRIP_NOPS (arg0);
8777 STRIP_NOPS (arg1);
8779 if (TREE_CODE (arg0) == ADDR_EXPR
8780 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8781 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8783 array = TREE_OPERAND (arg0, 0);
8784 offset = arg1;
8786 else if (TREE_CODE (arg1) == ADDR_EXPR
8787 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8788 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8790 array = TREE_OPERAND (arg1, 0);
8791 offset = arg0;
8793 else
8794 return 0;
8796 else
8797 return 0;
8799 if (TREE_CODE (array) == STRING_CST)
8801 *ptr_offset = convert (sizetype, offset);
8802 return array;
8804 else if (TREE_CODE (array) == VAR_DECL)
8806 int length;
8808 /* Variables initialized to string literals can be handled too. */
8809 if (DECL_INITIAL (array) == NULL_TREE
8810 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8811 return 0;
8813 /* If they are read-only, non-volatile and bind locally. */
8814 if (! TREE_READONLY (array)
8815 || TREE_SIDE_EFFECTS (array)
8816 || ! targetm.binds_local_p (array))
8817 return 0;
8819 /* Avoid const char foo[4] = "abcde"; */
8820 if (DECL_SIZE_UNIT (array) == NULL_TREE
8821 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8822 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8823 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8824 return 0;
8826 /* If variable is bigger than the string literal, OFFSET must be constant
8827 and inside of the bounds of the string literal. */
8828 offset = convert (sizetype, offset);
8829 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8830 && (! host_integerp (offset, 1)
8831 || compare_tree_int (offset, length) >= 0))
8832 return 0;
8834 *ptr_offset = offset;
8835 return DECL_INITIAL (array);
8838 return 0;
8841 /* Generate code to calculate EXP using a store-flag instruction
8842 and return an rtx for the result. EXP is either a comparison
8843 or a TRUTH_NOT_EXPR whose operand is a comparison.
8845 If TARGET is nonzero, store the result there if convenient.
8847 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8848 cheap.
8850 Return zero if there is no suitable set-flag instruction
8851 available on this machine.
8853 Once expand_expr has been called on the arguments of the comparison,
8854 we are committed to doing the store flag, since it is not safe to
8855 re-evaluate the expression. We emit the store-flag insn by calling
8856 emit_store_flag, but only expand the arguments if we have a reason
8857 to believe that emit_store_flag will be successful. If we think that
8858 it will, but it isn't, we have to simulate the store-flag with a
8859 set/jump/set sequence. */
8861 static rtx
8862 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8864 enum rtx_code code;
8865 tree arg0, arg1, type;
8866 tree tem;
8867 enum machine_mode operand_mode;
8868 int invert = 0;
8869 int unsignedp;
8870 rtx op0, op1;
8871 enum insn_code icode;
8872 rtx subtarget = target;
8873 rtx result, label;
8875 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8876 result at the end. We can't simply invert the test since it would
8877 have already been inverted if it were valid. This case occurs for
8878 some floating-point comparisons. */
8880 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8881 invert = 1, exp = TREE_OPERAND (exp, 0);
8883 arg0 = TREE_OPERAND (exp, 0);
8884 arg1 = TREE_OPERAND (exp, 1);
8886 /* Don't crash if the comparison was erroneous. */
8887 if (arg0 == error_mark_node || arg1 == error_mark_node)
8888 return const0_rtx;
8890 type = TREE_TYPE (arg0);
8891 operand_mode = TYPE_MODE (type);
8892 unsignedp = TYPE_UNSIGNED (type);
8894 /* We won't bother with BLKmode store-flag operations because it would mean
8895 passing a lot of information to emit_store_flag. */
8896 if (operand_mode == BLKmode)
8897 return 0;
8899 /* We won't bother with store-flag operations involving function pointers
8900 when function pointers must be canonicalized before comparisons. */
8901 #ifdef HAVE_canonicalize_funcptr_for_compare
8902 if (HAVE_canonicalize_funcptr_for_compare
8903 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8904 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8905 == FUNCTION_TYPE))
8906 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8907 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8908 == FUNCTION_TYPE))))
8909 return 0;
8910 #endif
8912 STRIP_NOPS (arg0);
8913 STRIP_NOPS (arg1);
8915 /* Get the rtx comparison code to use. We know that EXP is a comparison
8916 operation of some type. Some comparisons against 1 and -1 can be
8917 converted to comparisons with zero. Do so here so that the tests
8918 below will be aware that we have a comparison with zero. These
8919 tests will not catch constants in the first operand, but constants
8920 are rarely passed as the first operand. */
8922 switch (TREE_CODE (exp))
8924 case EQ_EXPR:
8925 code = EQ;
8926 break;
8927 case NE_EXPR:
8928 code = NE;
8929 break;
8930 case LT_EXPR:
8931 if (integer_onep (arg1))
8932 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8933 else
8934 code = unsignedp ? LTU : LT;
8935 break;
8936 case LE_EXPR:
8937 if (! unsignedp && integer_all_onesp (arg1))
8938 arg1 = integer_zero_node, code = LT;
8939 else
8940 code = unsignedp ? LEU : LE;
8941 break;
8942 case GT_EXPR:
8943 if (! unsignedp && integer_all_onesp (arg1))
8944 arg1 = integer_zero_node, code = GE;
8945 else
8946 code = unsignedp ? GTU : GT;
8947 break;
8948 case GE_EXPR:
8949 if (integer_onep (arg1))
8950 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8951 else
8952 code = unsignedp ? GEU : GE;
8953 break;
8955 case UNORDERED_EXPR:
8956 code = UNORDERED;
8957 break;
8958 case ORDERED_EXPR:
8959 code = ORDERED;
8960 break;
8961 case UNLT_EXPR:
8962 code = UNLT;
8963 break;
8964 case UNLE_EXPR:
8965 code = UNLE;
8966 break;
8967 case UNGT_EXPR:
8968 code = UNGT;
8969 break;
8970 case UNGE_EXPR:
8971 code = UNGE;
8972 break;
8973 case UNEQ_EXPR:
8974 code = UNEQ;
8975 break;
8976 case LTGT_EXPR:
8977 code = LTGT;
8978 break;
8980 default:
8981 gcc_unreachable ();
8984 /* Put a constant second. */
8985 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8987 tem = arg0; arg0 = arg1; arg1 = tem;
8988 code = swap_condition (code);
8991 /* If this is an equality or inequality test of a single bit, we can
8992 do this by shifting the bit being tested to the low-order bit and
8993 masking the result with the constant 1. If the condition was EQ,
8994 we xor it with 1. This does not require an scc insn and is faster
8995 than an scc insn even if we have it.
8997 The code to make this transformation was moved into fold_single_bit_test,
8998 so we just call into the folder and expand its result. */
9000 if ((code == NE || code == EQ)
9001 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9002 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9004 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9005 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9006 arg0, arg1, type),
9007 target, VOIDmode, EXPAND_NORMAL);
9010 /* Now see if we are likely to be able to do this. Return if not. */
9011 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9012 return 0;
9014 icode = setcc_gen_code[(int) code];
9015 if (icode == CODE_FOR_nothing
9016 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9018 /* We can only do this if it is one of the special cases that
9019 can be handled without an scc insn. */
9020 if ((code == LT && integer_zerop (arg1))
9021 || (! only_cheap && code == GE && integer_zerop (arg1)))
9023 else if (! only_cheap && (code == NE || code == EQ)
9024 && TREE_CODE (type) != REAL_TYPE
9025 && ((abs_optab->handlers[(int) operand_mode].insn_code
9026 != CODE_FOR_nothing)
9027 || (ffs_optab->handlers[(int) operand_mode].insn_code
9028 != CODE_FOR_nothing)))
9030 else
9031 return 0;
9034 if (! get_subtarget (target)
9035 || GET_MODE (subtarget) != operand_mode)
9036 subtarget = 0;
9038 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9040 if (target == 0)
9041 target = gen_reg_rtx (mode);
9043 result = emit_store_flag (target, code, op0, op1,
9044 operand_mode, unsignedp, 1);
9046 if (result)
9048 if (invert)
9049 result = expand_binop (mode, xor_optab, result, const1_rtx,
9050 result, 0, OPTAB_LIB_WIDEN);
9051 return result;
9054 /* If this failed, we have to do this with set/compare/jump/set code. */
9055 if (!REG_P (target)
9056 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9057 target = gen_reg_rtx (GET_MODE (target));
9059 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9060 result = compare_from_rtx (op0, op1, code, unsignedp,
9061 operand_mode, NULL_RTX);
9062 if (GET_CODE (result) == CONST_INT)
9063 return (((result == const0_rtx && ! invert)
9064 || (result != const0_rtx && invert))
9065 ? const0_rtx : const1_rtx);
9067 /* The code of RESULT may not match CODE if compare_from_rtx
9068 decided to swap its operands and reverse the original code.
9070 We know that compare_from_rtx returns either a CONST_INT or
9071 a new comparison code, so it is safe to just extract the
9072 code from RESULT. */
9073 code = GET_CODE (result);
9075 label = gen_label_rtx ();
9076 gcc_assert (bcc_gen_fctn[(int) code]);
9078 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9079 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9080 emit_label (label);
9082 return target;
9086 /* Stubs in case we haven't got a casesi insn. */
9087 #ifndef HAVE_casesi
9088 # define HAVE_casesi 0
9089 # define gen_casesi(a, b, c, d, e) (0)
9090 # define CODE_FOR_casesi CODE_FOR_nothing
9091 #endif
9093 /* If the machine does not have a case insn that compares the bounds,
9094 this means extra overhead for dispatch tables, which raises the
9095 threshold for using them. */
9096 #ifndef CASE_VALUES_THRESHOLD
9097 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9098 #endif /* CASE_VALUES_THRESHOLD */
9100 unsigned int
9101 case_values_threshold (void)
9103 return CASE_VALUES_THRESHOLD;
9106 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9107 0 otherwise (i.e. if there is no casesi instruction). */
9109 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9110 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9112 enum machine_mode index_mode = SImode;
9113 int index_bits = GET_MODE_BITSIZE (index_mode);
9114 rtx op1, op2, index;
9115 enum machine_mode op_mode;
9117 if (! HAVE_casesi)
9118 return 0;
9120 /* Convert the index to SImode. */
9121 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9123 enum machine_mode omode = TYPE_MODE (index_type);
9124 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9126 /* We must handle the endpoints in the original mode. */
9127 index_expr = build2 (MINUS_EXPR, index_type,
9128 index_expr, minval);
9129 minval = integer_zero_node;
9130 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9131 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9132 omode, 1, default_label);
9133 /* Now we can safely truncate. */
9134 index = convert_to_mode (index_mode, index, 0);
9136 else
9138 if (TYPE_MODE (index_type) != index_mode)
9140 index_expr = convert (lang_hooks.types.type_for_size
9141 (index_bits, 0), index_expr);
9142 index_type = TREE_TYPE (index_expr);
9145 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9148 do_pending_stack_adjust ();
9150 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9151 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9152 (index, op_mode))
9153 index = copy_to_mode_reg (op_mode, index);
9155 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9157 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9158 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9159 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9160 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9161 (op1, op_mode))
9162 op1 = copy_to_mode_reg (op_mode, op1);
9164 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9166 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9167 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9168 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9169 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9170 (op2, op_mode))
9171 op2 = copy_to_mode_reg (op_mode, op2);
9173 emit_jump_insn (gen_casesi (index, op1, op2,
9174 table_label, default_label));
9175 return 1;
9178 /* Attempt to generate a tablejump instruction; same concept. */
9179 #ifndef HAVE_tablejump
9180 #define HAVE_tablejump 0
9181 #define gen_tablejump(x, y) (0)
9182 #endif
9184 /* Subroutine of the next function.
9186 INDEX is the value being switched on, with the lowest value
9187 in the table already subtracted.
9188 MODE is its expected mode (needed if INDEX is constant).
9189 RANGE is the length of the jump table.
9190 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9192 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9193 index value is out of range. */
9195 static void
9196 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9197 rtx default_label)
9199 rtx temp, vector;
9201 if (INTVAL (range) > cfun->max_jumptable_ents)
9202 cfun->max_jumptable_ents = INTVAL (range);
9204 /* Do an unsigned comparison (in the proper mode) between the index
9205 expression and the value which represents the length of the range.
9206 Since we just finished subtracting the lower bound of the range
9207 from the index expression, this comparison allows us to simultaneously
9208 check that the original index expression value is both greater than
9209 or equal to the minimum value of the range and less than or equal to
9210 the maximum value of the range. */
9212 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9213 default_label);
9215 /* If index is in range, it must fit in Pmode.
9216 Convert to Pmode so we can index with it. */
9217 if (mode != Pmode)
9218 index = convert_to_mode (Pmode, index, 1);
9220 /* Don't let a MEM slip through, because then INDEX that comes
9221 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9222 and break_out_memory_refs will go to work on it and mess it up. */
9223 #ifdef PIC_CASE_VECTOR_ADDRESS
9224 if (flag_pic && !REG_P (index))
9225 index = copy_to_mode_reg (Pmode, index);
9226 #endif
9228 /* If flag_force_addr were to affect this address
9229 it could interfere with the tricky assumptions made
9230 about addresses that contain label-refs,
9231 which may be valid only very near the tablejump itself. */
9232 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9233 GET_MODE_SIZE, because this indicates how large insns are. The other
9234 uses should all be Pmode, because they are addresses. This code
9235 could fail if addresses and insns are not the same size. */
9236 index = gen_rtx_PLUS (Pmode,
9237 gen_rtx_MULT (Pmode, index,
9238 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9239 gen_rtx_LABEL_REF (Pmode, table_label));
9240 #ifdef PIC_CASE_VECTOR_ADDRESS
9241 if (flag_pic)
9242 index = PIC_CASE_VECTOR_ADDRESS (index);
9243 else
9244 #endif
9245 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9246 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9247 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9248 convert_move (temp, vector, 0);
9250 emit_jump_insn (gen_tablejump (temp, table_label));
9252 /* If we are generating PIC code or if the table is PC-relative, the
9253 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9254 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9255 emit_barrier ();
9259 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9260 rtx table_label, rtx default_label)
9262 rtx index;
9264 if (! HAVE_tablejump)
9265 return 0;
9267 index_expr = fold_build2 (MINUS_EXPR, index_type,
9268 convert (index_type, index_expr),
9269 convert (index_type, minval));
9270 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9271 do_pending_stack_adjust ();
9273 do_tablejump (index, TYPE_MODE (index_type),
9274 convert_modes (TYPE_MODE (index_type),
9275 TYPE_MODE (TREE_TYPE (range)),
9276 expand_expr (range, NULL_RTX,
9277 VOIDmode, 0),
9278 TYPE_UNSIGNED (TREE_TYPE (range))),
9279 table_label, default_label);
9280 return 1;
9283 /* Nonzero if the mode is a valid vector mode for this architecture.
9284 This returns nonzero even if there is no hardware support for the
9285 vector mode, but we can emulate with narrower modes. */
9288 vector_mode_valid_p (enum machine_mode mode)
9290 enum mode_class class = GET_MODE_CLASS (mode);
9291 enum machine_mode innermode;
9293 /* Doh! What's going on? */
9294 if (class != MODE_VECTOR_INT
9295 && class != MODE_VECTOR_FLOAT)
9296 return 0;
9298 /* Hardware support. Woo hoo! */
9299 if (targetm.vector_mode_supported_p (mode))
9300 return 1;
9302 innermode = GET_MODE_INNER (mode);
9304 /* We should probably return 1 if requesting V4DI and we have no DI,
9305 but we have V2DI, but this is probably very unlikely. */
9307 /* If we have support for the inner mode, we can safely emulate it.
9308 We may not have V2DI, but me can emulate with a pair of DIs. */
9309 return targetm.scalar_mode_supported_p (innermode);
9312 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9313 static rtx
9314 const_vector_from_tree (tree exp)
9316 rtvec v;
9317 int units, i;
9318 tree link, elt;
9319 enum machine_mode inner, mode;
9321 mode = TYPE_MODE (TREE_TYPE (exp));
9323 if (initializer_zerop (exp))
9324 return CONST0_RTX (mode);
9326 units = GET_MODE_NUNITS (mode);
9327 inner = GET_MODE_INNER (mode);
9329 v = rtvec_alloc (units);
9331 link = TREE_VECTOR_CST_ELTS (exp);
9332 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9334 elt = TREE_VALUE (link);
9336 if (TREE_CODE (elt) == REAL_CST)
9337 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9338 inner);
9339 else
9340 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9341 TREE_INT_CST_HIGH (elt),
9342 inner);
9345 /* Initialize remaining elements to 0. */
9346 for (; i < units; ++i)
9347 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9349 return gen_rtx_CONST_VECTOR (mode, v);
9351 #include "gt-expr.h"