* config/bfin/bfin.h (TARGET_CPU_CPP_BUILTINS): Define
[official-gcc/alias-decl.git] / gcc / expr.c
blob97116b33daa18c17419f85dc87a9b7c639593049
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, alias_set_type, bool);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
242 void
243 init_expr_once (void)
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
268 int regno;
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
286 SET_REGNO (reg, regno);
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 enum insn_code ic;
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
325 PUT_MODE (mem, srcmode);
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
333 /* This is run at the start of compiling a function. */
335 void
336 init_expr (void)
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361 gcc_assert (to_real == from_real);
362 gcc_assert (to_mode != BLKmode);
363 gcc_assert (from_mode != BLKmode);
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
385 emit_move_insn (to, from);
386 return;
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
398 emit_move_insn (to, from);
399 return;
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
409 if (to_real)
411 rtx value, insns;
412 convert_optab tab;
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423 tab = sext_optab;
424 else
425 tab = trunc_optab;
427 /* Try converting directly if the insn is supported. */
429 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
430 if (code != CODE_FOR_nothing)
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434 return;
437 /* Otherwise use a libcall. */
438 libcall = convert_optab_handler (tab, to_mode, from_mode)->libfunc;
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
443 start_sequence ();
444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445 1, from, from_mode);
446 insns = get_insns ();
447 end_sequence ();
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
452 return;
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
463 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
464 != CODE_FOR_nothing);
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
469 to, from, UNKNOWN);
470 return;
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
474 rtx new_from;
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
478 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
479 != CODE_FOR_nothing);
481 if (to_mode == full_mode)
483 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
484 to, from, UNKNOWN);
485 return;
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
490 new_from, from, UNKNOWN);
492 /* else proceed to integer conversions below. */
493 from_mode = full_mode;
494 from = new_from;
497 /* Now both modes are integers. */
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
529 if (REG_P (to))
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
566 #ifdef HAVE_slt
567 if (HAVE_slt
568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 && STORE_FLAG_VALUE == -1)
571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572 lowpart_mode, 0);
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
576 else
577 #endif
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582 NULL_RTX, 0);
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
593 gcc_assert (subword);
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
599 insns = get_insns ();
600 end_sequence ();
602 emit_no_conflict_block (insns, to, from, NULL_RTX,
603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604 return;
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
611 if (!((MEM_P (from)
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
615 || REG_P (from)
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
622 /* Now follow all the conversions between integers
623 no more than a word long. */
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (from_mode)))
630 if (!((MEM_P (from)
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
634 || REG_P (from)
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
654 else
656 enum machine_mode intermediate;
657 rtx tmp;
658 tree shift_amount;
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
680 - GET_MODE_BITSIZE (from_mode));
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
692 /* Support special truncate insns for certain modes. */
693 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
695 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
696 to, from, UNKNOWN);
697 return;
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
714 /* Mode combination is not recognized. */
715 gcc_unreachable ();
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
728 return convert_modes (mode, VOIDmode, x, unsignedp);
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
744 rtx temp;
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
757 if (mode == oldmode)
758 return x;
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
770 HOST_WIDE_INT val = INTVAL (x);
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
797 || (REG_P (x)
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
819 return gen_int_mode (val, mode);
822 return gen_lowpart (mode, x);
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
853 return MOVE_BY_PIECES_P (len, align);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
872 struct move_by_pieces data;
873 rtx to_addr, from_addr = XEXP (from, 0);
874 unsigned int max_size = MOVE_MAX_PIECES + 1;
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
880 data.offset = 0;
881 data.from_addr = from_addr;
882 if (to)
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
892 else
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
921 /* Find the mode of the largest move... */
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
962 enum machine_mode xmode;
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
977 while (max_size > 1)
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
982 mode = tmode;
984 if (mode == VOIDmode)
985 break;
987 icode = optab_handler (mov_optab, mode)->insn_code;
988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
991 max_size = GET_MODE_SIZE (mode);
994 /* The code above should have handled everything. */
995 gcc_assert (!data.len);
997 if (endp)
999 rtx to1;
1001 gcc_assert (!data.reverse);
1002 if (data.autinc_to)
1004 if (endp == 2)
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1015 else
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1021 return to1;
1023 else
1024 return data.to;
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
1034 unsigned HOST_WIDE_INT n_insns = 0;
1035 enum machine_mode tmode;
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1042 enum machine_mode tmode, xmode;
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1054 while (max_size > 1)
1056 enum machine_mode mode = VOIDmode;
1057 enum insn_code icode;
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
1062 mode = tmode;
1064 if (mode == VOIDmode)
1065 break;
1067 icode = optab_handler (mov_optab, mode)->insn_code;
1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1071 max_size = GET_MODE_SIZE (mode);
1074 gcc_assert (!l);
1075 return n_insns;
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1082 static void
1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
1086 unsigned int size = GET_MODE_SIZE (mode);
1087 rtx to1 = NULL_RTX, from1;
1089 while (data->len >= size)
1091 if (data->reverse)
1092 data->offset -= size;
1094 if (data->to)
1096 if (data->autinc_to)
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
1099 else
1100 to1 = adjust_address (data->to, mode, data->offset);
1103 if (data->autinc_from)
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
1106 else
1107 from1 = adjust_address (data->from, mode, data->offset);
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122 #else
1123 gcc_unreachable ();
1124 #endif
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1132 if (! data->reverse)
1133 data->offset += size;
1135 data->len -= size;
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1152 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1153 unsigned int expected_align, HOST_WIDE_INT expected_size)
1155 bool may_use_call;
1156 rtx retval = 0;
1157 unsigned int align;
1159 switch (method)
1161 case BLOCK_OP_NORMAL:
1162 case BLOCK_OP_TAILCALL:
1163 may_use_call = true;
1164 break;
1166 case BLOCK_OP_CALL_PARM:
1167 may_use_call = block_move_libcall_safe_for_call_parm ();
1169 /* Make inhibit_defer_pop nonzero around the library call
1170 to force it to pop the arguments right away. */
1171 NO_DEFER_POP;
1172 break;
1174 case BLOCK_OP_NO_LIBCALL:
1175 may_use_call = false;
1176 break;
1178 default:
1179 gcc_unreachable ();
1182 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1184 gcc_assert (MEM_P (x));
1185 gcc_assert (MEM_P (y));
1186 gcc_assert (size);
1188 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1189 block copy is more efficient for other large modes, e.g. DCmode. */
1190 x = adjust_address (x, BLKmode, 0);
1191 y = adjust_address (y, BLKmode, 0);
1193 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1194 can be incorrect is coming from __builtin_memcpy. */
1195 if (GET_CODE (size) == CONST_INT)
1197 if (INTVAL (size) == 0)
1198 return 0;
1200 x = shallow_copy_rtx (x);
1201 y = shallow_copy_rtx (y);
1202 set_mem_size (x, size);
1203 set_mem_size (y, size);
1206 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1207 move_by_pieces (x, y, INTVAL (size), align, 0);
1208 else if (emit_block_move_via_movmem (x, y, size, align,
1209 expected_align, expected_size))
1211 else if (may_use_call)
1212 retval = emit_block_move_via_libcall (x, y, size,
1213 method == BLOCK_OP_TAILCALL);
1214 else
1215 emit_block_move_via_loop (x, y, size, align);
1217 if (method == BLOCK_OP_CALL_PARM)
1218 OK_DEFER_POP;
1220 return retval;
1224 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1226 return emit_block_move_hints (x, y, size, method, 0, -1);
1229 /* A subroutine of emit_block_move. Returns true if calling the
1230 block move libcall will not clobber any parameters which may have
1231 already been placed on the stack. */
1233 static bool
1234 block_move_libcall_safe_for_call_parm (void)
1236 /* If arguments are pushed on the stack, then they're safe. */
1237 if (PUSH_ARGS)
1238 return true;
1240 /* If registers go on the stack anyway, any argument is sure to clobber
1241 an outgoing argument. */
1242 #if defined (REG_PARM_STACK_SPACE)
1243 if (OUTGOING_REG_PARM_STACK_SPACE)
1245 tree fn;
1246 fn = emit_block_move_libcall_fn (false);
1247 if (REG_PARM_STACK_SPACE (fn) != 0)
1248 return false;
1250 #endif
1252 /* If any argument goes in memory, then it might clobber an outgoing
1253 argument. */
1255 CUMULATIVE_ARGS args_so_far;
1256 tree fn, arg;
1258 fn = emit_block_move_libcall_fn (false);
1259 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1261 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1262 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1264 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1265 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1266 if (!tmp || !REG_P (tmp))
1267 return false;
1268 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1269 return false;
1270 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1273 return true;
1276 /* A subroutine of emit_block_move. Expand a movmem pattern;
1277 return true if successful. */
1279 static bool
1280 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1281 unsigned int expected_align, HOST_WIDE_INT expected_size)
1283 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1284 int save_volatile_ok = volatile_ok;
1285 enum machine_mode mode;
1287 if (expected_align < align)
1288 expected_align = align;
1290 /* Since this is a move insn, we don't care about volatility. */
1291 volatile_ok = 1;
1293 /* Try the most limited insn first, because there's no point
1294 including more than one in the machine description unless
1295 the more limited one has some advantage. */
1297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1298 mode = GET_MODE_WIDER_MODE (mode))
1300 enum insn_code code = movmem_optab[(int) mode];
1301 insn_operand_predicate_fn pred;
1303 if (code != CODE_FOR_nothing
1304 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1305 here because if SIZE is less than the mode mask, as it is
1306 returned by the macro, it will definitely be less than the
1307 actual mode mask. */
1308 && ((GET_CODE (size) == CONST_INT
1309 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1310 <= (GET_MODE_MASK (mode) >> 1)))
1311 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1312 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1313 || (*pred) (x, BLKmode))
1314 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1315 || (*pred) (y, BLKmode))
1316 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1317 || (*pred) (opalign, VOIDmode)))
1319 rtx op2;
1320 rtx last = get_last_insn ();
1321 rtx pat;
1323 op2 = convert_to_mode (mode, size, 1);
1324 pred = insn_data[(int) code].operand[2].predicate;
1325 if (pred != 0 && ! (*pred) (op2, mode))
1326 op2 = copy_to_mode_reg (mode, op2);
1328 /* ??? When called via emit_block_move_for_call, it'd be
1329 nice if there were some way to inform the backend, so
1330 that it doesn't fail the expansion because it thinks
1331 emitting the libcall would be more efficient. */
1333 if (insn_data[(int) code].n_operands == 4)
1334 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1335 else
1336 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1337 GEN_INT (expected_align),
1338 GEN_INT (expected_size));
1339 if (pat)
1341 emit_insn (pat);
1342 volatile_ok = save_volatile_ok;
1343 return true;
1345 else
1346 delete_insns_since (last);
1350 volatile_ok = save_volatile_ok;
1351 return false;
1354 /* A subroutine of emit_block_move. Expand a call to memcpy.
1355 Return the return value from memcpy, 0 otherwise. */
1358 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1360 rtx dst_addr, src_addr;
1361 tree call_expr, fn, src_tree, dst_tree, size_tree;
1362 enum machine_mode size_mode;
1363 rtx retval;
1365 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1366 pseudos. We can then place those new pseudos into a VAR_DECL and
1367 use them later. */
1369 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1370 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1372 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1373 src_addr = convert_memory_address (ptr_mode, src_addr);
1375 dst_tree = make_tree (ptr_type_node, dst_addr);
1376 src_tree = make_tree (ptr_type_node, src_addr);
1378 size_mode = TYPE_MODE (sizetype);
1380 size = convert_to_mode (size_mode, size, 1);
1381 size = copy_to_mode_reg (size_mode, size);
1383 /* It is incorrect to use the libcall calling conventions to call
1384 memcpy in this context. This could be a user call to memcpy and
1385 the user may wish to examine the return value from memcpy. For
1386 targets where libcalls and normal calls have different conventions
1387 for returning pointers, we could end up generating incorrect code. */
1389 size_tree = make_tree (sizetype, size);
1391 fn = emit_block_move_libcall_fn (true);
1392 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1393 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1395 retval = expand_normal (call_expr);
1397 return retval;
1400 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1401 for the function we use for block copies. The first time FOR_CALL
1402 is true, we call assemble_external. */
1404 static GTY(()) tree block_move_fn;
1406 void
1407 init_block_move_fn (const char *asmspec)
1409 if (!block_move_fn)
1411 tree args, fn;
1413 fn = get_identifier ("memcpy");
1414 args = build_function_type_list (ptr_type_node, ptr_type_node,
1415 const_ptr_type_node, sizetype,
1416 NULL_TREE);
1418 fn = build_decl (FUNCTION_DECL, fn, args);
1419 DECL_EXTERNAL (fn) = 1;
1420 TREE_PUBLIC (fn) = 1;
1421 DECL_ARTIFICIAL (fn) = 1;
1422 TREE_NOTHROW (fn) = 1;
1423 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1424 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1426 block_move_fn = fn;
1429 if (asmspec)
1430 set_user_assembler_name (block_move_fn, asmspec);
1433 static tree
1434 emit_block_move_libcall_fn (int for_call)
1436 static bool emitted_extern;
1438 if (!block_move_fn)
1439 init_block_move_fn (NULL);
1441 if (for_call && !emitted_extern)
1443 emitted_extern = true;
1444 make_decl_rtl (block_move_fn);
1445 assemble_external (block_move_fn);
1448 return block_move_fn;
1451 /* A subroutine of emit_block_move. Copy the data via an explicit
1452 loop. This is used only when libcalls are forbidden. */
1453 /* ??? It'd be nice to copy in hunks larger than QImode. */
1455 static void
1456 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1457 unsigned int align ATTRIBUTE_UNUSED)
1459 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1460 enum machine_mode iter_mode;
1462 iter_mode = GET_MODE (size);
1463 if (iter_mode == VOIDmode)
1464 iter_mode = word_mode;
1466 top_label = gen_label_rtx ();
1467 cmp_label = gen_label_rtx ();
1468 iter = gen_reg_rtx (iter_mode);
1470 emit_move_insn (iter, const0_rtx);
1472 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1473 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1474 do_pending_stack_adjust ();
1476 emit_jump (cmp_label);
1477 emit_label (top_label);
1479 tmp = convert_modes (Pmode, iter_mode, iter, true);
1480 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1481 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1482 x = change_address (x, QImode, x_addr);
1483 y = change_address (y, QImode, y_addr);
1485 emit_move_insn (x, y);
1487 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1488 true, OPTAB_LIB_WIDEN);
1489 if (tmp != iter)
1490 emit_move_insn (iter, tmp);
1492 emit_label (cmp_label);
1494 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1495 true, top_label);
1498 /* Copy all or part of a value X into registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1501 void
1502 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1504 int i;
1505 #ifdef HAVE_load_multiple
1506 rtx pat;
1507 rtx last;
1508 #endif
1510 if (nregs == 0)
1511 return;
1513 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1514 x = validize_mem (force_const_mem (mode, x));
1516 /* See if the machine can do this with a load multiple insn. */
1517 #ifdef HAVE_load_multiple
1518 if (HAVE_load_multiple)
1520 last = get_last_insn ();
1521 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1522 GEN_INT (nregs));
1523 if (pat)
1525 emit_insn (pat);
1526 return;
1528 else
1529 delete_insns_since (last);
1531 #endif
1533 for (i = 0; i < nregs; i++)
1534 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1535 operand_subword_force (x, i, mode));
1538 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1539 The number of registers to be filled is NREGS. */
1541 void
1542 move_block_from_reg (int regno, rtx x, int nregs)
1544 int i;
1546 if (nregs == 0)
1547 return;
1549 /* See if the machine can do this with a store multiple insn. */
1550 #ifdef HAVE_store_multiple
1551 if (HAVE_store_multiple)
1553 rtx last = get_last_insn ();
1554 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1555 GEN_INT (nregs));
1556 if (pat)
1558 emit_insn (pat);
1559 return;
1561 else
1562 delete_insns_since (last);
1564 #endif
1566 for (i = 0; i < nregs; i++)
1568 rtx tem = operand_subword (x, i, 1, BLKmode);
1570 gcc_assert (tem);
1572 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1576 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1577 ORIG, where ORIG is a non-consecutive group of registers represented by
1578 a PARALLEL. The clone is identical to the original except in that the
1579 original set of registers is replaced by a new set of pseudo registers.
1580 The new set has the same modes as the original set. */
1583 gen_group_rtx (rtx orig)
1585 int i, length;
1586 rtx *tmps;
1588 gcc_assert (GET_CODE (orig) == PARALLEL);
1590 length = XVECLEN (orig, 0);
1591 tmps = alloca (sizeof (rtx) * length);
1593 /* Skip a NULL entry in first slot. */
1594 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1596 if (i)
1597 tmps[0] = 0;
1599 for (; i < length; i++)
1601 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1602 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1604 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1607 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1610 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1611 except that values are placed in TMPS[i], and must later be moved
1612 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1614 static void
1615 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1617 rtx src;
1618 int start, i;
1619 enum machine_mode m = GET_MODE (orig_src);
1621 gcc_assert (GET_CODE (dst) == PARALLEL);
1623 if (m != VOIDmode
1624 && !SCALAR_INT_MODE_P (m)
1625 && !MEM_P (orig_src)
1626 && GET_CODE (orig_src) != CONCAT)
1628 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1629 if (imode == BLKmode)
1630 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1631 else
1632 src = gen_reg_rtx (imode);
1633 if (imode != BLKmode)
1634 src = gen_lowpart (GET_MODE (orig_src), src);
1635 emit_move_insn (src, orig_src);
1636 /* ...and back again. */
1637 if (imode != BLKmode)
1638 src = gen_lowpart (imode, src);
1639 emit_group_load_1 (tmps, dst, src, type, ssize);
1640 return;
1643 /* Check for a NULL entry, used to indicate that the parameter goes
1644 both on the stack and in registers. */
1645 if (XEXP (XVECEXP (dst, 0, 0), 0))
1646 start = 0;
1647 else
1648 start = 1;
1650 /* Process the pieces. */
1651 for (i = start; i < XVECLEN (dst, 0); i++)
1653 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1654 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1655 unsigned int bytelen = GET_MODE_SIZE (mode);
1656 int shift = 0;
1658 /* Handle trailing fragments that run over the size of the struct. */
1659 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1661 /* Arrange to shift the fragment to where it belongs.
1662 extract_bit_field loads to the lsb of the reg. */
1663 if (
1664 #ifdef BLOCK_REG_PADDING
1665 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1666 == (BYTES_BIG_ENDIAN ? upward : downward)
1667 #else
1668 BYTES_BIG_ENDIAN
1669 #endif
1671 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1672 bytelen = ssize - bytepos;
1673 gcc_assert (bytelen > 0);
1676 /* If we won't be loading directly from memory, protect the real source
1677 from strange tricks we might play; but make sure that the source can
1678 be loaded directly into the destination. */
1679 src = orig_src;
1680 if (!MEM_P (orig_src)
1681 && (!CONSTANT_P (orig_src)
1682 || (GET_MODE (orig_src) != mode
1683 && GET_MODE (orig_src) != VOIDmode)))
1685 if (GET_MODE (orig_src) == VOIDmode)
1686 src = gen_reg_rtx (mode);
1687 else
1688 src = gen_reg_rtx (GET_MODE (orig_src));
1690 emit_move_insn (src, orig_src);
1693 /* Optimize the access just a bit. */
1694 if (MEM_P (src)
1695 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1696 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1697 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1698 && bytelen == GET_MODE_SIZE (mode))
1700 tmps[i] = gen_reg_rtx (mode);
1701 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1703 else if (COMPLEX_MODE_P (mode)
1704 && GET_MODE (src) == mode
1705 && bytelen == GET_MODE_SIZE (mode))
1706 /* Let emit_move_complex do the bulk of the work. */
1707 tmps[i] = src;
1708 else if (GET_CODE (src) == CONCAT)
1710 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1711 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1713 if ((bytepos == 0 && bytelen == slen0)
1714 || (bytepos != 0 && bytepos + bytelen <= slen))
1716 /* The following assumes that the concatenated objects all
1717 have the same size. In this case, a simple calculation
1718 can be used to determine the object and the bit field
1719 to be extracted. */
1720 tmps[i] = XEXP (src, bytepos / slen0);
1721 if (! CONSTANT_P (tmps[i])
1722 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1723 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1724 (bytepos % slen0) * BITS_PER_UNIT,
1725 1, NULL_RTX, mode, mode);
1727 else
1729 rtx mem;
1731 gcc_assert (!bytepos);
1732 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1733 emit_move_insn (mem, src);
1734 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1735 0, 1, NULL_RTX, mode, mode);
1738 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1739 SIMD register, which is currently broken. While we get GCC
1740 to emit proper RTL for these cases, let's dump to memory. */
1741 else if (VECTOR_MODE_P (GET_MODE (dst))
1742 && REG_P (src))
1744 int slen = GET_MODE_SIZE (GET_MODE (src));
1745 rtx mem;
1747 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1748 emit_move_insn (mem, src);
1749 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1751 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1752 && XVECLEN (dst, 0) > 1)
1753 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1754 else if (CONSTANT_P (src)
1755 || (REG_P (src) && GET_MODE (src) == mode))
1756 tmps[i] = src;
1757 else
1758 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1759 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1760 mode, mode);
1762 if (shift)
1763 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1764 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1768 /* Emit code to move a block SRC of type TYPE to a block DST,
1769 where DST is non-consecutive registers represented by a PARALLEL.
1770 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1771 if not known. */
1773 void
1774 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1776 rtx *tmps;
1777 int i;
1779 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1780 emit_group_load_1 (tmps, dst, src, type, ssize);
1782 /* Copy the extracted pieces into the proper (probable) hard regs. */
1783 for (i = 0; i < XVECLEN (dst, 0); i++)
1785 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1786 if (d == NULL)
1787 continue;
1788 emit_move_insn (d, tmps[i]);
1792 /* Similar, but load SRC into new pseudos in a format that looks like
1793 PARALLEL. This can later be fed to emit_group_move to get things
1794 in the right place. */
1797 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1799 rtvec vec;
1800 int i;
1802 vec = rtvec_alloc (XVECLEN (parallel, 0));
1803 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1805 /* Convert the vector to look just like the original PARALLEL, except
1806 with the computed values. */
1807 for (i = 0; i < XVECLEN (parallel, 0); i++)
1809 rtx e = XVECEXP (parallel, 0, i);
1810 rtx d = XEXP (e, 0);
1812 if (d)
1814 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1815 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1817 RTVEC_ELT (vec, i) = e;
1820 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1823 /* Emit code to move a block SRC to block DST, where SRC and DST are
1824 non-consecutive groups of registers, each represented by a PARALLEL. */
1826 void
1827 emit_group_move (rtx dst, rtx src)
1829 int i;
1831 gcc_assert (GET_CODE (src) == PARALLEL
1832 && GET_CODE (dst) == PARALLEL
1833 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1835 /* Skip first entry if NULL. */
1836 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1837 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1838 XEXP (XVECEXP (src, 0, i), 0));
1841 /* Move a group of registers represented by a PARALLEL into pseudos. */
1844 emit_group_move_into_temps (rtx src)
1846 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1847 int i;
1849 for (i = 0; i < XVECLEN (src, 0); i++)
1851 rtx e = XVECEXP (src, 0, i);
1852 rtx d = XEXP (e, 0);
1854 if (d)
1855 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1856 RTVEC_ELT (vec, i) = e;
1859 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1862 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1863 where SRC is non-consecutive registers represented by a PARALLEL.
1864 SSIZE represents the total size of block ORIG_DST, or -1 if not
1865 known. */
1867 void
1868 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1870 rtx *tmps, dst;
1871 int start, finish, i;
1872 enum machine_mode m = GET_MODE (orig_dst);
1874 gcc_assert (GET_CODE (src) == PARALLEL);
1876 if (!SCALAR_INT_MODE_P (m)
1877 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1879 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1880 if (imode == BLKmode)
1881 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1882 else
1883 dst = gen_reg_rtx (imode);
1884 emit_group_store (dst, src, type, ssize);
1885 if (imode != BLKmode)
1886 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1887 emit_move_insn (orig_dst, dst);
1888 return;
1891 /* Check for a NULL entry, used to indicate that the parameter goes
1892 both on the stack and in registers. */
1893 if (XEXP (XVECEXP (src, 0, 0), 0))
1894 start = 0;
1895 else
1896 start = 1;
1897 finish = XVECLEN (src, 0);
1899 tmps = alloca (sizeof (rtx) * finish);
1901 /* Copy the (probable) hard regs into pseudos. */
1902 for (i = start; i < finish; i++)
1904 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1905 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1907 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1908 emit_move_insn (tmps[i], reg);
1910 else
1911 tmps[i] = reg;
1914 /* If we won't be storing directly into memory, protect the real destination
1915 from strange tricks we might play. */
1916 dst = orig_dst;
1917 if (GET_CODE (dst) == PARALLEL)
1919 rtx temp;
1921 /* We can get a PARALLEL dst if there is a conditional expression in
1922 a return statement. In that case, the dst and src are the same,
1923 so no action is necessary. */
1924 if (rtx_equal_p (dst, src))
1925 return;
1927 /* It is unclear if we can ever reach here, but we may as well handle
1928 it. Allocate a temporary, and split this into a store/load to/from
1929 the temporary. */
1931 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1932 emit_group_store (temp, src, type, ssize);
1933 emit_group_load (dst, temp, type, ssize);
1934 return;
1936 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1938 enum machine_mode outer = GET_MODE (dst);
1939 enum machine_mode inner;
1940 HOST_WIDE_INT bytepos;
1941 bool done = false;
1942 rtx temp;
1944 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1945 dst = gen_reg_rtx (outer);
1947 /* Make life a bit easier for combine. */
1948 /* If the first element of the vector is the low part
1949 of the destination mode, use a paradoxical subreg to
1950 initialize the destination. */
1951 if (start < finish)
1953 inner = GET_MODE (tmps[start]);
1954 bytepos = subreg_lowpart_offset (inner, outer);
1955 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1957 temp = simplify_gen_subreg (outer, tmps[start],
1958 inner, 0);
1959 if (temp)
1961 emit_move_insn (dst, temp);
1962 done = true;
1963 start++;
1968 /* If the first element wasn't the low part, try the last. */
1969 if (!done
1970 && start < finish - 1)
1972 inner = GET_MODE (tmps[finish - 1]);
1973 bytepos = subreg_lowpart_offset (inner, outer);
1974 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1976 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1977 inner, 0);
1978 if (temp)
1980 emit_move_insn (dst, temp);
1981 done = true;
1982 finish--;
1987 /* Otherwise, simply initialize the result to zero. */
1988 if (!done)
1989 emit_move_insn (dst, CONST0_RTX (outer));
1992 /* Process the pieces. */
1993 for (i = start; i < finish; i++)
1995 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1996 enum machine_mode mode = GET_MODE (tmps[i]);
1997 unsigned int bytelen = GET_MODE_SIZE (mode);
1998 rtx dest = dst;
2000 /* Handle trailing fragments that run over the size of the struct. */
2001 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2003 /* store_bit_field always takes its value from the lsb.
2004 Move the fragment to the lsb if it's not already there. */
2005 if (
2006 #ifdef BLOCK_REG_PADDING
2007 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2008 == (BYTES_BIG_ENDIAN ? upward : downward)
2009 #else
2010 BYTES_BIG_ENDIAN
2011 #endif
2014 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2015 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2016 build_int_cst (NULL_TREE, shift),
2017 tmps[i], 0);
2019 bytelen = ssize - bytepos;
2022 if (GET_CODE (dst) == CONCAT)
2024 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2025 dest = XEXP (dst, 0);
2026 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2028 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2029 dest = XEXP (dst, 1);
2031 else
2033 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2034 dest = assign_stack_temp (GET_MODE (dest),
2035 GET_MODE_SIZE (GET_MODE (dest)), 0);
2036 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2037 tmps[i]);
2038 dst = dest;
2039 break;
2043 /* Optimize the access just a bit. */
2044 if (MEM_P (dest)
2045 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2046 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2047 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2048 && bytelen == GET_MODE_SIZE (mode))
2049 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2050 else
2051 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2052 mode, tmps[i]);
2055 /* Copy from the pseudo into the (probable) hard reg. */
2056 if (orig_dst != dst)
2057 emit_move_insn (orig_dst, dst);
2060 /* Generate code to copy a BLKmode object of TYPE out of a
2061 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2062 is null, a stack temporary is created. TGTBLK is returned.
2064 The purpose of this routine is to handle functions that return
2065 BLKmode structures in registers. Some machines (the PA for example)
2066 want to return all small structures in registers regardless of the
2067 structure's alignment. */
2070 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2072 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2073 rtx src = NULL, dst = NULL;
2074 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2075 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2077 if (tgtblk == 0)
2079 tgtblk = assign_temp (build_qualified_type (type,
2080 (TYPE_QUALS (type)
2081 | TYPE_QUAL_CONST)),
2082 0, 1, 1);
2083 preserve_temp_slots (tgtblk);
2086 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2087 into a new pseudo which is a full word. */
2089 if (GET_MODE (srcreg) != BLKmode
2090 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2091 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2093 /* If the structure doesn't take up a whole number of words, see whether
2094 SRCREG is padded on the left or on the right. If it's on the left,
2095 set PADDING_CORRECTION to the number of bits to skip.
2097 In most ABIs, the structure will be returned at the least end of
2098 the register, which translates to right padding on little-endian
2099 targets and left padding on big-endian targets. The opposite
2100 holds if the structure is returned at the most significant
2101 end of the register. */
2102 if (bytes % UNITS_PER_WORD != 0
2103 && (targetm.calls.return_in_msb (type)
2104 ? !BYTES_BIG_ENDIAN
2105 : BYTES_BIG_ENDIAN))
2106 padding_correction
2107 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2109 /* Copy the structure BITSIZE bites at a time.
2111 We could probably emit more efficient code for machines which do not use
2112 strict alignment, but it doesn't seem worth the effort at the current
2113 time. */
2114 for (bitpos = 0, xbitpos = padding_correction;
2115 bitpos < bytes * BITS_PER_UNIT;
2116 bitpos += bitsize, xbitpos += bitsize)
2118 /* We need a new source operand each time xbitpos is on a
2119 word boundary and when xbitpos == padding_correction
2120 (the first time through). */
2121 if (xbitpos % BITS_PER_WORD == 0
2122 || xbitpos == padding_correction)
2123 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2124 GET_MODE (srcreg));
2126 /* We need a new destination operand each time bitpos is on
2127 a word boundary. */
2128 if (bitpos % BITS_PER_WORD == 0)
2129 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2131 /* Use xbitpos for the source extraction (right justified) and
2132 xbitpos for the destination store (left justified). */
2133 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2134 extract_bit_field (src, bitsize,
2135 xbitpos % BITS_PER_WORD, 1,
2136 NULL_RTX, word_mode, word_mode));
2139 return tgtblk;
2142 /* Add a USE expression for REG to the (possibly empty) list pointed
2143 to by CALL_FUSAGE. REG must denote a hard register. */
2145 void
2146 use_reg (rtx *call_fusage, rtx reg)
2148 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2150 *call_fusage
2151 = gen_rtx_EXPR_LIST (VOIDmode,
2152 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2155 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2156 starting at REGNO. All of these registers must be hard registers. */
2158 void
2159 use_regs (rtx *call_fusage, int regno, int nregs)
2161 int i;
2163 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2165 for (i = 0; i < nregs; i++)
2166 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2169 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2170 PARALLEL REGS. This is for calls that pass values in multiple
2171 non-contiguous locations. The Irix 6 ABI has examples of this. */
2173 void
2174 use_group_regs (rtx *call_fusage, rtx regs)
2176 int i;
2178 for (i = 0; i < XVECLEN (regs, 0); i++)
2180 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2182 /* A NULL entry means the parameter goes both on the stack and in
2183 registers. This can also be a MEM for targets that pass values
2184 partially on the stack and partially in registers. */
2185 if (reg != 0 && REG_P (reg))
2186 use_reg (call_fusage, reg);
2191 /* Determine whether the LEN bytes generated by CONSTFUN can be
2192 stored to memory using several move instructions. CONSTFUNDATA is
2193 a pointer which will be passed as argument in every CONSTFUN call.
2194 ALIGN is maximum alignment we can assume. Return nonzero if a
2195 call to store_by_pieces should succeed. */
2198 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2199 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2200 void *constfundata, unsigned int align)
2202 unsigned HOST_WIDE_INT l;
2203 unsigned int max_size;
2204 HOST_WIDE_INT offset = 0;
2205 enum machine_mode mode, tmode;
2206 enum insn_code icode;
2207 int reverse;
2208 rtx cst;
2210 if (len == 0)
2211 return 1;
2213 if (! STORE_BY_PIECES_P (len, align))
2214 return 0;
2216 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2217 if (align >= GET_MODE_ALIGNMENT (tmode))
2218 align = GET_MODE_ALIGNMENT (tmode);
2219 else
2221 enum machine_mode xmode;
2223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2224 tmode != VOIDmode;
2225 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2226 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2227 || SLOW_UNALIGNED_ACCESS (tmode, align))
2228 break;
2230 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2233 /* We would first store what we can in the largest integer mode, then go to
2234 successively smaller modes. */
2236 for (reverse = 0;
2237 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2238 reverse++)
2240 l = len;
2241 mode = VOIDmode;
2242 max_size = STORE_MAX_PIECES + 1;
2243 while (max_size > 1)
2245 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2246 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2247 if (GET_MODE_SIZE (tmode) < max_size)
2248 mode = tmode;
2250 if (mode == VOIDmode)
2251 break;
2253 icode = optab_handler (mov_optab, mode)->insn_code;
2254 if (icode != CODE_FOR_nothing
2255 && align >= GET_MODE_ALIGNMENT (mode))
2257 unsigned int size = GET_MODE_SIZE (mode);
2259 while (l >= size)
2261 if (reverse)
2262 offset -= size;
2264 cst = (*constfun) (constfundata, offset, mode);
2265 if (!LEGITIMATE_CONSTANT_P (cst))
2266 return 0;
2268 if (!reverse)
2269 offset += size;
2271 l -= size;
2275 max_size = GET_MODE_SIZE (mode);
2278 /* The code above should have handled everything. */
2279 gcc_assert (!l);
2282 return 1;
2285 /* Generate several move instructions to store LEN bytes generated by
2286 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2287 pointer which will be passed as argument in every CONSTFUN call.
2288 ALIGN is maximum alignment we can assume.
2289 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2290 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2291 stpcpy. */
2294 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2295 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2296 void *constfundata, unsigned int align, int endp)
2298 struct store_by_pieces data;
2300 if (len == 0)
2302 gcc_assert (endp != 2);
2303 return to;
2306 gcc_assert (STORE_BY_PIECES_P (len, align));
2307 data.constfun = constfun;
2308 data.constfundata = constfundata;
2309 data.len = len;
2310 data.to = to;
2311 store_by_pieces_1 (&data, align);
2312 if (endp)
2314 rtx to1;
2316 gcc_assert (!data.reverse);
2317 if (data.autinc_to)
2319 if (endp == 2)
2321 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2322 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2323 else
2324 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2325 -1));
2327 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2328 data.offset);
2330 else
2332 if (endp == 2)
2333 --data.offset;
2334 to1 = adjust_address (data.to, QImode, data.offset);
2336 return to1;
2338 else
2339 return data.to;
2342 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2343 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2345 static void
2346 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2348 struct store_by_pieces data;
2350 if (len == 0)
2351 return;
2353 data.constfun = clear_by_pieces_1;
2354 data.constfundata = NULL;
2355 data.len = len;
2356 data.to = to;
2357 store_by_pieces_1 (&data, align);
2360 /* Callback routine for clear_by_pieces.
2361 Return const0_rtx unconditionally. */
2363 static rtx
2364 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2365 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2366 enum machine_mode mode ATTRIBUTE_UNUSED)
2368 return const0_rtx;
2371 /* Subroutine of clear_by_pieces and store_by_pieces.
2372 Generate several move instructions to store LEN bytes of block TO. (A MEM
2373 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2375 static void
2376 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2377 unsigned int align ATTRIBUTE_UNUSED)
2379 rtx to_addr = XEXP (data->to, 0);
2380 unsigned int max_size = STORE_MAX_PIECES + 1;
2381 enum machine_mode mode = VOIDmode, tmode;
2382 enum insn_code icode;
2384 data->offset = 0;
2385 data->to_addr = to_addr;
2386 data->autinc_to
2387 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2388 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2390 data->explicit_inc_to = 0;
2391 data->reverse
2392 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2393 if (data->reverse)
2394 data->offset = data->len;
2396 /* If storing requires more than two move insns,
2397 copy addresses to registers (to make displacements shorter)
2398 and use post-increment if available. */
2399 if (!data->autinc_to
2400 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2402 /* Determine the main mode we'll be using. */
2403 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2404 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2405 if (GET_MODE_SIZE (tmode) < max_size)
2406 mode = tmode;
2408 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2410 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2411 data->autinc_to = 1;
2412 data->explicit_inc_to = -1;
2415 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2416 && ! data->autinc_to)
2418 data->to_addr = copy_addr_to_reg (to_addr);
2419 data->autinc_to = 1;
2420 data->explicit_inc_to = 1;
2423 if ( !data->autinc_to && CONSTANT_P (to_addr))
2424 data->to_addr = copy_addr_to_reg (to_addr);
2427 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2428 if (align >= GET_MODE_ALIGNMENT (tmode))
2429 align = GET_MODE_ALIGNMENT (tmode);
2430 else
2432 enum machine_mode xmode;
2434 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2435 tmode != VOIDmode;
2436 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2437 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2438 || SLOW_UNALIGNED_ACCESS (tmode, align))
2439 break;
2441 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2444 /* First store what we can in the largest integer mode, then go to
2445 successively smaller modes. */
2447 while (max_size > 1)
2449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2451 if (GET_MODE_SIZE (tmode) < max_size)
2452 mode = tmode;
2454 if (mode == VOIDmode)
2455 break;
2457 icode = optab_handler (mov_optab, mode)->insn_code;
2458 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2459 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2461 max_size = GET_MODE_SIZE (mode);
2464 /* The code above should have handled everything. */
2465 gcc_assert (!data->len);
2468 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2469 with move instructions for mode MODE. GENFUN is the gen_... function
2470 to make a move insn for that mode. DATA has all the other info. */
2472 static void
2473 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2474 struct store_by_pieces *data)
2476 unsigned int size = GET_MODE_SIZE (mode);
2477 rtx to1, cst;
2479 while (data->len >= size)
2481 if (data->reverse)
2482 data->offset -= size;
2484 if (data->autinc_to)
2485 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2486 data->offset);
2487 else
2488 to1 = adjust_address (data->to, mode, data->offset);
2490 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2491 emit_insn (gen_add2_insn (data->to_addr,
2492 GEN_INT (-(HOST_WIDE_INT) size)));
2494 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2495 emit_insn ((*genfun) (to1, cst));
2497 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2498 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2500 if (! data->reverse)
2501 data->offset += size;
2503 data->len -= size;
2507 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2508 its length in bytes. */
2511 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2512 unsigned int expected_align, HOST_WIDE_INT expected_size)
2514 enum machine_mode mode = GET_MODE (object);
2515 unsigned int align;
2517 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2519 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2520 just move a zero. Otherwise, do this a piece at a time. */
2521 if (mode != BLKmode
2522 && GET_CODE (size) == CONST_INT
2523 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2525 rtx zero = CONST0_RTX (mode);
2526 if (zero != NULL)
2528 emit_move_insn (object, zero);
2529 return NULL;
2532 if (COMPLEX_MODE_P (mode))
2534 zero = CONST0_RTX (GET_MODE_INNER (mode));
2535 if (zero != NULL)
2537 write_complex_part (object, zero, 0);
2538 write_complex_part (object, zero, 1);
2539 return NULL;
2544 if (size == const0_rtx)
2545 return NULL;
2547 align = MEM_ALIGN (object);
2549 if (GET_CODE (size) == CONST_INT
2550 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2551 clear_by_pieces (object, INTVAL (size), align);
2552 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2553 expected_align, expected_size))
2555 else
2556 return set_storage_via_libcall (object, size, const0_rtx,
2557 method == BLOCK_OP_TAILCALL);
2559 return NULL;
2563 clear_storage (rtx object, rtx size, enum block_op_methods method)
2565 return clear_storage_hints (object, size, method, 0, -1);
2569 /* A subroutine of clear_storage. Expand a call to memset.
2570 Return the return value of memset, 0 otherwise. */
2573 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2575 tree call_expr, fn, object_tree, size_tree, val_tree;
2576 enum machine_mode size_mode;
2577 rtx retval;
2579 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2580 place those into new pseudos into a VAR_DECL and use them later. */
2582 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2584 size_mode = TYPE_MODE (sizetype);
2585 size = convert_to_mode (size_mode, size, 1);
2586 size = copy_to_mode_reg (size_mode, size);
2588 /* It is incorrect to use the libcall calling conventions to call
2589 memset in this context. This could be a user call to memset and
2590 the user may wish to examine the return value from memset. For
2591 targets where libcalls and normal calls have different conventions
2592 for returning pointers, we could end up generating incorrect code. */
2594 object_tree = make_tree (ptr_type_node, object);
2595 if (GET_CODE (val) != CONST_INT)
2596 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2597 size_tree = make_tree (sizetype, size);
2598 val_tree = make_tree (integer_type_node, val);
2600 fn = clear_storage_libcall_fn (true);
2601 call_expr = build_call_expr (fn, 3,
2602 object_tree, integer_zero_node, size_tree);
2603 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2605 retval = expand_normal (call_expr);
2607 return retval;
2610 /* A subroutine of set_storage_via_libcall. Create the tree node
2611 for the function we use for block clears. The first time FOR_CALL
2612 is true, we call assemble_external. */
2614 static GTY(()) tree block_clear_fn;
2616 void
2617 init_block_clear_fn (const char *asmspec)
2619 if (!block_clear_fn)
2621 tree fn, args;
2623 fn = get_identifier ("memset");
2624 args = build_function_type_list (ptr_type_node, ptr_type_node,
2625 integer_type_node, sizetype,
2626 NULL_TREE);
2628 fn = build_decl (FUNCTION_DECL, fn, args);
2629 DECL_EXTERNAL (fn) = 1;
2630 TREE_PUBLIC (fn) = 1;
2631 DECL_ARTIFICIAL (fn) = 1;
2632 TREE_NOTHROW (fn) = 1;
2633 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2634 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2636 block_clear_fn = fn;
2639 if (asmspec)
2640 set_user_assembler_name (block_clear_fn, asmspec);
2643 static tree
2644 clear_storage_libcall_fn (int for_call)
2646 static bool emitted_extern;
2648 if (!block_clear_fn)
2649 init_block_clear_fn (NULL);
2651 if (for_call && !emitted_extern)
2653 emitted_extern = true;
2654 make_decl_rtl (block_clear_fn);
2655 assemble_external (block_clear_fn);
2658 return block_clear_fn;
2661 /* Expand a setmem pattern; return true if successful. */
2663 bool
2664 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2665 unsigned int expected_align, HOST_WIDE_INT expected_size)
2667 /* Try the most limited insn first, because there's no point
2668 including more than one in the machine description unless
2669 the more limited one has some advantage. */
2671 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2672 enum machine_mode mode;
2674 if (expected_align < align)
2675 expected_align = align;
2677 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2678 mode = GET_MODE_WIDER_MODE (mode))
2680 enum insn_code code = setmem_optab[(int) mode];
2681 insn_operand_predicate_fn pred;
2683 if (code != CODE_FOR_nothing
2684 /* We don't need MODE to be narrower than
2685 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2686 the mode mask, as it is returned by the macro, it will
2687 definitely be less than the actual mode mask. */
2688 && ((GET_CODE (size) == CONST_INT
2689 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2690 <= (GET_MODE_MASK (mode) >> 1)))
2691 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2692 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2693 || (*pred) (object, BLKmode))
2694 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2695 || (*pred) (opalign, VOIDmode)))
2697 rtx opsize, opchar;
2698 enum machine_mode char_mode;
2699 rtx last = get_last_insn ();
2700 rtx pat;
2702 opsize = convert_to_mode (mode, size, 1);
2703 pred = insn_data[(int) code].operand[1].predicate;
2704 if (pred != 0 && ! (*pred) (opsize, mode))
2705 opsize = copy_to_mode_reg (mode, opsize);
2707 opchar = val;
2708 char_mode = insn_data[(int) code].operand[2].mode;
2709 if (char_mode != VOIDmode)
2711 opchar = convert_to_mode (char_mode, opchar, 1);
2712 pred = insn_data[(int) code].operand[2].predicate;
2713 if (pred != 0 && ! (*pred) (opchar, char_mode))
2714 opchar = copy_to_mode_reg (char_mode, opchar);
2717 if (insn_data[(int) code].n_operands == 4)
2718 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2719 else
2720 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2721 GEN_INT (expected_align),
2722 GEN_INT (expected_size));
2723 if (pat)
2725 emit_insn (pat);
2726 return true;
2728 else
2729 delete_insns_since (last);
2733 return false;
2737 /* Write to one of the components of the complex value CPLX. Write VAL to
2738 the real part if IMAG_P is false, and the imaginary part if its true. */
2740 static void
2741 write_complex_part (rtx cplx, rtx val, bool imag_p)
2743 enum machine_mode cmode;
2744 enum machine_mode imode;
2745 unsigned ibitsize;
2747 if (GET_CODE (cplx) == CONCAT)
2749 emit_move_insn (XEXP (cplx, imag_p), val);
2750 return;
2753 cmode = GET_MODE (cplx);
2754 imode = GET_MODE_INNER (cmode);
2755 ibitsize = GET_MODE_BITSIZE (imode);
2757 /* For MEMs simplify_gen_subreg may generate an invalid new address
2758 because, e.g., the original address is considered mode-dependent
2759 by the target, which restricts simplify_subreg from invoking
2760 adjust_address_nv. Instead of preparing fallback support for an
2761 invalid address, we call adjust_address_nv directly. */
2762 if (MEM_P (cplx))
2764 emit_move_insn (adjust_address_nv (cplx, imode,
2765 imag_p ? GET_MODE_SIZE (imode) : 0),
2766 val);
2767 return;
2770 /* If the sub-object is at least word sized, then we know that subregging
2771 will work. This special case is important, since store_bit_field
2772 wants to operate on integer modes, and there's rarely an OImode to
2773 correspond to TCmode. */
2774 if (ibitsize >= BITS_PER_WORD
2775 /* For hard regs we have exact predicates. Assume we can split
2776 the original object if it spans an even number of hard regs.
2777 This special case is important for SCmode on 64-bit platforms
2778 where the natural size of floating-point regs is 32-bit. */
2779 || (REG_P (cplx)
2780 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2781 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2783 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2784 imag_p ? GET_MODE_SIZE (imode) : 0);
2785 if (part)
2787 emit_move_insn (part, val);
2788 return;
2790 else
2791 /* simplify_gen_subreg may fail for sub-word MEMs. */
2792 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2795 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2798 /* Extract one of the components of the complex value CPLX. Extract the
2799 real part if IMAG_P is false, and the imaginary part if it's true. */
2801 static rtx
2802 read_complex_part (rtx cplx, bool imag_p)
2804 enum machine_mode cmode, imode;
2805 unsigned ibitsize;
2807 if (GET_CODE (cplx) == CONCAT)
2808 return XEXP (cplx, imag_p);
2810 cmode = GET_MODE (cplx);
2811 imode = GET_MODE_INNER (cmode);
2812 ibitsize = GET_MODE_BITSIZE (imode);
2814 /* Special case reads from complex constants that got spilled to memory. */
2815 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2817 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2818 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2820 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2821 if (CONSTANT_CLASS_P (part))
2822 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2826 /* For MEMs simplify_gen_subreg may generate an invalid new address
2827 because, e.g., the original address is considered mode-dependent
2828 by the target, which restricts simplify_subreg from invoking
2829 adjust_address_nv. Instead of preparing fallback support for an
2830 invalid address, we call adjust_address_nv directly. */
2831 if (MEM_P (cplx))
2832 return adjust_address_nv (cplx, imode,
2833 imag_p ? GET_MODE_SIZE (imode) : 0);
2835 /* If the sub-object is at least word sized, then we know that subregging
2836 will work. This special case is important, since extract_bit_field
2837 wants to operate on integer modes, and there's rarely an OImode to
2838 correspond to TCmode. */
2839 if (ibitsize >= BITS_PER_WORD
2840 /* For hard regs we have exact predicates. Assume we can split
2841 the original object if it spans an even number of hard regs.
2842 This special case is important for SCmode on 64-bit platforms
2843 where the natural size of floating-point regs is 32-bit. */
2844 || (REG_P (cplx)
2845 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2846 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2848 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2849 imag_p ? GET_MODE_SIZE (imode) : 0);
2850 if (ret)
2851 return ret;
2852 else
2853 /* simplify_gen_subreg may fail for sub-word MEMs. */
2854 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2857 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2858 true, NULL_RTX, imode, imode);
2861 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2862 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2863 represented in NEW_MODE. If FORCE is true, this will never happen, as
2864 we'll force-create a SUBREG if needed. */
2866 static rtx
2867 emit_move_change_mode (enum machine_mode new_mode,
2868 enum machine_mode old_mode, rtx x, bool force)
2870 rtx ret;
2872 if (push_operand (x, GET_MODE (x)))
2874 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2875 MEM_COPY_ATTRIBUTES (ret, x);
2877 else if (MEM_P (x))
2879 /* We don't have to worry about changing the address since the
2880 size in bytes is supposed to be the same. */
2881 if (reload_in_progress)
2883 /* Copy the MEM to change the mode and move any
2884 substitutions from the old MEM to the new one. */
2885 ret = adjust_address_nv (x, new_mode, 0);
2886 copy_replacements (x, ret);
2888 else
2889 ret = adjust_address (x, new_mode, 0);
2891 else
2893 /* Note that we do want simplify_subreg's behavior of validating
2894 that the new mode is ok for a hard register. If we were to use
2895 simplify_gen_subreg, we would create the subreg, but would
2896 probably run into the target not being able to implement it. */
2897 /* Except, of course, when FORCE is true, when this is exactly what
2898 we want. Which is needed for CCmodes on some targets. */
2899 if (force)
2900 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2901 else
2902 ret = simplify_subreg (new_mode, x, old_mode, 0);
2905 return ret;
2908 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2909 an integer mode of the same size as MODE. Returns the instruction
2910 emitted, or NULL if such a move could not be generated. */
2912 static rtx
2913 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2915 enum machine_mode imode;
2916 enum insn_code code;
2918 /* There must exist a mode of the exact size we require. */
2919 imode = int_mode_for_mode (mode);
2920 if (imode == BLKmode)
2921 return NULL_RTX;
2923 /* The target must support moves in this mode. */
2924 code = optab_handler (mov_optab, imode)->insn_code;
2925 if (code == CODE_FOR_nothing)
2926 return NULL_RTX;
2928 x = emit_move_change_mode (imode, mode, x, force);
2929 if (x == NULL_RTX)
2930 return NULL_RTX;
2931 y = emit_move_change_mode (imode, mode, y, force);
2932 if (y == NULL_RTX)
2933 return NULL_RTX;
2934 return emit_insn (GEN_FCN (code) (x, y));
2937 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2938 Return an equivalent MEM that does not use an auto-increment. */
2940 static rtx
2941 emit_move_resolve_push (enum machine_mode mode, rtx x)
2943 enum rtx_code code = GET_CODE (XEXP (x, 0));
2944 HOST_WIDE_INT adjust;
2945 rtx temp;
2947 adjust = GET_MODE_SIZE (mode);
2948 #ifdef PUSH_ROUNDING
2949 adjust = PUSH_ROUNDING (adjust);
2950 #endif
2951 if (code == PRE_DEC || code == POST_DEC)
2952 adjust = -adjust;
2953 else if (code == PRE_MODIFY || code == POST_MODIFY)
2955 rtx expr = XEXP (XEXP (x, 0), 1);
2956 HOST_WIDE_INT val;
2958 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2959 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2960 val = INTVAL (XEXP (expr, 1));
2961 if (GET_CODE (expr) == MINUS)
2962 val = -val;
2963 gcc_assert (adjust == val || adjust == -val);
2964 adjust = val;
2967 /* Do not use anti_adjust_stack, since we don't want to update
2968 stack_pointer_delta. */
2969 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2970 GEN_INT (adjust), stack_pointer_rtx,
2971 0, OPTAB_LIB_WIDEN);
2972 if (temp != stack_pointer_rtx)
2973 emit_move_insn (stack_pointer_rtx, temp);
2975 switch (code)
2977 case PRE_INC:
2978 case PRE_DEC:
2979 case PRE_MODIFY:
2980 temp = stack_pointer_rtx;
2981 break;
2982 case POST_INC:
2983 case POST_DEC:
2984 case POST_MODIFY:
2985 temp = plus_constant (stack_pointer_rtx, -adjust);
2986 break;
2987 default:
2988 gcc_unreachable ();
2991 return replace_equiv_address (x, temp);
2994 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2995 X is known to satisfy push_operand, and MODE is known to be complex.
2996 Returns the last instruction emitted. */
2999 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3001 enum machine_mode submode = GET_MODE_INNER (mode);
3002 bool imag_first;
3004 #ifdef PUSH_ROUNDING
3005 unsigned int submodesize = GET_MODE_SIZE (submode);
3007 /* In case we output to the stack, but the size is smaller than the
3008 machine can push exactly, we need to use move instructions. */
3009 if (PUSH_ROUNDING (submodesize) != submodesize)
3011 x = emit_move_resolve_push (mode, x);
3012 return emit_move_insn (x, y);
3014 #endif
3016 /* Note that the real part always precedes the imag part in memory
3017 regardless of machine's endianness. */
3018 switch (GET_CODE (XEXP (x, 0)))
3020 case PRE_DEC:
3021 case POST_DEC:
3022 imag_first = true;
3023 break;
3024 case PRE_INC:
3025 case POST_INC:
3026 imag_first = false;
3027 break;
3028 default:
3029 gcc_unreachable ();
3032 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3033 read_complex_part (y, imag_first));
3034 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3035 read_complex_part (y, !imag_first));
3038 /* A subroutine of emit_move_complex. Perform the move from Y to X
3039 via two moves of the parts. Returns the last instruction emitted. */
3042 emit_move_complex_parts (rtx x, rtx y)
3044 /* Show the output dies here. This is necessary for SUBREGs
3045 of pseudos since we cannot track their lifetimes correctly;
3046 hard regs shouldn't appear here except as return values. */
3047 if (!reload_completed && !reload_in_progress
3048 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3049 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3051 write_complex_part (x, read_complex_part (y, false), false);
3052 write_complex_part (x, read_complex_part (y, true), true);
3054 return get_last_insn ();
3057 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3058 MODE is known to be complex. Returns the last instruction emitted. */
3060 static rtx
3061 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3063 bool try_int;
3065 /* Need to take special care for pushes, to maintain proper ordering
3066 of the data, and possibly extra padding. */
3067 if (push_operand (x, mode))
3068 return emit_move_complex_push (mode, x, y);
3070 /* See if we can coerce the target into moving both values at once. */
3072 /* Move floating point as parts. */
3073 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3074 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3075 try_int = false;
3076 /* Not possible if the values are inherently not adjacent. */
3077 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3078 try_int = false;
3079 /* Is possible if both are registers (or subregs of registers). */
3080 else if (register_operand (x, mode) && register_operand (y, mode))
3081 try_int = true;
3082 /* If one of the operands is a memory, and alignment constraints
3083 are friendly enough, we may be able to do combined memory operations.
3084 We do not attempt this if Y is a constant because that combination is
3085 usually better with the by-parts thing below. */
3086 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3087 && (!STRICT_ALIGNMENT
3088 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3089 try_int = true;
3090 else
3091 try_int = false;
3093 if (try_int)
3095 rtx ret;
3097 /* For memory to memory moves, optimal behavior can be had with the
3098 existing block move logic. */
3099 if (MEM_P (x) && MEM_P (y))
3101 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3102 BLOCK_OP_NO_LIBCALL);
3103 return get_last_insn ();
3106 ret = emit_move_via_integer (mode, x, y, true);
3107 if (ret)
3108 return ret;
3111 return emit_move_complex_parts (x, y);
3114 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3115 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3117 static rtx
3118 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3120 rtx ret;
3122 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3123 if (mode != CCmode)
3125 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3126 if (code != CODE_FOR_nothing)
3128 x = emit_move_change_mode (CCmode, mode, x, true);
3129 y = emit_move_change_mode (CCmode, mode, y, true);
3130 return emit_insn (GEN_FCN (code) (x, y));
3134 /* Otherwise, find the MODE_INT mode of the same width. */
3135 ret = emit_move_via_integer (mode, x, y, false);
3136 gcc_assert (ret != NULL);
3137 return ret;
3140 /* Return true if word I of OP lies entirely in the
3141 undefined bits of a paradoxical subreg. */
3143 static bool
3144 undefined_operand_subword_p (const_rtx op, int i)
3146 enum machine_mode innermode, innermostmode;
3147 int offset;
3148 if (GET_CODE (op) != SUBREG)
3149 return false;
3150 innermode = GET_MODE (op);
3151 innermostmode = GET_MODE (SUBREG_REG (op));
3152 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3153 /* The SUBREG_BYTE represents offset, as if the value were stored in
3154 memory, except for a paradoxical subreg where we define
3155 SUBREG_BYTE to be 0; undo this exception as in
3156 simplify_subreg. */
3157 if (SUBREG_BYTE (op) == 0
3158 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3160 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3161 if (WORDS_BIG_ENDIAN)
3162 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3163 if (BYTES_BIG_ENDIAN)
3164 offset += difference % UNITS_PER_WORD;
3166 if (offset >= GET_MODE_SIZE (innermostmode)
3167 || offset <= -GET_MODE_SIZE (word_mode))
3168 return true;
3169 return false;
3172 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3173 MODE is any multi-word or full-word mode that lacks a move_insn
3174 pattern. Note that you will get better code if you define such
3175 patterns, even if they must turn into multiple assembler instructions. */
3177 static rtx
3178 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3180 rtx last_insn = 0;
3181 rtx seq, inner;
3182 bool need_clobber;
3183 int i;
3185 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3187 /* If X is a push on the stack, do the push now and replace
3188 X with a reference to the stack pointer. */
3189 if (push_operand (x, mode))
3190 x = emit_move_resolve_push (mode, x);
3192 /* If we are in reload, see if either operand is a MEM whose address
3193 is scheduled for replacement. */
3194 if (reload_in_progress && MEM_P (x)
3195 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3196 x = replace_equiv_address_nv (x, inner);
3197 if (reload_in_progress && MEM_P (y)
3198 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3199 y = replace_equiv_address_nv (y, inner);
3201 start_sequence ();
3203 need_clobber = false;
3204 for (i = 0;
3205 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3206 i++)
3208 rtx xpart = operand_subword (x, i, 1, mode);
3209 rtx ypart;
3211 /* Do not generate code for a move if it would come entirely
3212 from the undefined bits of a paradoxical subreg. */
3213 if (undefined_operand_subword_p (y, i))
3214 continue;
3216 ypart = operand_subword (y, i, 1, mode);
3218 /* If we can't get a part of Y, put Y into memory if it is a
3219 constant. Otherwise, force it into a register. Then we must
3220 be able to get a part of Y. */
3221 if (ypart == 0 && CONSTANT_P (y))
3223 y = use_anchored_address (force_const_mem (mode, y));
3224 ypart = operand_subword (y, i, 1, mode);
3226 else if (ypart == 0)
3227 ypart = operand_subword_force (y, i, mode);
3229 gcc_assert (xpart && ypart);
3231 need_clobber |= (GET_CODE (xpart) == SUBREG);
3233 last_insn = emit_move_insn (xpart, ypart);
3236 seq = get_insns ();
3237 end_sequence ();
3239 /* Show the output dies here. This is necessary for SUBREGs
3240 of pseudos since we cannot track their lifetimes correctly;
3241 hard regs shouldn't appear here except as return values.
3242 We never want to emit such a clobber after reload. */
3243 if (x != y
3244 && ! (reload_in_progress || reload_completed)
3245 && need_clobber != 0)
3246 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3248 emit_insn (seq);
3250 return last_insn;
3253 /* Low level part of emit_move_insn.
3254 Called just like emit_move_insn, but assumes X and Y
3255 are basically valid. */
3258 emit_move_insn_1 (rtx x, rtx y)
3260 enum machine_mode mode = GET_MODE (x);
3261 enum insn_code code;
3263 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3265 code = optab_handler (mov_optab, mode)->insn_code;
3266 if (code != CODE_FOR_nothing)
3267 return emit_insn (GEN_FCN (code) (x, y));
3269 /* Expand complex moves by moving real part and imag part. */
3270 if (COMPLEX_MODE_P (mode))
3271 return emit_move_complex (mode, x, y);
3273 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3275 rtx result = emit_move_via_integer (mode, x, y, true);
3277 /* If we can't find an integer mode, use multi words. */
3278 if (result)
3279 return result;
3280 else
3281 return emit_move_multi_word (mode, x, y);
3284 if (GET_MODE_CLASS (mode) == MODE_CC)
3285 return emit_move_ccmode (mode, x, y);
3287 /* Try using a move pattern for the corresponding integer mode. This is
3288 only safe when simplify_subreg can convert MODE constants into integer
3289 constants. At present, it can only do this reliably if the value
3290 fits within a HOST_WIDE_INT. */
3291 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3293 rtx ret = emit_move_via_integer (mode, x, y, false);
3294 if (ret)
3295 return ret;
3298 return emit_move_multi_word (mode, x, y);
3301 /* Generate code to copy Y into X.
3302 Both Y and X must have the same mode, except that
3303 Y can be a constant with VOIDmode.
3304 This mode cannot be BLKmode; use emit_block_move for that.
3306 Return the last instruction emitted. */
3309 emit_move_insn (rtx x, rtx y)
3311 enum machine_mode mode = GET_MODE (x);
3312 rtx y_cst = NULL_RTX;
3313 rtx last_insn, set;
3315 gcc_assert (mode != BLKmode
3316 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3318 if (CONSTANT_P (y))
3320 if (optimize
3321 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3322 && (last_insn = compress_float_constant (x, y)))
3323 return last_insn;
3325 y_cst = y;
3327 if (!LEGITIMATE_CONSTANT_P (y))
3329 y = force_const_mem (mode, y);
3331 /* If the target's cannot_force_const_mem prevented the spill,
3332 assume that the target's move expanders will also take care
3333 of the non-legitimate constant. */
3334 if (!y)
3335 y = y_cst;
3336 else
3337 y = use_anchored_address (y);
3341 /* If X or Y are memory references, verify that their addresses are valid
3342 for the machine. */
3343 if (MEM_P (x)
3344 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3345 && ! push_operand (x, GET_MODE (x)))
3346 || (flag_force_addr
3347 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3348 x = validize_mem (x);
3350 if (MEM_P (y)
3351 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3352 || (flag_force_addr
3353 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3354 y = validize_mem (y);
3356 gcc_assert (mode != BLKmode);
3358 last_insn = emit_move_insn_1 (x, y);
3360 if (y_cst && REG_P (x)
3361 && (set = single_set (last_insn)) != NULL_RTX
3362 && SET_DEST (set) == x
3363 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3364 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3366 return last_insn;
3369 /* If Y is representable exactly in a narrower mode, and the target can
3370 perform the extension directly from constant or memory, then emit the
3371 move as an extension. */
3373 static rtx
3374 compress_float_constant (rtx x, rtx y)
3376 enum machine_mode dstmode = GET_MODE (x);
3377 enum machine_mode orig_srcmode = GET_MODE (y);
3378 enum machine_mode srcmode;
3379 REAL_VALUE_TYPE r;
3380 int oldcost, newcost;
3382 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3384 if (LEGITIMATE_CONSTANT_P (y))
3385 oldcost = rtx_cost (y, SET);
3386 else
3387 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3389 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3390 srcmode != orig_srcmode;
3391 srcmode = GET_MODE_WIDER_MODE (srcmode))
3393 enum insn_code ic;
3394 rtx trunc_y, last_insn;
3396 /* Skip if the target can't extend this way. */
3397 ic = can_extend_p (dstmode, srcmode, 0);
3398 if (ic == CODE_FOR_nothing)
3399 continue;
3401 /* Skip if the narrowed value isn't exact. */
3402 if (! exact_real_truncate (srcmode, &r))
3403 continue;
3405 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3407 if (LEGITIMATE_CONSTANT_P (trunc_y))
3409 /* Skip if the target needs extra instructions to perform
3410 the extension. */
3411 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3412 continue;
3413 /* This is valid, but may not be cheaper than the original. */
3414 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3415 if (oldcost < newcost)
3416 continue;
3418 else if (float_extend_from_mem[dstmode][srcmode])
3420 trunc_y = force_const_mem (srcmode, trunc_y);
3421 /* This is valid, but may not be cheaper than the original. */
3422 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3423 if (oldcost < newcost)
3424 continue;
3425 trunc_y = validize_mem (trunc_y);
3427 else
3428 continue;
3430 /* For CSE's benefit, force the compressed constant pool entry
3431 into a new pseudo. This constant may be used in different modes,
3432 and if not, combine will put things back together for us. */
3433 trunc_y = force_reg (srcmode, trunc_y);
3434 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3435 last_insn = get_last_insn ();
3437 if (REG_P (x))
3438 set_unique_reg_note (last_insn, REG_EQUAL, y);
3440 return last_insn;
3443 return NULL_RTX;
3446 /* Pushing data onto the stack. */
3448 /* Push a block of length SIZE (perhaps variable)
3449 and return an rtx to address the beginning of the block.
3450 The value may be virtual_outgoing_args_rtx.
3452 EXTRA is the number of bytes of padding to push in addition to SIZE.
3453 BELOW nonzero means this padding comes at low addresses;
3454 otherwise, the padding comes at high addresses. */
3457 push_block (rtx size, int extra, int below)
3459 rtx temp;
3461 size = convert_modes (Pmode, ptr_mode, size, 1);
3462 if (CONSTANT_P (size))
3463 anti_adjust_stack (plus_constant (size, extra));
3464 else if (REG_P (size) && extra == 0)
3465 anti_adjust_stack (size);
3466 else
3468 temp = copy_to_mode_reg (Pmode, size);
3469 if (extra != 0)
3470 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3471 temp, 0, OPTAB_LIB_WIDEN);
3472 anti_adjust_stack (temp);
3475 #ifndef STACK_GROWS_DOWNWARD
3476 if (0)
3477 #else
3478 if (1)
3479 #endif
3481 temp = virtual_outgoing_args_rtx;
3482 if (extra != 0 && below)
3483 temp = plus_constant (temp, extra);
3485 else
3487 if (GET_CODE (size) == CONST_INT)
3488 temp = plus_constant (virtual_outgoing_args_rtx,
3489 -INTVAL (size) - (below ? 0 : extra));
3490 else if (extra != 0 && !below)
3491 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3492 negate_rtx (Pmode, plus_constant (size, extra)));
3493 else
3494 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3495 negate_rtx (Pmode, size));
3498 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3501 #ifdef PUSH_ROUNDING
3503 /* Emit single push insn. */
3505 static void
3506 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3508 rtx dest_addr;
3509 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3510 rtx dest;
3511 enum insn_code icode;
3512 insn_operand_predicate_fn pred;
3514 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3515 /* If there is push pattern, use it. Otherwise try old way of throwing
3516 MEM representing push operation to move expander. */
3517 icode = optab_handler (push_optab, mode)->insn_code;
3518 if (icode != CODE_FOR_nothing)
3520 if (((pred = insn_data[(int) icode].operand[0].predicate)
3521 && !((*pred) (x, mode))))
3522 x = force_reg (mode, x);
3523 emit_insn (GEN_FCN (icode) (x));
3524 return;
3526 if (GET_MODE_SIZE (mode) == rounded_size)
3527 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3528 /* If we are to pad downward, adjust the stack pointer first and
3529 then store X into the stack location using an offset. This is
3530 because emit_move_insn does not know how to pad; it does not have
3531 access to type. */
3532 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3534 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3535 HOST_WIDE_INT offset;
3537 emit_move_insn (stack_pointer_rtx,
3538 expand_binop (Pmode,
3539 #ifdef STACK_GROWS_DOWNWARD
3540 sub_optab,
3541 #else
3542 add_optab,
3543 #endif
3544 stack_pointer_rtx,
3545 GEN_INT (rounded_size),
3546 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3548 offset = (HOST_WIDE_INT) padding_size;
3549 #ifdef STACK_GROWS_DOWNWARD
3550 if (STACK_PUSH_CODE == POST_DEC)
3551 /* We have already decremented the stack pointer, so get the
3552 previous value. */
3553 offset += (HOST_WIDE_INT) rounded_size;
3554 #else
3555 if (STACK_PUSH_CODE == POST_INC)
3556 /* We have already incremented the stack pointer, so get the
3557 previous value. */
3558 offset -= (HOST_WIDE_INT) rounded_size;
3559 #endif
3560 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3562 else
3564 #ifdef STACK_GROWS_DOWNWARD
3565 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3566 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3567 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3568 #else
3569 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3570 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3571 GEN_INT (rounded_size));
3572 #endif
3573 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3576 dest = gen_rtx_MEM (mode, dest_addr);
3578 if (type != 0)
3580 set_mem_attributes (dest, type, 1);
3582 if (flag_optimize_sibling_calls)
3583 /* Function incoming arguments may overlap with sibling call
3584 outgoing arguments and we cannot allow reordering of reads
3585 from function arguments with stores to outgoing arguments
3586 of sibling calls. */
3587 set_mem_alias_set (dest, 0);
3589 emit_move_insn (dest, x);
3591 #endif
3593 /* Generate code to push X onto the stack, assuming it has mode MODE and
3594 type TYPE.
3595 MODE is redundant except when X is a CONST_INT (since they don't
3596 carry mode info).
3597 SIZE is an rtx for the size of data to be copied (in bytes),
3598 needed only if X is BLKmode.
3600 ALIGN (in bits) is maximum alignment we can assume.
3602 If PARTIAL and REG are both nonzero, then copy that many of the first
3603 bytes of X into registers starting with REG, and push the rest of X.
3604 The amount of space pushed is decreased by PARTIAL bytes.
3605 REG must be a hard register in this case.
3606 If REG is zero but PARTIAL is not, take any all others actions for an
3607 argument partially in registers, but do not actually load any
3608 registers.
3610 EXTRA is the amount in bytes of extra space to leave next to this arg.
3611 This is ignored if an argument block has already been allocated.
3613 On a machine that lacks real push insns, ARGS_ADDR is the address of
3614 the bottom of the argument block for this call. We use indexing off there
3615 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3616 argument block has not been preallocated.
3618 ARGS_SO_FAR is the size of args previously pushed for this call.
3620 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3621 for arguments passed in registers. If nonzero, it will be the number
3622 of bytes required. */
3624 void
3625 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3626 unsigned int align, int partial, rtx reg, int extra,
3627 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3628 rtx alignment_pad)
3630 rtx xinner;
3631 enum direction stack_direction
3632 #ifdef STACK_GROWS_DOWNWARD
3633 = downward;
3634 #else
3635 = upward;
3636 #endif
3638 /* Decide where to pad the argument: `downward' for below,
3639 `upward' for above, or `none' for don't pad it.
3640 Default is below for small data on big-endian machines; else above. */
3641 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3643 /* Invert direction if stack is post-decrement.
3644 FIXME: why? */
3645 if (STACK_PUSH_CODE == POST_DEC)
3646 if (where_pad != none)
3647 where_pad = (where_pad == downward ? upward : downward);
3649 xinner = x;
3651 if (mode == BLKmode
3652 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3654 /* Copy a block into the stack, entirely or partially. */
3656 rtx temp;
3657 int used;
3658 int offset;
3659 int skip;
3661 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3662 used = partial - offset;
3664 if (mode != BLKmode)
3666 /* A value is to be stored in an insufficiently aligned
3667 stack slot; copy via a suitably aligned slot if
3668 necessary. */
3669 size = GEN_INT (GET_MODE_SIZE (mode));
3670 if (!MEM_P (xinner))
3672 temp = assign_temp (type, 0, 1, 1);
3673 emit_move_insn (temp, xinner);
3674 xinner = temp;
3678 gcc_assert (size);
3680 /* USED is now the # of bytes we need not copy to the stack
3681 because registers will take care of them. */
3683 if (partial != 0)
3684 xinner = adjust_address (xinner, BLKmode, used);
3686 /* If the partial register-part of the arg counts in its stack size,
3687 skip the part of stack space corresponding to the registers.
3688 Otherwise, start copying to the beginning of the stack space,
3689 by setting SKIP to 0. */
3690 skip = (reg_parm_stack_space == 0) ? 0 : used;
3692 #ifdef PUSH_ROUNDING
3693 /* Do it with several push insns if that doesn't take lots of insns
3694 and if there is no difficulty with push insns that skip bytes
3695 on the stack for alignment purposes. */
3696 if (args_addr == 0
3697 && PUSH_ARGS
3698 && GET_CODE (size) == CONST_INT
3699 && skip == 0
3700 && MEM_ALIGN (xinner) >= align
3701 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3702 /* Here we avoid the case of a structure whose weak alignment
3703 forces many pushes of a small amount of data,
3704 and such small pushes do rounding that causes trouble. */
3705 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3706 || align >= BIGGEST_ALIGNMENT
3707 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3708 == (align / BITS_PER_UNIT)))
3709 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3711 /* Push padding now if padding above and stack grows down,
3712 or if padding below and stack grows up.
3713 But if space already allocated, this has already been done. */
3714 if (extra && args_addr == 0
3715 && where_pad != none && where_pad != stack_direction)
3716 anti_adjust_stack (GEN_INT (extra));
3718 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3720 else
3721 #endif /* PUSH_ROUNDING */
3723 rtx target;
3725 /* Otherwise make space on the stack and copy the data
3726 to the address of that space. */
3728 /* Deduct words put into registers from the size we must copy. */
3729 if (partial != 0)
3731 if (GET_CODE (size) == CONST_INT)
3732 size = GEN_INT (INTVAL (size) - used);
3733 else
3734 size = expand_binop (GET_MODE (size), sub_optab, size,
3735 GEN_INT (used), NULL_RTX, 0,
3736 OPTAB_LIB_WIDEN);
3739 /* Get the address of the stack space.
3740 In this case, we do not deal with EXTRA separately.
3741 A single stack adjust will do. */
3742 if (! args_addr)
3744 temp = push_block (size, extra, where_pad == downward);
3745 extra = 0;
3747 else if (GET_CODE (args_so_far) == CONST_INT)
3748 temp = memory_address (BLKmode,
3749 plus_constant (args_addr,
3750 skip + INTVAL (args_so_far)));
3751 else
3752 temp = memory_address (BLKmode,
3753 plus_constant (gen_rtx_PLUS (Pmode,
3754 args_addr,
3755 args_so_far),
3756 skip));
3758 if (!ACCUMULATE_OUTGOING_ARGS)
3760 /* If the source is referenced relative to the stack pointer,
3761 copy it to another register to stabilize it. We do not need
3762 to do this if we know that we won't be changing sp. */
3764 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3765 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3766 temp = copy_to_reg (temp);
3769 target = gen_rtx_MEM (BLKmode, temp);
3771 /* We do *not* set_mem_attributes here, because incoming arguments
3772 may overlap with sibling call outgoing arguments and we cannot
3773 allow reordering of reads from function arguments with stores
3774 to outgoing arguments of sibling calls. We do, however, want
3775 to record the alignment of the stack slot. */
3776 /* ALIGN may well be better aligned than TYPE, e.g. due to
3777 PARM_BOUNDARY. Assume the caller isn't lying. */
3778 set_mem_align (target, align);
3780 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3783 else if (partial > 0)
3785 /* Scalar partly in registers. */
3787 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3788 int i;
3789 int not_stack;
3790 /* # bytes of start of argument
3791 that we must make space for but need not store. */
3792 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3793 int args_offset = INTVAL (args_so_far);
3794 int skip;
3796 /* Push padding now if padding above and stack grows down,
3797 or if padding below and stack grows up.
3798 But if space already allocated, this has already been done. */
3799 if (extra && args_addr == 0
3800 && where_pad != none && where_pad != stack_direction)
3801 anti_adjust_stack (GEN_INT (extra));
3803 /* If we make space by pushing it, we might as well push
3804 the real data. Otherwise, we can leave OFFSET nonzero
3805 and leave the space uninitialized. */
3806 if (args_addr == 0)
3807 offset = 0;
3809 /* Now NOT_STACK gets the number of words that we don't need to
3810 allocate on the stack. Convert OFFSET to words too. */
3811 not_stack = (partial - offset) / UNITS_PER_WORD;
3812 offset /= UNITS_PER_WORD;
3814 /* If the partial register-part of the arg counts in its stack size,
3815 skip the part of stack space corresponding to the registers.
3816 Otherwise, start copying to the beginning of the stack space,
3817 by setting SKIP to 0. */
3818 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3820 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3821 x = validize_mem (force_const_mem (mode, x));
3823 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3824 SUBREGs of such registers are not allowed. */
3825 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3826 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3827 x = copy_to_reg (x);
3829 /* Loop over all the words allocated on the stack for this arg. */
3830 /* We can do it by words, because any scalar bigger than a word
3831 has a size a multiple of a word. */
3832 #ifndef PUSH_ARGS_REVERSED
3833 for (i = not_stack; i < size; i++)
3834 #else
3835 for (i = size - 1; i >= not_stack; i--)
3836 #endif
3837 if (i >= not_stack + offset)
3838 emit_push_insn (operand_subword_force (x, i, mode),
3839 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3840 0, args_addr,
3841 GEN_INT (args_offset + ((i - not_stack + skip)
3842 * UNITS_PER_WORD)),
3843 reg_parm_stack_space, alignment_pad);
3845 else
3847 rtx addr;
3848 rtx dest;
3850 /* Push padding now if padding above and stack grows down,
3851 or if padding below and stack grows up.
3852 But if space already allocated, this has already been done. */
3853 if (extra && args_addr == 0
3854 && where_pad != none && where_pad != stack_direction)
3855 anti_adjust_stack (GEN_INT (extra));
3857 #ifdef PUSH_ROUNDING
3858 if (args_addr == 0 && PUSH_ARGS)
3859 emit_single_push_insn (mode, x, type);
3860 else
3861 #endif
3863 if (GET_CODE (args_so_far) == CONST_INT)
3864 addr
3865 = memory_address (mode,
3866 plus_constant (args_addr,
3867 INTVAL (args_so_far)));
3868 else
3869 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3870 args_so_far));
3871 dest = gen_rtx_MEM (mode, addr);
3873 /* We do *not* set_mem_attributes here, because incoming arguments
3874 may overlap with sibling call outgoing arguments and we cannot
3875 allow reordering of reads from function arguments with stores
3876 to outgoing arguments of sibling calls. We do, however, want
3877 to record the alignment of the stack slot. */
3878 /* ALIGN may well be better aligned than TYPE, e.g. due to
3879 PARM_BOUNDARY. Assume the caller isn't lying. */
3880 set_mem_align (dest, align);
3882 emit_move_insn (dest, x);
3886 /* If part should go in registers, copy that part
3887 into the appropriate registers. Do this now, at the end,
3888 since mem-to-mem copies above may do function calls. */
3889 if (partial > 0 && reg != 0)
3891 /* Handle calls that pass values in multiple non-contiguous locations.
3892 The Irix 6 ABI has examples of this. */
3893 if (GET_CODE (reg) == PARALLEL)
3894 emit_group_load (reg, x, type, -1);
3895 else
3897 gcc_assert (partial % UNITS_PER_WORD == 0);
3898 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3902 if (extra && args_addr == 0 && where_pad == stack_direction)
3903 anti_adjust_stack (GEN_INT (extra));
3905 if (alignment_pad && args_addr == 0)
3906 anti_adjust_stack (alignment_pad);
3909 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3910 operations. */
3912 static rtx
3913 get_subtarget (rtx x)
3915 return (optimize
3916 || x == 0
3917 /* Only registers can be subtargets. */
3918 || !REG_P (x)
3919 /* Don't use hard regs to avoid extending their life. */
3920 || REGNO (x) < FIRST_PSEUDO_REGISTER
3921 ? 0 : x);
3924 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3925 FIELD is a bitfield. Returns true if the optimization was successful,
3926 and there's nothing else to do. */
3928 static bool
3929 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3930 unsigned HOST_WIDE_INT bitpos,
3931 enum machine_mode mode1, rtx str_rtx,
3932 tree to, tree src)
3934 enum machine_mode str_mode = GET_MODE (str_rtx);
3935 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3936 tree op0, op1;
3937 rtx value, result;
3938 optab binop;
3940 if (mode1 != VOIDmode
3941 || bitsize >= BITS_PER_WORD
3942 || str_bitsize > BITS_PER_WORD
3943 || TREE_SIDE_EFFECTS (to)
3944 || TREE_THIS_VOLATILE (to))
3945 return false;
3947 STRIP_NOPS (src);
3948 if (!BINARY_CLASS_P (src)
3949 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3950 return false;
3952 op0 = TREE_OPERAND (src, 0);
3953 op1 = TREE_OPERAND (src, 1);
3954 STRIP_NOPS (op0);
3956 if (!operand_equal_p (to, op0, 0))
3957 return false;
3959 if (MEM_P (str_rtx))
3961 unsigned HOST_WIDE_INT offset1;
3963 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3964 str_mode = word_mode;
3965 str_mode = get_best_mode (bitsize, bitpos,
3966 MEM_ALIGN (str_rtx), str_mode, 0);
3967 if (str_mode == VOIDmode)
3968 return false;
3969 str_bitsize = GET_MODE_BITSIZE (str_mode);
3971 offset1 = bitpos;
3972 bitpos %= str_bitsize;
3973 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3974 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3976 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3977 return false;
3979 /* If the bit field covers the whole REG/MEM, store_field
3980 will likely generate better code. */
3981 if (bitsize >= str_bitsize)
3982 return false;
3984 /* We can't handle fields split across multiple entities. */
3985 if (bitpos + bitsize > str_bitsize)
3986 return false;
3988 if (BYTES_BIG_ENDIAN)
3989 bitpos = str_bitsize - bitpos - bitsize;
3991 switch (TREE_CODE (src))
3993 case PLUS_EXPR:
3994 case MINUS_EXPR:
3995 /* For now, just optimize the case of the topmost bitfield
3996 where we don't need to do any masking and also
3997 1 bit bitfields where xor can be used.
3998 We might win by one instruction for the other bitfields
3999 too if insv/extv instructions aren't used, so that
4000 can be added later. */
4001 if (bitpos + bitsize != str_bitsize
4002 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4003 break;
4005 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4006 value = convert_modes (str_mode,
4007 TYPE_MODE (TREE_TYPE (op1)), value,
4008 TYPE_UNSIGNED (TREE_TYPE (op1)));
4010 /* We may be accessing data outside the field, which means
4011 we can alias adjacent data. */
4012 if (MEM_P (str_rtx))
4014 str_rtx = shallow_copy_rtx (str_rtx);
4015 set_mem_alias_set (str_rtx, 0);
4016 set_mem_expr (str_rtx, 0);
4019 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4020 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4022 value = expand_and (str_mode, value, const1_rtx, NULL);
4023 binop = xor_optab;
4025 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4026 build_int_cst (NULL_TREE, bitpos),
4027 NULL_RTX, 1);
4028 result = expand_binop (str_mode, binop, str_rtx,
4029 value, str_rtx, 1, OPTAB_WIDEN);
4030 if (result != str_rtx)
4031 emit_move_insn (str_rtx, result);
4032 return true;
4034 case BIT_IOR_EXPR:
4035 case BIT_XOR_EXPR:
4036 if (TREE_CODE (op1) != INTEGER_CST)
4037 break;
4038 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4039 value = convert_modes (GET_MODE (str_rtx),
4040 TYPE_MODE (TREE_TYPE (op1)), value,
4041 TYPE_UNSIGNED (TREE_TYPE (op1)));
4043 /* We may be accessing data outside the field, which means
4044 we can alias adjacent data. */
4045 if (MEM_P (str_rtx))
4047 str_rtx = shallow_copy_rtx (str_rtx);
4048 set_mem_alias_set (str_rtx, 0);
4049 set_mem_expr (str_rtx, 0);
4052 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4053 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4055 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4056 - 1);
4057 value = expand_and (GET_MODE (str_rtx), value, mask,
4058 NULL_RTX);
4060 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4061 build_int_cst (NULL_TREE, bitpos),
4062 NULL_RTX, 1);
4063 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4064 value, str_rtx, 1, OPTAB_WIDEN);
4065 if (result != str_rtx)
4066 emit_move_insn (str_rtx, result);
4067 return true;
4069 default:
4070 break;
4073 return false;
4077 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4078 is true, try generating a nontemporal store. */
4080 void
4081 expand_assignment (tree to, tree from, bool nontemporal)
4083 rtx to_rtx = 0;
4084 rtx result;
4086 /* Don't crash if the lhs of the assignment was erroneous. */
4087 if (TREE_CODE (to) == ERROR_MARK)
4089 result = expand_normal (from);
4090 return;
4093 /* Optimize away no-op moves without side-effects. */
4094 if (operand_equal_p (to, from, 0))
4095 return;
4097 /* Assignment of a structure component needs special treatment
4098 if the structure component's rtx is not simply a MEM.
4099 Assignment of an array element at a constant index, and assignment of
4100 an array element in an unaligned packed structure field, has the same
4101 problem. */
4102 if (handled_component_p (to)
4103 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4105 enum machine_mode mode1;
4106 HOST_WIDE_INT bitsize, bitpos;
4107 tree offset;
4108 int unsignedp;
4109 int volatilep = 0;
4110 tree tem;
4112 push_temp_slots ();
4113 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4114 &unsignedp, &volatilep, true);
4116 /* If we are going to use store_bit_field and extract_bit_field,
4117 make sure to_rtx will be safe for multiple use. */
4119 to_rtx = expand_normal (tem);
4121 if (offset != 0)
4123 rtx offset_rtx;
4125 if (!MEM_P (to_rtx))
4127 /* We can get constant negative offsets into arrays with broken
4128 user code. Translate this to a trap instead of ICEing. */
4129 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4130 expand_builtin_trap ();
4131 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4134 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4135 #ifdef POINTERS_EXTEND_UNSIGNED
4136 if (GET_MODE (offset_rtx) != Pmode)
4137 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4138 #else
4139 if (GET_MODE (offset_rtx) != ptr_mode)
4140 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4141 #endif
4143 /* A constant address in TO_RTX can have VOIDmode, we must not try
4144 to call force_reg for that case. Avoid that case. */
4145 if (MEM_P (to_rtx)
4146 && GET_MODE (to_rtx) == BLKmode
4147 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4148 && bitsize > 0
4149 && (bitpos % bitsize) == 0
4150 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4151 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4153 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4154 bitpos = 0;
4157 to_rtx = offset_address (to_rtx, offset_rtx,
4158 highest_pow2_factor_for_target (to,
4159 offset));
4162 /* Handle expand_expr of a complex value returning a CONCAT. */
4163 if (GET_CODE (to_rtx) == CONCAT)
4165 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4167 gcc_assert (bitpos == 0);
4168 result = store_expr (from, to_rtx, false, nontemporal);
4170 else
4172 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4173 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4174 nontemporal);
4177 else
4179 if (MEM_P (to_rtx))
4181 /* If the field is at offset zero, we could have been given the
4182 DECL_RTX of the parent struct. Don't munge it. */
4183 to_rtx = shallow_copy_rtx (to_rtx);
4185 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4187 /* Deal with volatile and readonly fields. The former is only
4188 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4189 if (volatilep)
4190 MEM_VOLATILE_P (to_rtx) = 1;
4191 if (component_uses_parent_alias_set (to))
4192 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4195 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4196 to_rtx, to, from))
4197 result = NULL;
4198 else
4199 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4200 TREE_TYPE (tem), get_alias_set (to),
4201 nontemporal);
4204 if (result)
4205 preserve_temp_slots (result);
4206 free_temp_slots ();
4207 pop_temp_slots ();
4208 return;
4211 /* If the rhs is a function call and its value is not an aggregate,
4212 call the function before we start to compute the lhs.
4213 This is needed for correct code for cases such as
4214 val = setjmp (buf) on machines where reference to val
4215 requires loading up part of an address in a separate insn.
4217 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4218 since it might be a promoted variable where the zero- or sign- extension
4219 needs to be done. Handling this in the normal way is safe because no
4220 computation is done before the call. */
4221 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4222 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4223 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4224 && REG_P (DECL_RTL (to))))
4226 rtx value;
4228 push_temp_slots ();
4229 value = expand_normal (from);
4230 if (to_rtx == 0)
4231 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4233 /* Handle calls that return values in multiple non-contiguous locations.
4234 The Irix 6 ABI has examples of this. */
4235 if (GET_CODE (to_rtx) == PARALLEL)
4236 emit_group_load (to_rtx, value, TREE_TYPE (from),
4237 int_size_in_bytes (TREE_TYPE (from)));
4238 else if (GET_MODE (to_rtx) == BLKmode)
4239 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4240 else
4242 if (POINTER_TYPE_P (TREE_TYPE (to)))
4243 value = convert_memory_address (GET_MODE (to_rtx), value);
4244 emit_move_insn (to_rtx, value);
4246 preserve_temp_slots (to_rtx);
4247 free_temp_slots ();
4248 pop_temp_slots ();
4249 return;
4252 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4253 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4255 if (to_rtx == 0)
4256 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4258 /* Don't move directly into a return register. */
4259 if (TREE_CODE (to) == RESULT_DECL
4260 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4262 rtx temp;
4264 push_temp_slots ();
4265 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4267 if (GET_CODE (to_rtx) == PARALLEL)
4268 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4269 int_size_in_bytes (TREE_TYPE (from)));
4270 else
4271 emit_move_insn (to_rtx, temp);
4273 preserve_temp_slots (to_rtx);
4274 free_temp_slots ();
4275 pop_temp_slots ();
4276 return;
4279 /* In case we are returning the contents of an object which overlaps
4280 the place the value is being stored, use a safe function when copying
4281 a value through a pointer into a structure value return block. */
4282 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4283 && current_function_returns_struct
4284 && !current_function_returns_pcc_struct)
4286 rtx from_rtx, size;
4288 push_temp_slots ();
4289 size = expr_size (from);
4290 from_rtx = expand_normal (from);
4292 emit_library_call (memmove_libfunc, LCT_NORMAL,
4293 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4294 XEXP (from_rtx, 0), Pmode,
4295 convert_to_mode (TYPE_MODE (sizetype),
4296 size, TYPE_UNSIGNED (sizetype)),
4297 TYPE_MODE (sizetype));
4299 preserve_temp_slots (to_rtx);
4300 free_temp_slots ();
4301 pop_temp_slots ();
4302 return;
4305 /* Compute FROM and store the value in the rtx we got. */
4307 push_temp_slots ();
4308 result = store_expr (from, to_rtx, 0, nontemporal);
4309 preserve_temp_slots (result);
4310 free_temp_slots ();
4311 pop_temp_slots ();
4312 return;
4315 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4316 succeeded, false otherwise. */
4318 static bool
4319 emit_storent_insn (rtx to, rtx from)
4321 enum machine_mode mode = GET_MODE (to), imode;
4322 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4323 rtx pattern;
4325 if (code == CODE_FOR_nothing)
4326 return false;
4328 imode = insn_data[code].operand[0].mode;
4329 if (!insn_data[code].operand[0].predicate (to, imode))
4330 return false;
4332 imode = insn_data[code].operand[1].mode;
4333 if (!insn_data[code].operand[1].predicate (from, imode))
4335 from = copy_to_mode_reg (imode, from);
4336 if (!insn_data[code].operand[1].predicate (from, imode))
4337 return false;
4340 pattern = GEN_FCN (code) (to, from);
4341 if (pattern == NULL_RTX)
4342 return false;
4344 emit_insn (pattern);
4345 return true;
4348 /* Generate code for computing expression EXP,
4349 and storing the value into TARGET.
4351 If the mode is BLKmode then we may return TARGET itself.
4352 It turns out that in BLKmode it doesn't cause a problem.
4353 because C has no operators that could combine two different
4354 assignments into the same BLKmode object with different values
4355 with no sequence point. Will other languages need this to
4356 be more thorough?
4358 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4359 stack, and block moves may need to be treated specially.
4361 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4364 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4366 rtx temp;
4367 rtx alt_rtl = NULL_RTX;
4368 int dont_return_target = 0;
4370 if (VOID_TYPE_P (TREE_TYPE (exp)))
4372 /* C++ can generate ?: expressions with a throw expression in one
4373 branch and an rvalue in the other. Here, we resolve attempts to
4374 store the throw expression's nonexistent result. */
4375 gcc_assert (!call_param_p);
4376 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4377 return NULL_RTX;
4379 if (TREE_CODE (exp) == COMPOUND_EXPR)
4381 /* Perform first part of compound expression, then assign from second
4382 part. */
4383 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4384 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4385 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4386 nontemporal);
4388 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4390 /* For conditional expression, get safe form of the target. Then
4391 test the condition, doing the appropriate assignment on either
4392 side. This avoids the creation of unnecessary temporaries.
4393 For non-BLKmode, it is more efficient not to do this. */
4395 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4397 do_pending_stack_adjust ();
4398 NO_DEFER_POP;
4399 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4400 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4401 nontemporal);
4402 emit_jump_insn (gen_jump (lab2));
4403 emit_barrier ();
4404 emit_label (lab1);
4405 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4406 nontemporal);
4407 emit_label (lab2);
4408 OK_DEFER_POP;
4410 return NULL_RTX;
4412 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4413 /* If this is a scalar in a register that is stored in a wider mode
4414 than the declared mode, compute the result into its declared mode
4415 and then convert to the wider mode. Our value is the computed
4416 expression. */
4418 rtx inner_target = 0;
4420 /* We can do the conversion inside EXP, which will often result
4421 in some optimizations. Do the conversion in two steps: first
4422 change the signedness, if needed, then the extend. But don't
4423 do this if the type of EXP is a subtype of something else
4424 since then the conversion might involve more than just
4425 converting modes. */
4426 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4427 && TREE_TYPE (TREE_TYPE (exp)) == 0
4428 && (!lang_hooks.reduce_bit_field_operations
4429 || (GET_MODE_PRECISION (GET_MODE (target))
4430 == TYPE_PRECISION (TREE_TYPE (exp)))))
4432 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4433 != SUBREG_PROMOTED_UNSIGNED_P (target))
4435 /* Some types, e.g. Fortran's logical*4, won't have a signed
4436 version, so use the mode instead. */
4437 tree ntype
4438 = (signed_or_unsigned_type_for
4439 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4440 if (ntype == NULL)
4441 ntype = lang_hooks.types.type_for_mode
4442 (TYPE_MODE (TREE_TYPE (exp)),
4443 SUBREG_PROMOTED_UNSIGNED_P (target));
4445 exp = fold_convert (ntype, exp);
4448 exp = fold_convert (lang_hooks.types.type_for_mode
4449 (GET_MODE (SUBREG_REG (target)),
4450 SUBREG_PROMOTED_UNSIGNED_P (target)),
4451 exp);
4453 inner_target = SUBREG_REG (target);
4456 temp = expand_expr (exp, inner_target, VOIDmode,
4457 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4459 /* If TEMP is a VOIDmode constant, use convert_modes to make
4460 sure that we properly convert it. */
4461 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4463 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4464 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4465 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4466 GET_MODE (target), temp,
4467 SUBREG_PROMOTED_UNSIGNED_P (target));
4470 convert_move (SUBREG_REG (target), temp,
4471 SUBREG_PROMOTED_UNSIGNED_P (target));
4473 return NULL_RTX;
4475 else if (TREE_CODE (exp) == STRING_CST
4476 && !nontemporal && !call_param_p
4477 && TREE_STRING_LENGTH (exp) > 0
4478 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4480 /* Optimize initialization of an array with a STRING_CST. */
4481 HOST_WIDE_INT exp_len, str_copy_len;
4482 rtx dest_mem;
4484 exp_len = int_expr_size (exp);
4485 if (exp_len <= 0)
4486 goto normal_expr;
4488 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4489 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4490 goto normal_expr;
4492 str_copy_len = TREE_STRING_LENGTH (exp);
4493 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4495 str_copy_len += STORE_MAX_PIECES - 1;
4496 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4498 str_copy_len = MIN (str_copy_len, exp_len);
4499 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4500 (void *) TREE_STRING_POINTER (exp),
4501 MEM_ALIGN (target)))
4502 goto normal_expr;
4504 dest_mem = target;
4506 dest_mem = store_by_pieces (dest_mem,
4507 str_copy_len, builtin_strncpy_read_str,
4508 (void *) TREE_STRING_POINTER (exp),
4509 MEM_ALIGN (target),
4510 exp_len > str_copy_len ? 1 : 0);
4511 if (exp_len > str_copy_len)
4512 clear_storage (dest_mem, GEN_INT (exp_len - str_copy_len),
4513 BLOCK_OP_NORMAL);
4514 return NULL_RTX;
4516 else
4518 rtx tmp_target;
4520 normal_expr:
4521 /* If we want to use a nontemporal store, force the value to
4522 register first. */
4523 tmp_target = nontemporal ? NULL_RTX : target;
4524 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4525 (call_param_p
4526 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4527 &alt_rtl);
4528 /* Return TARGET if it's a specified hardware register.
4529 If TARGET is a volatile mem ref, either return TARGET
4530 or return a reg copied *from* TARGET; ANSI requires this.
4532 Otherwise, if TEMP is not TARGET, return TEMP
4533 if it is constant (for efficiency),
4534 or if we really want the correct value. */
4535 if (!(target && REG_P (target)
4536 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4537 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4538 && ! rtx_equal_p (temp, target)
4539 && CONSTANT_P (temp))
4540 dont_return_target = 1;
4543 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4544 the same as that of TARGET, adjust the constant. This is needed, for
4545 example, in case it is a CONST_DOUBLE and we want only a word-sized
4546 value. */
4547 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4548 && TREE_CODE (exp) != ERROR_MARK
4549 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4550 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4551 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4553 /* If value was not generated in the target, store it there.
4554 Convert the value to TARGET's type first if necessary and emit the
4555 pending incrementations that have been queued when expanding EXP.
4556 Note that we cannot emit the whole queue blindly because this will
4557 effectively disable the POST_INC optimization later.
4559 If TEMP and TARGET compare equal according to rtx_equal_p, but
4560 one or both of them are volatile memory refs, we have to distinguish
4561 two cases:
4562 - expand_expr has used TARGET. In this case, we must not generate
4563 another copy. This can be detected by TARGET being equal according
4564 to == .
4565 - expand_expr has not used TARGET - that means that the source just
4566 happens to have the same RTX form. Since temp will have been created
4567 by expand_expr, it will compare unequal according to == .
4568 We must generate a copy in this case, to reach the correct number
4569 of volatile memory references. */
4571 if ((! rtx_equal_p (temp, target)
4572 || (temp != target && (side_effects_p (temp)
4573 || side_effects_p (target))))
4574 && TREE_CODE (exp) != ERROR_MARK
4575 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4576 but TARGET is not valid memory reference, TEMP will differ
4577 from TARGET although it is really the same location. */
4578 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4579 /* If there's nothing to copy, don't bother. Don't call
4580 expr_size unless necessary, because some front-ends (C++)
4581 expr_size-hook must not be given objects that are not
4582 supposed to be bit-copied or bit-initialized. */
4583 && expr_size (exp) != const0_rtx)
4585 if (GET_MODE (temp) != GET_MODE (target)
4586 && GET_MODE (temp) != VOIDmode)
4588 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4589 if (dont_return_target)
4591 /* In this case, we will return TEMP,
4592 so make sure it has the proper mode.
4593 But don't forget to store the value into TARGET. */
4594 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4595 emit_move_insn (target, temp);
4597 else if (GET_MODE (target) == BLKmode)
4598 emit_block_move (target, temp, expr_size (exp),
4599 (call_param_p
4600 ? BLOCK_OP_CALL_PARM
4601 : BLOCK_OP_NORMAL));
4602 else
4603 convert_move (target, temp, unsignedp);
4606 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4608 /* Handle copying a string constant into an array. The string
4609 constant may be shorter than the array. So copy just the string's
4610 actual length, and clear the rest. First get the size of the data
4611 type of the string, which is actually the size of the target. */
4612 rtx size = expr_size (exp);
4614 if (GET_CODE (size) == CONST_INT
4615 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4616 emit_block_move (target, temp, size,
4617 (call_param_p
4618 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4619 else
4621 /* Compute the size of the data to copy from the string. */
4622 tree copy_size
4623 = size_binop (MIN_EXPR,
4624 make_tree (sizetype, size),
4625 size_int (TREE_STRING_LENGTH (exp)));
4626 rtx copy_size_rtx
4627 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4628 (call_param_p
4629 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4630 rtx label = 0;
4632 /* Copy that much. */
4633 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4634 TYPE_UNSIGNED (sizetype));
4635 emit_block_move (target, temp, copy_size_rtx,
4636 (call_param_p
4637 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4639 /* Figure out how much is left in TARGET that we have to clear.
4640 Do all calculations in ptr_mode. */
4641 if (GET_CODE (copy_size_rtx) == CONST_INT)
4643 size = plus_constant (size, -INTVAL (copy_size_rtx));
4644 target = adjust_address (target, BLKmode,
4645 INTVAL (copy_size_rtx));
4647 else
4649 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4650 copy_size_rtx, NULL_RTX, 0,
4651 OPTAB_LIB_WIDEN);
4653 #ifdef POINTERS_EXTEND_UNSIGNED
4654 if (GET_MODE (copy_size_rtx) != Pmode)
4655 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4656 TYPE_UNSIGNED (sizetype));
4657 #endif
4659 target = offset_address (target, copy_size_rtx,
4660 highest_pow2_factor (copy_size));
4661 label = gen_label_rtx ();
4662 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4663 GET_MODE (size), 0, label);
4666 if (size != const0_rtx)
4667 clear_storage (target, size, BLOCK_OP_NORMAL);
4669 if (label)
4670 emit_label (label);
4673 /* Handle calls that return values in multiple non-contiguous locations.
4674 The Irix 6 ABI has examples of this. */
4675 else if (GET_CODE (target) == PARALLEL)
4676 emit_group_load (target, temp, TREE_TYPE (exp),
4677 int_size_in_bytes (TREE_TYPE (exp)));
4678 else if (GET_MODE (temp) == BLKmode)
4679 emit_block_move (target, temp, expr_size (exp),
4680 (call_param_p
4681 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4682 else if (nontemporal
4683 && emit_storent_insn (target, temp))
4684 /* If we managed to emit a nontemporal store, there is nothing else to
4685 do. */
4687 else
4689 temp = force_operand (temp, target);
4690 if (temp != target)
4691 emit_move_insn (target, temp);
4695 return NULL_RTX;
4698 /* Helper for categorize_ctor_elements. Identical interface. */
4700 static bool
4701 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4702 HOST_WIDE_INT *p_elt_count,
4703 bool *p_must_clear)
4705 unsigned HOST_WIDE_INT idx;
4706 HOST_WIDE_INT nz_elts, elt_count;
4707 tree value, purpose;
4709 /* Whether CTOR is a valid constant initializer, in accordance with what
4710 initializer_constant_valid_p does. If inferred from the constructor
4711 elements, true until proven otherwise. */
4712 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4713 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4715 nz_elts = 0;
4716 elt_count = 0;
4718 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4720 HOST_WIDE_INT mult;
4722 mult = 1;
4723 if (TREE_CODE (purpose) == RANGE_EXPR)
4725 tree lo_index = TREE_OPERAND (purpose, 0);
4726 tree hi_index = TREE_OPERAND (purpose, 1);
4728 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4729 mult = (tree_low_cst (hi_index, 1)
4730 - tree_low_cst (lo_index, 1) + 1);
4733 switch (TREE_CODE (value))
4735 case CONSTRUCTOR:
4737 HOST_WIDE_INT nz = 0, ic = 0;
4739 bool const_elt_p
4740 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4742 nz_elts += mult * nz;
4743 elt_count += mult * ic;
4745 if (const_from_elts_p && const_p)
4746 const_p = const_elt_p;
4748 break;
4750 case INTEGER_CST:
4751 case REAL_CST:
4752 if (!initializer_zerop (value))
4753 nz_elts += mult;
4754 elt_count += mult;
4755 break;
4757 case STRING_CST:
4758 nz_elts += mult * TREE_STRING_LENGTH (value);
4759 elt_count += mult * TREE_STRING_LENGTH (value);
4760 break;
4762 case COMPLEX_CST:
4763 if (!initializer_zerop (TREE_REALPART (value)))
4764 nz_elts += mult;
4765 if (!initializer_zerop (TREE_IMAGPART (value)))
4766 nz_elts += mult;
4767 elt_count += mult;
4768 break;
4770 case VECTOR_CST:
4772 tree v;
4773 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4775 if (!initializer_zerop (TREE_VALUE (v)))
4776 nz_elts += mult;
4777 elt_count += mult;
4780 break;
4782 default:
4783 nz_elts += mult;
4784 elt_count += mult;
4786 if (const_from_elts_p && const_p)
4787 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4788 != NULL_TREE;
4789 break;
4793 if (!*p_must_clear
4794 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4795 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4797 tree init_sub_type;
4798 bool clear_this = true;
4800 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4802 /* We don't expect more than one element of the union to be
4803 initialized. Not sure what we should do otherwise... */
4804 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4805 == 1);
4807 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4808 CONSTRUCTOR_ELTS (ctor),
4809 0)->value);
4811 /* ??? We could look at each element of the union, and find the
4812 largest element. Which would avoid comparing the size of the
4813 initialized element against any tail padding in the union.
4814 Doesn't seem worth the effort... */
4815 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4816 TYPE_SIZE (init_sub_type)) == 1)
4818 /* And now we have to find out if the element itself is fully
4819 constructed. E.g. for union { struct { int a, b; } s; } u
4820 = { .s = { .a = 1 } }. */
4821 if (elt_count == count_type_elements (init_sub_type, false))
4822 clear_this = false;
4826 *p_must_clear = clear_this;
4829 *p_nz_elts += nz_elts;
4830 *p_elt_count += elt_count;
4832 return const_p;
4835 /* Examine CTOR to discover:
4836 * how many scalar fields are set to nonzero values,
4837 and place it in *P_NZ_ELTS;
4838 * how many scalar fields in total are in CTOR,
4839 and place it in *P_ELT_COUNT.
4840 * if a type is a union, and the initializer from the constructor
4841 is not the largest element in the union, then set *p_must_clear.
4843 Return whether or not CTOR is a valid static constant initializer, the same
4844 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4846 bool
4847 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4848 HOST_WIDE_INT *p_elt_count,
4849 bool *p_must_clear)
4851 *p_nz_elts = 0;
4852 *p_elt_count = 0;
4853 *p_must_clear = false;
4855 return
4856 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4859 /* Count the number of scalars in TYPE. Return -1 on overflow or
4860 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4861 array member at the end of the structure. */
4863 HOST_WIDE_INT
4864 count_type_elements (const_tree type, bool allow_flexarr)
4866 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4867 switch (TREE_CODE (type))
4869 case ARRAY_TYPE:
4871 tree telts = array_type_nelts (type);
4872 if (telts && host_integerp (telts, 1))
4874 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4875 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4876 if (n == 0)
4877 return 0;
4878 else if (max / n > m)
4879 return n * m;
4881 return -1;
4884 case RECORD_TYPE:
4886 HOST_WIDE_INT n = 0, t;
4887 tree f;
4889 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4890 if (TREE_CODE (f) == FIELD_DECL)
4892 t = count_type_elements (TREE_TYPE (f), false);
4893 if (t < 0)
4895 /* Check for structures with flexible array member. */
4896 tree tf = TREE_TYPE (f);
4897 if (allow_flexarr
4898 && TREE_CHAIN (f) == NULL
4899 && TREE_CODE (tf) == ARRAY_TYPE
4900 && TYPE_DOMAIN (tf)
4901 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4902 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4903 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4904 && int_size_in_bytes (type) >= 0)
4905 break;
4907 return -1;
4909 n += t;
4912 return n;
4915 case UNION_TYPE:
4916 case QUAL_UNION_TYPE:
4918 /* Ho hum. How in the world do we guess here? Clearly it isn't
4919 right to count the fields. Guess based on the number of words. */
4920 HOST_WIDE_INT n = int_size_in_bytes (type);
4921 if (n < 0)
4922 return -1;
4923 return n / UNITS_PER_WORD;
4926 case COMPLEX_TYPE:
4927 return 2;
4929 case VECTOR_TYPE:
4930 return TYPE_VECTOR_SUBPARTS (type);
4932 case INTEGER_TYPE:
4933 case REAL_TYPE:
4934 case ENUMERAL_TYPE:
4935 case BOOLEAN_TYPE:
4936 case POINTER_TYPE:
4937 case OFFSET_TYPE:
4938 case REFERENCE_TYPE:
4939 return 1;
4941 case VOID_TYPE:
4942 case METHOD_TYPE:
4943 case FUNCTION_TYPE:
4944 case LANG_TYPE:
4945 default:
4946 gcc_unreachable ();
4950 /* Return 1 if EXP contains mostly (3/4) zeros. */
4952 static int
4953 mostly_zeros_p (const_tree exp)
4955 if (TREE_CODE (exp) == CONSTRUCTOR)
4958 HOST_WIDE_INT nz_elts, count, elts;
4959 bool must_clear;
4961 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4962 if (must_clear)
4963 return 1;
4965 elts = count_type_elements (TREE_TYPE (exp), false);
4967 return nz_elts < elts / 4;
4970 return initializer_zerop (exp);
4973 /* Return 1 if EXP contains all zeros. */
4975 static int
4976 all_zeros_p (const_tree exp)
4978 if (TREE_CODE (exp) == CONSTRUCTOR)
4981 HOST_WIDE_INT nz_elts, count;
4982 bool must_clear;
4984 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4985 return nz_elts == 0;
4988 return initializer_zerop (exp);
4991 /* Helper function for store_constructor.
4992 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4993 TYPE is the type of the CONSTRUCTOR, not the element type.
4994 CLEARED is as for store_constructor.
4995 ALIAS_SET is the alias set to use for any stores.
4997 This provides a recursive shortcut back to store_constructor when it isn't
4998 necessary to go through store_field. This is so that we can pass through
4999 the cleared field to let store_constructor know that we may not have to
5000 clear a substructure if the outer structure has already been cleared. */
5002 static void
5003 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5004 HOST_WIDE_INT bitpos, enum machine_mode mode,
5005 tree exp, tree type, int cleared,
5006 alias_set_type alias_set)
5008 if (TREE_CODE (exp) == CONSTRUCTOR
5009 /* We can only call store_constructor recursively if the size and
5010 bit position are on a byte boundary. */
5011 && bitpos % BITS_PER_UNIT == 0
5012 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5013 /* If we have a nonzero bitpos for a register target, then we just
5014 let store_field do the bitfield handling. This is unlikely to
5015 generate unnecessary clear instructions anyways. */
5016 && (bitpos == 0 || MEM_P (target)))
5018 if (MEM_P (target))
5019 target
5020 = adjust_address (target,
5021 GET_MODE (target) == BLKmode
5022 || 0 != (bitpos
5023 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5024 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5027 /* Update the alias set, if required. */
5028 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5029 && MEM_ALIAS_SET (target) != 0)
5031 target = copy_rtx (target);
5032 set_mem_alias_set (target, alias_set);
5035 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5037 else
5038 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5041 /* Store the value of constructor EXP into the rtx TARGET.
5042 TARGET is either a REG or a MEM; we know it cannot conflict, since
5043 safe_from_p has been called.
5044 CLEARED is true if TARGET is known to have been zero'd.
5045 SIZE is the number of bytes of TARGET we are allowed to modify: this
5046 may not be the same as the size of EXP if we are assigning to a field
5047 which has been packed to exclude padding bits. */
5049 static void
5050 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5052 tree type = TREE_TYPE (exp);
5053 #ifdef WORD_REGISTER_OPERATIONS
5054 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5055 #endif
5057 switch (TREE_CODE (type))
5059 case RECORD_TYPE:
5060 case UNION_TYPE:
5061 case QUAL_UNION_TYPE:
5063 unsigned HOST_WIDE_INT idx;
5064 tree field, value;
5066 /* If size is zero or the target is already cleared, do nothing. */
5067 if (size == 0 || cleared)
5068 cleared = 1;
5069 /* We either clear the aggregate or indicate the value is dead. */
5070 else if ((TREE_CODE (type) == UNION_TYPE
5071 || TREE_CODE (type) == QUAL_UNION_TYPE)
5072 && ! CONSTRUCTOR_ELTS (exp))
5073 /* If the constructor is empty, clear the union. */
5075 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5076 cleared = 1;
5079 /* If we are building a static constructor into a register,
5080 set the initial value as zero so we can fold the value into
5081 a constant. But if more than one register is involved,
5082 this probably loses. */
5083 else if (REG_P (target) && TREE_STATIC (exp)
5084 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5086 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5087 cleared = 1;
5090 /* If the constructor has fewer fields than the structure or
5091 if we are initializing the structure to mostly zeros, clear
5092 the whole structure first. Don't do this if TARGET is a
5093 register whose mode size isn't equal to SIZE since
5094 clear_storage can't handle this case. */
5095 else if (size > 0
5096 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5097 != fields_length (type))
5098 || mostly_zeros_p (exp))
5099 && (!REG_P (target)
5100 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5101 == size)))
5103 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5104 cleared = 1;
5107 if (REG_P (target) && !cleared)
5108 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5110 /* Store each element of the constructor into the
5111 corresponding field of TARGET. */
5112 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5114 enum machine_mode mode;
5115 HOST_WIDE_INT bitsize;
5116 HOST_WIDE_INT bitpos = 0;
5117 tree offset;
5118 rtx to_rtx = target;
5120 /* Just ignore missing fields. We cleared the whole
5121 structure, above, if any fields are missing. */
5122 if (field == 0)
5123 continue;
5125 if (cleared && initializer_zerop (value))
5126 continue;
5128 if (host_integerp (DECL_SIZE (field), 1))
5129 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5130 else
5131 bitsize = -1;
5133 mode = DECL_MODE (field);
5134 if (DECL_BIT_FIELD (field))
5135 mode = VOIDmode;
5137 offset = DECL_FIELD_OFFSET (field);
5138 if (host_integerp (offset, 0)
5139 && host_integerp (bit_position (field), 0))
5141 bitpos = int_bit_position (field);
5142 offset = 0;
5144 else
5145 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5147 if (offset)
5149 rtx offset_rtx;
5151 offset
5152 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5153 make_tree (TREE_TYPE (exp),
5154 target));
5156 offset_rtx = expand_normal (offset);
5157 gcc_assert (MEM_P (to_rtx));
5159 #ifdef POINTERS_EXTEND_UNSIGNED
5160 if (GET_MODE (offset_rtx) != Pmode)
5161 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5162 #else
5163 if (GET_MODE (offset_rtx) != ptr_mode)
5164 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5165 #endif
5167 to_rtx = offset_address (to_rtx, offset_rtx,
5168 highest_pow2_factor (offset));
5171 #ifdef WORD_REGISTER_OPERATIONS
5172 /* If this initializes a field that is smaller than a
5173 word, at the start of a word, try to widen it to a full
5174 word. This special case allows us to output C++ member
5175 function initializations in a form that the optimizers
5176 can understand. */
5177 if (REG_P (target)
5178 && bitsize < BITS_PER_WORD
5179 && bitpos % BITS_PER_WORD == 0
5180 && GET_MODE_CLASS (mode) == MODE_INT
5181 && TREE_CODE (value) == INTEGER_CST
5182 && exp_size >= 0
5183 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5185 tree type = TREE_TYPE (value);
5187 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5189 type = lang_hooks.types.type_for_size
5190 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5191 value = fold_convert (type, value);
5194 if (BYTES_BIG_ENDIAN)
5195 value
5196 = fold_build2 (LSHIFT_EXPR, type, value,
5197 build_int_cst (type,
5198 BITS_PER_WORD - bitsize));
5199 bitsize = BITS_PER_WORD;
5200 mode = word_mode;
5202 #endif
5204 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5205 && DECL_NONADDRESSABLE_P (field))
5207 to_rtx = copy_rtx (to_rtx);
5208 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5211 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5212 value, type, cleared,
5213 get_alias_set (TREE_TYPE (field)));
5215 break;
5217 case ARRAY_TYPE:
5219 tree value, index;
5220 unsigned HOST_WIDE_INT i;
5221 int need_to_clear;
5222 tree domain;
5223 tree elttype = TREE_TYPE (type);
5224 int const_bounds_p;
5225 HOST_WIDE_INT minelt = 0;
5226 HOST_WIDE_INT maxelt = 0;
5228 domain = TYPE_DOMAIN (type);
5229 const_bounds_p = (TYPE_MIN_VALUE (domain)
5230 && TYPE_MAX_VALUE (domain)
5231 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5232 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5234 /* If we have constant bounds for the range of the type, get them. */
5235 if (const_bounds_p)
5237 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5238 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5241 /* If the constructor has fewer elements than the array, clear
5242 the whole array first. Similarly if this is static
5243 constructor of a non-BLKmode object. */
5244 if (cleared)
5245 need_to_clear = 0;
5246 else if (REG_P (target) && TREE_STATIC (exp))
5247 need_to_clear = 1;
5248 else
5250 unsigned HOST_WIDE_INT idx;
5251 tree index, value;
5252 HOST_WIDE_INT count = 0, zero_count = 0;
5253 need_to_clear = ! const_bounds_p;
5255 /* This loop is a more accurate version of the loop in
5256 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5257 is also needed to check for missing elements. */
5258 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5260 HOST_WIDE_INT this_node_count;
5262 if (need_to_clear)
5263 break;
5265 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5267 tree lo_index = TREE_OPERAND (index, 0);
5268 tree hi_index = TREE_OPERAND (index, 1);
5270 if (! host_integerp (lo_index, 1)
5271 || ! host_integerp (hi_index, 1))
5273 need_to_clear = 1;
5274 break;
5277 this_node_count = (tree_low_cst (hi_index, 1)
5278 - tree_low_cst (lo_index, 1) + 1);
5280 else
5281 this_node_count = 1;
5283 count += this_node_count;
5284 if (mostly_zeros_p (value))
5285 zero_count += this_node_count;
5288 /* Clear the entire array first if there are any missing
5289 elements, or if the incidence of zero elements is >=
5290 75%. */
5291 if (! need_to_clear
5292 && (count < maxelt - minelt + 1
5293 || 4 * zero_count >= 3 * count))
5294 need_to_clear = 1;
5297 if (need_to_clear && size > 0)
5299 if (REG_P (target))
5300 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5301 else
5302 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5303 cleared = 1;
5306 if (!cleared && REG_P (target))
5307 /* Inform later passes that the old value is dead. */
5308 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5310 /* Store each element of the constructor into the
5311 corresponding element of TARGET, determined by counting the
5312 elements. */
5313 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5315 enum machine_mode mode;
5316 HOST_WIDE_INT bitsize;
5317 HOST_WIDE_INT bitpos;
5318 int unsignedp;
5319 rtx xtarget = target;
5321 if (cleared && initializer_zerop (value))
5322 continue;
5324 unsignedp = TYPE_UNSIGNED (elttype);
5325 mode = TYPE_MODE (elttype);
5326 if (mode == BLKmode)
5327 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5328 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5329 : -1);
5330 else
5331 bitsize = GET_MODE_BITSIZE (mode);
5333 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5335 tree lo_index = TREE_OPERAND (index, 0);
5336 tree hi_index = TREE_OPERAND (index, 1);
5337 rtx index_r, pos_rtx;
5338 HOST_WIDE_INT lo, hi, count;
5339 tree position;
5341 /* If the range is constant and "small", unroll the loop. */
5342 if (const_bounds_p
5343 && host_integerp (lo_index, 0)
5344 && host_integerp (hi_index, 0)
5345 && (lo = tree_low_cst (lo_index, 0),
5346 hi = tree_low_cst (hi_index, 0),
5347 count = hi - lo + 1,
5348 (!MEM_P (target)
5349 || count <= 2
5350 || (host_integerp (TYPE_SIZE (elttype), 1)
5351 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5352 <= 40 * 8)))))
5354 lo -= minelt; hi -= minelt;
5355 for (; lo <= hi; lo++)
5357 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5359 if (MEM_P (target)
5360 && !MEM_KEEP_ALIAS_SET_P (target)
5361 && TREE_CODE (type) == ARRAY_TYPE
5362 && TYPE_NONALIASED_COMPONENT (type))
5364 target = copy_rtx (target);
5365 MEM_KEEP_ALIAS_SET_P (target) = 1;
5368 store_constructor_field
5369 (target, bitsize, bitpos, mode, value, type, cleared,
5370 get_alias_set (elttype));
5373 else
5375 rtx loop_start = gen_label_rtx ();
5376 rtx loop_end = gen_label_rtx ();
5377 tree exit_cond;
5379 expand_normal (hi_index);
5380 unsignedp = TYPE_UNSIGNED (domain);
5382 index = build_decl (VAR_DECL, NULL_TREE, domain);
5384 index_r
5385 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5386 &unsignedp, 0));
5387 SET_DECL_RTL (index, index_r);
5388 store_expr (lo_index, index_r, 0, false);
5390 /* Build the head of the loop. */
5391 do_pending_stack_adjust ();
5392 emit_label (loop_start);
5394 /* Assign value to element index. */
5395 position =
5396 fold_convert (ssizetype,
5397 fold_build2 (MINUS_EXPR,
5398 TREE_TYPE (index),
5399 index,
5400 TYPE_MIN_VALUE (domain)));
5402 position =
5403 size_binop (MULT_EXPR, position,
5404 fold_convert (ssizetype,
5405 TYPE_SIZE_UNIT (elttype)));
5407 pos_rtx = expand_normal (position);
5408 xtarget = offset_address (target, pos_rtx,
5409 highest_pow2_factor (position));
5410 xtarget = adjust_address (xtarget, mode, 0);
5411 if (TREE_CODE (value) == CONSTRUCTOR)
5412 store_constructor (value, xtarget, cleared,
5413 bitsize / BITS_PER_UNIT);
5414 else
5415 store_expr (value, xtarget, 0, false);
5417 /* Generate a conditional jump to exit the loop. */
5418 exit_cond = build2 (LT_EXPR, integer_type_node,
5419 index, hi_index);
5420 jumpif (exit_cond, loop_end);
5422 /* Update the loop counter, and jump to the head of
5423 the loop. */
5424 expand_assignment (index,
5425 build2 (PLUS_EXPR, TREE_TYPE (index),
5426 index, integer_one_node),
5427 false);
5429 emit_jump (loop_start);
5431 /* Build the end of the loop. */
5432 emit_label (loop_end);
5435 else if ((index != 0 && ! host_integerp (index, 0))
5436 || ! host_integerp (TYPE_SIZE (elttype), 1))
5438 tree position;
5440 if (index == 0)
5441 index = ssize_int (1);
5443 if (minelt)
5444 index = fold_convert (ssizetype,
5445 fold_build2 (MINUS_EXPR,
5446 TREE_TYPE (index),
5447 index,
5448 TYPE_MIN_VALUE (domain)));
5450 position =
5451 size_binop (MULT_EXPR, index,
5452 fold_convert (ssizetype,
5453 TYPE_SIZE_UNIT (elttype)));
5454 xtarget = offset_address (target,
5455 expand_normal (position),
5456 highest_pow2_factor (position));
5457 xtarget = adjust_address (xtarget, mode, 0);
5458 store_expr (value, xtarget, 0, false);
5460 else
5462 if (index != 0)
5463 bitpos = ((tree_low_cst (index, 0) - minelt)
5464 * tree_low_cst (TYPE_SIZE (elttype), 1));
5465 else
5466 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5468 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5469 && TREE_CODE (type) == ARRAY_TYPE
5470 && TYPE_NONALIASED_COMPONENT (type))
5472 target = copy_rtx (target);
5473 MEM_KEEP_ALIAS_SET_P (target) = 1;
5475 store_constructor_field (target, bitsize, bitpos, mode, value,
5476 type, cleared, get_alias_set (elttype));
5479 break;
5482 case VECTOR_TYPE:
5484 unsigned HOST_WIDE_INT idx;
5485 constructor_elt *ce;
5486 int i;
5487 int need_to_clear;
5488 int icode = 0;
5489 tree elttype = TREE_TYPE (type);
5490 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5491 enum machine_mode eltmode = TYPE_MODE (elttype);
5492 HOST_WIDE_INT bitsize;
5493 HOST_WIDE_INT bitpos;
5494 rtvec vector = NULL;
5495 unsigned n_elts;
5497 gcc_assert (eltmode != BLKmode);
5499 n_elts = TYPE_VECTOR_SUBPARTS (type);
5500 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5502 enum machine_mode mode = GET_MODE (target);
5504 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5505 if (icode != CODE_FOR_nothing)
5507 unsigned int i;
5509 vector = rtvec_alloc (n_elts);
5510 for (i = 0; i < n_elts; i++)
5511 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5515 /* If the constructor has fewer elements than the vector,
5516 clear the whole array first. Similarly if this is static
5517 constructor of a non-BLKmode object. */
5518 if (cleared)
5519 need_to_clear = 0;
5520 else if (REG_P (target) && TREE_STATIC (exp))
5521 need_to_clear = 1;
5522 else
5524 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5525 tree value;
5527 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5529 int n_elts_here = tree_low_cst
5530 (int_const_binop (TRUNC_DIV_EXPR,
5531 TYPE_SIZE (TREE_TYPE (value)),
5532 TYPE_SIZE (elttype), 0), 1);
5534 count += n_elts_here;
5535 if (mostly_zeros_p (value))
5536 zero_count += n_elts_here;
5539 /* Clear the entire vector first if there are any missing elements,
5540 or if the incidence of zero elements is >= 75%. */
5541 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5544 if (need_to_clear && size > 0 && !vector)
5546 if (REG_P (target))
5547 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5548 else
5549 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5550 cleared = 1;
5553 /* Inform later passes that the old value is dead. */
5554 if (!cleared && !vector && REG_P (target))
5555 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5557 /* Store each element of the constructor into the corresponding
5558 element of TARGET, determined by counting the elements. */
5559 for (idx = 0, i = 0;
5560 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5561 idx++, i += bitsize / elt_size)
5563 HOST_WIDE_INT eltpos;
5564 tree value = ce->value;
5566 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5567 if (cleared && initializer_zerop (value))
5568 continue;
5570 if (ce->index)
5571 eltpos = tree_low_cst (ce->index, 1);
5572 else
5573 eltpos = i;
5575 if (vector)
5577 /* Vector CONSTRUCTORs should only be built from smaller
5578 vectors in the case of BLKmode vectors. */
5579 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5580 RTVEC_ELT (vector, eltpos)
5581 = expand_normal (value);
5583 else
5585 enum machine_mode value_mode =
5586 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5587 ? TYPE_MODE (TREE_TYPE (value))
5588 : eltmode;
5589 bitpos = eltpos * elt_size;
5590 store_constructor_field (target, bitsize, bitpos,
5591 value_mode, value, type,
5592 cleared, get_alias_set (elttype));
5596 if (vector)
5597 emit_insn (GEN_FCN (icode)
5598 (target,
5599 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5600 break;
5603 default:
5604 gcc_unreachable ();
5608 /* Store the value of EXP (an expression tree)
5609 into a subfield of TARGET which has mode MODE and occupies
5610 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5611 If MODE is VOIDmode, it means that we are storing into a bit-field.
5613 Always return const0_rtx unless we have something particular to
5614 return.
5616 TYPE is the type of the underlying object,
5618 ALIAS_SET is the alias set for the destination. This value will
5619 (in general) be different from that for TARGET, since TARGET is a
5620 reference to the containing structure.
5622 If NONTEMPORAL is true, try generating a nontemporal store. */
5624 static rtx
5625 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5626 enum machine_mode mode, tree exp, tree type,
5627 alias_set_type alias_set, bool nontemporal)
5629 HOST_WIDE_INT width_mask = 0;
5631 if (TREE_CODE (exp) == ERROR_MARK)
5632 return const0_rtx;
5634 /* If we have nothing to store, do nothing unless the expression has
5635 side-effects. */
5636 if (bitsize == 0)
5637 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5638 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5639 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5641 /* If we are storing into an unaligned field of an aligned union that is
5642 in a register, we may have the mode of TARGET being an integer mode but
5643 MODE == BLKmode. In that case, get an aligned object whose size and
5644 alignment are the same as TARGET and store TARGET into it (we can avoid
5645 the store if the field being stored is the entire width of TARGET). Then
5646 call ourselves recursively to store the field into a BLKmode version of
5647 that object. Finally, load from the object into TARGET. This is not
5648 very efficient in general, but should only be slightly more expensive
5649 than the otherwise-required unaligned accesses. Perhaps this can be
5650 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5651 twice, once with emit_move_insn and once via store_field. */
5653 if (mode == BLKmode
5654 && (REG_P (target) || GET_CODE (target) == SUBREG))
5656 rtx object = assign_temp (type, 0, 1, 1);
5657 rtx blk_object = adjust_address (object, BLKmode, 0);
5659 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5660 emit_move_insn (object, target);
5662 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5663 nontemporal);
5665 emit_move_insn (target, object);
5667 /* We want to return the BLKmode version of the data. */
5668 return blk_object;
5671 if (GET_CODE (target) == CONCAT)
5673 /* We're storing into a struct containing a single __complex. */
5675 gcc_assert (!bitpos);
5676 return store_expr (exp, target, 0, nontemporal);
5679 /* If the structure is in a register or if the component
5680 is a bit field, we cannot use addressing to access it.
5681 Use bit-field techniques or SUBREG to store in it. */
5683 if (mode == VOIDmode
5684 || (mode != BLKmode && ! direct_store[(int) mode]
5685 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5686 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5687 || REG_P (target)
5688 || GET_CODE (target) == SUBREG
5689 /* If the field isn't aligned enough to store as an ordinary memref,
5690 store it as a bit field. */
5691 || (mode != BLKmode
5692 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5693 || bitpos % GET_MODE_ALIGNMENT (mode))
5694 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5695 || (bitpos % BITS_PER_UNIT != 0)))
5696 /* If the RHS and field are a constant size and the size of the
5697 RHS isn't the same size as the bitfield, we must use bitfield
5698 operations. */
5699 || (bitsize >= 0
5700 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5701 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5703 rtx temp;
5705 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5706 implies a mask operation. If the precision is the same size as
5707 the field we're storing into, that mask is redundant. This is
5708 particularly common with bit field assignments generated by the
5709 C front end. */
5710 if (TREE_CODE (exp) == NOP_EXPR)
5712 tree type = TREE_TYPE (exp);
5713 if (INTEGRAL_TYPE_P (type)
5714 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5715 && bitsize == TYPE_PRECISION (type))
5717 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5718 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5719 exp = TREE_OPERAND (exp, 0);
5723 temp = expand_normal (exp);
5725 /* If BITSIZE is narrower than the size of the type of EXP
5726 we will be narrowing TEMP. Normally, what's wanted are the
5727 low-order bits. However, if EXP's type is a record and this is
5728 big-endian machine, we want the upper BITSIZE bits. */
5729 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5730 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5731 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5732 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5733 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5734 - bitsize),
5735 NULL_RTX, 1);
5737 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5738 MODE. */
5739 if (mode != VOIDmode && mode != BLKmode
5740 && mode != TYPE_MODE (TREE_TYPE (exp)))
5741 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5743 /* If the modes of TARGET and TEMP are both BLKmode, both
5744 must be in memory and BITPOS must be aligned on a byte
5745 boundary. If so, we simply do a block copy. */
5746 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5748 gcc_assert (MEM_P (target) && MEM_P (temp)
5749 && !(bitpos % BITS_PER_UNIT));
5751 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5752 emit_block_move (target, temp,
5753 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5754 / BITS_PER_UNIT),
5755 BLOCK_OP_NORMAL);
5757 return const0_rtx;
5760 /* Store the value in the bitfield. */
5761 store_bit_field (target, bitsize, bitpos, mode, temp);
5763 return const0_rtx;
5765 else
5767 /* Now build a reference to just the desired component. */
5768 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5770 if (to_rtx == target)
5771 to_rtx = copy_rtx (to_rtx);
5773 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5774 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5775 set_mem_alias_set (to_rtx, alias_set);
5777 return store_expr (exp, to_rtx, 0, nontemporal);
5781 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5782 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5783 codes and find the ultimate containing object, which we return.
5785 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5786 bit position, and *PUNSIGNEDP to the signedness of the field.
5787 If the position of the field is variable, we store a tree
5788 giving the variable offset (in units) in *POFFSET.
5789 This offset is in addition to the bit position.
5790 If the position is not variable, we store 0 in *POFFSET.
5792 If any of the extraction expressions is volatile,
5793 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5795 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5796 is a mode that can be used to access the field. In that case, *PBITSIZE
5797 is redundant.
5799 If the field describes a variable-sized object, *PMODE is set to
5800 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5801 this case, but the address of the object can be found.
5803 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5804 look through nodes that serve as markers of a greater alignment than
5805 the one that can be deduced from the expression. These nodes make it
5806 possible for front-ends to prevent temporaries from being created by
5807 the middle-end on alignment considerations. For that purpose, the
5808 normal operating mode at high-level is to always pass FALSE so that
5809 the ultimate containing object is really returned; moreover, the
5810 associated predicate handled_component_p will always return TRUE
5811 on these nodes, thus indicating that they are essentially handled
5812 by get_inner_reference. TRUE should only be passed when the caller
5813 is scanning the expression in order to build another representation
5814 and specifically knows how to handle these nodes; as such, this is
5815 the normal operating mode in the RTL expanders. */
5817 tree
5818 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5819 HOST_WIDE_INT *pbitpos, tree *poffset,
5820 enum machine_mode *pmode, int *punsignedp,
5821 int *pvolatilep, bool keep_aligning)
5823 tree size_tree = 0;
5824 enum machine_mode mode = VOIDmode;
5825 tree offset = size_zero_node;
5826 tree bit_offset = bitsize_zero_node;
5828 /* First get the mode, signedness, and size. We do this from just the
5829 outermost expression. */
5830 if (TREE_CODE (exp) == COMPONENT_REF)
5832 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5833 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5834 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5836 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5838 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5840 size_tree = TREE_OPERAND (exp, 1);
5841 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5843 /* For vector types, with the correct size of access, use the mode of
5844 inner type. */
5845 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5846 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5847 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5848 mode = TYPE_MODE (TREE_TYPE (exp));
5850 else
5852 mode = TYPE_MODE (TREE_TYPE (exp));
5853 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5855 if (mode == BLKmode)
5856 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5857 else
5858 *pbitsize = GET_MODE_BITSIZE (mode);
5861 if (size_tree != 0)
5863 if (! host_integerp (size_tree, 1))
5864 mode = BLKmode, *pbitsize = -1;
5865 else
5866 *pbitsize = tree_low_cst (size_tree, 1);
5869 *pmode = mode;
5871 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5872 and find the ultimate containing object. */
5873 while (1)
5875 switch (TREE_CODE (exp))
5877 case BIT_FIELD_REF:
5878 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5879 TREE_OPERAND (exp, 2));
5880 break;
5882 case COMPONENT_REF:
5884 tree field = TREE_OPERAND (exp, 1);
5885 tree this_offset = component_ref_field_offset (exp);
5887 /* If this field hasn't been filled in yet, don't go past it.
5888 This should only happen when folding expressions made during
5889 type construction. */
5890 if (this_offset == 0)
5891 break;
5893 offset = size_binop (PLUS_EXPR, offset, this_offset);
5894 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5895 DECL_FIELD_BIT_OFFSET (field));
5897 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5899 break;
5901 case ARRAY_REF:
5902 case ARRAY_RANGE_REF:
5904 tree index = TREE_OPERAND (exp, 1);
5905 tree low_bound = array_ref_low_bound (exp);
5906 tree unit_size = array_ref_element_size (exp);
5908 /* We assume all arrays have sizes that are a multiple of a byte.
5909 First subtract the lower bound, if any, in the type of the
5910 index, then convert to sizetype and multiply by the size of
5911 the array element. */
5912 if (! integer_zerop (low_bound))
5913 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5914 index, low_bound);
5916 offset = size_binop (PLUS_EXPR, offset,
5917 size_binop (MULT_EXPR,
5918 fold_convert (sizetype, index),
5919 unit_size));
5921 break;
5923 case REALPART_EXPR:
5924 break;
5926 case IMAGPART_EXPR:
5927 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5928 bitsize_int (*pbitsize));
5929 break;
5931 case VIEW_CONVERT_EXPR:
5932 if (keep_aligning && STRICT_ALIGNMENT
5933 && (TYPE_ALIGN (TREE_TYPE (exp))
5934 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5935 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5936 < BIGGEST_ALIGNMENT)
5937 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5938 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5939 goto done;
5940 break;
5942 default:
5943 goto done;
5946 /* If any reference in the chain is volatile, the effect is volatile. */
5947 if (TREE_THIS_VOLATILE (exp))
5948 *pvolatilep = 1;
5950 exp = TREE_OPERAND (exp, 0);
5952 done:
5954 /* If OFFSET is constant, see if we can return the whole thing as a
5955 constant bit position. Make sure to handle overflow during
5956 this conversion. */
5957 if (host_integerp (offset, 0))
5959 double_int tem = double_int_mul (tree_to_double_int (offset),
5960 uhwi_to_double_int (BITS_PER_UNIT));
5961 tem = double_int_add (tem, tree_to_double_int (bit_offset));
5962 if (double_int_fits_in_shwi_p (tem))
5964 *pbitpos = double_int_to_shwi (tem);
5965 *poffset = NULL_TREE;
5966 return exp;
5970 /* Otherwise, split it up. */
5971 *pbitpos = tree_low_cst (bit_offset, 0);
5972 *poffset = offset;
5974 return exp;
5977 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5978 look for whether EXP or any nested component-refs within EXP is marked
5979 as PACKED. */
5981 bool
5982 contains_packed_reference (const_tree exp)
5984 bool packed_p = false;
5986 while (1)
5988 switch (TREE_CODE (exp))
5990 case COMPONENT_REF:
5992 tree field = TREE_OPERAND (exp, 1);
5993 packed_p = DECL_PACKED (field)
5994 || TYPE_PACKED (TREE_TYPE (field))
5995 || TYPE_PACKED (TREE_TYPE (exp));
5996 if (packed_p)
5997 goto done;
5999 break;
6001 case BIT_FIELD_REF:
6002 case ARRAY_REF:
6003 case ARRAY_RANGE_REF:
6004 case REALPART_EXPR:
6005 case IMAGPART_EXPR:
6006 case VIEW_CONVERT_EXPR:
6007 break;
6009 default:
6010 goto done;
6012 exp = TREE_OPERAND (exp, 0);
6014 done:
6015 return packed_p;
6018 /* Return a tree of sizetype representing the size, in bytes, of the element
6019 of EXP, an ARRAY_REF. */
6021 tree
6022 array_ref_element_size (tree exp)
6024 tree aligned_size = TREE_OPERAND (exp, 3);
6025 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6027 /* If a size was specified in the ARRAY_REF, it's the size measured
6028 in alignment units of the element type. So multiply by that value. */
6029 if (aligned_size)
6031 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6032 sizetype from another type of the same width and signedness. */
6033 if (TREE_TYPE (aligned_size) != sizetype)
6034 aligned_size = fold_convert (sizetype, aligned_size);
6035 return size_binop (MULT_EXPR, aligned_size,
6036 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6039 /* Otherwise, take the size from that of the element type. Substitute
6040 any PLACEHOLDER_EXPR that we have. */
6041 else
6042 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6045 /* Return a tree representing the lower bound of the array mentioned in
6046 EXP, an ARRAY_REF. */
6048 tree
6049 array_ref_low_bound (tree exp)
6051 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6053 /* If a lower bound is specified in EXP, use it. */
6054 if (TREE_OPERAND (exp, 2))
6055 return TREE_OPERAND (exp, 2);
6057 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6058 substituting for a PLACEHOLDER_EXPR as needed. */
6059 if (domain_type && TYPE_MIN_VALUE (domain_type))
6060 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6062 /* Otherwise, return a zero of the appropriate type. */
6063 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6066 /* Return a tree representing the upper bound of the array mentioned in
6067 EXP, an ARRAY_REF. */
6069 tree
6070 array_ref_up_bound (tree exp)
6072 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6074 /* If there is a domain type and it has an upper bound, use it, substituting
6075 for a PLACEHOLDER_EXPR as needed. */
6076 if (domain_type && TYPE_MAX_VALUE (domain_type))
6077 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6079 /* Otherwise fail. */
6080 return NULL_TREE;
6083 /* Return a tree representing the offset, in bytes, of the field referenced
6084 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6086 tree
6087 component_ref_field_offset (tree exp)
6089 tree aligned_offset = TREE_OPERAND (exp, 2);
6090 tree field = TREE_OPERAND (exp, 1);
6092 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6093 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6094 value. */
6095 if (aligned_offset)
6097 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6098 sizetype from another type of the same width and signedness. */
6099 if (TREE_TYPE (aligned_offset) != sizetype)
6100 aligned_offset = fold_convert (sizetype, aligned_offset);
6101 return size_binop (MULT_EXPR, aligned_offset,
6102 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6105 /* Otherwise, take the offset from that of the field. Substitute
6106 any PLACEHOLDER_EXPR that we have. */
6107 else
6108 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6111 /* Return 1 if T is an expression that get_inner_reference handles. */
6114 handled_component_p (const_tree t)
6116 switch (TREE_CODE (t))
6118 case BIT_FIELD_REF:
6119 case COMPONENT_REF:
6120 case ARRAY_REF:
6121 case ARRAY_RANGE_REF:
6122 case VIEW_CONVERT_EXPR:
6123 case REALPART_EXPR:
6124 case IMAGPART_EXPR:
6125 return 1;
6127 default:
6128 return 0;
6132 /* Given an rtx VALUE that may contain additions and multiplications, return
6133 an equivalent value that just refers to a register, memory, or constant.
6134 This is done by generating instructions to perform the arithmetic and
6135 returning a pseudo-register containing the value.
6137 The returned value may be a REG, SUBREG, MEM or constant. */
6140 force_operand (rtx value, rtx target)
6142 rtx op1, op2;
6143 /* Use subtarget as the target for operand 0 of a binary operation. */
6144 rtx subtarget = get_subtarget (target);
6145 enum rtx_code code = GET_CODE (value);
6147 /* Check for subreg applied to an expression produced by loop optimizer. */
6148 if (code == SUBREG
6149 && !REG_P (SUBREG_REG (value))
6150 && !MEM_P (SUBREG_REG (value)))
6152 value
6153 = simplify_gen_subreg (GET_MODE (value),
6154 force_reg (GET_MODE (SUBREG_REG (value)),
6155 force_operand (SUBREG_REG (value),
6156 NULL_RTX)),
6157 GET_MODE (SUBREG_REG (value)),
6158 SUBREG_BYTE (value));
6159 code = GET_CODE (value);
6162 /* Check for a PIC address load. */
6163 if ((code == PLUS || code == MINUS)
6164 && XEXP (value, 0) == pic_offset_table_rtx
6165 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6166 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6167 || GET_CODE (XEXP (value, 1)) == CONST))
6169 if (!subtarget)
6170 subtarget = gen_reg_rtx (GET_MODE (value));
6171 emit_move_insn (subtarget, value);
6172 return subtarget;
6175 if (ARITHMETIC_P (value))
6177 op2 = XEXP (value, 1);
6178 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6179 subtarget = 0;
6180 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6182 code = PLUS;
6183 op2 = negate_rtx (GET_MODE (value), op2);
6186 /* Check for an addition with OP2 a constant integer and our first
6187 operand a PLUS of a virtual register and something else. In that
6188 case, we want to emit the sum of the virtual register and the
6189 constant first and then add the other value. This allows virtual
6190 register instantiation to simply modify the constant rather than
6191 creating another one around this addition. */
6192 if (code == PLUS && GET_CODE (op2) == CONST_INT
6193 && GET_CODE (XEXP (value, 0)) == PLUS
6194 && REG_P (XEXP (XEXP (value, 0), 0))
6195 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6196 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6198 rtx temp = expand_simple_binop (GET_MODE (value), code,
6199 XEXP (XEXP (value, 0), 0), op2,
6200 subtarget, 0, OPTAB_LIB_WIDEN);
6201 return expand_simple_binop (GET_MODE (value), code, temp,
6202 force_operand (XEXP (XEXP (value,
6203 0), 1), 0),
6204 target, 0, OPTAB_LIB_WIDEN);
6207 op1 = force_operand (XEXP (value, 0), subtarget);
6208 op2 = force_operand (op2, NULL_RTX);
6209 switch (code)
6211 case MULT:
6212 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6213 case DIV:
6214 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6215 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6216 target, 1, OPTAB_LIB_WIDEN);
6217 else
6218 return expand_divmod (0,
6219 FLOAT_MODE_P (GET_MODE (value))
6220 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6221 GET_MODE (value), op1, op2, target, 0);
6222 case MOD:
6223 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6224 target, 0);
6225 case UDIV:
6226 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6227 target, 1);
6228 case UMOD:
6229 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6230 target, 1);
6231 case ASHIFTRT:
6232 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6233 target, 0, OPTAB_LIB_WIDEN);
6234 default:
6235 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6236 target, 1, OPTAB_LIB_WIDEN);
6239 if (UNARY_P (value))
6241 if (!target)
6242 target = gen_reg_rtx (GET_MODE (value));
6243 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6244 switch (code)
6246 case ZERO_EXTEND:
6247 case SIGN_EXTEND:
6248 case TRUNCATE:
6249 case FLOAT_EXTEND:
6250 case FLOAT_TRUNCATE:
6251 convert_move (target, op1, code == ZERO_EXTEND);
6252 return target;
6254 case FIX:
6255 case UNSIGNED_FIX:
6256 expand_fix (target, op1, code == UNSIGNED_FIX);
6257 return target;
6259 case FLOAT:
6260 case UNSIGNED_FLOAT:
6261 expand_float (target, op1, code == UNSIGNED_FLOAT);
6262 return target;
6264 default:
6265 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6269 #ifdef INSN_SCHEDULING
6270 /* On machines that have insn scheduling, we want all memory reference to be
6271 explicit, so we need to deal with such paradoxical SUBREGs. */
6272 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6273 && (GET_MODE_SIZE (GET_MODE (value))
6274 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6275 value
6276 = simplify_gen_subreg (GET_MODE (value),
6277 force_reg (GET_MODE (SUBREG_REG (value)),
6278 force_operand (SUBREG_REG (value),
6279 NULL_RTX)),
6280 GET_MODE (SUBREG_REG (value)),
6281 SUBREG_BYTE (value));
6282 #endif
6284 return value;
6287 /* Subroutine of expand_expr: return nonzero iff there is no way that
6288 EXP can reference X, which is being modified. TOP_P is nonzero if this
6289 call is going to be used to determine whether we need a temporary
6290 for EXP, as opposed to a recursive call to this function.
6292 It is always safe for this routine to return zero since it merely
6293 searches for optimization opportunities. */
6296 safe_from_p (const_rtx x, tree exp, int top_p)
6298 rtx exp_rtl = 0;
6299 int i, nops;
6301 if (x == 0
6302 /* If EXP has varying size, we MUST use a target since we currently
6303 have no way of allocating temporaries of variable size
6304 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6305 So we assume here that something at a higher level has prevented a
6306 clash. This is somewhat bogus, but the best we can do. Only
6307 do this when X is BLKmode and when we are at the top level. */
6308 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6309 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6310 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6311 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6312 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6313 != INTEGER_CST)
6314 && GET_MODE (x) == BLKmode)
6315 /* If X is in the outgoing argument area, it is always safe. */
6316 || (MEM_P (x)
6317 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6318 || (GET_CODE (XEXP (x, 0)) == PLUS
6319 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6320 return 1;
6322 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6323 find the underlying pseudo. */
6324 if (GET_CODE (x) == SUBREG)
6326 x = SUBREG_REG (x);
6327 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6328 return 0;
6331 /* Now look at our tree code and possibly recurse. */
6332 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6334 case tcc_declaration:
6335 exp_rtl = DECL_RTL_IF_SET (exp);
6336 break;
6338 case tcc_constant:
6339 return 1;
6341 case tcc_exceptional:
6342 if (TREE_CODE (exp) == TREE_LIST)
6344 while (1)
6346 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6347 return 0;
6348 exp = TREE_CHAIN (exp);
6349 if (!exp)
6350 return 1;
6351 if (TREE_CODE (exp) != TREE_LIST)
6352 return safe_from_p (x, exp, 0);
6355 else if (TREE_CODE (exp) == CONSTRUCTOR)
6357 constructor_elt *ce;
6358 unsigned HOST_WIDE_INT idx;
6360 for (idx = 0;
6361 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6362 idx++)
6363 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6364 || !safe_from_p (x, ce->value, 0))
6365 return 0;
6366 return 1;
6368 else if (TREE_CODE (exp) == ERROR_MARK)
6369 return 1; /* An already-visited SAVE_EXPR? */
6370 else
6371 return 0;
6373 case tcc_statement:
6374 /* The only case we look at here is the DECL_INITIAL inside a
6375 DECL_EXPR. */
6376 return (TREE_CODE (exp) != DECL_EXPR
6377 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6378 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6379 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6381 case tcc_binary:
6382 case tcc_comparison:
6383 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6384 return 0;
6385 /* Fall through. */
6387 case tcc_unary:
6388 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6390 case tcc_expression:
6391 case tcc_reference:
6392 case tcc_vl_exp:
6393 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6394 the expression. If it is set, we conflict iff we are that rtx or
6395 both are in memory. Otherwise, we check all operands of the
6396 expression recursively. */
6398 switch (TREE_CODE (exp))
6400 case ADDR_EXPR:
6401 /* If the operand is static or we are static, we can't conflict.
6402 Likewise if we don't conflict with the operand at all. */
6403 if (staticp (TREE_OPERAND (exp, 0))
6404 || TREE_STATIC (exp)
6405 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6406 return 1;
6408 /* Otherwise, the only way this can conflict is if we are taking
6409 the address of a DECL a that address if part of X, which is
6410 very rare. */
6411 exp = TREE_OPERAND (exp, 0);
6412 if (DECL_P (exp))
6414 if (!DECL_RTL_SET_P (exp)
6415 || !MEM_P (DECL_RTL (exp)))
6416 return 0;
6417 else
6418 exp_rtl = XEXP (DECL_RTL (exp), 0);
6420 break;
6422 case MISALIGNED_INDIRECT_REF:
6423 case ALIGN_INDIRECT_REF:
6424 case INDIRECT_REF:
6425 if (MEM_P (x)
6426 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6427 get_alias_set (exp)))
6428 return 0;
6429 break;
6431 case CALL_EXPR:
6432 /* Assume that the call will clobber all hard registers and
6433 all of memory. */
6434 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6435 || MEM_P (x))
6436 return 0;
6437 break;
6439 case WITH_CLEANUP_EXPR:
6440 case CLEANUP_POINT_EXPR:
6441 /* Lowered by gimplify.c. */
6442 gcc_unreachable ();
6444 case SAVE_EXPR:
6445 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6447 default:
6448 break;
6451 /* If we have an rtx, we do not need to scan our operands. */
6452 if (exp_rtl)
6453 break;
6455 nops = TREE_OPERAND_LENGTH (exp);
6456 for (i = 0; i < nops; i++)
6457 if (TREE_OPERAND (exp, i) != 0
6458 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6459 return 0;
6461 break;
6463 case tcc_type:
6464 /* Should never get a type here. */
6465 gcc_unreachable ();
6467 case tcc_gimple_stmt:
6468 gcc_unreachable ();
6471 /* If we have an rtl, find any enclosed object. Then see if we conflict
6472 with it. */
6473 if (exp_rtl)
6475 if (GET_CODE (exp_rtl) == SUBREG)
6477 exp_rtl = SUBREG_REG (exp_rtl);
6478 if (REG_P (exp_rtl)
6479 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6480 return 0;
6483 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6484 are memory and they conflict. */
6485 return ! (rtx_equal_p (x, exp_rtl)
6486 || (MEM_P (x) && MEM_P (exp_rtl)
6487 && true_dependence (exp_rtl, VOIDmode, x,
6488 rtx_addr_varies_p)));
6491 /* If we reach here, it is safe. */
6492 return 1;
6496 /* Return the highest power of two that EXP is known to be a multiple of.
6497 This is used in updating alignment of MEMs in array references. */
6499 unsigned HOST_WIDE_INT
6500 highest_pow2_factor (const_tree exp)
6502 unsigned HOST_WIDE_INT c0, c1;
6504 switch (TREE_CODE (exp))
6506 case INTEGER_CST:
6507 /* We can find the lowest bit that's a one. If the low
6508 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6509 We need to handle this case since we can find it in a COND_EXPR,
6510 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6511 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6512 later ICE. */
6513 if (TREE_OVERFLOW (exp))
6514 return BIGGEST_ALIGNMENT;
6515 else
6517 /* Note: tree_low_cst is intentionally not used here,
6518 we don't care about the upper bits. */
6519 c0 = TREE_INT_CST_LOW (exp);
6520 c0 &= -c0;
6521 return c0 ? c0 : BIGGEST_ALIGNMENT;
6523 break;
6525 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6526 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6527 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6528 return MIN (c0, c1);
6530 case MULT_EXPR:
6531 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6532 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6533 return c0 * c1;
6535 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6536 case CEIL_DIV_EXPR:
6537 if (integer_pow2p (TREE_OPERAND (exp, 1))
6538 && host_integerp (TREE_OPERAND (exp, 1), 1))
6540 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6541 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6542 return MAX (1, c0 / c1);
6544 break;
6546 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6547 case SAVE_EXPR:
6548 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6550 case COMPOUND_EXPR:
6551 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6553 case COND_EXPR:
6554 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6555 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6556 return MIN (c0, c1);
6558 default:
6559 break;
6562 return 1;
6565 /* Similar, except that the alignment requirements of TARGET are
6566 taken into account. Assume it is at least as aligned as its
6567 type, unless it is a COMPONENT_REF in which case the layout of
6568 the structure gives the alignment. */
6570 static unsigned HOST_WIDE_INT
6571 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6573 unsigned HOST_WIDE_INT target_align, factor;
6575 factor = highest_pow2_factor (exp);
6576 if (TREE_CODE (target) == COMPONENT_REF)
6577 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6578 else
6579 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6580 return MAX (factor, target_align);
6583 /* Return &VAR expression for emulated thread local VAR. */
6585 static tree
6586 emutls_var_address (tree var)
6588 tree emuvar = emutls_decl (var);
6589 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6590 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6591 tree arglist = build_tree_list (NULL_TREE, arg);
6592 tree call = build_function_call_expr (fn, arglist);
6593 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6596 /* Expands variable VAR. */
6598 void
6599 expand_var (tree var)
6601 if (DECL_EXTERNAL (var))
6602 return;
6604 if (TREE_STATIC (var))
6605 /* If this is an inlined copy of a static local variable,
6606 look up the original decl. */
6607 var = DECL_ORIGIN (var);
6609 if (TREE_STATIC (var)
6610 ? !TREE_ASM_WRITTEN (var)
6611 : !DECL_RTL_SET_P (var))
6613 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6614 /* Should be ignored. */;
6615 else if (lang_hooks.expand_decl (var))
6616 /* OK. */;
6617 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6618 expand_decl (var);
6619 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6620 rest_of_decl_compilation (var, 0, 0);
6621 else
6622 /* No expansion needed. */
6623 gcc_assert (TREE_CODE (var) == TYPE_DECL
6624 || TREE_CODE (var) == CONST_DECL
6625 || TREE_CODE (var) == FUNCTION_DECL
6626 || TREE_CODE (var) == LABEL_DECL);
6630 /* Subroutine of expand_expr. Expand the two operands of a binary
6631 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6632 The value may be stored in TARGET if TARGET is nonzero. The
6633 MODIFIER argument is as documented by expand_expr. */
6635 static void
6636 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6637 enum expand_modifier modifier)
6639 if (! safe_from_p (target, exp1, 1))
6640 target = 0;
6641 if (operand_equal_p (exp0, exp1, 0))
6643 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6644 *op1 = copy_rtx (*op0);
6646 else
6648 /* If we need to preserve evaluation order, copy exp0 into its own
6649 temporary variable so that it can't be clobbered by exp1. */
6650 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6651 exp0 = save_expr (exp0);
6652 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6653 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6658 /* Return a MEM that contains constant EXP. DEFER is as for
6659 output_constant_def and MODIFIER is as for expand_expr. */
6661 static rtx
6662 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6664 rtx mem;
6666 mem = output_constant_def (exp, defer);
6667 if (modifier != EXPAND_INITIALIZER)
6668 mem = use_anchored_address (mem);
6669 return mem;
6672 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6673 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6675 static rtx
6676 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6677 enum expand_modifier modifier)
6679 rtx result, subtarget;
6680 tree inner, offset;
6681 HOST_WIDE_INT bitsize, bitpos;
6682 int volatilep, unsignedp;
6683 enum machine_mode mode1;
6685 /* If we are taking the address of a constant and are at the top level,
6686 we have to use output_constant_def since we can't call force_const_mem
6687 at top level. */
6688 /* ??? This should be considered a front-end bug. We should not be
6689 generating ADDR_EXPR of something that isn't an LVALUE. The only
6690 exception here is STRING_CST. */
6691 if (TREE_CODE (exp) == CONSTRUCTOR
6692 || CONSTANT_CLASS_P (exp))
6693 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6695 /* Everything must be something allowed by is_gimple_addressable. */
6696 switch (TREE_CODE (exp))
6698 case INDIRECT_REF:
6699 /* This case will happen via recursion for &a->b. */
6700 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6702 case CONST_DECL:
6703 /* Recurse and make the output_constant_def clause above handle this. */
6704 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6705 tmode, modifier);
6707 case REALPART_EXPR:
6708 /* The real part of the complex number is always first, therefore
6709 the address is the same as the address of the parent object. */
6710 offset = 0;
6711 bitpos = 0;
6712 inner = TREE_OPERAND (exp, 0);
6713 break;
6715 case IMAGPART_EXPR:
6716 /* The imaginary part of the complex number is always second.
6717 The expression is therefore always offset by the size of the
6718 scalar type. */
6719 offset = 0;
6720 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6721 inner = TREE_OPERAND (exp, 0);
6722 break;
6724 case VAR_DECL:
6725 /* TLS emulation hook - replace __thread VAR's &VAR with
6726 __emutls_get_address (&_emutls.VAR). */
6727 if (! targetm.have_tls
6728 && TREE_CODE (exp) == VAR_DECL
6729 && DECL_THREAD_LOCAL_P (exp))
6731 exp = emutls_var_address (exp);
6732 return expand_expr (exp, target, tmode, modifier);
6734 /* Fall through. */
6736 default:
6737 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6738 expand_expr, as that can have various side effects; LABEL_DECLs for
6739 example, may not have their DECL_RTL set yet. Assume language
6740 specific tree nodes can be expanded in some interesting way. */
6741 if (DECL_P (exp)
6742 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6744 result = expand_expr (exp, target, tmode,
6745 modifier == EXPAND_INITIALIZER
6746 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6748 /* If the DECL isn't in memory, then the DECL wasn't properly
6749 marked TREE_ADDRESSABLE, which will be either a front-end
6750 or a tree optimizer bug. */
6751 gcc_assert (MEM_P (result));
6752 result = XEXP (result, 0);
6754 /* ??? Is this needed anymore? */
6755 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6757 assemble_external (exp);
6758 TREE_USED (exp) = 1;
6761 if (modifier != EXPAND_INITIALIZER
6762 && modifier != EXPAND_CONST_ADDRESS)
6763 result = force_operand (result, target);
6764 return result;
6767 /* Pass FALSE as the last argument to get_inner_reference although
6768 we are expanding to RTL. The rationale is that we know how to
6769 handle "aligning nodes" here: we can just bypass them because
6770 they won't change the final object whose address will be returned
6771 (they actually exist only for that purpose). */
6772 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6773 &mode1, &unsignedp, &volatilep, false);
6774 break;
6777 /* We must have made progress. */
6778 gcc_assert (inner != exp);
6780 subtarget = offset || bitpos ? NULL_RTX : target;
6781 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6783 if (offset)
6785 rtx tmp;
6787 if (modifier != EXPAND_NORMAL)
6788 result = force_operand (result, NULL);
6789 tmp = expand_expr (offset, NULL_RTX, tmode,
6790 modifier == EXPAND_INITIALIZER
6791 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6793 result = convert_memory_address (tmode, result);
6794 tmp = convert_memory_address (tmode, tmp);
6796 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6797 result = gen_rtx_PLUS (tmode, result, tmp);
6798 else
6800 subtarget = bitpos ? NULL_RTX : target;
6801 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6802 1, OPTAB_LIB_WIDEN);
6806 if (bitpos)
6808 /* Someone beforehand should have rejected taking the address
6809 of such an object. */
6810 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6812 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6813 if (modifier < EXPAND_SUM)
6814 result = force_operand (result, target);
6817 return result;
6820 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6821 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6823 static rtx
6824 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6825 enum expand_modifier modifier)
6827 enum machine_mode rmode;
6828 rtx result;
6830 /* Target mode of VOIDmode says "whatever's natural". */
6831 if (tmode == VOIDmode)
6832 tmode = TYPE_MODE (TREE_TYPE (exp));
6834 /* We can get called with some Weird Things if the user does silliness
6835 like "(short) &a". In that case, convert_memory_address won't do
6836 the right thing, so ignore the given target mode. */
6837 if (tmode != Pmode && tmode != ptr_mode)
6838 tmode = Pmode;
6840 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6841 tmode, modifier);
6843 /* Despite expand_expr claims concerning ignoring TMODE when not
6844 strictly convenient, stuff breaks if we don't honor it. Note
6845 that combined with the above, we only do this for pointer modes. */
6846 rmode = GET_MODE (result);
6847 if (rmode == VOIDmode)
6848 rmode = tmode;
6849 if (rmode != tmode)
6850 result = convert_memory_address (tmode, result);
6852 return result;
6856 /* expand_expr: generate code for computing expression EXP.
6857 An rtx for the computed value is returned. The value is never null.
6858 In the case of a void EXP, const0_rtx is returned.
6860 The value may be stored in TARGET if TARGET is nonzero.
6861 TARGET is just a suggestion; callers must assume that
6862 the rtx returned may not be the same as TARGET.
6864 If TARGET is CONST0_RTX, it means that the value will be ignored.
6866 If TMODE is not VOIDmode, it suggests generating the
6867 result in mode TMODE. But this is done only when convenient.
6868 Otherwise, TMODE is ignored and the value generated in its natural mode.
6869 TMODE is just a suggestion; callers must assume that
6870 the rtx returned may not have mode TMODE.
6872 Note that TARGET may have neither TMODE nor MODE. In that case, it
6873 probably will not be used.
6875 If MODIFIER is EXPAND_SUM then when EXP is an addition
6876 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6877 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6878 products as above, or REG or MEM, or constant.
6879 Ordinarily in such cases we would output mul or add instructions
6880 and then return a pseudo reg containing the sum.
6882 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6883 it also marks a label as absolutely required (it can't be dead).
6884 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6885 This is used for outputting expressions used in initializers.
6887 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6888 with a constant address even if that address is not normally legitimate.
6889 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6891 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6892 a call parameter. Such targets require special care as we haven't yet
6893 marked TARGET so that it's safe from being trashed by libcalls. We
6894 don't want to use TARGET for anything but the final result;
6895 Intermediate values must go elsewhere. Additionally, calls to
6896 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6898 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6899 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6900 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6901 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6902 recursively. */
6904 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6905 enum expand_modifier, rtx *);
6908 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6909 enum expand_modifier modifier, rtx *alt_rtl)
6911 int rn = -1;
6912 rtx ret, last = NULL;
6914 /* Handle ERROR_MARK before anybody tries to access its type. */
6915 if (TREE_CODE (exp) == ERROR_MARK
6916 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6918 ret = CONST0_RTX (tmode);
6919 return ret ? ret : const0_rtx;
6922 if (flag_non_call_exceptions)
6924 rn = lookup_stmt_eh_region (exp);
6925 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6926 if (rn >= 0)
6927 last = get_last_insn ();
6930 /* If this is an expression of some kind and it has an associated line
6931 number, then emit the line number before expanding the expression.
6933 We need to save and restore the file and line information so that
6934 errors discovered during expansion are emitted with the right
6935 information. It would be better of the diagnostic routines
6936 used the file/line information embedded in the tree nodes rather
6937 than globals. */
6938 if (cfun && EXPR_HAS_LOCATION (exp))
6940 location_t saved_location = input_location;
6941 input_location = EXPR_LOCATION (exp);
6942 set_curr_insn_source_location (input_location);
6944 /* Record where the insns produced belong. */
6945 set_curr_insn_block (TREE_BLOCK (exp));
6947 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6949 input_location = saved_location;
6951 else
6953 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6956 /* If using non-call exceptions, mark all insns that may trap.
6957 expand_call() will mark CALL_INSNs before we get to this code,
6958 but it doesn't handle libcalls, and these may trap. */
6959 if (rn >= 0)
6961 rtx insn;
6962 for (insn = next_real_insn (last); insn;
6963 insn = next_real_insn (insn))
6965 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6966 /* If we want exceptions for non-call insns, any
6967 may_trap_p instruction may throw. */
6968 && GET_CODE (PATTERN (insn)) != CLOBBER
6969 && GET_CODE (PATTERN (insn)) != USE
6970 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6972 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6973 REG_NOTES (insn));
6978 return ret;
6981 static rtx
6982 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6983 enum expand_modifier modifier, rtx *alt_rtl)
6985 rtx op0, op1, op2, temp, decl_rtl;
6986 tree type;
6987 int unsignedp;
6988 enum machine_mode mode;
6989 enum tree_code code = TREE_CODE (exp);
6990 optab this_optab;
6991 rtx subtarget, original_target;
6992 int ignore;
6993 tree context, subexp0, subexp1;
6994 bool reduce_bit_field = false;
6995 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6996 ? reduce_to_bit_field_precision ((expr), \
6997 target, \
6998 type) \
6999 : (expr))
7001 if (GIMPLE_STMT_P (exp))
7003 type = void_type_node;
7004 mode = VOIDmode;
7005 unsignedp = 0;
7007 else
7009 type = TREE_TYPE (exp);
7010 mode = TYPE_MODE (type);
7011 unsignedp = TYPE_UNSIGNED (type);
7013 if (lang_hooks.reduce_bit_field_operations
7014 && TREE_CODE (type) == INTEGER_TYPE
7015 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7017 /* An operation in what may be a bit-field type needs the
7018 result to be reduced to the precision of the bit-field type,
7019 which is narrower than that of the type's mode. */
7020 reduce_bit_field = true;
7021 if (modifier == EXPAND_STACK_PARM)
7022 target = 0;
7025 /* Use subtarget as the target for operand 0 of a binary operation. */
7026 subtarget = get_subtarget (target);
7027 original_target = target;
7028 ignore = (target == const0_rtx
7029 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7030 || code == CONVERT_EXPR || code == COND_EXPR
7031 || code == VIEW_CONVERT_EXPR)
7032 && TREE_CODE (type) == VOID_TYPE));
7034 /* If we are going to ignore this result, we need only do something
7035 if there is a side-effect somewhere in the expression. If there
7036 is, short-circuit the most common cases here. Note that we must
7037 not call expand_expr with anything but const0_rtx in case this
7038 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7040 if (ignore)
7042 if (! TREE_SIDE_EFFECTS (exp))
7043 return const0_rtx;
7045 /* Ensure we reference a volatile object even if value is ignored, but
7046 don't do this if all we are doing is taking its address. */
7047 if (TREE_THIS_VOLATILE (exp)
7048 && TREE_CODE (exp) != FUNCTION_DECL
7049 && mode != VOIDmode && mode != BLKmode
7050 && modifier != EXPAND_CONST_ADDRESS)
7052 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7053 if (MEM_P (temp))
7054 temp = copy_to_reg (temp);
7055 return const0_rtx;
7058 if (TREE_CODE_CLASS (code) == tcc_unary
7059 || code == COMPONENT_REF || code == INDIRECT_REF)
7060 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7061 modifier);
7063 else if (TREE_CODE_CLASS (code) == tcc_binary
7064 || TREE_CODE_CLASS (code) == tcc_comparison
7065 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7067 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7068 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7069 return const0_rtx;
7071 else if (code == BIT_FIELD_REF)
7073 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7074 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7075 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7076 return const0_rtx;
7079 target = 0;
7083 switch (code)
7085 case LABEL_DECL:
7087 tree function = decl_function_context (exp);
7089 temp = label_rtx (exp);
7090 temp = gen_rtx_LABEL_REF (Pmode, temp);
7092 if (function != current_function_decl
7093 && function != 0)
7094 LABEL_REF_NONLOCAL_P (temp) = 1;
7096 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7097 return temp;
7100 case SSA_NAME:
7101 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7102 NULL);
7104 case PARM_DECL:
7105 case VAR_DECL:
7106 /* If a static var's type was incomplete when the decl was written,
7107 but the type is complete now, lay out the decl now. */
7108 if (DECL_SIZE (exp) == 0
7109 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7110 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7111 layout_decl (exp, 0);
7113 /* TLS emulation hook - replace __thread vars with
7114 *__emutls_get_address (&_emutls.var). */
7115 if (! targetm.have_tls
7116 && TREE_CODE (exp) == VAR_DECL
7117 && DECL_THREAD_LOCAL_P (exp))
7119 exp = build_fold_indirect_ref (emutls_var_address (exp));
7120 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7123 /* ... fall through ... */
7125 case FUNCTION_DECL:
7126 case RESULT_DECL:
7127 decl_rtl = DECL_RTL (exp);
7128 gcc_assert (decl_rtl);
7129 decl_rtl = copy_rtx (decl_rtl);
7131 /* Ensure variable marked as used even if it doesn't go through
7132 a parser. If it hasn't be used yet, write out an external
7133 definition. */
7134 if (! TREE_USED (exp))
7136 assemble_external (exp);
7137 TREE_USED (exp) = 1;
7140 /* Show we haven't gotten RTL for this yet. */
7141 temp = 0;
7143 /* Variables inherited from containing functions should have
7144 been lowered by this point. */
7145 context = decl_function_context (exp);
7146 gcc_assert (!context
7147 || context == current_function_decl
7148 || TREE_STATIC (exp)
7149 /* ??? C++ creates functions that are not TREE_STATIC. */
7150 || TREE_CODE (exp) == FUNCTION_DECL);
7152 /* This is the case of an array whose size is to be determined
7153 from its initializer, while the initializer is still being parsed.
7154 See expand_decl. */
7156 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7157 temp = validize_mem (decl_rtl);
7159 /* If DECL_RTL is memory, we are in the normal case and either
7160 the address is not valid or it is not a register and -fforce-addr
7161 is specified, get the address into a register. */
7163 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7165 if (alt_rtl)
7166 *alt_rtl = decl_rtl;
7167 decl_rtl = use_anchored_address (decl_rtl);
7168 if (modifier != EXPAND_CONST_ADDRESS
7169 && modifier != EXPAND_SUM
7170 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7171 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7172 temp = replace_equiv_address (decl_rtl,
7173 copy_rtx (XEXP (decl_rtl, 0)));
7176 /* If we got something, return it. But first, set the alignment
7177 if the address is a register. */
7178 if (temp != 0)
7180 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7181 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7183 return temp;
7186 /* If the mode of DECL_RTL does not match that of the decl, it
7187 must be a promoted value. We return a SUBREG of the wanted mode,
7188 but mark it so that we know that it was already extended. */
7190 if (REG_P (decl_rtl)
7191 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7193 enum machine_mode pmode;
7195 /* Get the signedness used for this variable. Ensure we get the
7196 same mode we got when the variable was declared. */
7197 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7198 (TREE_CODE (exp) == RESULT_DECL
7199 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7200 gcc_assert (GET_MODE (decl_rtl) == pmode);
7202 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7203 SUBREG_PROMOTED_VAR_P (temp) = 1;
7204 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7205 return temp;
7208 return decl_rtl;
7210 case INTEGER_CST:
7211 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7212 TREE_INT_CST_HIGH (exp), mode);
7214 return temp;
7216 case VECTOR_CST:
7218 tree tmp = NULL_TREE;
7219 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7220 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7221 return const_vector_from_tree (exp);
7222 if (GET_MODE_CLASS (mode) == MODE_INT)
7224 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7225 if (type_for_mode)
7226 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7228 if (!tmp)
7229 tmp = build_constructor_from_list (type,
7230 TREE_VECTOR_CST_ELTS (exp));
7231 return expand_expr (tmp, ignore ? const0_rtx : target,
7232 tmode, modifier);
7235 case CONST_DECL:
7236 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7238 case REAL_CST:
7239 /* If optimized, generate immediate CONST_DOUBLE
7240 which will be turned into memory by reload if necessary.
7242 We used to force a register so that loop.c could see it. But
7243 this does not allow gen_* patterns to perform optimizations with
7244 the constants. It also produces two insns in cases like "x = 1.0;".
7245 On most machines, floating-point constants are not permitted in
7246 many insns, so we'd end up copying it to a register in any case.
7248 Now, we do the copying in expand_binop, if appropriate. */
7249 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7250 TYPE_MODE (TREE_TYPE (exp)));
7252 case COMPLEX_CST:
7253 /* Handle evaluating a complex constant in a CONCAT target. */
7254 if (original_target && GET_CODE (original_target) == CONCAT)
7256 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7257 rtx rtarg, itarg;
7259 rtarg = XEXP (original_target, 0);
7260 itarg = XEXP (original_target, 1);
7262 /* Move the real and imaginary parts separately. */
7263 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7264 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7266 if (op0 != rtarg)
7267 emit_move_insn (rtarg, op0);
7268 if (op1 != itarg)
7269 emit_move_insn (itarg, op1);
7271 return original_target;
7274 /* ... fall through ... */
7276 case STRING_CST:
7277 temp = expand_expr_constant (exp, 1, modifier);
7279 /* temp contains a constant address.
7280 On RISC machines where a constant address isn't valid,
7281 make some insns to get that address into a register. */
7282 if (modifier != EXPAND_CONST_ADDRESS
7283 && modifier != EXPAND_INITIALIZER
7284 && modifier != EXPAND_SUM
7285 && (! memory_address_p (mode, XEXP (temp, 0))
7286 || flag_force_addr))
7287 return replace_equiv_address (temp,
7288 copy_rtx (XEXP (temp, 0)));
7289 return temp;
7291 case SAVE_EXPR:
7293 tree val = TREE_OPERAND (exp, 0);
7294 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7296 if (!SAVE_EXPR_RESOLVED_P (exp))
7298 /* We can indeed still hit this case, typically via builtin
7299 expanders calling save_expr immediately before expanding
7300 something. Assume this means that we only have to deal
7301 with non-BLKmode values. */
7302 gcc_assert (GET_MODE (ret) != BLKmode);
7304 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7305 DECL_ARTIFICIAL (val) = 1;
7306 DECL_IGNORED_P (val) = 1;
7307 TREE_OPERAND (exp, 0) = val;
7308 SAVE_EXPR_RESOLVED_P (exp) = 1;
7310 if (!CONSTANT_P (ret))
7311 ret = copy_to_reg (ret);
7312 SET_DECL_RTL (val, ret);
7315 return ret;
7318 case GOTO_EXPR:
7319 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7320 expand_goto (TREE_OPERAND (exp, 0));
7321 else
7322 expand_computed_goto (TREE_OPERAND (exp, 0));
7323 return const0_rtx;
7325 case CONSTRUCTOR:
7326 /* If we don't need the result, just ensure we evaluate any
7327 subexpressions. */
7328 if (ignore)
7330 unsigned HOST_WIDE_INT idx;
7331 tree value;
7333 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7334 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7336 return const0_rtx;
7339 /* Try to avoid creating a temporary at all. This is possible
7340 if all of the initializer is zero.
7341 FIXME: try to handle all [0..255] initializers we can handle
7342 with memset. */
7343 else if (TREE_STATIC (exp)
7344 && !TREE_ADDRESSABLE (exp)
7345 && target != 0 && mode == BLKmode
7346 && all_zeros_p (exp))
7348 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7349 return target;
7352 /* All elts simple constants => refer to a constant in memory. But
7353 if this is a non-BLKmode mode, let it store a field at a time
7354 since that should make a CONST_INT or CONST_DOUBLE when we
7355 fold. Likewise, if we have a target we can use, it is best to
7356 store directly into the target unless the type is large enough
7357 that memcpy will be used. If we are making an initializer and
7358 all operands are constant, put it in memory as well.
7360 FIXME: Avoid trying to fill vector constructors piece-meal.
7361 Output them with output_constant_def below unless we're sure
7362 they're zeros. This should go away when vector initializers
7363 are treated like VECTOR_CST instead of arrays.
7365 else if ((TREE_STATIC (exp)
7366 && ((mode == BLKmode
7367 && ! (target != 0 && safe_from_p (target, exp, 1)))
7368 || TREE_ADDRESSABLE (exp)
7369 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7370 && (! MOVE_BY_PIECES_P
7371 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7372 TYPE_ALIGN (type)))
7373 && ! mostly_zeros_p (exp))))
7374 || ((modifier == EXPAND_INITIALIZER
7375 || modifier == EXPAND_CONST_ADDRESS)
7376 && TREE_CONSTANT (exp)))
7378 rtx constructor = expand_expr_constant (exp, 1, modifier);
7380 if (modifier != EXPAND_CONST_ADDRESS
7381 && modifier != EXPAND_INITIALIZER
7382 && modifier != EXPAND_SUM)
7383 constructor = validize_mem (constructor);
7385 return constructor;
7387 else
7389 /* Handle calls that pass values in multiple non-contiguous
7390 locations. The Irix 6 ABI has examples of this. */
7391 if (target == 0 || ! safe_from_p (target, exp, 1)
7392 || GET_CODE (target) == PARALLEL
7393 || modifier == EXPAND_STACK_PARM)
7394 target
7395 = assign_temp (build_qualified_type (type,
7396 (TYPE_QUALS (type)
7397 | (TREE_READONLY (exp)
7398 * TYPE_QUAL_CONST))),
7399 0, TREE_ADDRESSABLE (exp), 1);
7401 store_constructor (exp, target, 0, int_expr_size (exp));
7402 return target;
7405 case MISALIGNED_INDIRECT_REF:
7406 case ALIGN_INDIRECT_REF:
7407 case INDIRECT_REF:
7409 tree exp1 = TREE_OPERAND (exp, 0);
7411 if (modifier != EXPAND_WRITE)
7413 tree t;
7415 t = fold_read_from_constant_string (exp);
7416 if (t)
7417 return expand_expr (t, target, tmode, modifier);
7420 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7421 op0 = memory_address (mode, op0);
7423 if (code == ALIGN_INDIRECT_REF)
7425 int align = TYPE_ALIGN_UNIT (type);
7426 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7427 op0 = memory_address (mode, op0);
7430 temp = gen_rtx_MEM (mode, op0);
7432 set_mem_attributes (temp, exp, 0);
7434 /* Resolve the misalignment now, so that we don't have to remember
7435 to resolve it later. Of course, this only works for reads. */
7436 /* ??? When we get around to supporting writes, we'll have to handle
7437 this in store_expr directly. The vectorizer isn't generating
7438 those yet, however. */
7439 if (code == MISALIGNED_INDIRECT_REF)
7441 int icode;
7442 rtx reg, insn;
7444 gcc_assert (modifier == EXPAND_NORMAL
7445 || modifier == EXPAND_STACK_PARM);
7447 /* The vectorizer should have already checked the mode. */
7448 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7449 gcc_assert (icode != CODE_FOR_nothing);
7451 /* We've already validated the memory, and we're creating a
7452 new pseudo destination. The predicates really can't fail. */
7453 reg = gen_reg_rtx (mode);
7455 /* Nor can the insn generator. */
7456 insn = GEN_FCN (icode) (reg, temp);
7457 emit_insn (insn);
7459 return reg;
7462 return temp;
7465 case TARGET_MEM_REF:
7467 struct mem_address addr;
7469 get_address_description (exp, &addr);
7470 op0 = addr_for_mem_ref (&addr, true);
7471 op0 = memory_address (mode, op0);
7472 temp = gen_rtx_MEM (mode, op0);
7473 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7475 return temp;
7477 case ARRAY_REF:
7480 tree array = TREE_OPERAND (exp, 0);
7481 tree index = TREE_OPERAND (exp, 1);
7483 /* Fold an expression like: "foo"[2].
7484 This is not done in fold so it won't happen inside &.
7485 Don't fold if this is for wide characters since it's too
7486 difficult to do correctly and this is a very rare case. */
7488 if (modifier != EXPAND_CONST_ADDRESS
7489 && modifier != EXPAND_INITIALIZER
7490 && modifier != EXPAND_MEMORY)
7492 tree t = fold_read_from_constant_string (exp);
7494 if (t)
7495 return expand_expr (t, target, tmode, modifier);
7498 /* If this is a constant index into a constant array,
7499 just get the value from the array. Handle both the cases when
7500 we have an explicit constructor and when our operand is a variable
7501 that was declared const. */
7503 if (modifier != EXPAND_CONST_ADDRESS
7504 && modifier != EXPAND_INITIALIZER
7505 && modifier != EXPAND_MEMORY
7506 && TREE_CODE (array) == CONSTRUCTOR
7507 && ! TREE_SIDE_EFFECTS (array)
7508 && TREE_CODE (index) == INTEGER_CST)
7510 unsigned HOST_WIDE_INT ix;
7511 tree field, value;
7513 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7514 field, value)
7515 if (tree_int_cst_equal (field, index))
7517 if (!TREE_SIDE_EFFECTS (value))
7518 return expand_expr (fold (value), target, tmode, modifier);
7519 break;
7523 else if (optimize >= 1
7524 && modifier != EXPAND_CONST_ADDRESS
7525 && modifier != EXPAND_INITIALIZER
7526 && modifier != EXPAND_MEMORY
7527 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7528 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7529 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7530 && targetm.binds_local_p (array))
7532 if (TREE_CODE (index) == INTEGER_CST)
7534 tree init = DECL_INITIAL (array);
7536 if (TREE_CODE (init) == CONSTRUCTOR)
7538 unsigned HOST_WIDE_INT ix;
7539 tree field, value;
7541 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7542 field, value)
7543 if (tree_int_cst_equal (field, index))
7545 if (!TREE_SIDE_EFFECTS (value))
7546 return expand_expr (fold (value), target, tmode,
7547 modifier);
7548 break;
7551 else if(TREE_CODE (init) == STRING_CST)
7553 tree index1 = index;
7554 tree low_bound = array_ref_low_bound (exp);
7555 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7557 /* Optimize the special-case of a zero lower bound.
7559 We convert the low_bound to sizetype to avoid some problems
7560 with constant folding. (E.g. suppose the lower bound is 1,
7561 and its mode is QI. Without the conversion,l (ARRAY
7562 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7563 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7565 if (! integer_zerop (low_bound))
7566 index1 = size_diffop (index1, fold_convert (sizetype,
7567 low_bound));
7569 if (0 > compare_tree_int (index1,
7570 TREE_STRING_LENGTH (init)))
7572 tree type = TREE_TYPE (TREE_TYPE (init));
7573 enum machine_mode mode = TYPE_MODE (type);
7575 if (GET_MODE_CLASS (mode) == MODE_INT
7576 && GET_MODE_SIZE (mode) == 1)
7577 return gen_int_mode (TREE_STRING_POINTER (init)
7578 [TREE_INT_CST_LOW (index1)],
7579 mode);
7585 goto normal_inner_ref;
7587 case COMPONENT_REF:
7588 /* If the operand is a CONSTRUCTOR, we can just extract the
7589 appropriate field if it is present. */
7590 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7592 unsigned HOST_WIDE_INT idx;
7593 tree field, value;
7595 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7596 idx, field, value)
7597 if (field == TREE_OPERAND (exp, 1)
7598 /* We can normally use the value of the field in the
7599 CONSTRUCTOR. However, if this is a bitfield in
7600 an integral mode that we can fit in a HOST_WIDE_INT,
7601 we must mask only the number of bits in the bitfield,
7602 since this is done implicitly by the constructor. If
7603 the bitfield does not meet either of those conditions,
7604 we can't do this optimization. */
7605 && (! DECL_BIT_FIELD (field)
7606 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7607 && (GET_MODE_BITSIZE (DECL_MODE (field))
7608 <= HOST_BITS_PER_WIDE_INT))))
7610 if (DECL_BIT_FIELD (field)
7611 && modifier == EXPAND_STACK_PARM)
7612 target = 0;
7613 op0 = expand_expr (value, target, tmode, modifier);
7614 if (DECL_BIT_FIELD (field))
7616 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7617 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7619 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7621 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7622 op0 = expand_and (imode, op0, op1, target);
7624 else
7626 tree count
7627 = build_int_cst (NULL_TREE,
7628 GET_MODE_BITSIZE (imode) - bitsize);
7630 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7631 target, 0);
7632 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7633 target, 0);
7637 return op0;
7640 goto normal_inner_ref;
7642 case BIT_FIELD_REF:
7643 case ARRAY_RANGE_REF:
7644 normal_inner_ref:
7646 enum machine_mode mode1;
7647 HOST_WIDE_INT bitsize, bitpos;
7648 tree offset;
7649 int volatilep = 0;
7650 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7651 &mode1, &unsignedp, &volatilep, true);
7652 rtx orig_op0;
7654 /* If we got back the original object, something is wrong. Perhaps
7655 we are evaluating an expression too early. In any event, don't
7656 infinitely recurse. */
7657 gcc_assert (tem != exp);
7659 /* If TEM's type is a union of variable size, pass TARGET to the inner
7660 computation, since it will need a temporary and TARGET is known
7661 to have to do. This occurs in unchecked conversion in Ada. */
7663 orig_op0 = op0
7664 = expand_expr (tem,
7665 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7666 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7667 != INTEGER_CST)
7668 && modifier != EXPAND_STACK_PARM
7669 ? target : NULL_RTX),
7670 VOIDmode,
7671 (modifier == EXPAND_INITIALIZER
7672 || modifier == EXPAND_CONST_ADDRESS
7673 || modifier == EXPAND_STACK_PARM)
7674 ? modifier : EXPAND_NORMAL);
7676 /* If this is a constant, put it into a register if it is a legitimate
7677 constant, OFFSET is 0, and we won't try to extract outside the
7678 register (in case we were passed a partially uninitialized object
7679 or a view_conversion to a larger size). Force the constant to
7680 memory otherwise. */
7681 if (CONSTANT_P (op0))
7683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7684 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7685 && offset == 0
7686 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7687 op0 = force_reg (mode, op0);
7688 else
7689 op0 = validize_mem (force_const_mem (mode, op0));
7692 /* Otherwise, if this object not in memory and we either have an
7693 offset, a BLKmode result, or a reference outside the object, put it
7694 there. Such cases can occur in Ada if we have unchecked conversion
7695 of an expression from a scalar type to an array or record type or
7696 for an ARRAY_RANGE_REF whose type is BLKmode. */
7697 else if (!MEM_P (op0)
7698 && (offset != 0
7699 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7700 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7702 tree nt = build_qualified_type (TREE_TYPE (tem),
7703 (TYPE_QUALS (TREE_TYPE (tem))
7704 | TYPE_QUAL_CONST));
7705 rtx memloc = assign_temp (nt, 1, 1, 1);
7707 emit_move_insn (memloc, op0);
7708 op0 = memloc;
7711 if (offset != 0)
7713 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7714 EXPAND_SUM);
7716 gcc_assert (MEM_P (op0));
7718 #ifdef POINTERS_EXTEND_UNSIGNED
7719 if (GET_MODE (offset_rtx) != Pmode)
7720 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7721 #else
7722 if (GET_MODE (offset_rtx) != ptr_mode)
7723 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7724 #endif
7726 if (GET_MODE (op0) == BLKmode
7727 /* A constant address in OP0 can have VOIDmode, we must
7728 not try to call force_reg in that case. */
7729 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7730 && bitsize != 0
7731 && (bitpos % bitsize) == 0
7732 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7733 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7735 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7736 bitpos = 0;
7739 op0 = offset_address (op0, offset_rtx,
7740 highest_pow2_factor (offset));
7743 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7744 record its alignment as BIGGEST_ALIGNMENT. */
7745 if (MEM_P (op0) && bitpos == 0 && offset != 0
7746 && is_aligning_offset (offset, tem))
7747 set_mem_align (op0, BIGGEST_ALIGNMENT);
7749 /* Don't forget about volatility even if this is a bitfield. */
7750 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7752 if (op0 == orig_op0)
7753 op0 = copy_rtx (op0);
7755 MEM_VOLATILE_P (op0) = 1;
7758 /* The following code doesn't handle CONCAT.
7759 Assume only bitpos == 0 can be used for CONCAT, due to
7760 one element arrays having the same mode as its element. */
7761 if (GET_CODE (op0) == CONCAT)
7763 gcc_assert (bitpos == 0
7764 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7765 return op0;
7768 /* In cases where an aligned union has an unaligned object
7769 as a field, we might be extracting a BLKmode value from
7770 an integer-mode (e.g., SImode) object. Handle this case
7771 by doing the extract into an object as wide as the field
7772 (which we know to be the width of a basic mode), then
7773 storing into memory, and changing the mode to BLKmode. */
7774 if (mode1 == VOIDmode
7775 || REG_P (op0) || GET_CODE (op0) == SUBREG
7776 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7777 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7778 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7779 && modifier != EXPAND_CONST_ADDRESS
7780 && modifier != EXPAND_INITIALIZER)
7781 /* If the field isn't aligned enough to fetch as a memref,
7782 fetch it as a bit field. */
7783 || (mode1 != BLKmode
7784 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7785 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7786 || (MEM_P (op0)
7787 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7788 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7789 && ((modifier == EXPAND_CONST_ADDRESS
7790 || modifier == EXPAND_INITIALIZER)
7791 ? STRICT_ALIGNMENT
7792 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7793 || (bitpos % BITS_PER_UNIT != 0)))
7794 /* If the type and the field are a constant size and the
7795 size of the type isn't the same size as the bitfield,
7796 we must use bitfield operations. */
7797 || (bitsize >= 0
7798 && TYPE_SIZE (TREE_TYPE (exp))
7799 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7800 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7801 bitsize)))
7803 enum machine_mode ext_mode = mode;
7805 if (ext_mode == BLKmode
7806 && ! (target != 0 && MEM_P (op0)
7807 && MEM_P (target)
7808 && bitpos % BITS_PER_UNIT == 0))
7809 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7811 if (ext_mode == BLKmode)
7813 if (target == 0)
7814 target = assign_temp (type, 0, 1, 1);
7816 if (bitsize == 0)
7817 return target;
7819 /* In this case, BITPOS must start at a byte boundary and
7820 TARGET, if specified, must be a MEM. */
7821 gcc_assert (MEM_P (op0)
7822 && (!target || MEM_P (target))
7823 && !(bitpos % BITS_PER_UNIT));
7825 emit_block_move (target,
7826 adjust_address (op0, VOIDmode,
7827 bitpos / BITS_PER_UNIT),
7828 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7829 / BITS_PER_UNIT),
7830 (modifier == EXPAND_STACK_PARM
7831 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7833 return target;
7836 op0 = validize_mem (op0);
7838 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7839 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7841 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7842 (modifier == EXPAND_STACK_PARM
7843 ? NULL_RTX : target),
7844 ext_mode, ext_mode);
7846 /* If the result is a record type and BITSIZE is narrower than
7847 the mode of OP0, an integral mode, and this is a big endian
7848 machine, we must put the field into the high-order bits. */
7849 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7850 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7851 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7852 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7853 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7854 - bitsize),
7855 op0, 1);
7857 /* If the result type is BLKmode, store the data into a temporary
7858 of the appropriate type, but with the mode corresponding to the
7859 mode for the data we have (op0's mode). It's tempting to make
7860 this a constant type, since we know it's only being stored once,
7861 but that can cause problems if we are taking the address of this
7862 COMPONENT_REF because the MEM of any reference via that address
7863 will have flags corresponding to the type, which will not
7864 necessarily be constant. */
7865 if (mode == BLKmode)
7867 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7868 rtx new;
7870 /* If the reference doesn't use the alias set of its type,
7871 we cannot create the temporary using that type. */
7872 if (component_uses_parent_alias_set (exp))
7874 new = assign_stack_local (ext_mode, size, 0);
7875 set_mem_alias_set (new, get_alias_set (exp));
7877 else
7878 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7880 emit_move_insn (new, op0);
7881 op0 = copy_rtx (new);
7882 PUT_MODE (op0, BLKmode);
7883 set_mem_attributes (op0, exp, 1);
7886 return op0;
7889 /* If the result is BLKmode, use that to access the object
7890 now as well. */
7891 if (mode == BLKmode)
7892 mode1 = BLKmode;
7894 /* Get a reference to just this component. */
7895 if (modifier == EXPAND_CONST_ADDRESS
7896 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7897 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7898 else
7899 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7901 if (op0 == orig_op0)
7902 op0 = copy_rtx (op0);
7904 set_mem_attributes (op0, exp, 0);
7905 if (REG_P (XEXP (op0, 0)))
7906 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7908 MEM_VOLATILE_P (op0) |= volatilep;
7909 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7910 || modifier == EXPAND_CONST_ADDRESS
7911 || modifier == EXPAND_INITIALIZER)
7912 return op0;
7913 else if (target == 0)
7914 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7916 convert_move (target, op0, unsignedp);
7917 return target;
7920 case OBJ_TYPE_REF:
7921 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7923 case CALL_EXPR:
7924 /* Check for a built-in function. */
7925 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7926 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7927 == FUNCTION_DECL)
7928 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7930 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7931 == BUILT_IN_FRONTEND)
7932 return lang_hooks.expand_expr (exp, original_target,
7933 tmode, modifier,
7934 alt_rtl);
7935 else
7936 return expand_builtin (exp, target, subtarget, tmode, ignore);
7939 return expand_call (exp, target, ignore);
7941 case NON_LVALUE_EXPR:
7942 case NOP_EXPR:
7943 case CONVERT_EXPR:
7944 if (TREE_OPERAND (exp, 0) == error_mark_node)
7945 return const0_rtx;
7947 if (TREE_CODE (type) == UNION_TYPE)
7949 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7951 /* If both input and output are BLKmode, this conversion isn't doing
7952 anything except possibly changing memory attribute. */
7953 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7955 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7956 modifier);
7958 result = copy_rtx (result);
7959 set_mem_attributes (result, exp, 0);
7960 return result;
7963 if (target == 0)
7965 if (TYPE_MODE (type) != BLKmode)
7966 target = gen_reg_rtx (TYPE_MODE (type));
7967 else
7968 target = assign_temp (type, 0, 1, 1);
7971 if (MEM_P (target))
7972 /* Store data into beginning of memory target. */
7973 store_expr (TREE_OPERAND (exp, 0),
7974 adjust_address (target, TYPE_MODE (valtype), 0),
7975 modifier == EXPAND_STACK_PARM,
7976 false);
7978 else
7980 gcc_assert (REG_P (target));
7982 /* Store this field into a union of the proper type. */
7983 store_field (target,
7984 MIN ((int_size_in_bytes (TREE_TYPE
7985 (TREE_OPERAND (exp, 0)))
7986 * BITS_PER_UNIT),
7987 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7988 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7989 type, 0, false);
7992 /* Return the entire union. */
7993 return target;
7996 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7998 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7999 modifier);
8001 /* If the signedness of the conversion differs and OP0 is
8002 a promoted SUBREG, clear that indication since we now
8003 have to do the proper extension. */
8004 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8005 && GET_CODE (op0) == SUBREG)
8006 SUBREG_PROMOTED_VAR_P (op0) = 0;
8008 return REDUCE_BIT_FIELD (op0);
8011 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8012 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8013 if (GET_MODE (op0) == mode)
8016 /* If OP0 is a constant, just convert it into the proper mode. */
8017 else if (CONSTANT_P (op0))
8019 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8020 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8022 if (modifier == EXPAND_INITIALIZER)
8023 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8024 subreg_lowpart_offset (mode,
8025 inner_mode));
8026 else
8027 op0= convert_modes (mode, inner_mode, op0,
8028 TYPE_UNSIGNED (inner_type));
8031 else if (modifier == EXPAND_INITIALIZER)
8032 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8034 else if (target == 0)
8035 op0 = convert_to_mode (mode, op0,
8036 TYPE_UNSIGNED (TREE_TYPE
8037 (TREE_OPERAND (exp, 0))));
8038 else
8040 convert_move (target, op0,
8041 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8042 op0 = target;
8045 return REDUCE_BIT_FIELD (op0);
8047 case VIEW_CONVERT_EXPR:
8048 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8050 /* If the input and output modes are both the same, we are done. */
8051 if (TYPE_MODE (type) == GET_MODE (op0))
8053 /* If neither mode is BLKmode, and both modes are the same size
8054 then we can use gen_lowpart. */
8055 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8056 && GET_MODE_SIZE (TYPE_MODE (type))
8057 == GET_MODE_SIZE (GET_MODE (op0)))
8059 if (GET_CODE (op0) == SUBREG)
8060 op0 = force_reg (GET_MODE (op0), op0);
8061 op0 = gen_lowpart (TYPE_MODE (type), op0);
8063 /* If both modes are integral, then we can convert from one to the
8064 other. */
8065 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8066 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8067 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8068 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8069 /* As a last resort, spill op0 to memory, and reload it in a
8070 different mode. */
8071 else if (!MEM_P (op0))
8073 /* If the operand is not a MEM, force it into memory. Since we
8074 are going to be changing the mode of the MEM, don't call
8075 force_const_mem for constants because we don't allow pool
8076 constants to change mode. */
8077 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8079 gcc_assert (!TREE_ADDRESSABLE (exp));
8081 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8082 target
8083 = assign_stack_temp_for_type
8084 (TYPE_MODE (inner_type),
8085 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8087 emit_move_insn (target, op0);
8088 op0 = target;
8091 /* At this point, OP0 is in the correct mode. If the output type is such
8092 that the operand is known to be aligned, indicate that it is.
8093 Otherwise, we need only be concerned about alignment for non-BLKmode
8094 results. */
8095 if (MEM_P (op0))
8097 op0 = copy_rtx (op0);
8099 if (TYPE_ALIGN_OK (type))
8100 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8101 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8102 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8104 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8105 HOST_WIDE_INT temp_size
8106 = MAX (int_size_in_bytes (inner_type),
8107 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8108 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8109 temp_size, 0, type);
8110 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8112 gcc_assert (!TREE_ADDRESSABLE (exp));
8114 if (GET_MODE (op0) == BLKmode)
8115 emit_block_move (new_with_op0_mode, op0,
8116 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8117 (modifier == EXPAND_STACK_PARM
8118 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8119 else
8120 emit_move_insn (new_with_op0_mode, op0);
8122 op0 = new;
8125 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8128 return op0;
8130 case POINTER_PLUS_EXPR:
8131 /* Even though the sizetype mode and the pointer's mode can be different
8132 expand is able to handle this correctly and get the correct result out
8133 of the PLUS_EXPR code. */
8134 case PLUS_EXPR:
8136 /* Check if this is a case for multiplication and addition. */
8137 if (TREE_CODE (type) == INTEGER_TYPE
8138 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8140 tree subsubexp0, subsubexp1;
8141 enum tree_code code0, code1;
8143 subexp0 = TREE_OPERAND (exp, 0);
8144 subsubexp0 = TREE_OPERAND (subexp0, 0);
8145 subsubexp1 = TREE_OPERAND (subexp0, 1);
8146 code0 = TREE_CODE (subsubexp0);
8147 code1 = TREE_CODE (subsubexp1);
8148 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8149 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8150 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8151 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8152 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8153 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8154 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8156 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8157 enum machine_mode innermode = TYPE_MODE (op0type);
8158 bool zextend_p = TYPE_UNSIGNED (op0type);
8159 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8160 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8161 && (optab_handler (this_optab, mode)->insn_code
8162 != CODE_FOR_nothing))
8164 expand_operands (TREE_OPERAND (subsubexp0, 0),
8165 TREE_OPERAND (subsubexp1, 0),
8166 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8167 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8168 VOIDmode, EXPAND_NORMAL);
8169 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8170 target, unsignedp);
8171 gcc_assert (temp);
8172 return REDUCE_BIT_FIELD (temp);
8177 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8178 something else, make sure we add the register to the constant and
8179 then to the other thing. This case can occur during strength
8180 reduction and doing it this way will produce better code if the
8181 frame pointer or argument pointer is eliminated.
8183 fold-const.c will ensure that the constant is always in the inner
8184 PLUS_EXPR, so the only case we need to do anything about is if
8185 sp, ap, or fp is our second argument, in which case we must swap
8186 the innermost first argument and our second argument. */
8188 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8189 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8190 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8191 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8192 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8193 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8195 tree t = TREE_OPERAND (exp, 1);
8197 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8198 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8201 /* If the result is to be ptr_mode and we are adding an integer to
8202 something, we might be forming a constant. So try to use
8203 plus_constant. If it produces a sum and we can't accept it,
8204 use force_operand. This allows P = &ARR[const] to generate
8205 efficient code on machines where a SYMBOL_REF is not a valid
8206 address.
8208 If this is an EXPAND_SUM call, always return the sum. */
8209 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8210 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8212 if (modifier == EXPAND_STACK_PARM)
8213 target = 0;
8214 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8215 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8216 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8218 rtx constant_part;
8220 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8221 EXPAND_SUM);
8222 /* Use immed_double_const to ensure that the constant is
8223 truncated according to the mode of OP1, then sign extended
8224 to a HOST_WIDE_INT. Using the constant directly can result
8225 in non-canonical RTL in a 64x32 cross compile. */
8226 constant_part
8227 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8228 (HOST_WIDE_INT) 0,
8229 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8230 op1 = plus_constant (op1, INTVAL (constant_part));
8231 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8232 op1 = force_operand (op1, target);
8233 return REDUCE_BIT_FIELD (op1);
8236 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8237 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8238 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8240 rtx constant_part;
8242 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8243 (modifier == EXPAND_INITIALIZER
8244 ? EXPAND_INITIALIZER : EXPAND_SUM));
8245 if (! CONSTANT_P (op0))
8247 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8248 VOIDmode, modifier);
8249 /* Return a PLUS if modifier says it's OK. */
8250 if (modifier == EXPAND_SUM
8251 || modifier == EXPAND_INITIALIZER)
8252 return simplify_gen_binary (PLUS, mode, op0, op1);
8253 goto binop2;
8255 /* Use immed_double_const to ensure that the constant is
8256 truncated according to the mode of OP1, then sign extended
8257 to a HOST_WIDE_INT. Using the constant directly can result
8258 in non-canonical RTL in a 64x32 cross compile. */
8259 constant_part
8260 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8261 (HOST_WIDE_INT) 0,
8262 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8263 op0 = plus_constant (op0, INTVAL (constant_part));
8264 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8265 op0 = force_operand (op0, target);
8266 return REDUCE_BIT_FIELD (op0);
8270 /* No sense saving up arithmetic to be done
8271 if it's all in the wrong mode to form part of an address.
8272 And force_operand won't know whether to sign-extend or
8273 zero-extend. */
8274 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8275 || mode != ptr_mode)
8277 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8278 subtarget, &op0, &op1, 0);
8279 if (op0 == const0_rtx)
8280 return op1;
8281 if (op1 == const0_rtx)
8282 return op0;
8283 goto binop2;
8286 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8287 subtarget, &op0, &op1, modifier);
8288 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8290 case MINUS_EXPR:
8291 /* Check if this is a case for multiplication and subtraction. */
8292 if (TREE_CODE (type) == INTEGER_TYPE
8293 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8295 tree subsubexp0, subsubexp1;
8296 enum tree_code code0, code1;
8298 subexp1 = TREE_OPERAND (exp, 1);
8299 subsubexp0 = TREE_OPERAND (subexp1, 0);
8300 subsubexp1 = TREE_OPERAND (subexp1, 1);
8301 code0 = TREE_CODE (subsubexp0);
8302 code1 = TREE_CODE (subsubexp1);
8303 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8304 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8305 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8306 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8307 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8308 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8309 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8311 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8312 enum machine_mode innermode = TYPE_MODE (op0type);
8313 bool zextend_p = TYPE_UNSIGNED (op0type);
8314 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8315 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8316 && (optab_handler (this_optab, mode)->insn_code
8317 != CODE_FOR_nothing))
8319 expand_operands (TREE_OPERAND (subsubexp0, 0),
8320 TREE_OPERAND (subsubexp1, 0),
8321 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8322 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8323 VOIDmode, EXPAND_NORMAL);
8324 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8325 target, unsignedp);
8326 gcc_assert (temp);
8327 return REDUCE_BIT_FIELD (temp);
8332 /* For initializers, we are allowed to return a MINUS of two
8333 symbolic constants. Here we handle all cases when both operands
8334 are constant. */
8335 /* Handle difference of two symbolic constants,
8336 for the sake of an initializer. */
8337 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8338 && really_constant_p (TREE_OPERAND (exp, 0))
8339 && really_constant_p (TREE_OPERAND (exp, 1)))
8341 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8342 NULL_RTX, &op0, &op1, modifier);
8344 /* If the last operand is a CONST_INT, use plus_constant of
8345 the negated constant. Else make the MINUS. */
8346 if (GET_CODE (op1) == CONST_INT)
8347 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8348 else
8349 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8352 /* No sense saving up arithmetic to be done
8353 if it's all in the wrong mode to form part of an address.
8354 And force_operand won't know whether to sign-extend or
8355 zero-extend. */
8356 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8357 || mode != ptr_mode)
8358 goto binop;
8360 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8361 subtarget, &op0, &op1, modifier);
8363 /* Convert A - const to A + (-const). */
8364 if (GET_CODE (op1) == CONST_INT)
8366 op1 = negate_rtx (mode, op1);
8367 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8370 goto binop2;
8372 case MULT_EXPR:
8373 /* If first operand is constant, swap them.
8374 Thus the following special case checks need only
8375 check the second operand. */
8376 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8378 tree t1 = TREE_OPERAND (exp, 0);
8379 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8380 TREE_OPERAND (exp, 1) = t1;
8383 /* Attempt to return something suitable for generating an
8384 indexed address, for machines that support that. */
8386 if (modifier == EXPAND_SUM && mode == ptr_mode
8387 && host_integerp (TREE_OPERAND (exp, 1), 0))
8389 tree exp1 = TREE_OPERAND (exp, 1);
8391 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8392 EXPAND_SUM);
8394 if (!REG_P (op0))
8395 op0 = force_operand (op0, NULL_RTX);
8396 if (!REG_P (op0))
8397 op0 = copy_to_mode_reg (mode, op0);
8399 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8400 gen_int_mode (tree_low_cst (exp1, 0),
8401 TYPE_MODE (TREE_TYPE (exp1)))));
8404 if (modifier == EXPAND_STACK_PARM)
8405 target = 0;
8407 /* Check for multiplying things that have been extended
8408 from a narrower type. If this machine supports multiplying
8409 in that narrower type with a result in the desired type,
8410 do it that way, and avoid the explicit type-conversion. */
8412 subexp0 = TREE_OPERAND (exp, 0);
8413 subexp1 = TREE_OPERAND (exp, 1);
8414 /* First, check if we have a multiplication of one signed and one
8415 unsigned operand. */
8416 if (TREE_CODE (subexp0) == NOP_EXPR
8417 && TREE_CODE (subexp1) == NOP_EXPR
8418 && TREE_CODE (type) == INTEGER_TYPE
8419 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8420 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8421 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8422 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8423 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8424 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8426 enum machine_mode innermode
8427 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8428 this_optab = usmul_widen_optab;
8429 if (mode == GET_MODE_WIDER_MODE (innermode))
8431 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8433 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8434 expand_operands (TREE_OPERAND (subexp0, 0),
8435 TREE_OPERAND (subexp1, 0),
8436 NULL_RTX, &op0, &op1, 0);
8437 else
8438 expand_operands (TREE_OPERAND (subexp0, 0),
8439 TREE_OPERAND (subexp1, 0),
8440 NULL_RTX, &op1, &op0, 0);
8442 goto binop3;
8446 /* Check for a multiplication with matching signedness. */
8447 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8448 && TREE_CODE (type) == INTEGER_TYPE
8449 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8450 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8451 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8452 && int_fits_type_p (TREE_OPERAND (exp, 1),
8453 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8454 /* Don't use a widening multiply if a shift will do. */
8455 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8456 > HOST_BITS_PER_WIDE_INT)
8457 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8459 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8460 && (TYPE_PRECISION (TREE_TYPE
8461 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8462 == TYPE_PRECISION (TREE_TYPE
8463 (TREE_OPERAND
8464 (TREE_OPERAND (exp, 0), 0))))
8465 /* If both operands are extended, they must either both
8466 be zero-extended or both be sign-extended. */
8467 && (TYPE_UNSIGNED (TREE_TYPE
8468 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8469 == TYPE_UNSIGNED (TREE_TYPE
8470 (TREE_OPERAND
8471 (TREE_OPERAND (exp, 0), 0)))))))
8473 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8474 enum machine_mode innermode = TYPE_MODE (op0type);
8475 bool zextend_p = TYPE_UNSIGNED (op0type);
8476 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8477 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8479 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8481 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8483 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8484 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8485 TREE_OPERAND (exp, 1),
8486 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8487 else
8488 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8489 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8490 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8491 goto binop3;
8493 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8494 && innermode == word_mode)
8496 rtx htem, hipart;
8497 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8498 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8499 op1 = convert_modes (innermode, mode,
8500 expand_normal (TREE_OPERAND (exp, 1)),
8501 unsignedp);
8502 else
8503 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8504 temp = expand_binop (mode, other_optab, op0, op1, target,
8505 unsignedp, OPTAB_LIB_WIDEN);
8506 hipart = gen_highpart (innermode, temp);
8507 htem = expand_mult_highpart_adjust (innermode, hipart,
8508 op0, op1, hipart,
8509 zextend_p);
8510 if (htem != hipart)
8511 emit_move_insn (hipart, htem);
8512 return REDUCE_BIT_FIELD (temp);
8516 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8517 subtarget, &op0, &op1, 0);
8518 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8520 case TRUNC_DIV_EXPR:
8521 case FLOOR_DIV_EXPR:
8522 case CEIL_DIV_EXPR:
8523 case ROUND_DIV_EXPR:
8524 case EXACT_DIV_EXPR:
8525 if (modifier == EXPAND_STACK_PARM)
8526 target = 0;
8527 /* Possible optimization: compute the dividend with EXPAND_SUM
8528 then if the divisor is constant can optimize the case
8529 where some terms of the dividend have coeffs divisible by it. */
8530 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8531 subtarget, &op0, &op1, 0);
8532 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8534 case RDIV_EXPR:
8535 goto binop;
8537 case TRUNC_MOD_EXPR:
8538 case FLOOR_MOD_EXPR:
8539 case CEIL_MOD_EXPR:
8540 case ROUND_MOD_EXPR:
8541 if (modifier == EXPAND_STACK_PARM)
8542 target = 0;
8543 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8544 subtarget, &op0, &op1, 0);
8545 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8547 case FIX_TRUNC_EXPR:
8548 op0 = expand_normal (TREE_OPERAND (exp, 0));
8549 if (target == 0 || modifier == EXPAND_STACK_PARM)
8550 target = gen_reg_rtx (mode);
8551 expand_fix (target, op0, unsignedp);
8552 return target;
8554 case FLOAT_EXPR:
8555 op0 = expand_normal (TREE_OPERAND (exp, 0));
8556 if (target == 0 || modifier == EXPAND_STACK_PARM)
8557 target = gen_reg_rtx (mode);
8558 /* expand_float can't figure out what to do if FROM has VOIDmode.
8559 So give it the correct mode. With -O, cse will optimize this. */
8560 if (GET_MODE (op0) == VOIDmode)
8561 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8562 op0);
8563 expand_float (target, op0,
8564 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8565 return target;
8567 case NEGATE_EXPR:
8568 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8569 VOIDmode, EXPAND_NORMAL);
8570 if (modifier == EXPAND_STACK_PARM)
8571 target = 0;
8572 temp = expand_unop (mode,
8573 optab_for_tree_code (NEGATE_EXPR, type),
8574 op0, target, 0);
8575 gcc_assert (temp);
8576 return REDUCE_BIT_FIELD (temp);
8578 case ABS_EXPR:
8579 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8580 VOIDmode, EXPAND_NORMAL);
8581 if (modifier == EXPAND_STACK_PARM)
8582 target = 0;
8584 /* ABS_EXPR is not valid for complex arguments. */
8585 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8586 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8588 /* Unsigned abs is simply the operand. Testing here means we don't
8589 risk generating incorrect code below. */
8590 if (TYPE_UNSIGNED (type))
8591 return op0;
8593 return expand_abs (mode, op0, target, unsignedp,
8594 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8596 case MAX_EXPR:
8597 case MIN_EXPR:
8598 target = original_target;
8599 if (target == 0
8600 || modifier == EXPAND_STACK_PARM
8601 || (MEM_P (target) && MEM_VOLATILE_P (target))
8602 || GET_MODE (target) != mode
8603 || (REG_P (target)
8604 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8605 target = gen_reg_rtx (mode);
8606 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8607 target, &op0, &op1, 0);
8609 /* First try to do it with a special MIN or MAX instruction.
8610 If that does not win, use a conditional jump to select the proper
8611 value. */
8612 this_optab = optab_for_tree_code (code, type);
8613 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8614 OPTAB_WIDEN);
8615 if (temp != 0)
8616 return temp;
8618 /* At this point, a MEM target is no longer useful; we will get better
8619 code without it. */
8621 if (! REG_P (target))
8622 target = gen_reg_rtx (mode);
8624 /* If op1 was placed in target, swap op0 and op1. */
8625 if (target != op0 && target == op1)
8627 temp = op0;
8628 op0 = op1;
8629 op1 = temp;
8632 /* We generate better code and avoid problems with op1 mentioning
8633 target by forcing op1 into a pseudo if it isn't a constant. */
8634 if (! CONSTANT_P (op1))
8635 op1 = force_reg (mode, op1);
8638 enum rtx_code comparison_code;
8639 rtx cmpop1 = op1;
8641 if (code == MAX_EXPR)
8642 comparison_code = unsignedp ? GEU : GE;
8643 else
8644 comparison_code = unsignedp ? LEU : LE;
8646 /* Canonicalize to comparisons against 0. */
8647 if (op1 == const1_rtx)
8649 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8650 or (a != 0 ? a : 1) for unsigned.
8651 For MIN we are safe converting (a <= 1 ? a : 1)
8652 into (a <= 0 ? a : 1) */
8653 cmpop1 = const0_rtx;
8654 if (code == MAX_EXPR)
8655 comparison_code = unsignedp ? NE : GT;
8657 if (op1 == constm1_rtx && !unsignedp)
8659 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8660 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8661 cmpop1 = const0_rtx;
8662 if (code == MIN_EXPR)
8663 comparison_code = LT;
8665 #ifdef HAVE_conditional_move
8666 /* Use a conditional move if possible. */
8667 if (can_conditionally_move_p (mode))
8669 rtx insn;
8671 /* ??? Same problem as in expmed.c: emit_conditional_move
8672 forces a stack adjustment via compare_from_rtx, and we
8673 lose the stack adjustment if the sequence we are about
8674 to create is discarded. */
8675 do_pending_stack_adjust ();
8677 start_sequence ();
8679 /* Try to emit the conditional move. */
8680 insn = emit_conditional_move (target, comparison_code,
8681 op0, cmpop1, mode,
8682 op0, op1, mode,
8683 unsignedp);
8685 /* If we could do the conditional move, emit the sequence,
8686 and return. */
8687 if (insn)
8689 rtx seq = get_insns ();
8690 end_sequence ();
8691 emit_insn (seq);
8692 return target;
8695 /* Otherwise discard the sequence and fall back to code with
8696 branches. */
8697 end_sequence ();
8699 #endif
8700 if (target != op0)
8701 emit_move_insn (target, op0);
8703 temp = gen_label_rtx ();
8704 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8705 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8707 emit_move_insn (target, op1);
8708 emit_label (temp);
8709 return target;
8711 case BIT_NOT_EXPR:
8712 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8713 VOIDmode, EXPAND_NORMAL);
8714 if (modifier == EXPAND_STACK_PARM)
8715 target = 0;
8716 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8717 gcc_assert (temp);
8718 return temp;
8720 /* ??? Can optimize bitwise operations with one arg constant.
8721 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8722 and (a bitwise1 b) bitwise2 b (etc)
8723 but that is probably not worth while. */
8725 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8726 boolean values when we want in all cases to compute both of them. In
8727 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8728 as actual zero-or-1 values and then bitwise anding. In cases where
8729 there cannot be any side effects, better code would be made by
8730 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8731 how to recognize those cases. */
8733 case TRUTH_AND_EXPR:
8734 code = BIT_AND_EXPR;
8735 case BIT_AND_EXPR:
8736 goto binop;
8738 case TRUTH_OR_EXPR:
8739 code = BIT_IOR_EXPR;
8740 case BIT_IOR_EXPR:
8741 goto binop;
8743 case TRUTH_XOR_EXPR:
8744 code = BIT_XOR_EXPR;
8745 case BIT_XOR_EXPR:
8746 goto binop;
8748 case LSHIFT_EXPR:
8749 case RSHIFT_EXPR:
8750 case LROTATE_EXPR:
8751 case RROTATE_EXPR:
8752 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8753 subtarget = 0;
8754 if (modifier == EXPAND_STACK_PARM)
8755 target = 0;
8756 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8757 VOIDmode, EXPAND_NORMAL);
8758 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8759 unsignedp);
8761 /* Could determine the answer when only additive constants differ. Also,
8762 the addition of one can be handled by changing the condition. */
8763 case LT_EXPR:
8764 case LE_EXPR:
8765 case GT_EXPR:
8766 case GE_EXPR:
8767 case EQ_EXPR:
8768 case NE_EXPR:
8769 case UNORDERED_EXPR:
8770 case ORDERED_EXPR:
8771 case UNLT_EXPR:
8772 case UNLE_EXPR:
8773 case UNGT_EXPR:
8774 case UNGE_EXPR:
8775 case UNEQ_EXPR:
8776 case LTGT_EXPR:
8777 temp = do_store_flag (exp,
8778 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8779 tmode != VOIDmode ? tmode : mode, 0);
8780 if (temp != 0)
8781 return temp;
8783 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8784 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8785 && original_target
8786 && REG_P (original_target)
8787 && (GET_MODE (original_target)
8788 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8790 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8791 VOIDmode, EXPAND_NORMAL);
8793 /* If temp is constant, we can just compute the result. */
8794 if (GET_CODE (temp) == CONST_INT)
8796 if (INTVAL (temp) != 0)
8797 emit_move_insn (target, const1_rtx);
8798 else
8799 emit_move_insn (target, const0_rtx);
8801 return target;
8804 if (temp != original_target)
8806 enum machine_mode mode1 = GET_MODE (temp);
8807 if (mode1 == VOIDmode)
8808 mode1 = tmode != VOIDmode ? tmode : mode;
8810 temp = copy_to_mode_reg (mode1, temp);
8813 op1 = gen_label_rtx ();
8814 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8815 GET_MODE (temp), unsignedp, op1);
8816 emit_move_insn (temp, const1_rtx);
8817 emit_label (op1);
8818 return temp;
8821 /* If no set-flag instruction, must generate a conditional store
8822 into a temporary variable. Drop through and handle this
8823 like && and ||. */
8825 if (! ignore
8826 && (target == 0
8827 || modifier == EXPAND_STACK_PARM
8828 || ! safe_from_p (target, exp, 1)
8829 /* Make sure we don't have a hard reg (such as function's return
8830 value) live across basic blocks, if not optimizing. */
8831 || (!optimize && REG_P (target)
8832 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8833 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8835 if (target)
8836 emit_move_insn (target, const0_rtx);
8838 op1 = gen_label_rtx ();
8839 jumpifnot (exp, op1);
8841 if (target)
8842 emit_move_insn (target, const1_rtx);
8844 emit_label (op1);
8845 return ignore ? const0_rtx : target;
8847 case TRUTH_NOT_EXPR:
8848 if (modifier == EXPAND_STACK_PARM)
8849 target = 0;
8850 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8851 VOIDmode, EXPAND_NORMAL);
8852 /* The parser is careful to generate TRUTH_NOT_EXPR
8853 only with operands that are always zero or one. */
8854 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8855 target, 1, OPTAB_LIB_WIDEN);
8856 gcc_assert (temp);
8857 return temp;
8859 case STATEMENT_LIST:
8861 tree_stmt_iterator iter;
8863 gcc_assert (ignore);
8865 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8866 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8868 return const0_rtx;
8870 case COND_EXPR:
8871 /* A COND_EXPR with its type being VOID_TYPE represents a
8872 conditional jump and is handled in
8873 expand_gimple_cond_expr. */
8874 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8876 /* Note that COND_EXPRs whose type is a structure or union
8877 are required to be constructed to contain assignments of
8878 a temporary variable, so that we can evaluate them here
8879 for side effect only. If type is void, we must do likewise. */
8881 gcc_assert (!TREE_ADDRESSABLE (type)
8882 && !ignore
8883 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8884 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8886 /* If we are not to produce a result, we have no target. Otherwise,
8887 if a target was specified use it; it will not be used as an
8888 intermediate target unless it is safe. If no target, use a
8889 temporary. */
8891 if (modifier != EXPAND_STACK_PARM
8892 && original_target
8893 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8894 && GET_MODE (original_target) == mode
8895 #ifdef HAVE_conditional_move
8896 && (! can_conditionally_move_p (mode)
8897 || REG_P (original_target))
8898 #endif
8899 && !MEM_P (original_target))
8900 temp = original_target;
8901 else
8902 temp = assign_temp (type, 0, 0, 1);
8904 do_pending_stack_adjust ();
8905 NO_DEFER_POP;
8906 op0 = gen_label_rtx ();
8907 op1 = gen_label_rtx ();
8908 jumpifnot (TREE_OPERAND (exp, 0), op0);
8909 store_expr (TREE_OPERAND (exp, 1), temp,
8910 modifier == EXPAND_STACK_PARM,
8911 false);
8913 emit_jump_insn (gen_jump (op1));
8914 emit_barrier ();
8915 emit_label (op0);
8916 store_expr (TREE_OPERAND (exp, 2), temp,
8917 modifier == EXPAND_STACK_PARM,
8918 false);
8920 emit_label (op1);
8921 OK_DEFER_POP;
8922 return temp;
8924 case VEC_COND_EXPR:
8925 target = expand_vec_cond_expr (exp, target);
8926 return target;
8928 case MODIFY_EXPR:
8930 tree lhs = TREE_OPERAND (exp, 0);
8931 tree rhs = TREE_OPERAND (exp, 1);
8932 gcc_assert (ignore);
8933 expand_assignment (lhs, rhs, false);
8934 return const0_rtx;
8937 case GIMPLE_MODIFY_STMT:
8939 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8940 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8942 gcc_assert (ignore);
8944 /* Check for |= or &= of a bitfield of size one into another bitfield
8945 of size 1. In this case, (unless we need the result of the
8946 assignment) we can do this more efficiently with a
8947 test followed by an assignment, if necessary.
8949 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8950 things change so we do, this code should be enhanced to
8951 support it. */
8952 if (TREE_CODE (lhs) == COMPONENT_REF
8953 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8954 || TREE_CODE (rhs) == BIT_AND_EXPR)
8955 && TREE_OPERAND (rhs, 0) == lhs
8956 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8957 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8958 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8960 rtx label = gen_label_rtx ();
8961 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8962 do_jump (TREE_OPERAND (rhs, 1),
8963 value ? label : 0,
8964 value ? 0 : label);
8965 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
8966 MOVE_NONTEMPORAL (exp));
8967 do_pending_stack_adjust ();
8968 emit_label (label);
8969 return const0_rtx;
8972 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
8973 return const0_rtx;
8976 case RETURN_EXPR:
8977 if (!TREE_OPERAND (exp, 0))
8978 expand_null_return ();
8979 else
8980 expand_return (TREE_OPERAND (exp, 0));
8981 return const0_rtx;
8983 case ADDR_EXPR:
8984 return expand_expr_addr_expr (exp, target, tmode, modifier);
8986 case COMPLEX_EXPR:
8987 /* Get the rtx code of the operands. */
8988 op0 = expand_normal (TREE_OPERAND (exp, 0));
8989 op1 = expand_normal (TREE_OPERAND (exp, 1));
8991 if (!target)
8992 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8994 /* Move the real (op0) and imaginary (op1) parts to their location. */
8995 write_complex_part (target, op0, false);
8996 write_complex_part (target, op1, true);
8998 return target;
9000 case REALPART_EXPR:
9001 op0 = expand_normal (TREE_OPERAND (exp, 0));
9002 return read_complex_part (op0, false);
9004 case IMAGPART_EXPR:
9005 op0 = expand_normal (TREE_OPERAND (exp, 0));
9006 return read_complex_part (op0, true);
9008 case RESX_EXPR:
9009 expand_resx_expr (exp);
9010 return const0_rtx;
9012 case TRY_CATCH_EXPR:
9013 case CATCH_EXPR:
9014 case EH_FILTER_EXPR:
9015 case TRY_FINALLY_EXPR:
9016 /* Lowered by tree-eh.c. */
9017 gcc_unreachable ();
9019 case WITH_CLEANUP_EXPR:
9020 case CLEANUP_POINT_EXPR:
9021 case TARGET_EXPR:
9022 case CASE_LABEL_EXPR:
9023 case VA_ARG_EXPR:
9024 case BIND_EXPR:
9025 case INIT_EXPR:
9026 case CONJ_EXPR:
9027 case COMPOUND_EXPR:
9028 case PREINCREMENT_EXPR:
9029 case PREDECREMENT_EXPR:
9030 case POSTINCREMENT_EXPR:
9031 case POSTDECREMENT_EXPR:
9032 case LOOP_EXPR:
9033 case EXIT_EXPR:
9034 case TRUTH_ANDIF_EXPR:
9035 case TRUTH_ORIF_EXPR:
9036 /* Lowered by gimplify.c. */
9037 gcc_unreachable ();
9039 case CHANGE_DYNAMIC_TYPE_EXPR:
9040 /* This is ignored at the RTL level. The tree level set
9041 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9042 overkill for the RTL layer but is all that we can
9043 represent. */
9044 return const0_rtx;
9046 case EXC_PTR_EXPR:
9047 return get_exception_pointer (cfun);
9049 case FILTER_EXPR:
9050 return get_exception_filter (cfun);
9052 case FDESC_EXPR:
9053 /* Function descriptors are not valid except for as
9054 initialization constants, and should not be expanded. */
9055 gcc_unreachable ();
9057 case SWITCH_EXPR:
9058 expand_case (exp);
9059 return const0_rtx;
9061 case LABEL_EXPR:
9062 expand_label (TREE_OPERAND (exp, 0));
9063 return const0_rtx;
9065 case ASM_EXPR:
9066 expand_asm_expr (exp);
9067 return const0_rtx;
9069 case WITH_SIZE_EXPR:
9070 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9071 have pulled out the size to use in whatever context it needed. */
9072 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9073 modifier, alt_rtl);
9075 case REALIGN_LOAD_EXPR:
9077 tree oprnd0 = TREE_OPERAND (exp, 0);
9078 tree oprnd1 = TREE_OPERAND (exp, 1);
9079 tree oprnd2 = TREE_OPERAND (exp, 2);
9080 rtx op2;
9082 this_optab = optab_for_tree_code (code, type);
9083 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9084 op2 = expand_normal (oprnd2);
9085 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9086 target, unsignedp);
9087 gcc_assert (temp);
9088 return temp;
9091 case DOT_PROD_EXPR:
9093 tree oprnd0 = TREE_OPERAND (exp, 0);
9094 tree oprnd1 = TREE_OPERAND (exp, 1);
9095 tree oprnd2 = TREE_OPERAND (exp, 2);
9096 rtx op2;
9098 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9099 op2 = expand_normal (oprnd2);
9100 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9101 target, unsignedp);
9102 return target;
9105 case WIDEN_SUM_EXPR:
9107 tree oprnd0 = TREE_OPERAND (exp, 0);
9108 tree oprnd1 = TREE_OPERAND (exp, 1);
9110 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9111 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9112 target, unsignedp);
9113 return target;
9116 case REDUC_MAX_EXPR:
9117 case REDUC_MIN_EXPR:
9118 case REDUC_PLUS_EXPR:
9120 op0 = expand_normal (TREE_OPERAND (exp, 0));
9121 this_optab = optab_for_tree_code (code, type);
9122 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9123 gcc_assert (temp);
9124 return temp;
9127 case VEC_EXTRACT_EVEN_EXPR:
9128 case VEC_EXTRACT_ODD_EXPR:
9130 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9131 NULL_RTX, &op0, &op1, 0);
9132 this_optab = optab_for_tree_code (code, type);
9133 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9134 OPTAB_WIDEN);
9135 gcc_assert (temp);
9136 return temp;
9139 case VEC_INTERLEAVE_HIGH_EXPR:
9140 case VEC_INTERLEAVE_LOW_EXPR:
9142 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9143 NULL_RTX, &op0, &op1, 0);
9144 this_optab = optab_for_tree_code (code, type);
9145 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9146 OPTAB_WIDEN);
9147 gcc_assert (temp);
9148 return temp;
9151 case VEC_LSHIFT_EXPR:
9152 case VEC_RSHIFT_EXPR:
9154 target = expand_vec_shift_expr (exp, target);
9155 return target;
9158 case VEC_UNPACK_HI_EXPR:
9159 case VEC_UNPACK_LO_EXPR:
9161 op0 = expand_normal (TREE_OPERAND (exp, 0));
9162 this_optab = optab_for_tree_code (code, type);
9163 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9164 target, unsignedp);
9165 gcc_assert (temp);
9166 return temp;
9169 case VEC_UNPACK_FLOAT_HI_EXPR:
9170 case VEC_UNPACK_FLOAT_LO_EXPR:
9172 op0 = expand_normal (TREE_OPERAND (exp, 0));
9173 /* The signedness is determined from input operand. */
9174 this_optab = optab_for_tree_code (code,
9175 TREE_TYPE (TREE_OPERAND (exp, 0)));
9176 temp = expand_widen_pattern_expr
9177 (exp, op0, NULL_RTX, NULL_RTX,
9178 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9180 gcc_assert (temp);
9181 return temp;
9184 case VEC_WIDEN_MULT_HI_EXPR:
9185 case VEC_WIDEN_MULT_LO_EXPR:
9187 tree oprnd0 = TREE_OPERAND (exp, 0);
9188 tree oprnd1 = TREE_OPERAND (exp, 1);
9190 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9191 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9192 target, unsignedp);
9193 gcc_assert (target);
9194 return target;
9197 case VEC_PACK_TRUNC_EXPR:
9198 case VEC_PACK_SAT_EXPR:
9199 case VEC_PACK_FIX_TRUNC_EXPR:
9201 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9202 goto binop;
9205 default:
9206 return lang_hooks.expand_expr (exp, original_target, tmode,
9207 modifier, alt_rtl);
9210 /* Here to do an ordinary binary operator. */
9211 binop:
9212 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9213 subtarget, &op0, &op1, 0);
9214 binop2:
9215 this_optab = optab_for_tree_code (code, type);
9216 binop3:
9217 if (modifier == EXPAND_STACK_PARM)
9218 target = 0;
9219 temp = expand_binop (mode, this_optab, op0, op1, target,
9220 unsignedp, OPTAB_LIB_WIDEN);
9221 gcc_assert (temp);
9222 return REDUCE_BIT_FIELD (temp);
9224 #undef REDUCE_BIT_FIELD
9226 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9227 signedness of TYPE), possibly returning the result in TARGET. */
9228 static rtx
9229 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9231 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9232 if (target && GET_MODE (target) != GET_MODE (exp))
9233 target = 0;
9234 /* For constant values, reduce using build_int_cst_type. */
9235 if (GET_CODE (exp) == CONST_INT)
9237 HOST_WIDE_INT value = INTVAL (exp);
9238 tree t = build_int_cst_type (type, value);
9239 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9241 else if (TYPE_UNSIGNED (type))
9243 rtx mask;
9244 if (prec < HOST_BITS_PER_WIDE_INT)
9245 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9246 GET_MODE (exp));
9247 else
9248 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9249 ((unsigned HOST_WIDE_INT) 1
9250 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9251 GET_MODE (exp));
9252 return expand_and (GET_MODE (exp), exp, mask, target);
9254 else
9256 tree count = build_int_cst (NULL_TREE,
9257 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9258 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9259 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9263 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9264 when applied to the address of EXP produces an address known to be
9265 aligned more than BIGGEST_ALIGNMENT. */
9267 static int
9268 is_aligning_offset (const_tree offset, const_tree exp)
9270 /* Strip off any conversions. */
9271 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9272 || TREE_CODE (offset) == NOP_EXPR
9273 || TREE_CODE (offset) == CONVERT_EXPR)
9274 offset = TREE_OPERAND (offset, 0);
9276 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9277 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9278 if (TREE_CODE (offset) != BIT_AND_EXPR
9279 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9280 || compare_tree_int (TREE_OPERAND (offset, 1),
9281 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9282 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9283 return 0;
9285 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9286 It must be NEGATE_EXPR. Then strip any more conversions. */
9287 offset = TREE_OPERAND (offset, 0);
9288 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9289 || TREE_CODE (offset) == NOP_EXPR
9290 || TREE_CODE (offset) == CONVERT_EXPR)
9291 offset = TREE_OPERAND (offset, 0);
9293 if (TREE_CODE (offset) != NEGATE_EXPR)
9294 return 0;
9296 offset = TREE_OPERAND (offset, 0);
9297 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9298 || TREE_CODE (offset) == NOP_EXPR
9299 || TREE_CODE (offset) == CONVERT_EXPR)
9300 offset = TREE_OPERAND (offset, 0);
9302 /* This must now be the address of EXP. */
9303 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9306 /* Return the tree node if an ARG corresponds to a string constant or zero
9307 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9308 in bytes within the string that ARG is accessing. The type of the
9309 offset will be `sizetype'. */
9311 tree
9312 string_constant (tree arg, tree *ptr_offset)
9314 tree array, offset, lower_bound;
9315 STRIP_NOPS (arg);
9317 if (TREE_CODE (arg) == ADDR_EXPR)
9319 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9321 *ptr_offset = size_zero_node;
9322 return TREE_OPERAND (arg, 0);
9324 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9326 array = TREE_OPERAND (arg, 0);
9327 offset = size_zero_node;
9329 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9331 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9332 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9333 if (TREE_CODE (array) != STRING_CST
9334 && TREE_CODE (array) != VAR_DECL)
9335 return 0;
9337 /* Check if the array has a nonzero lower bound. */
9338 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9339 if (!integer_zerop (lower_bound))
9341 /* If the offset and base aren't both constants, return 0. */
9342 if (TREE_CODE (lower_bound) != INTEGER_CST)
9343 return 0;
9344 if (TREE_CODE (offset) != INTEGER_CST)
9345 return 0;
9346 /* Adjust offset by the lower bound. */
9347 offset = size_diffop (fold_convert (sizetype, offset),
9348 fold_convert (sizetype, lower_bound));
9351 else
9352 return 0;
9354 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9356 tree arg0 = TREE_OPERAND (arg, 0);
9357 tree arg1 = TREE_OPERAND (arg, 1);
9359 STRIP_NOPS (arg0);
9360 STRIP_NOPS (arg1);
9362 if (TREE_CODE (arg0) == ADDR_EXPR
9363 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9364 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9366 array = TREE_OPERAND (arg0, 0);
9367 offset = arg1;
9369 else if (TREE_CODE (arg1) == ADDR_EXPR
9370 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9371 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9373 array = TREE_OPERAND (arg1, 0);
9374 offset = arg0;
9376 else
9377 return 0;
9379 else
9380 return 0;
9382 if (TREE_CODE (array) == STRING_CST)
9384 *ptr_offset = fold_convert (sizetype, offset);
9385 return array;
9387 else if (TREE_CODE (array) == VAR_DECL)
9389 int length;
9391 /* Variables initialized to string literals can be handled too. */
9392 if (DECL_INITIAL (array) == NULL_TREE
9393 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9394 return 0;
9396 /* If they are read-only, non-volatile and bind locally. */
9397 if (! TREE_READONLY (array)
9398 || TREE_SIDE_EFFECTS (array)
9399 || ! targetm.binds_local_p (array))
9400 return 0;
9402 /* Avoid const char foo[4] = "abcde"; */
9403 if (DECL_SIZE_UNIT (array) == NULL_TREE
9404 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9405 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9406 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9407 return 0;
9409 /* If variable is bigger than the string literal, OFFSET must be constant
9410 and inside of the bounds of the string literal. */
9411 offset = fold_convert (sizetype, offset);
9412 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9413 && (! host_integerp (offset, 1)
9414 || compare_tree_int (offset, length) >= 0))
9415 return 0;
9417 *ptr_offset = offset;
9418 return DECL_INITIAL (array);
9421 return 0;
9424 /* Generate code to calculate EXP using a store-flag instruction
9425 and return an rtx for the result. EXP is either a comparison
9426 or a TRUTH_NOT_EXPR whose operand is a comparison.
9428 If TARGET is nonzero, store the result there if convenient.
9430 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9431 cheap.
9433 Return zero if there is no suitable set-flag instruction
9434 available on this machine.
9436 Once expand_expr has been called on the arguments of the comparison,
9437 we are committed to doing the store flag, since it is not safe to
9438 re-evaluate the expression. We emit the store-flag insn by calling
9439 emit_store_flag, but only expand the arguments if we have a reason
9440 to believe that emit_store_flag will be successful. If we think that
9441 it will, but it isn't, we have to simulate the store-flag with a
9442 set/jump/set sequence. */
9444 static rtx
9445 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9447 enum rtx_code code;
9448 tree arg0, arg1, type;
9449 tree tem;
9450 enum machine_mode operand_mode;
9451 int invert = 0;
9452 int unsignedp;
9453 rtx op0, op1;
9454 enum insn_code icode;
9455 rtx subtarget = target;
9456 rtx result, label;
9458 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9459 result at the end. We can't simply invert the test since it would
9460 have already been inverted if it were valid. This case occurs for
9461 some floating-point comparisons. */
9463 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9464 invert = 1, exp = TREE_OPERAND (exp, 0);
9466 arg0 = TREE_OPERAND (exp, 0);
9467 arg1 = TREE_OPERAND (exp, 1);
9469 /* Don't crash if the comparison was erroneous. */
9470 if (arg0 == error_mark_node || arg1 == error_mark_node)
9471 return const0_rtx;
9473 type = TREE_TYPE (arg0);
9474 operand_mode = TYPE_MODE (type);
9475 unsignedp = TYPE_UNSIGNED (type);
9477 /* We won't bother with BLKmode store-flag operations because it would mean
9478 passing a lot of information to emit_store_flag. */
9479 if (operand_mode == BLKmode)
9480 return 0;
9482 /* We won't bother with store-flag operations involving function pointers
9483 when function pointers must be canonicalized before comparisons. */
9484 #ifdef HAVE_canonicalize_funcptr_for_compare
9485 if (HAVE_canonicalize_funcptr_for_compare
9486 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9487 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9488 == FUNCTION_TYPE))
9489 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9490 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9491 == FUNCTION_TYPE))))
9492 return 0;
9493 #endif
9495 STRIP_NOPS (arg0);
9496 STRIP_NOPS (arg1);
9498 /* Get the rtx comparison code to use. We know that EXP is a comparison
9499 operation of some type. Some comparisons against 1 and -1 can be
9500 converted to comparisons with zero. Do so here so that the tests
9501 below will be aware that we have a comparison with zero. These
9502 tests will not catch constants in the first operand, but constants
9503 are rarely passed as the first operand. */
9505 switch (TREE_CODE (exp))
9507 case EQ_EXPR:
9508 code = EQ;
9509 break;
9510 case NE_EXPR:
9511 code = NE;
9512 break;
9513 case LT_EXPR:
9514 if (integer_onep (arg1))
9515 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9516 else
9517 code = unsignedp ? LTU : LT;
9518 break;
9519 case LE_EXPR:
9520 if (! unsignedp && integer_all_onesp (arg1))
9521 arg1 = integer_zero_node, code = LT;
9522 else
9523 code = unsignedp ? LEU : LE;
9524 break;
9525 case GT_EXPR:
9526 if (! unsignedp && integer_all_onesp (arg1))
9527 arg1 = integer_zero_node, code = GE;
9528 else
9529 code = unsignedp ? GTU : GT;
9530 break;
9531 case GE_EXPR:
9532 if (integer_onep (arg1))
9533 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9534 else
9535 code = unsignedp ? GEU : GE;
9536 break;
9538 case UNORDERED_EXPR:
9539 code = UNORDERED;
9540 break;
9541 case ORDERED_EXPR:
9542 code = ORDERED;
9543 break;
9544 case UNLT_EXPR:
9545 code = UNLT;
9546 break;
9547 case UNLE_EXPR:
9548 code = UNLE;
9549 break;
9550 case UNGT_EXPR:
9551 code = UNGT;
9552 break;
9553 case UNGE_EXPR:
9554 code = UNGE;
9555 break;
9556 case UNEQ_EXPR:
9557 code = UNEQ;
9558 break;
9559 case LTGT_EXPR:
9560 code = LTGT;
9561 break;
9563 default:
9564 gcc_unreachable ();
9567 /* Put a constant second. */
9568 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9570 tem = arg0; arg0 = arg1; arg1 = tem;
9571 code = swap_condition (code);
9574 /* If this is an equality or inequality test of a single bit, we can
9575 do this by shifting the bit being tested to the low-order bit and
9576 masking the result with the constant 1. If the condition was EQ,
9577 we xor it with 1. This does not require an scc insn and is faster
9578 than an scc insn even if we have it.
9580 The code to make this transformation was moved into fold_single_bit_test,
9581 so we just call into the folder and expand its result. */
9583 if ((code == NE || code == EQ)
9584 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9585 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9587 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9588 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9589 arg0, arg1, type),
9590 target, VOIDmode, EXPAND_NORMAL);
9593 /* Now see if we are likely to be able to do this. Return if not. */
9594 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9595 return 0;
9597 icode = setcc_gen_code[(int) code];
9599 if (icode == CODE_FOR_nothing)
9601 enum machine_mode wmode;
9603 for (wmode = operand_mode;
9604 icode == CODE_FOR_nothing && wmode != VOIDmode;
9605 wmode = GET_MODE_WIDER_MODE (wmode))
9606 icode = optab_handler (cstore_optab, wmode)->insn_code;
9609 if (icode == CODE_FOR_nothing
9610 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9612 /* We can only do this if it is one of the special cases that
9613 can be handled without an scc insn. */
9614 if ((code == LT && integer_zerop (arg1))
9615 || (! only_cheap && code == GE && integer_zerop (arg1)))
9617 else if (! only_cheap && (code == NE || code == EQ)
9618 && TREE_CODE (type) != REAL_TYPE
9619 && ((optab_handler (abs_optab, operand_mode)->insn_code
9620 != CODE_FOR_nothing)
9621 || (optab_handler (ffs_optab, operand_mode)->insn_code
9622 != CODE_FOR_nothing)))
9624 else
9625 return 0;
9628 if (! get_subtarget (target)
9629 || GET_MODE (subtarget) != operand_mode)
9630 subtarget = 0;
9632 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9634 if (target == 0)
9635 target = gen_reg_rtx (mode);
9637 result = emit_store_flag (target, code, op0, op1,
9638 operand_mode, unsignedp, 1);
9640 if (result)
9642 if (invert)
9643 result = expand_binop (mode, xor_optab, result, const1_rtx,
9644 result, 0, OPTAB_LIB_WIDEN);
9645 return result;
9648 /* If this failed, we have to do this with set/compare/jump/set code. */
9649 if (!REG_P (target)
9650 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9651 target = gen_reg_rtx (GET_MODE (target));
9653 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9654 label = gen_label_rtx ();
9655 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9656 NULL_RTX, label);
9658 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9659 emit_label (label);
9661 return target;
9665 /* Stubs in case we haven't got a casesi insn. */
9666 #ifndef HAVE_casesi
9667 # define HAVE_casesi 0
9668 # define gen_casesi(a, b, c, d, e) (0)
9669 # define CODE_FOR_casesi CODE_FOR_nothing
9670 #endif
9672 /* If the machine does not have a case insn that compares the bounds,
9673 this means extra overhead for dispatch tables, which raises the
9674 threshold for using them. */
9675 #ifndef CASE_VALUES_THRESHOLD
9676 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9677 #endif /* CASE_VALUES_THRESHOLD */
9679 unsigned int
9680 case_values_threshold (void)
9682 return CASE_VALUES_THRESHOLD;
9685 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9686 0 otherwise (i.e. if there is no casesi instruction). */
9688 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9689 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9691 enum machine_mode index_mode = SImode;
9692 int index_bits = GET_MODE_BITSIZE (index_mode);
9693 rtx op1, op2, index;
9694 enum machine_mode op_mode;
9696 if (! HAVE_casesi)
9697 return 0;
9699 /* Convert the index to SImode. */
9700 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9702 enum machine_mode omode = TYPE_MODE (index_type);
9703 rtx rangertx = expand_normal (range);
9705 /* We must handle the endpoints in the original mode. */
9706 index_expr = build2 (MINUS_EXPR, index_type,
9707 index_expr, minval);
9708 minval = integer_zero_node;
9709 index = expand_normal (index_expr);
9710 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9711 omode, 1, default_label);
9712 /* Now we can safely truncate. */
9713 index = convert_to_mode (index_mode, index, 0);
9715 else
9717 if (TYPE_MODE (index_type) != index_mode)
9719 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9720 index_expr = fold_convert (index_type, index_expr);
9723 index = expand_normal (index_expr);
9726 do_pending_stack_adjust ();
9728 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9729 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9730 (index, op_mode))
9731 index = copy_to_mode_reg (op_mode, index);
9733 op1 = expand_normal (minval);
9735 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9736 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9737 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9738 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9739 (op1, op_mode))
9740 op1 = copy_to_mode_reg (op_mode, op1);
9742 op2 = expand_normal (range);
9744 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9745 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9746 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9747 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9748 (op2, op_mode))
9749 op2 = copy_to_mode_reg (op_mode, op2);
9751 emit_jump_insn (gen_casesi (index, op1, op2,
9752 table_label, default_label));
9753 return 1;
9756 /* Attempt to generate a tablejump instruction; same concept. */
9757 #ifndef HAVE_tablejump
9758 #define HAVE_tablejump 0
9759 #define gen_tablejump(x, y) (0)
9760 #endif
9762 /* Subroutine of the next function.
9764 INDEX is the value being switched on, with the lowest value
9765 in the table already subtracted.
9766 MODE is its expected mode (needed if INDEX is constant).
9767 RANGE is the length of the jump table.
9768 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9770 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9771 index value is out of range. */
9773 static void
9774 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9775 rtx default_label)
9777 rtx temp, vector;
9779 if (INTVAL (range) > cfun->max_jumptable_ents)
9780 cfun->max_jumptable_ents = INTVAL (range);
9782 /* Do an unsigned comparison (in the proper mode) between the index
9783 expression and the value which represents the length of the range.
9784 Since we just finished subtracting the lower bound of the range
9785 from the index expression, this comparison allows us to simultaneously
9786 check that the original index expression value is both greater than
9787 or equal to the minimum value of the range and less than or equal to
9788 the maximum value of the range. */
9790 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9791 default_label);
9793 /* If index is in range, it must fit in Pmode.
9794 Convert to Pmode so we can index with it. */
9795 if (mode != Pmode)
9796 index = convert_to_mode (Pmode, index, 1);
9798 /* Don't let a MEM slip through, because then INDEX that comes
9799 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9800 and break_out_memory_refs will go to work on it and mess it up. */
9801 #ifdef PIC_CASE_VECTOR_ADDRESS
9802 if (flag_pic && !REG_P (index))
9803 index = copy_to_mode_reg (Pmode, index);
9804 #endif
9806 /* If flag_force_addr were to affect this address
9807 it could interfere with the tricky assumptions made
9808 about addresses that contain label-refs,
9809 which may be valid only very near the tablejump itself. */
9810 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9811 GET_MODE_SIZE, because this indicates how large insns are. The other
9812 uses should all be Pmode, because they are addresses. This code
9813 could fail if addresses and insns are not the same size. */
9814 index = gen_rtx_PLUS (Pmode,
9815 gen_rtx_MULT (Pmode, index,
9816 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9817 gen_rtx_LABEL_REF (Pmode, table_label));
9818 #ifdef PIC_CASE_VECTOR_ADDRESS
9819 if (flag_pic)
9820 index = PIC_CASE_VECTOR_ADDRESS (index);
9821 else
9822 #endif
9823 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9824 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9825 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9826 convert_move (temp, vector, 0);
9828 emit_jump_insn (gen_tablejump (temp, table_label));
9830 /* If we are generating PIC code or if the table is PC-relative, the
9831 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9832 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9833 emit_barrier ();
9837 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9838 rtx table_label, rtx default_label)
9840 rtx index;
9842 if (! HAVE_tablejump)
9843 return 0;
9845 index_expr = fold_build2 (MINUS_EXPR, index_type,
9846 fold_convert (index_type, index_expr),
9847 fold_convert (index_type, minval));
9848 index = expand_normal (index_expr);
9849 do_pending_stack_adjust ();
9851 do_tablejump (index, TYPE_MODE (index_type),
9852 convert_modes (TYPE_MODE (index_type),
9853 TYPE_MODE (TREE_TYPE (range)),
9854 expand_normal (range),
9855 TYPE_UNSIGNED (TREE_TYPE (range))),
9856 table_label, default_label);
9857 return 1;
9860 /* Nonzero if the mode is a valid vector mode for this architecture.
9861 This returns nonzero even if there is no hardware support for the
9862 vector mode, but we can emulate with narrower modes. */
9865 vector_mode_valid_p (enum machine_mode mode)
9867 enum mode_class class = GET_MODE_CLASS (mode);
9868 enum machine_mode innermode;
9870 /* Doh! What's going on? */
9871 if (class != MODE_VECTOR_INT
9872 && class != MODE_VECTOR_FLOAT)
9873 return 0;
9875 /* Hardware support. Woo hoo! */
9876 if (targetm.vector_mode_supported_p (mode))
9877 return 1;
9879 innermode = GET_MODE_INNER (mode);
9881 /* We should probably return 1 if requesting V4DI and we have no DI,
9882 but we have V2DI, but this is probably very unlikely. */
9884 /* If we have support for the inner mode, we can safely emulate it.
9885 We may not have V2DI, but me can emulate with a pair of DIs. */
9886 return targetm.scalar_mode_supported_p (innermode);
9889 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9890 static rtx
9891 const_vector_from_tree (tree exp)
9893 rtvec v;
9894 int units, i;
9895 tree link, elt;
9896 enum machine_mode inner, mode;
9898 mode = TYPE_MODE (TREE_TYPE (exp));
9900 if (initializer_zerop (exp))
9901 return CONST0_RTX (mode);
9903 units = GET_MODE_NUNITS (mode);
9904 inner = GET_MODE_INNER (mode);
9906 v = rtvec_alloc (units);
9908 link = TREE_VECTOR_CST_ELTS (exp);
9909 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9911 elt = TREE_VALUE (link);
9913 if (TREE_CODE (elt) == REAL_CST)
9914 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9915 inner);
9916 else
9917 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9918 TREE_INT_CST_HIGH (elt),
9919 inner);
9922 /* Initialize remaining elements to 0. */
9923 for (; i < units; ++i)
9924 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9926 return gen_rtx_CONST_VECTOR (mode, v);
9928 #include "gt-expr.h"