* tree-ssa-operands.c (get_expr_operands): Do not treat malloc
[official-gcc.git] / gcc / expr.c
blob1f8c3ef70d489c3588f452eab916b09f6cf05f7e
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "target.h"
53 /* Decide whether a function's arguments should be processed
54 from first to last or from last to first.
56 They should if the stack and args grow in opposite directions, but
57 only if we have push insns. */
59 #ifdef PUSH_ROUNDING
61 #ifndef PUSH_ARGS_REVERSED
62 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
63 #define PUSH_ARGS_REVERSED /* If it's last to first. */
64 #endif
65 #endif
67 #endif
69 #ifndef STACK_PUSH_CODE
70 #ifdef STACK_GROWS_DOWNWARD
71 #define STACK_PUSH_CODE PRE_DEC
72 #else
73 #define STACK_PUSH_CODE PRE_INC
74 #endif
75 #endif
77 /* Convert defined/undefined to boolean. */
78 #ifdef TARGET_MEM_FUNCTIONS
79 #undef TARGET_MEM_FUNCTIONS
80 #define TARGET_MEM_FUNCTIONS 1
81 #else
82 #define TARGET_MEM_FUNCTIONS 0
83 #endif
86 /* If this is nonzero, we do not bother generating VOLATILE
87 around volatile memory references, and we are willing to
88 output indirect addresses. If cse is to follow, we reject
89 indirect addresses so a useful potential cse is generated;
90 if it is used only once, instruction combination will produce
91 the same indirect address eventually. */
92 int cse_not_expected;
94 /* This structure is used by move_by_pieces to describe the move to
95 be performed. */
96 struct move_by_pieces
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
102 rtx from;
103 rtx from_addr;
104 int autinc_from;
105 int explicit_inc_from;
106 unsigned HOST_WIDE_INT len;
107 HOST_WIDE_INT offset;
108 int reverse;
111 /* This structure is used by store_by_pieces to describe the clear to
112 be performed. */
114 struct store_by_pieces
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 unsigned HOST_WIDE_INT len;
121 HOST_WIDE_INT offset;
122 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 void *constfundata;
124 int reverse;
127 static rtx enqueue_insn (rtx, rtx);
128 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
129 unsigned int);
130 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
131 struct move_by_pieces *);
132 static bool block_move_libcall_safe_for_call_parm (void);
133 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
134 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
135 static tree emit_block_move_libcall_fn (int);
136 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
137 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
138 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
139 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
140 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
141 struct store_by_pieces *);
142 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
143 static rtx clear_storage_via_libcall (rtx, rtx);
144 static tree clear_storage_libcall_fn (int);
145 static rtx compress_float_constant (rtx, rtx);
146 static rtx get_subtarget (rtx);
147 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int);
150 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
151 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
152 tree, enum machine_mode, int, tree, int);
153 static rtx var_rtx (tree);
155 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
156 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
158 static int is_aligning_offset (tree, tree);
159 static rtx expand_increment (tree, int, int);
160 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
161 enum expand_modifier);
162 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
163 #ifdef PUSH_ROUNDING
164 static void emit_single_push_insn (enum machine_mode, rtx, tree);
165 #endif
166 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
167 static rtx const_vector_from_tree (tree);
169 /* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
173 static char direct_load[NUM_MACHINE_MODES];
174 static char direct_store[NUM_MACHINE_MODES];
176 /* Record for each mode whether we can float-extend from memory. */
178 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
187 /* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189 #ifndef CLEAR_BY_PIECES_P
190 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
192 #endif
194 /* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197 #ifndef STORE_BY_PIECES_P
198 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
199 #endif
201 /* This array records the insn_code of insns to perform block moves. */
202 enum insn_code movstr_optab[NUM_MACHINE_MODES];
204 /* This array records the insn_code of insns to perform block clears. */
205 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
207 /* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
210 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
212 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 #endif
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
221 void
222 init_expr_once (void)
224 rtx insn, pat;
225 enum machine_mode mode;
226 int num_clobbers;
227 rtx mem, mem1;
228 rtx reg;
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
233 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
234 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
236 /* A scratch register we can modify in-place below to avoid
237 useless RTL allocations. */
238 reg = gen_rtx_REG (VOIDmode, -1);
240 insn = rtx_alloc (INSN);
241 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
242 PATTERN (insn) = pat;
244 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
245 mode = (enum machine_mode) ((int) mode + 1))
247 int regno;
249 direct_load[(int) mode] = direct_store[(int) mode] = 0;
250 PUT_MODE (mem, mode);
251 PUT_MODE (mem1, mode);
252 PUT_MODE (reg, mode);
254 /* See if there is some register that can be used in this mode and
255 directly loaded or stored from memory. */
257 if (mode != VOIDmode && mode != BLKmode)
258 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
259 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 regno++)
262 if (! HARD_REGNO_MODE_OK (regno, mode))
263 continue;
265 REGNO (reg) = regno;
267 SET_SRC (pat) = mem;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
272 SET_SRC (pat) = mem1;
273 SET_DEST (pat) = reg;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_load[(int) mode] = 1;
277 SET_SRC (pat) = reg;
278 SET_DEST (pat) = mem;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
282 SET_SRC (pat) = reg;
283 SET_DEST (pat) = mem1;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_store[(int) mode] = 1;
289 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
292 mode = GET_MODE_WIDER_MODE (mode))
294 enum machine_mode srcmode;
295 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
296 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 enum insn_code ic;
300 ic = can_extend_p (mode, srcmode, 0);
301 if (ic == CODE_FOR_nothing)
302 continue;
304 PUT_MODE (mem, srcmode);
306 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
307 float_extend_from_mem[mode][srcmode] = true;
312 /* This is run at the start of compiling a function. */
314 void
315 init_expr (void)
317 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
320 /* Small sanity check that the queue is empty at the end of a function. */
322 void
323 finish_expr_for_function (void)
325 if (pending_chain)
326 abort ();
329 /* Manage the queue of increment instructions to be output
330 for POSTINCREMENT_EXPR expressions, etc. */
332 /* Queue up to increment (or change) VAR later. BODY says how:
333 BODY should be the same thing you would pass to emit_insn
334 to increment right away. It will go to emit_insn later on.
336 The value is a QUEUED expression to be used in place of VAR
337 where you want to guarantee the pre-incrementation value of VAR. */
339 static rtx
340 enqueue_insn (rtx var, rtx body)
342 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
343 body, pending_chain);
344 return pending_chain;
347 /* Use protect_from_queue to convert a QUEUED expression
348 into something that you can put immediately into an instruction.
349 If the queued incrementation has not happened yet,
350 protect_from_queue returns the variable itself.
351 If the incrementation has happened, protect_from_queue returns a temp
352 that contains a copy of the old value of the variable.
354 Any time an rtx which might possibly be a QUEUED is to be put
355 into an instruction, it must be passed through protect_from_queue first.
356 QUEUED expressions are not meaningful in instructions.
358 Do not pass a value through protect_from_queue and then hold
359 on to it for a while before putting it in an instruction!
360 If the queue is flushed in between, incorrect code will result. */
363 protect_from_queue (rtx x, int modify)
365 RTX_CODE code = GET_CODE (x);
367 #if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
370 return x;
371 #endif
373 if (code != QUEUED)
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
379 shared. */
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
383 rtx y = XEXP (x, 0);
384 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
386 if (QUEUED_INSN (y))
388 rtx temp = gen_reg_rtx (GET_MODE (x));
390 emit_insn_before (gen_move_insn (temp, new),
391 QUEUED_INSN (y));
392 return temp;
395 /* Copy the address into a pseudo, so that the returned value
396 remains correct across calls to emit_queue. */
397 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
402 if (code == MEM)
404 rtx tem = protect_from_queue (XEXP (x, 0), 0);
405 if (tem != XEXP (x, 0))
407 x = copy_rtx (x);
408 XEXP (x, 0) = tem;
411 else if (code == PLUS || code == MULT)
413 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
414 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
415 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
417 x = copy_rtx (x);
418 XEXP (x, 0) = new0;
419 XEXP (x, 1) = new1;
422 return x;
424 /* If the increment has not happened, use the variable itself. Copy it
425 into a new pseudo so that the value remains correct across calls to
426 emit_queue. */
427 if (QUEUED_INSN (x) == 0)
428 return copy_to_reg (QUEUED_VAR (x));
429 /* If the increment has happened and a pre-increment copy exists,
430 use that copy. */
431 if (QUEUED_COPY (x) != 0)
432 return QUEUED_COPY (x);
433 /* The increment has happened but we haven't set up a pre-increment copy.
434 Set one up now, and use it. */
435 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
436 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
437 QUEUED_INSN (x));
438 return QUEUED_COPY (x);
441 /* Return nonzero if X contains a QUEUED expression:
442 if it contains anything that will be altered by a queued increment.
443 We handle only combinations of MEM, PLUS, MINUS and MULT operators
444 since memory addresses generally contain only those. */
447 queued_subexp_p (rtx x)
449 enum rtx_code code = GET_CODE (x);
450 switch (code)
452 case QUEUED:
453 return 1;
454 case MEM:
455 return queued_subexp_p (XEXP (x, 0));
456 case MULT:
457 case PLUS:
458 case MINUS:
459 return (queued_subexp_p (XEXP (x, 0))
460 || queued_subexp_p (XEXP (x, 1)));
461 default:
462 return 0;
466 /* Retrieve a mark on the queue. */
468 static rtx
469 mark_queue (void)
471 return pending_chain;
474 /* Perform all the pending incrementations that have been enqueued
475 after MARK was retrieved. If MARK is null, perform all the
476 pending incrementations. */
478 static void
479 emit_insns_enqueued_after_mark (rtx mark)
481 rtx p;
483 /* The marked incrementation may have been emitted in the meantime
484 through a call to emit_queue. In this case, the mark is not valid
485 anymore so do nothing. */
486 if (mark && ! QUEUED_BODY (mark))
487 return;
489 while ((p = pending_chain) != mark)
491 rtx body = QUEUED_BODY (p);
493 switch (GET_CODE (body))
495 case INSN:
496 case JUMP_INSN:
497 case CALL_INSN:
498 case CODE_LABEL:
499 case BARRIER:
500 case NOTE:
501 QUEUED_INSN (p) = body;
502 emit_insn (body);
503 break;
505 #ifdef ENABLE_CHECKING
506 case SEQUENCE:
507 abort ();
508 break;
509 #endif
511 default:
512 QUEUED_INSN (p) = emit_insn (body);
513 break;
516 QUEUED_BODY (p) = 0;
517 pending_chain = QUEUED_NEXT (p);
521 /* Perform all the pending incrementations. */
523 void
524 emit_queue (void)
526 emit_insns_enqueued_after_mark (NULL_RTX);
529 /* Copy data from FROM to TO, where the machine modes are not the same.
530 Both modes may be integer, or both may be floating.
531 UNSIGNEDP should be nonzero if FROM is an unsigned type.
532 This causes zero-extension instead of sign-extension. */
534 void
535 convert_move (rtx to, rtx from, int unsignedp)
537 enum machine_mode to_mode = GET_MODE (to);
538 enum machine_mode from_mode = GET_MODE (from);
539 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
540 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
541 enum insn_code code;
542 rtx libcall;
544 /* rtx code for making an equivalent value. */
545 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
546 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
548 to = protect_from_queue (to, 1);
549 from = protect_from_queue (from, 0);
551 if (to_real != from_real)
552 abort ();
554 /* If the source and destination are already the same, then there's
555 nothing to do. */
556 if (to == from)
557 return;
559 /* If FROM is a SUBREG that indicates that we have already done at least
560 the required extension, strip it. We don't handle such SUBREGs as
561 TO here. */
563 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
564 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
565 >= GET_MODE_SIZE (to_mode))
566 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
567 from = gen_lowpart (to_mode, from), from_mode = to_mode;
569 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
570 abort ();
572 if (to_mode == from_mode
573 || (from_mode == VOIDmode && CONSTANT_P (from)))
575 emit_move_insn (to, from);
576 return;
579 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
581 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
582 abort ();
584 if (VECTOR_MODE_P (to_mode))
585 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
586 else
587 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
589 emit_move_insn (to, from);
590 return;
593 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
595 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
596 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
597 return;
600 if (to_real)
602 rtx value, insns;
603 convert_optab tab;
605 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
606 tab = sext_optab;
607 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
608 tab = trunc_optab;
609 else
610 abort ();
612 /* Try converting directly if the insn is supported. */
614 code = tab->handlers[to_mode][from_mode].insn_code;
615 if (code != CODE_FOR_nothing)
617 emit_unop_insn (code, to, from,
618 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
619 return;
622 /* Otherwise use a libcall. */
623 libcall = tab->handlers[to_mode][from_mode].libfunc;
625 if (!libcall)
626 /* This conversion is not implemented yet. */
627 abort ();
629 start_sequence ();
630 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
631 1, from, from_mode);
632 insns = get_insns ();
633 end_sequence ();
634 emit_libcall_block (insns, to, value,
635 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
636 from)
637 : gen_rtx_FLOAT_EXTEND (to_mode, from));
638 return;
641 /* Handle pointer conversion. */ /* SPEE 900220. */
642 /* Targets are expected to provide conversion insns between PxImode and
643 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
644 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
646 enum machine_mode full_mode
647 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
649 if (trunc_optab->handlers[to_mode][full_mode].insn_code
650 == CODE_FOR_nothing)
651 abort ();
653 if (full_mode != from_mode)
654 from = convert_to_mode (full_mode, from, unsignedp);
655 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
656 to, from, UNKNOWN);
657 return;
659 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
661 enum machine_mode full_mode
662 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
664 if (sext_optab->handlers[full_mode][from_mode].insn_code
665 == CODE_FOR_nothing)
666 abort ();
668 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
669 to, from, UNKNOWN);
670 if (to_mode == full_mode)
671 return;
673 /* else proceed to integer conversions below. */
674 from_mode = full_mode;
677 /* Now both modes are integers. */
679 /* Handle expanding beyond a word. */
680 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
681 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
683 rtx insns;
684 rtx lowpart;
685 rtx fill_value;
686 rtx lowfrom;
687 int i;
688 enum machine_mode lowpart_mode;
689 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
691 /* Try converting directly if the insn is supported. */
692 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
693 != CODE_FOR_nothing)
695 /* If FROM is a SUBREG, put it into a register. Do this
696 so that we always generate the same set of insns for
697 better cse'ing; if an intermediate assignment occurred,
698 we won't be doing the operation directly on the SUBREG. */
699 if (optimize > 0 && GET_CODE (from) == SUBREG)
700 from = force_reg (from_mode, from);
701 emit_unop_insn (code, to, from, equiv_code);
702 return;
704 /* Next, try converting via full word. */
705 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
706 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
707 != CODE_FOR_nothing))
709 if (GET_CODE (to) == REG)
711 if (reg_overlap_mentioned_p (to, from))
712 from = force_reg (from_mode, from);
713 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
715 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
716 emit_unop_insn (code, to,
717 gen_lowpart (word_mode, to), equiv_code);
718 return;
721 /* No special multiword conversion insn; do it by hand. */
722 start_sequence ();
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
727 if (reg_overlap_mentioned_p (to, from))
728 from = force_reg (from_mode, from);
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
732 lowpart_mode = word_mode;
733 else
734 lowpart_mode = from_mode;
736 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
738 lowpart = gen_lowpart (lowpart_mode, to);
739 emit_move_insn (lowpart, lowfrom);
741 /* Compute the value to put in each remaining word. */
742 if (unsignedp)
743 fill_value = const0_rtx;
744 else
746 #ifdef HAVE_slt
747 if (HAVE_slt
748 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
749 && STORE_FLAG_VALUE == -1)
751 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
752 lowpart_mode, 0);
753 fill_value = gen_reg_rtx (word_mode);
754 emit_insn (gen_slt (fill_value));
756 else
757 #endif
759 fill_value
760 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
761 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
762 NULL_RTX, 0);
763 fill_value = convert_to_mode (word_mode, fill_value, 1);
767 /* Fill the remaining words. */
768 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
770 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
771 rtx subword = operand_subword (to, index, 1, to_mode);
773 if (subword == 0)
774 abort ();
776 if (fill_value != subword)
777 emit_move_insn (subword, fill_value);
780 insns = get_insns ();
781 end_sequence ();
783 emit_no_conflict_block (insns, to, from, NULL_RTX,
784 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
785 return;
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
792 if (!((GET_CODE (from) == MEM
793 && ! MEM_VOLATILE_P (from)
794 && direct_load[(int) to_mode]
795 && ! mode_dependent_address_p (XEXP (from, 0)))
796 || GET_CODE (from) == REG
797 || GET_CODE (from) == SUBREG))
798 from = force_reg (from_mode, from);
799 convert_move (to, gen_lowpart (word_mode, from), 0);
800 return;
803 /* Now follow all the conversions between integers
804 no more than a word long. */
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
809 GET_MODE_BITSIZE (from_mode)))
811 if (!((GET_CODE (from) == MEM
812 && ! MEM_VOLATILE_P (from)
813 && direct_load[(int) to_mode]
814 && ! mode_dependent_address_p (XEXP (from, 0)))
815 || GET_CODE (from) == REG
816 || GET_CODE (from) == SUBREG))
817 from = force_reg (from_mode, from);
818 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
820 from = copy_to_reg (from);
821 emit_move_insn (to, gen_lowpart (to_mode, from));
822 return;
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
828 /* Convert directly if that works. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 != CODE_FOR_nothing)
832 if (flag_force_mem)
833 from = force_not_mem (from);
835 emit_unop_insn (code, to, from, equiv_code);
836 return;
838 else
840 enum machine_mode intermediate;
841 rtx tmp;
842 tree shift_amount;
844 /* Search for a mode to convert via. */
845 for (intermediate = from_mode; intermediate != VOIDmode;
846 intermediate = GET_MODE_WIDER_MODE (intermediate))
847 if (((can_extend_p (to_mode, intermediate, unsignedp)
848 != CODE_FOR_nothing)
849 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
851 GET_MODE_BITSIZE (intermediate))))
852 && (can_extend_p (intermediate, from_mode, unsignedp)
853 != CODE_FOR_nothing))
855 convert_move (to, convert_to_mode (intermediate, from,
856 unsignedp), unsignedp);
857 return;
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
863 - GET_MODE_BITSIZE (from_mode), 0);
864 from = gen_lowpart (to_mode, force_reg (from_mode, from));
865 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
866 to, unsignedp);
867 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
868 to, unsignedp);
869 if (tmp != to)
870 emit_move_insn (to, tmp);
871 return;
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
878 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 to, from, UNKNOWN);
880 return;
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
892 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
893 emit_move_insn (to, temp);
894 return;
897 /* Mode combination is not recognized. */
898 abort ();
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
912 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
914 return convert_modes (mode, VOIDmode, x, unsignedp);
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
931 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
933 rtx temp;
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
938 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
940 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
941 x = gen_lowpart (mode, x);
943 if (GET_MODE (x) != VOIDmode)
944 oldmode = GET_MODE (x);
946 if (mode == oldmode)
947 return x;
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
955 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
959 HOST_WIDE_INT val = INTVAL (x);
961 if (oldmode != VOIDmode
962 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
964 int width = GET_MODE_BITSIZE (oldmode);
966 /* We need to zero extend VAL. */
967 val &= ((HOST_WIDE_INT) 1 << width) - 1;
970 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
978 if ((GET_CODE (x) == CONST_INT
979 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
980 || (GET_MODE_CLASS (mode) == MODE_INT
981 && GET_MODE_CLASS (oldmode) == MODE_INT
982 && (GET_CODE (x) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
984 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
985 && direct_load[(int) mode])
986 || (GET_CODE (x) == REG
987 && (! HARD_REGISTER_P (x)
988 || HARD_REGNO_MODE_OK (REGNO (x), mode))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
990 GET_MODE_BITSIZE (GET_MODE (x)))))))))
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
996 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
998 HOST_WIDE_INT val = INTVAL (x);
999 int width = GET_MODE_BITSIZE (oldmode);
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1004 if (! unsignedp
1005 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1006 val |= (HOST_WIDE_INT) (-1) << width;
1008 return gen_int_mode (val, mode);
1011 return gen_lowpart (mode, x);
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1018 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1019 abort ();
1020 return simplify_gen_subreg (mode, x, oldmode, 0);
1023 temp = gen_reg_rtx (mode);
1024 convert_move (temp, x, unsignedp);
1025 return temp;
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1037 succeed. */
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1041 unsigned int align ATTRIBUTE_UNUSED)
1043 return MOVE_BY_PIECES_P (len, align);
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1053 ALIGN is maximum stack alignment we can assume.
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1057 stpcpy. */
1060 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1061 unsigned int align, int endp)
1063 struct move_by_pieces data;
1064 rtx to_addr, from_addr = XEXP (from, 0);
1065 unsigned int max_size = MOVE_MAX_PIECES + 1;
1066 enum machine_mode mode = VOIDmode, tmode;
1067 enum insn_code icode;
1069 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1071 data.offset = 0;
1072 data.from_addr = from_addr;
1073 if (to)
1075 to_addr = XEXP (to, 0);
1076 data.to = to;
1077 data.autinc_to
1078 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1079 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1080 data.reverse
1081 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1083 else
1085 to_addr = NULL_RTX;
1086 data.to = NULL_RTX;
1087 data.autinc_to = 1;
1088 #ifdef STACK_GROWS_DOWNWARD
1089 data.reverse = 1;
1090 #else
1091 data.reverse = 0;
1092 #endif
1094 data.to_addr = to_addr;
1095 data.from = from;
1096 data.autinc_from
1097 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1098 || GET_CODE (from_addr) == POST_INC
1099 || GET_CODE (from_addr) == POST_DEC);
1101 data.explicit_inc_from = 0;
1102 data.explicit_inc_to = 0;
1103 if (data.reverse) data.offset = len;
1104 data.len = len;
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data.autinc_from && data.autinc_to)
1110 && move_by_pieces_ninsns (len, align) > 2)
1112 /* Find the mode of the largest move... */
1113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1114 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1115 if (GET_MODE_SIZE (tmode) < max_size)
1116 mode = tmode;
1118 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1120 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1121 data.autinc_from = 1;
1122 data.explicit_inc_from = -1;
1124 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 data.autinc_from = 1;
1128 data.explicit_inc_from = 1;
1130 if (!data.autinc_from && CONSTANT_P (from_addr))
1131 data.from_addr = copy_addr_to_reg (from_addr);
1132 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1134 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1135 data.autinc_to = 1;
1136 data.explicit_inc_to = -1;
1138 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 data.autinc_to = 1;
1142 data.explicit_inc_to = 1;
1144 if (!data.autinc_to && CONSTANT_P (to_addr))
1145 data.to_addr = copy_addr_to_reg (to_addr);
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1149 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1150 align = MOVE_MAX * BITS_PER_UNIT;
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1155 while (max_size > 1)
1157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1158 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1159 if (GET_MODE_SIZE (tmode) < max_size)
1160 mode = tmode;
1162 if (mode == VOIDmode)
1163 break;
1165 icode = mov_optab->handlers[(int) mode].insn_code;
1166 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1167 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1169 max_size = GET_MODE_SIZE (mode);
1172 /* The code above should have handled everything. */
1173 if (data.len > 0)
1174 abort ();
1176 if (endp)
1178 rtx to1;
1180 if (data.reverse)
1181 abort ();
1182 if (data.autinc_to)
1184 if (endp == 2)
1186 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1187 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1188 else
1189 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1190 -1));
1192 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1193 data.offset);
1195 else
1197 if (endp == 2)
1198 --data.offset;
1199 to1 = adjust_address (data.to, QImode, data.offset);
1201 return to1;
1203 else
1204 return data.to;
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1213 unsigned HOST_WIDE_INT n_insns = 0;
1214 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1217 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1218 align = MOVE_MAX * BITS_PER_UNIT;
1220 while (max_size > 1)
1222 enum machine_mode mode = VOIDmode, tmode;
1223 enum insn_code icode;
1225 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1226 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1227 if (GET_MODE_SIZE (tmode) < max_size)
1228 mode = tmode;
1230 if (mode == VOIDmode)
1231 break;
1233 icode = mov_optab->handlers[(int) mode].insn_code;
1234 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1235 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1237 max_size = GET_MODE_SIZE (mode);
1240 if (l)
1241 abort ();
1242 return n_insns;
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1249 static void
1250 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1251 struct move_by_pieces *data)
1253 unsigned int size = GET_MODE_SIZE (mode);
1254 rtx to1 = NULL_RTX, from1;
1256 while (data->len >= size)
1258 if (data->reverse)
1259 data->offset -= size;
1261 if (data->to)
1263 if (data->autinc_to)
1264 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1265 data->offset);
1266 else
1267 to1 = adjust_address (data->to, mode, data->offset);
1270 if (data->autinc_from)
1271 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1272 data->offset);
1273 else
1274 from1 = adjust_address (data->from, mode, data->offset);
1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1277 emit_insn (gen_add2_insn (data->to_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1280 emit_insn (gen_add2_insn (data->from_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1283 if (data->to)
1284 emit_insn ((*genfun) (to1, from1));
1285 else
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode, from1, NULL);
1289 #else
1290 abort ();
1291 #endif
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1295 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1296 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1297 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1299 if (! data->reverse)
1300 data->offset += size;
1302 data->len -= size;
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1315 Return the address of the new block, if memcpy is called and returns it,
1316 0 otherwise. */
1319 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1321 bool may_use_call;
1322 rtx retval = 0;
1323 unsigned int align;
1325 switch (method)
1327 case BLOCK_OP_NORMAL:
1328 may_use_call = true;
1329 break;
1331 case BLOCK_OP_CALL_PARM:
1332 may_use_call = block_move_libcall_safe_for_call_parm ();
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1336 NO_DEFER_POP;
1337 break;
1339 case BLOCK_OP_NO_LIBCALL:
1340 may_use_call = false;
1341 break;
1343 default:
1344 abort ();
1347 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1349 if (GET_MODE (x) != BLKmode)
1350 abort ();
1351 if (GET_MODE (y) != BLKmode)
1352 abort ();
1354 x = protect_from_queue (x, 1);
1355 y = protect_from_queue (y, 0);
1356 size = protect_from_queue (size, 0);
1358 if (GET_CODE (x) != MEM)
1359 abort ();
1360 if (GET_CODE (y) != MEM)
1361 abort ();
1362 if (size == 0)
1363 abort ();
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size) == CONST_INT)
1369 if (INTVAL (size) == 0)
1370 return 0;
1372 x = shallow_copy_rtx (x);
1373 y = shallow_copy_rtx (y);
1374 set_mem_size (x, size);
1375 set_mem_size (y, size);
1378 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1379 move_by_pieces (x, y, INTVAL (size), align, 0);
1380 else if (emit_block_move_via_movstr (x, y, size, align))
1382 else if (may_use_call)
1383 retval = emit_block_move_via_libcall (x, y, size);
1384 else
1385 emit_block_move_via_loop (x, y, size, align);
1387 if (method == BLOCK_OP_CALL_PARM)
1388 OK_DEFER_POP;
1390 return retval;
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1397 static bool
1398 block_move_libcall_safe_for_call_parm (void)
1400 /* If arguments are pushed on the stack, then they're safe. */
1401 if (PUSH_ARGS)
1402 return true;
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1408 tree fn = emit_block_move_libcall_fn (false);
1409 (void) fn;
1410 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 return false;
1413 #endif
1415 /* If any argument goes in memory, then it might clobber an outgoing
1416 argument. */
1418 CUMULATIVE_ARGS args_so_far;
1419 tree fn, arg;
1421 fn = emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1424 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1425 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1427 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1428 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1429 if (!tmp || !REG_P (tmp))
1430 return false;
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1433 NULL_TREE, 1))
1434 return false;
1435 #endif
1436 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1439 return true;
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1445 static bool
1446 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1449 int save_volatile_ok = volatile_ok;
1450 enum machine_mode mode;
1452 /* Since this is a move insn, we don't care about volatility. */
1453 volatile_ok = 1;
1455 /* Try the most limited insn first, because there's no point
1456 including more than one in the machine description unless
1457 the more limited one has some advantage. */
1459 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1460 mode = GET_MODE_WIDER_MODE (mode))
1462 enum insn_code code = movstr_optab[(int) mode];
1463 insn_operand_predicate_fn pred;
1465 if (code != CODE_FOR_nothing
1466 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1467 here because if SIZE is less than the mode mask, as it is
1468 returned by the macro, it will definitely be less than the
1469 actual mode mask. */
1470 && ((GET_CODE (size) == CONST_INT
1471 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1472 <= (GET_MODE_MASK (mode) >> 1)))
1473 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1474 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1475 || (*pred) (x, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1477 || (*pred) (y, BLKmode))
1478 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1479 || (*pred) (opalign, VOIDmode)))
1481 rtx op2;
1482 rtx last = get_last_insn ();
1483 rtx pat;
1485 op2 = convert_to_mode (mode, size, 1);
1486 pred = insn_data[(int) code].operand[2].predicate;
1487 if (pred != 0 && ! (*pred) (op2, mode))
1488 op2 = copy_to_mode_reg (mode, op2);
1490 /* ??? When called via emit_block_move_for_call, it'd be
1491 nice if there were some way to inform the backend, so
1492 that it doesn't fail the expansion because it thinks
1493 emitting the libcall would be more efficient. */
1495 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1496 if (pat)
1498 emit_insn (pat);
1499 volatile_ok = save_volatile_ok;
1500 return true;
1502 else
1503 delete_insns_since (last);
1507 volatile_ok = save_volatile_ok;
1508 return false;
1511 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1512 Return the return value from memcpy, 0 otherwise. */
1514 static rtx
1515 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1517 rtx dst_addr, src_addr;
1518 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1519 enum machine_mode size_mode;
1520 rtx retval;
1522 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1524 It is unsafe to save the value generated by protect_from_queue and reuse
1525 it later. Consider what happens if emit_queue is called before the
1526 return value from protect_from_queue is used.
1528 Expansion of the CALL_EXPR below will call emit_queue before we are
1529 finished emitting RTL for argument setup. So if we are not careful we
1530 could get the wrong value for an argument.
1532 To avoid this problem we go ahead and emit code to copy the addresses of
1533 DST and SRC and SIZE into new pseudos. We can then place those new
1534 pseudos into an RTL_EXPR and use them later, even after a call to
1535 emit_queue.
1537 Note this is not strictly needed for library calls since they do not call
1538 emit_queue before loading their arguments. However, we may need to have
1539 library calls call emit_queue in the future since failing to do so could
1540 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1541 arguments in registers. */
1543 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1544 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1546 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1547 src_addr = convert_memory_address (ptr_mode, src_addr);
1549 dst_tree = make_tree (ptr_type_node, dst_addr);
1550 src_tree = make_tree (ptr_type_node, src_addr);
1552 if (TARGET_MEM_FUNCTIONS)
1553 size_mode = TYPE_MODE (sizetype);
1554 else
1555 size_mode = TYPE_MODE (unsigned_type_node);
1557 size = convert_to_mode (size_mode, size, 1);
1558 size = copy_to_mode_reg (size_mode, size);
1560 /* It is incorrect to use the libcall calling conventions to call
1561 memcpy in this context. This could be a user call to memcpy and
1562 the user may wish to examine the return value from memcpy. For
1563 targets where libcalls and normal calls have different conventions
1564 for returning pointers, we could end up generating incorrect code.
1566 For convenience, we generate the call to bcopy this way as well. */
1568 if (TARGET_MEM_FUNCTIONS)
1569 size_tree = make_tree (sizetype, size);
1570 else
1571 size_tree = make_tree (unsigned_type_node, size);
1573 fn = emit_block_move_libcall_fn (true);
1574 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1575 if (TARGET_MEM_FUNCTIONS)
1577 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1578 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1580 else
1582 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1583 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1586 /* Now we have to build up the CALL_EXPR itself. */
1587 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1588 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1589 call_expr, arg_list, NULL_TREE);
1591 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1593 /* If we are initializing a readonly value, show the above call clobbered
1594 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1595 the delay slot scheduler might overlook conflicts and take nasty
1596 decisions. */
1597 if (RTX_UNCHANGING_P (dst))
1598 add_function_usage_to
1599 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1600 gen_rtx_CLOBBER (VOIDmode, dst),
1601 NULL_RTX));
1603 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1606 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1607 for the function we use for block copies. The first time FOR_CALL
1608 is true, we call assemble_external. */
1610 static GTY(()) tree block_move_fn;
1612 void
1613 init_block_move_fn (const char *asmspec)
1615 if (!block_move_fn)
1617 tree args, fn;
1619 if (TARGET_MEM_FUNCTIONS)
1621 fn = get_identifier ("memcpy");
1622 args = build_function_type_list (ptr_type_node, ptr_type_node,
1623 const_ptr_type_node, sizetype,
1624 NULL_TREE);
1626 else
1628 fn = get_identifier ("bcopy");
1629 args = build_function_type_list (void_type_node, const_ptr_type_node,
1630 ptr_type_node, unsigned_type_node,
1631 NULL_TREE);
1634 fn = build_decl (FUNCTION_DECL, fn, args);
1635 DECL_EXTERNAL (fn) = 1;
1636 TREE_PUBLIC (fn) = 1;
1637 DECL_ARTIFICIAL (fn) = 1;
1638 TREE_NOTHROW (fn) = 1;
1640 block_move_fn = fn;
1643 if (asmspec)
1645 SET_DECL_RTL (block_move_fn, NULL_RTX);
1646 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1650 static tree
1651 emit_block_move_libcall_fn (int for_call)
1653 static bool emitted_extern;
1655 if (!block_move_fn)
1656 init_block_move_fn (NULL);
1658 if (for_call && !emitted_extern)
1660 emitted_extern = true;
1661 make_decl_rtl (block_move_fn, NULL);
1662 assemble_external (block_move_fn);
1665 return block_move_fn;
1668 /* A subroutine of emit_block_move. Copy the data via an explicit
1669 loop. This is used only when libcalls are forbidden. */
1670 /* ??? It'd be nice to copy in hunks larger than QImode. */
1672 static void
1673 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1674 unsigned int align ATTRIBUTE_UNUSED)
1676 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1677 enum machine_mode iter_mode;
1679 iter_mode = GET_MODE (size);
1680 if (iter_mode == VOIDmode)
1681 iter_mode = word_mode;
1683 top_label = gen_label_rtx ();
1684 cmp_label = gen_label_rtx ();
1685 iter = gen_reg_rtx (iter_mode);
1687 emit_move_insn (iter, const0_rtx);
1689 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1690 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1691 do_pending_stack_adjust ();
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1702 emit_move_insn (x, y);
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1706 if (tmp != iter)
1707 emit_move_insn (iter, tmp);
1709 emit_label (cmp_label);
1711 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1712 true, top_label);
1715 /* Copy all or part of a value X into registers starting at REGNO.
1716 The number of registers to be filled is NREGS. */
1718 void
1719 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1721 int i;
1722 #ifdef HAVE_load_multiple
1723 rtx pat;
1724 rtx last;
1725 #endif
1727 if (nregs == 0)
1728 return;
1730 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1731 x = validize_mem (force_const_mem (mode, x));
1733 /* See if the machine can do this with a load multiple insn. */
1734 #ifdef HAVE_load_multiple
1735 if (HAVE_load_multiple)
1737 last = get_last_insn ();
1738 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1739 GEN_INT (nregs));
1740 if (pat)
1742 emit_insn (pat);
1743 return;
1745 else
1746 delete_insns_since (last);
1748 #endif
1750 for (i = 0; i < nregs; i++)
1751 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1752 operand_subword_force (x, i, mode));
1755 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1756 The number of registers to be filled is NREGS. */
1758 void
1759 move_block_from_reg (int regno, rtx x, int nregs)
1761 int i;
1763 if (nregs == 0)
1764 return;
1766 /* See if the machine can do this with a store multiple insn. */
1767 #ifdef HAVE_store_multiple
1768 if (HAVE_store_multiple)
1770 rtx last = get_last_insn ();
1771 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1772 GEN_INT (nregs));
1773 if (pat)
1775 emit_insn (pat);
1776 return;
1778 else
1779 delete_insns_since (last);
1781 #endif
1783 for (i = 0; i < nregs; i++)
1785 rtx tem = operand_subword (x, i, 1, BLKmode);
1787 if (tem == 0)
1788 abort ();
1790 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1794 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1795 ORIG, where ORIG is a non-consecutive group of registers represented by
1796 a PARALLEL. The clone is identical to the original except in that the
1797 original set of registers is replaced by a new set of pseudo registers.
1798 The new set has the same modes as the original set. */
1801 gen_group_rtx (rtx orig)
1803 int i, length;
1804 rtx *tmps;
1806 if (GET_CODE (orig) != PARALLEL)
1807 abort ();
1809 length = XVECLEN (orig, 0);
1810 tmps = alloca (sizeof (rtx) * length);
1812 /* Skip a NULL entry in first slot. */
1813 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1815 if (i)
1816 tmps[0] = 0;
1818 for (; i < length; i++)
1820 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1821 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1823 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1826 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1829 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1830 where DST is non-consecutive registers represented by a PARALLEL.
1831 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1832 if not known. */
1834 void
1835 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1837 rtx *tmps, src;
1838 int start, i;
1840 if (GET_CODE (dst) != PARALLEL)
1841 abort ();
1843 /* Check for a NULL entry, used to indicate that the parameter goes
1844 both on the stack and in registers. */
1845 if (XEXP (XVECEXP (dst, 0, 0), 0))
1846 start = 0;
1847 else
1848 start = 1;
1850 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1852 /* Process the pieces. */
1853 for (i = start; i < XVECLEN (dst, 0); i++)
1855 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1856 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1857 unsigned int bytelen = GET_MODE_SIZE (mode);
1858 int shift = 0;
1860 /* Handle trailing fragments that run over the size of the struct. */
1861 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1863 /* Arrange to shift the fragment to where it belongs.
1864 extract_bit_field loads to the lsb of the reg. */
1865 if (
1866 #ifdef BLOCK_REG_PADDING
1867 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1868 == (BYTES_BIG_ENDIAN ? upward : downward)
1869 #else
1870 BYTES_BIG_ENDIAN
1871 #endif
1873 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1874 bytelen = ssize - bytepos;
1875 if (bytelen <= 0)
1876 abort ();
1879 /* If we won't be loading directly from memory, protect the real source
1880 from strange tricks we might play; but make sure that the source can
1881 be loaded directly into the destination. */
1882 src = orig_src;
1883 if (GET_CODE (orig_src) != MEM
1884 && (!CONSTANT_P (orig_src)
1885 || (GET_MODE (orig_src) != mode
1886 && GET_MODE (orig_src) != VOIDmode)))
1888 if (GET_MODE (orig_src) == VOIDmode)
1889 src = gen_reg_rtx (mode);
1890 else
1891 src = gen_reg_rtx (GET_MODE (orig_src));
1893 emit_move_insn (src, orig_src);
1896 /* Optimize the access just a bit. */
1897 if (GET_CODE (src) == MEM
1898 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1899 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1900 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1901 && bytelen == GET_MODE_SIZE (mode))
1903 tmps[i] = gen_reg_rtx (mode);
1904 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1906 else if (GET_CODE (src) == CONCAT)
1908 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1909 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1911 if ((bytepos == 0 && bytelen == slen0)
1912 || (bytepos != 0 && bytepos + bytelen <= slen))
1914 /* The following assumes that the concatenated objects all
1915 have the same size. In this case, a simple calculation
1916 can be used to determine the object and the bit field
1917 to be extracted. */
1918 tmps[i] = XEXP (src, bytepos / slen0);
1919 if (! CONSTANT_P (tmps[i])
1920 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1921 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1922 (bytepos % slen0) * BITS_PER_UNIT,
1923 1, NULL_RTX, mode, mode, ssize);
1925 else if (bytepos == 0)
1927 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1928 emit_move_insn (mem, src);
1929 tmps[i] = adjust_address (mem, mode, 0);
1931 else
1932 abort ();
1934 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1935 SIMD register, which is currently broken. While we get GCC
1936 to emit proper RTL for these cases, let's dump to memory. */
1937 else if (VECTOR_MODE_P (GET_MODE (dst))
1938 && GET_CODE (src) == REG)
1940 int slen = GET_MODE_SIZE (GET_MODE (src));
1941 rtx mem;
1943 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1944 emit_move_insn (mem, src);
1945 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1947 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1948 && XVECLEN (dst, 0) > 1)
1949 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1950 else if (CONSTANT_P (src)
1951 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1952 tmps[i] = src;
1953 else
1954 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1955 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1956 mode, mode, ssize);
1958 if (shift)
1959 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1960 tmps[i], 0, OPTAB_WIDEN);
1963 emit_queue ();
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1970 /* Emit code to move a block SRC to block DST, where SRC and DST are
1971 non-consecutive groups of registers, each represented by a PARALLEL. */
1973 void
1974 emit_group_move (rtx dst, rtx src)
1976 int i;
1978 if (GET_CODE (src) != PARALLEL
1979 || GET_CODE (dst) != PARALLEL
1980 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1981 abort ();
1983 /* Skip first entry if NULL. */
1984 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1985 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1986 XEXP (XVECEXP (src, 0, i), 0));
1989 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1990 where SRC is non-consecutive registers represented by a PARALLEL.
1991 SSIZE represents the total size of block ORIG_DST, or -1 if not
1992 known. */
1994 void
1995 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1997 rtx *tmps, dst;
1998 int start, i;
2000 if (GET_CODE (src) != PARALLEL)
2001 abort ();
2003 /* Check for a NULL entry, used to indicate that the parameter goes
2004 both on the stack and in registers. */
2005 if (XEXP (XVECEXP (src, 0, 0), 0))
2006 start = 0;
2007 else
2008 start = 1;
2010 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2012 /* Copy the (probable) hard regs into pseudos. */
2013 for (i = start; i < XVECLEN (src, 0); i++)
2015 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2016 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2017 emit_move_insn (tmps[i], reg);
2019 emit_queue ();
2021 /* If we won't be storing directly into memory, protect the real destination
2022 from strange tricks we might play. */
2023 dst = orig_dst;
2024 if (GET_CODE (dst) == PARALLEL)
2026 rtx temp;
2028 /* We can get a PARALLEL dst if there is a conditional expression in
2029 a return statement. In that case, the dst and src are the same,
2030 so no action is necessary. */
2031 if (rtx_equal_p (dst, src))
2032 return;
2034 /* It is unclear if we can ever reach here, but we may as well handle
2035 it. Allocate a temporary, and split this into a store/load to/from
2036 the temporary. */
2038 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2039 emit_group_store (temp, src, type, ssize);
2040 emit_group_load (dst, temp, type, ssize);
2041 return;
2043 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2045 dst = gen_reg_rtx (GET_MODE (orig_dst));
2046 /* Make life a bit easier for combine. */
2047 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2050 /* Process the pieces. */
2051 for (i = start; i < XVECLEN (src, 0); i++)
2053 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2054 enum machine_mode mode = GET_MODE (tmps[i]);
2055 unsigned int bytelen = GET_MODE_SIZE (mode);
2056 rtx dest = dst;
2058 /* Handle trailing fragments that run over the size of the struct. */
2059 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2061 /* store_bit_field always takes its value from the lsb.
2062 Move the fragment to the lsb if it's not already there. */
2063 if (
2064 #ifdef BLOCK_REG_PADDING
2065 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2066 == (BYTES_BIG_ENDIAN ? upward : downward)
2067 #else
2068 BYTES_BIG_ENDIAN
2069 #endif
2072 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2073 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2076 bytelen = ssize - bytepos;
2079 if (GET_CODE (dst) == CONCAT)
2081 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2082 dest = XEXP (dst, 0);
2083 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2086 dest = XEXP (dst, 1);
2088 else if (bytepos == 0 && XVECLEN (src, 0))
2090 dest = assign_stack_temp (GET_MODE (dest),
2091 GET_MODE_SIZE (GET_MODE (dest)), 0);
2092 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2093 tmps[i]);
2094 dst = dest;
2095 break;
2097 else
2098 abort ();
2101 /* Optimize the access just a bit. */
2102 if (GET_CODE (dest) == MEM
2103 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2104 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2105 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2106 && bytelen == GET_MODE_SIZE (mode))
2107 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2108 else
2109 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2110 mode, tmps[i], ssize);
2113 emit_queue ();
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (orig_dst != dst)
2117 emit_move_insn (orig_dst, dst);
2120 /* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2124 The purpose of this routine is to handle functions that return
2125 BLKmode structures in registers. Some machines (the PA for example)
2126 want to return all small structures in registers regardless of the
2127 structure's alignment. */
2130 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2132 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2133 rtx src = NULL, dst = NULL;
2134 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2135 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2137 if (tgtblk == 0)
2139 tgtblk = assign_temp (build_qualified_type (type,
2140 (TYPE_QUALS (type)
2141 | TYPE_QUAL_CONST)),
2142 0, 1, 1);
2143 preserve_temp_slots (tgtblk);
2146 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2147 into a new pseudo which is a full word. */
2149 if (GET_MODE (srcreg) != BLKmode
2150 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2151 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2153 /* If the structure doesn't take up a whole number of words, see whether
2154 SRCREG is padded on the left or on the right. If it's on the left,
2155 set PADDING_CORRECTION to the number of bits to skip.
2157 In most ABIs, the structure will be returned at the least end of
2158 the register, which translates to right padding on little-endian
2159 targets and left padding on big-endian targets. The opposite
2160 holds if the structure is returned at the most significant
2161 end of the register. */
2162 if (bytes % UNITS_PER_WORD != 0
2163 && (targetm.calls.return_in_msb (type)
2164 ? !BYTES_BIG_ENDIAN
2165 : BYTES_BIG_ENDIAN))
2166 padding_correction
2167 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2169 /* Copy the structure BITSIZE bites at a time.
2171 We could probably emit more efficient code for machines which do not use
2172 strict alignment, but it doesn't seem worth the effort at the current
2173 time. */
2174 for (bitpos = 0, xbitpos = padding_correction;
2175 bitpos < bytes * BITS_PER_UNIT;
2176 bitpos += bitsize, xbitpos += bitsize)
2178 /* We need a new source operand each time xbitpos is on a
2179 word boundary and when xbitpos == padding_correction
2180 (the first time through). */
2181 if (xbitpos % BITS_PER_WORD == 0
2182 || xbitpos == padding_correction)
2183 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2184 GET_MODE (srcreg));
2186 /* We need a new destination operand each time bitpos is on
2187 a word boundary. */
2188 if (bitpos % BITS_PER_WORD == 0)
2189 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2191 /* Use xbitpos for the source extraction (right justified) and
2192 xbitpos for the destination store (left justified). */
2193 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2194 extract_bit_field (src, bitsize,
2195 xbitpos % BITS_PER_WORD, 1,
2196 NULL_RTX, word_mode, word_mode,
2197 BITS_PER_WORD),
2198 BITS_PER_WORD);
2201 return tgtblk;
2204 /* Add a USE expression for REG to the (possibly empty) list pointed
2205 to by CALL_FUSAGE. REG must denote a hard register. */
2207 void
2208 use_reg (rtx *call_fusage, rtx reg)
2210 if (GET_CODE (reg) != REG
2211 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2212 abort ();
2214 *call_fusage
2215 = gen_rtx_EXPR_LIST (VOIDmode,
2216 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2219 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2220 starting at REGNO. All of these registers must be hard registers. */
2222 void
2223 use_regs (rtx *call_fusage, int regno, int nregs)
2225 int i;
2227 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2228 abort ();
2230 for (i = 0; i < nregs; i++)
2231 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2234 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2235 PARALLEL REGS. This is for calls that pass values in multiple
2236 non-contiguous locations. The Irix 6 ABI has examples of this. */
2238 void
2239 use_group_regs (rtx *call_fusage, rtx regs)
2241 int i;
2243 for (i = 0; i < XVECLEN (regs, 0); i++)
2245 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2247 /* A NULL entry means the parameter goes both on the stack and in
2248 registers. This can also be a MEM for targets that pass values
2249 partially on the stack and partially in registers. */
2250 if (reg != 0 && GET_CODE (reg) == REG)
2251 use_reg (call_fusage, reg);
2256 /* Determine whether the LEN bytes generated by CONSTFUN can be
2257 stored to memory using several move instructions. CONSTFUNDATA is
2258 a pointer which will be passed as argument in every CONSTFUN call.
2259 ALIGN is maximum alignment we can assume. Return nonzero if a
2260 call to store_by_pieces should succeed. */
2263 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2264 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2265 void *constfundata, unsigned int align)
2267 unsigned HOST_WIDE_INT max_size, l;
2268 HOST_WIDE_INT offset = 0;
2269 enum machine_mode mode, tmode;
2270 enum insn_code icode;
2271 int reverse;
2272 rtx cst;
2274 if (len == 0)
2275 return 1;
2277 if (! STORE_BY_PIECES_P (len, align))
2278 return 0;
2280 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2281 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2282 align = MOVE_MAX * BITS_PER_UNIT;
2284 /* We would first store what we can in the largest integer mode, then go to
2285 successively smaller modes. */
2287 for (reverse = 0;
2288 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2289 reverse++)
2291 l = len;
2292 mode = VOIDmode;
2293 max_size = STORE_MAX_PIECES + 1;
2294 while (max_size > 1)
2296 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2297 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2298 if (GET_MODE_SIZE (tmode) < max_size)
2299 mode = tmode;
2301 if (mode == VOIDmode)
2302 break;
2304 icode = mov_optab->handlers[(int) mode].insn_code;
2305 if (icode != CODE_FOR_nothing
2306 && align >= GET_MODE_ALIGNMENT (mode))
2308 unsigned int size = GET_MODE_SIZE (mode);
2310 while (l >= size)
2312 if (reverse)
2313 offset -= size;
2315 cst = (*constfun) (constfundata, offset, mode);
2316 if (!LEGITIMATE_CONSTANT_P (cst))
2317 return 0;
2319 if (!reverse)
2320 offset += size;
2322 l -= size;
2326 max_size = GET_MODE_SIZE (mode);
2329 /* The code above should have handled everything. */
2330 if (l != 0)
2331 abort ();
2334 return 1;
2337 /* Generate several move instructions to store LEN bytes generated by
2338 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2339 pointer which will be passed as argument in every CONSTFUN call.
2340 ALIGN is maximum alignment we can assume.
2341 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2342 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2343 stpcpy. */
2346 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2347 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2348 void *constfundata, unsigned int align, int endp)
2350 struct store_by_pieces data;
2352 if (len == 0)
2354 if (endp == 2)
2355 abort ();
2356 return to;
2359 if (! STORE_BY_PIECES_P (len, align))
2360 abort ();
2361 to = protect_from_queue (to, 1);
2362 data.constfun = constfun;
2363 data.constfundata = constfundata;
2364 data.len = len;
2365 data.to = to;
2366 store_by_pieces_1 (&data, align);
2367 if (endp)
2369 rtx to1;
2371 if (data.reverse)
2372 abort ();
2373 if (data.autinc_to)
2375 if (endp == 2)
2377 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2378 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2379 else
2380 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2381 -1));
2383 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2384 data.offset);
2386 else
2388 if (endp == 2)
2389 --data.offset;
2390 to1 = adjust_address (data.to, QImode, data.offset);
2392 return to1;
2394 else
2395 return data.to;
2398 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2399 rtx with BLKmode). The caller must pass TO through protect_from_queue
2400 before calling. ALIGN is maximum alignment we can assume. */
2402 static void
2403 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2405 struct store_by_pieces data;
2407 if (len == 0)
2408 return;
2410 data.constfun = clear_by_pieces_1;
2411 data.constfundata = NULL;
2412 data.len = len;
2413 data.to = to;
2414 store_by_pieces_1 (&data, align);
2417 /* Callback routine for clear_by_pieces.
2418 Return const0_rtx unconditionally. */
2420 static rtx
2421 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2422 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2423 enum machine_mode mode ATTRIBUTE_UNUSED)
2425 return const0_rtx;
2428 /* Subroutine of clear_by_pieces and store_by_pieces.
2429 Generate several move instructions to store LEN bytes of block TO. (A MEM
2430 rtx with BLKmode). The caller must pass TO through protect_from_queue
2431 before calling. ALIGN is maximum alignment we can assume. */
2433 static void
2434 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2435 unsigned int align ATTRIBUTE_UNUSED)
2437 rtx to_addr = XEXP (data->to, 0);
2438 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2439 enum machine_mode mode = VOIDmode, tmode;
2440 enum insn_code icode;
2442 data->offset = 0;
2443 data->to_addr = to_addr;
2444 data->autinc_to
2445 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2446 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2448 data->explicit_inc_to = 0;
2449 data->reverse
2450 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2451 if (data->reverse)
2452 data->offset = data->len;
2454 /* If storing requires more than two move insns,
2455 copy addresses to registers (to make displacements shorter)
2456 and use post-increment if available. */
2457 if (!data->autinc_to
2458 && move_by_pieces_ninsns (data->len, align) > 2)
2460 /* Determine the main mode we'll be using. */
2461 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2462 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2463 if (GET_MODE_SIZE (tmode) < max_size)
2464 mode = tmode;
2466 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2468 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2469 data->autinc_to = 1;
2470 data->explicit_inc_to = -1;
2473 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2474 && ! data->autinc_to)
2476 data->to_addr = copy_addr_to_reg (to_addr);
2477 data->autinc_to = 1;
2478 data->explicit_inc_to = 1;
2481 if ( !data->autinc_to && CONSTANT_P (to_addr))
2482 data->to_addr = copy_addr_to_reg (to_addr);
2485 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2486 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2487 align = MOVE_MAX * BITS_PER_UNIT;
2489 /* First store what we can in the largest integer mode, then go to
2490 successively smaller modes. */
2492 while (max_size > 1)
2494 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2495 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2496 if (GET_MODE_SIZE (tmode) < max_size)
2497 mode = tmode;
2499 if (mode == VOIDmode)
2500 break;
2502 icode = mov_optab->handlers[(int) mode].insn_code;
2503 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2504 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2506 max_size = GET_MODE_SIZE (mode);
2509 /* The code above should have handled everything. */
2510 if (data->len != 0)
2511 abort ();
2514 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2515 with move instructions for mode MODE. GENFUN is the gen_... function
2516 to make a move insn for that mode. DATA has all the other info. */
2518 static void
2519 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2520 struct store_by_pieces *data)
2522 unsigned int size = GET_MODE_SIZE (mode);
2523 rtx to1, cst;
2525 while (data->len >= size)
2527 if (data->reverse)
2528 data->offset -= size;
2530 if (data->autinc_to)
2531 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2532 data->offset);
2533 else
2534 to1 = adjust_address (data->to, mode, data->offset);
2536 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2537 emit_insn (gen_add2_insn (data->to_addr,
2538 GEN_INT (-(HOST_WIDE_INT) size)));
2540 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2541 emit_insn ((*genfun) (to1, cst));
2543 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2544 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2546 if (! data->reverse)
2547 data->offset += size;
2549 data->len -= size;
2553 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2554 its length in bytes. */
2557 clear_storage (rtx object, rtx size)
2559 rtx retval = 0;
2560 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2561 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2563 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564 just move a zero. Otherwise, do this a piece at a time. */
2565 if (GET_MODE (object) != BLKmode
2566 && GET_CODE (size) == CONST_INT
2567 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2568 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2569 else
2571 object = protect_from_queue (object, 1);
2572 size = protect_from_queue (size, 0);
2574 if (size == const0_rtx)
2576 else if (GET_CODE (size) == CONST_INT
2577 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2578 clear_by_pieces (object, INTVAL (size), align);
2579 else if (clear_storage_via_clrstr (object, size, align))
2581 else
2582 retval = clear_storage_via_libcall (object, size);
2585 return retval;
2588 /* A subroutine of clear_storage. Expand a clrstr pattern;
2589 return true if successful. */
2591 static bool
2592 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2594 /* Try the most limited insn first, because there's no point
2595 including more than one in the machine description unless
2596 the more limited one has some advantage. */
2598 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2599 enum machine_mode mode;
2601 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2602 mode = GET_MODE_WIDER_MODE (mode))
2604 enum insn_code code = clrstr_optab[(int) mode];
2605 insn_operand_predicate_fn pred;
2607 if (code != CODE_FOR_nothing
2608 /* We don't need MODE to be narrower than
2609 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2610 the mode mask, as it is returned by the macro, it will
2611 definitely be less than the actual mode mask. */
2612 && ((GET_CODE (size) == CONST_INT
2613 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2614 <= (GET_MODE_MASK (mode) >> 1)))
2615 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2616 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2617 || (*pred) (object, BLKmode))
2618 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2619 || (*pred) (opalign, VOIDmode)))
2621 rtx op1;
2622 rtx last = get_last_insn ();
2623 rtx pat;
2625 op1 = convert_to_mode (mode, size, 1);
2626 pred = insn_data[(int) code].operand[1].predicate;
2627 if (pred != 0 && ! (*pred) (op1, mode))
2628 op1 = copy_to_mode_reg (mode, op1);
2630 pat = GEN_FCN ((int) code) (object, op1, opalign);
2631 if (pat)
2633 emit_insn (pat);
2634 return true;
2636 else
2637 delete_insns_since (last);
2641 return false;
2644 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2645 Return the return value of memset, 0 otherwise. */
2647 static rtx
2648 clear_storage_via_libcall (rtx object, rtx size)
2650 tree call_expr, arg_list, fn, object_tree, size_tree;
2651 enum machine_mode size_mode;
2652 rtx retval;
2654 /* OBJECT or SIZE may have been passed through protect_from_queue.
2656 It is unsafe to save the value generated by protect_from_queue
2657 and reuse it later. Consider what happens if emit_queue is
2658 called before the return value from protect_from_queue is used.
2660 Expansion of the CALL_EXPR below will call emit_queue before
2661 we are finished emitting RTL for argument setup. So if we are
2662 not careful we could get the wrong value for an argument.
2664 To avoid this problem we go ahead and emit code to copy OBJECT
2665 and SIZE into new pseudos. We can then place those new pseudos
2666 into an RTL_EXPR and use them later, even after a call to
2667 emit_queue.
2669 Note this is not strictly needed for library calls since they
2670 do not call emit_queue before loading their arguments. However,
2671 we may need to have library calls call emit_queue in the future
2672 since failing to do so could cause problems for targets which
2673 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2675 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2677 if (TARGET_MEM_FUNCTIONS)
2678 size_mode = TYPE_MODE (sizetype);
2679 else
2680 size_mode = TYPE_MODE (unsigned_type_node);
2681 size = convert_to_mode (size_mode, size, 1);
2682 size = copy_to_mode_reg (size_mode, size);
2684 /* It is incorrect to use the libcall calling conventions to call
2685 memset in this context. This could be a user call to memset and
2686 the user may wish to examine the return value from memset. For
2687 targets where libcalls and normal calls have different conventions
2688 for returning pointers, we could end up generating incorrect code.
2690 For convenience, we generate the call to bzero this way as well. */
2692 object_tree = make_tree (ptr_type_node, object);
2693 if (TARGET_MEM_FUNCTIONS)
2694 size_tree = make_tree (sizetype, size);
2695 else
2696 size_tree = make_tree (unsigned_type_node, size);
2698 fn = clear_storage_libcall_fn (true);
2699 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2700 if (TARGET_MEM_FUNCTIONS)
2701 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2702 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2704 /* Now we have to build up the CALL_EXPR itself. */
2705 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2706 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2707 call_expr, arg_list, NULL_TREE);
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2711 /* If we are initializing a readonly value, show the above call
2712 clobbered it. Otherwise, a load from it may erroneously be
2713 hoisted from a loop. */
2714 if (RTX_UNCHANGING_P (object))
2715 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2717 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2720 /* A subroutine of clear_storage_via_libcall. Create the tree node
2721 for the function we use for block clears. The first time FOR_CALL
2722 is true, we call assemble_external. */
2724 static GTY(()) tree block_clear_fn;
2726 void
2727 init_block_clear_fn (const char *asmspec)
2729 if (!block_clear_fn)
2731 tree fn, args;
2733 if (TARGET_MEM_FUNCTIONS)
2735 fn = get_identifier ("memset");
2736 args = build_function_type_list (ptr_type_node, ptr_type_node,
2737 integer_type_node, sizetype,
2738 NULL_TREE);
2740 else
2742 fn = get_identifier ("bzero");
2743 args = build_function_type_list (void_type_node, ptr_type_node,
2744 unsigned_type_node, NULL_TREE);
2747 fn = build_decl (FUNCTION_DECL, fn, args);
2748 DECL_EXTERNAL (fn) = 1;
2749 TREE_PUBLIC (fn) = 1;
2750 DECL_ARTIFICIAL (fn) = 1;
2751 TREE_NOTHROW (fn) = 1;
2753 block_clear_fn = fn;
2756 if (asmspec)
2758 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2759 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2763 static tree
2764 clear_storage_libcall_fn (int for_call)
2766 static bool emitted_extern;
2768 if (!block_clear_fn)
2769 init_block_clear_fn (NULL);
2771 if (for_call && !emitted_extern)
2773 emitted_extern = true;
2774 make_decl_rtl (block_clear_fn, NULL);
2775 assemble_external (block_clear_fn);
2778 return block_clear_fn;
2781 /* Generate code to copy Y into X.
2782 Both Y and X must have the same mode, except that
2783 Y can be a constant with VOIDmode.
2784 This mode cannot be BLKmode; use emit_block_move for that.
2786 Return the last instruction emitted. */
2789 emit_move_insn (rtx x, rtx y)
2791 enum machine_mode mode = GET_MODE (x);
2792 rtx y_cst = NULL_RTX;
2793 rtx last_insn, set;
2795 x = protect_from_queue (x, 1);
2796 y = protect_from_queue (y, 0);
2798 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2799 abort ();
2801 if (CONSTANT_P (y))
2803 if (optimize
2804 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2805 && (last_insn = compress_float_constant (x, y)))
2806 return last_insn;
2808 y_cst = y;
2810 if (!LEGITIMATE_CONSTANT_P (y))
2812 y = force_const_mem (mode, y);
2814 /* If the target's cannot_force_const_mem prevented the spill,
2815 assume that the target's move expanders will also take care
2816 of the non-legitimate constant. */
2817 if (!y)
2818 y = y_cst;
2822 /* If X or Y are memory references, verify that their addresses are valid
2823 for the machine. */
2824 if (GET_CODE (x) == MEM
2825 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2826 && ! push_operand (x, GET_MODE (x)))
2827 || (flag_force_addr
2828 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2829 x = validize_mem (x);
2831 if (GET_CODE (y) == MEM
2832 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2833 || (flag_force_addr
2834 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2835 y = validize_mem (y);
2837 if (mode == BLKmode)
2838 abort ();
2840 last_insn = emit_move_insn_1 (x, y);
2842 if (y_cst && GET_CODE (x) == REG
2843 && (set = single_set (last_insn)) != NULL_RTX
2844 && SET_DEST (set) == x
2845 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2846 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2848 return last_insn;
2851 /* Low level part of emit_move_insn.
2852 Called just like emit_move_insn, but assumes X and Y
2853 are basically valid. */
2856 emit_move_insn_1 (rtx x, rtx y)
2858 enum machine_mode mode = GET_MODE (x);
2859 enum machine_mode submode;
2860 enum mode_class class = GET_MODE_CLASS (mode);
2862 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2863 abort ();
2865 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2866 return
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2869 /* Expand complex moves by moving real part and imag part, if possible. */
2870 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2871 && BLKmode != (submode = GET_MODE_INNER (mode))
2872 && (mov_optab->handlers[(int) submode].insn_code
2873 != CODE_FOR_nothing))
2875 /* Don't split destination if it is a stack push. */
2876 int stack = push_operand (x, GET_MODE (x));
2878 #ifdef PUSH_ROUNDING
2879 /* In case we output to the stack, but the size is smaller than the
2880 machine can push exactly, we need to use move instructions. */
2881 if (stack
2882 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2883 != GET_MODE_SIZE (submode)))
2885 rtx temp;
2886 HOST_WIDE_INT offset1, offset2;
2888 /* Do not use anti_adjust_stack, since we don't want to update
2889 stack_pointer_delta. */
2890 temp = expand_binop (Pmode,
2891 #ifdef STACK_GROWS_DOWNWARD
2892 sub_optab,
2893 #else
2894 add_optab,
2895 #endif
2896 stack_pointer_rtx,
2897 GEN_INT
2898 (PUSH_ROUNDING
2899 (GET_MODE_SIZE (GET_MODE (x)))),
2900 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2902 if (temp != stack_pointer_rtx)
2903 emit_move_insn (stack_pointer_rtx, temp);
2905 #ifdef STACK_GROWS_DOWNWARD
2906 offset1 = 0;
2907 offset2 = GET_MODE_SIZE (submode);
2908 #else
2909 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2910 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2911 + GET_MODE_SIZE (submode));
2912 #endif
2914 emit_move_insn (change_address (x, submode,
2915 gen_rtx_PLUS (Pmode,
2916 stack_pointer_rtx,
2917 GEN_INT (offset1))),
2918 gen_realpart (submode, y));
2919 emit_move_insn (change_address (x, submode,
2920 gen_rtx_PLUS (Pmode,
2921 stack_pointer_rtx,
2922 GEN_INT (offset2))),
2923 gen_imagpart (submode, y));
2925 else
2926 #endif
2927 /* If this is a stack, push the highpart first, so it
2928 will be in the argument order.
2930 In that case, change_address is used only to convert
2931 the mode, not to change the address. */
2932 if (stack)
2934 /* Note that the real part always precedes the imag part in memory
2935 regardless of machine's endianness. */
2936 #ifdef STACK_GROWS_DOWNWARD
2937 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2938 gen_imagpart (submode, y));
2939 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2940 gen_realpart (submode, y));
2941 #else
2942 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2943 gen_realpart (submode, y));
2944 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2945 gen_imagpart (submode, y));
2946 #endif
2948 else
2950 rtx realpart_x, realpart_y;
2951 rtx imagpart_x, imagpart_y;
2953 /* If this is a complex value with each part being smaller than a
2954 word, the usual calling sequence will likely pack the pieces into
2955 a single register. Unfortunately, SUBREG of hard registers only
2956 deals in terms of words, so we have a problem converting input
2957 arguments to the CONCAT of two registers that is used elsewhere
2958 for complex values. If this is before reload, we can copy it into
2959 memory and reload. FIXME, we should see about using extract and
2960 insert on integer registers, but complex short and complex char
2961 variables should be rarely used. */
2962 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2963 && (reload_in_progress | reload_completed) == 0)
2965 int packed_dest_p
2966 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2967 int packed_src_p
2968 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2970 if (packed_dest_p || packed_src_p)
2972 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2973 ? MODE_FLOAT : MODE_INT);
2975 enum machine_mode reg_mode
2976 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2978 if (reg_mode != BLKmode)
2980 rtx mem = assign_stack_temp (reg_mode,
2981 GET_MODE_SIZE (mode), 0);
2982 rtx cmem = adjust_address (mem, mode, 0);
2984 if (packed_dest_p)
2986 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2988 emit_move_insn_1 (cmem, y);
2989 return emit_move_insn_1 (sreg, mem);
2991 else
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2995 emit_move_insn_1 (mem, sreg);
2996 return emit_move_insn_1 (x, cmem);
3002 realpart_x = gen_realpart (submode, x);
3003 realpart_y = gen_realpart (submode, y);
3004 imagpart_x = gen_imagpart (submode, x);
3005 imagpart_y = gen_imagpart (submode, y);
3007 /* Show the output dies here. This is necessary for SUBREGs
3008 of pseudos since we cannot track their lifetimes correctly;
3009 hard regs shouldn't appear here except as return values.
3010 We never want to emit such a clobber after reload. */
3011 if (x != y
3012 && ! (reload_in_progress || reload_completed)
3013 && (GET_CODE (realpart_x) == SUBREG
3014 || GET_CODE (imagpart_x) == SUBREG))
3015 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3017 emit_move_insn (realpart_x, realpart_y);
3018 emit_move_insn (imagpart_x, imagpart_y);
3021 return get_last_insn ();
3024 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3025 find a mode to do it in. If we have a movcc, use it. Otherwise,
3026 find the MODE_INT mode of the same width. */
3027 else if (GET_MODE_CLASS (mode) == MODE_CC
3028 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3030 enum insn_code insn_code;
3031 enum machine_mode tmode = VOIDmode;
3032 rtx x1 = x, y1 = y;
3034 if (mode != CCmode
3035 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3036 tmode = CCmode;
3037 else
3038 for (tmode = QImode; tmode != VOIDmode;
3039 tmode = GET_MODE_WIDER_MODE (tmode))
3040 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3041 break;
3043 if (tmode == VOIDmode)
3044 abort ();
3046 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3047 may call change_address which is not appropriate if we were
3048 called when a reload was in progress. We don't have to worry
3049 about changing the address since the size in bytes is supposed to
3050 be the same. Copy the MEM to change the mode and move any
3051 substitutions from the old MEM to the new one. */
3053 if (reload_in_progress)
3055 x = gen_lowpart_common (tmode, x1);
3056 if (x == 0 && GET_CODE (x1) == MEM)
3058 x = adjust_address_nv (x1, tmode, 0);
3059 copy_replacements (x1, x);
3062 y = gen_lowpart_common (tmode, y1);
3063 if (y == 0 && GET_CODE (y1) == MEM)
3065 y = adjust_address_nv (y1, tmode, 0);
3066 copy_replacements (y1, y);
3069 else
3071 x = gen_lowpart (tmode, x);
3072 y = gen_lowpart (tmode, y);
3075 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3076 return emit_insn (GEN_FCN (insn_code) (x, y));
3079 /* Try using a move pattern for the corresponding integer mode. This is
3080 only safe when simplify_subreg can convert MODE constants into integer
3081 constants. At present, it can only do this reliably if the value
3082 fits within a HOST_WIDE_INT. */
3083 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3084 && (submode = int_mode_for_mode (mode)) != BLKmode
3085 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3086 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3087 (simplify_gen_subreg (submode, x, mode, 0),
3088 simplify_gen_subreg (submode, y, mode, 0)));
3090 /* This will handle any multi-word or full-word mode that lacks a move_insn
3091 pattern. However, you will get better code if you define such patterns,
3092 even if they must turn into multiple assembler instructions. */
3093 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3095 rtx last_insn = 0;
3096 rtx seq, inner;
3097 int need_clobber;
3098 int i;
3100 #ifdef PUSH_ROUNDING
3102 /* If X is a push on the stack, do the push now and replace
3103 X with a reference to the stack pointer. */
3104 if (push_operand (x, GET_MODE (x)))
3106 rtx temp;
3107 enum rtx_code code;
3109 /* Do not use anti_adjust_stack, since we don't want to update
3110 stack_pointer_delta. */
3111 temp = expand_binop (Pmode,
3112 #ifdef STACK_GROWS_DOWNWARD
3113 sub_optab,
3114 #else
3115 add_optab,
3116 #endif
3117 stack_pointer_rtx,
3118 GEN_INT
3119 (PUSH_ROUNDING
3120 (GET_MODE_SIZE (GET_MODE (x)))),
3121 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3123 if (temp != stack_pointer_rtx)
3124 emit_move_insn (stack_pointer_rtx, temp);
3126 code = GET_CODE (XEXP (x, 0));
3128 /* Just hope that small offsets off SP are OK. */
3129 if (code == POST_INC)
3130 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3131 GEN_INT (-((HOST_WIDE_INT)
3132 GET_MODE_SIZE (GET_MODE (x)))));
3133 else if (code == POST_DEC)
3134 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3135 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3136 else
3137 temp = stack_pointer_rtx;
3139 x = change_address (x, VOIDmode, temp);
3141 #endif
3143 /* If we are in reload, see if either operand is a MEM whose address
3144 is scheduled for replacement. */
3145 if (reload_in_progress && GET_CODE (x) == MEM
3146 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3147 x = replace_equiv_address_nv (x, inner);
3148 if (reload_in_progress && GET_CODE (y) == MEM
3149 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3150 y = replace_equiv_address_nv (y, inner);
3152 start_sequence ();
3154 need_clobber = 0;
3155 for (i = 0;
3156 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3157 i++)
3159 rtx xpart = operand_subword (x, i, 1, mode);
3160 rtx ypart = operand_subword (y, i, 1, mode);
3162 /* If we can't get a part of Y, put Y into memory if it is a
3163 constant. Otherwise, force it into a register. If we still
3164 can't get a part of Y, abort. */
3165 if (ypart == 0 && CONSTANT_P (y))
3167 y = force_const_mem (mode, y);
3168 ypart = operand_subword (y, i, 1, mode);
3170 else if (ypart == 0)
3171 ypart = operand_subword_force (y, i, mode);
3173 if (xpart == 0 || ypart == 0)
3174 abort ();
3176 need_clobber |= (GET_CODE (xpart) == SUBREG);
3178 last_insn = emit_move_insn (xpart, ypart);
3181 seq = get_insns ();
3182 end_sequence ();
3184 /* Show the output dies here. This is necessary for SUBREGs
3185 of pseudos since we cannot track their lifetimes correctly;
3186 hard regs shouldn't appear here except as return values.
3187 We never want to emit such a clobber after reload. */
3188 if (x != y
3189 && ! (reload_in_progress || reload_completed)
3190 && need_clobber != 0)
3191 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3193 emit_insn (seq);
3195 return last_insn;
3197 else
3198 abort ();
3201 /* If Y is representable exactly in a narrower mode, and the target can
3202 perform the extension directly from constant or memory, then emit the
3203 move as an extension. */
3205 static rtx
3206 compress_float_constant (rtx x, rtx y)
3208 enum machine_mode dstmode = GET_MODE (x);
3209 enum machine_mode orig_srcmode = GET_MODE (y);
3210 enum machine_mode srcmode;
3211 REAL_VALUE_TYPE r;
3213 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3215 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3216 srcmode != orig_srcmode;
3217 srcmode = GET_MODE_WIDER_MODE (srcmode))
3219 enum insn_code ic;
3220 rtx trunc_y, last_insn;
3222 /* Skip if the target can't extend this way. */
3223 ic = can_extend_p (dstmode, srcmode, 0);
3224 if (ic == CODE_FOR_nothing)
3225 continue;
3227 /* Skip if the narrowed value isn't exact. */
3228 if (! exact_real_truncate (srcmode, &r))
3229 continue;
3231 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3233 if (LEGITIMATE_CONSTANT_P (trunc_y))
3235 /* Skip if the target needs extra instructions to perform
3236 the extension. */
3237 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3238 continue;
3240 else if (float_extend_from_mem[dstmode][srcmode])
3241 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3242 else
3243 continue;
3245 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3246 last_insn = get_last_insn ();
3248 if (GET_CODE (x) == REG)
3249 set_unique_reg_note (last_insn, REG_EQUAL, y);
3251 return last_insn;
3254 return NULL_RTX;
3257 /* Pushing data onto the stack. */
3259 /* Push a block of length SIZE (perhaps variable)
3260 and return an rtx to address the beginning of the block.
3261 Note that it is not possible for the value returned to be a QUEUED.
3262 The value may be virtual_outgoing_args_rtx.
3264 EXTRA is the number of bytes of padding to push in addition to SIZE.
3265 BELOW nonzero means this padding comes at low addresses;
3266 otherwise, the padding comes at high addresses. */
3269 push_block (rtx size, int extra, int below)
3271 rtx temp;
3273 size = convert_modes (Pmode, ptr_mode, size, 1);
3274 if (CONSTANT_P (size))
3275 anti_adjust_stack (plus_constant (size, extra));
3276 else if (GET_CODE (size) == REG && extra == 0)
3277 anti_adjust_stack (size);
3278 else
3280 temp = copy_to_mode_reg (Pmode, size);
3281 if (extra != 0)
3282 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3283 temp, 0, OPTAB_LIB_WIDEN);
3284 anti_adjust_stack (temp);
3287 #ifndef STACK_GROWS_DOWNWARD
3288 if (0)
3289 #else
3290 if (1)
3291 #endif
3293 temp = virtual_outgoing_args_rtx;
3294 if (extra != 0 && below)
3295 temp = plus_constant (temp, extra);
3297 else
3299 if (GET_CODE (size) == CONST_INT)
3300 temp = plus_constant (virtual_outgoing_args_rtx,
3301 -INTVAL (size) - (below ? 0 : extra));
3302 else if (extra != 0 && !below)
3303 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3304 negate_rtx (Pmode, plus_constant (size, extra)));
3305 else
3306 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3307 negate_rtx (Pmode, size));
3310 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3313 #ifdef PUSH_ROUNDING
3315 /* Emit single push insn. */
3317 static void
3318 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3320 rtx dest_addr;
3321 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3322 rtx dest;
3323 enum insn_code icode;
3324 insn_operand_predicate_fn pred;
3326 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3327 /* If there is push pattern, use it. Otherwise try old way of throwing
3328 MEM representing push operation to move expander. */
3329 icode = push_optab->handlers[(int) mode].insn_code;
3330 if (icode != CODE_FOR_nothing)
3332 if (((pred = insn_data[(int) icode].operand[0].predicate)
3333 && !((*pred) (x, mode))))
3334 x = force_reg (mode, x);
3335 emit_insn (GEN_FCN (icode) (x));
3336 return;
3338 if (GET_MODE_SIZE (mode) == rounded_size)
3339 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3340 /* If we are to pad downward, adjust the stack pointer first and
3341 then store X into the stack location using an offset. This is
3342 because emit_move_insn does not know how to pad; it does not have
3343 access to type. */
3344 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3346 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3347 HOST_WIDE_INT offset;
3349 emit_move_insn (stack_pointer_rtx,
3350 expand_binop (Pmode,
3351 #ifdef STACK_GROWS_DOWNWARD
3352 sub_optab,
3353 #else
3354 add_optab,
3355 #endif
3356 stack_pointer_rtx,
3357 GEN_INT (rounded_size),
3358 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3360 offset = (HOST_WIDE_INT) padding_size;
3361 #ifdef STACK_GROWS_DOWNWARD
3362 if (STACK_PUSH_CODE == POST_DEC)
3363 /* We have already decremented the stack pointer, so get the
3364 previous value. */
3365 offset += (HOST_WIDE_INT) rounded_size;
3366 #else
3367 if (STACK_PUSH_CODE == POST_INC)
3368 /* We have already incremented the stack pointer, so get the
3369 previous value. */
3370 offset -= (HOST_WIDE_INT) rounded_size;
3371 #endif
3372 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3374 else
3376 #ifdef STACK_GROWS_DOWNWARD
3377 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3378 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3379 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3380 #else
3381 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3382 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3383 GEN_INT (rounded_size));
3384 #endif
3385 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3388 dest = gen_rtx_MEM (mode, dest_addr);
3390 if (type != 0)
3392 set_mem_attributes (dest, type, 1);
3394 if (flag_optimize_sibling_calls)
3395 /* Function incoming arguments may overlap with sibling call
3396 outgoing arguments and we cannot allow reordering of reads
3397 from function arguments with stores to outgoing arguments
3398 of sibling calls. */
3399 set_mem_alias_set (dest, 0);
3401 emit_move_insn (dest, x);
3403 #endif
3405 /* Generate code to push X onto the stack, assuming it has mode MODE and
3406 type TYPE.
3407 MODE is redundant except when X is a CONST_INT (since they don't
3408 carry mode info).
3409 SIZE is an rtx for the size of data to be copied (in bytes),
3410 needed only if X is BLKmode.
3412 ALIGN (in bits) is maximum alignment we can assume.
3414 If PARTIAL and REG are both nonzero, then copy that many of the first
3415 words of X into registers starting with REG, and push the rest of X.
3416 The amount of space pushed is decreased by PARTIAL words,
3417 rounded *down* to a multiple of PARM_BOUNDARY.
3418 REG must be a hard register in this case.
3419 If REG is zero but PARTIAL is not, take any all others actions for an
3420 argument partially in registers, but do not actually load any
3421 registers.
3423 EXTRA is the amount in bytes of extra space to leave next to this arg.
3424 This is ignored if an argument block has already been allocated.
3426 On a machine that lacks real push insns, ARGS_ADDR is the address of
3427 the bottom of the argument block for this call. We use indexing off there
3428 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3429 argument block has not been preallocated.
3431 ARGS_SO_FAR is the size of args previously pushed for this call.
3433 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3434 for arguments passed in registers. If nonzero, it will be the number
3435 of bytes required. */
3437 void
3438 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3439 unsigned int align, int partial, rtx reg, int extra,
3440 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3441 rtx alignment_pad)
3443 rtx xinner;
3444 enum direction stack_direction
3445 #ifdef STACK_GROWS_DOWNWARD
3446 = downward;
3447 #else
3448 = upward;
3449 #endif
3451 /* Decide where to pad the argument: `downward' for below,
3452 `upward' for above, or `none' for don't pad it.
3453 Default is below for small data on big-endian machines; else above. */
3454 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3456 /* Invert direction if stack is post-decrement.
3457 FIXME: why? */
3458 if (STACK_PUSH_CODE == POST_DEC)
3459 if (where_pad != none)
3460 where_pad = (where_pad == downward ? upward : downward);
3462 xinner = x = protect_from_queue (x, 0);
3464 if (mode == BLKmode)
3466 /* Copy a block into the stack, entirely or partially. */
3468 rtx temp;
3469 int used = partial * UNITS_PER_WORD;
3470 int offset;
3471 int skip;
3473 if (reg && GET_CODE (reg) == PARALLEL)
3475 /* Use the size of the elt to compute offset. */
3476 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3477 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3478 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3480 else
3481 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3483 if (size == 0)
3484 abort ();
3486 used -= offset;
3488 /* USED is now the # of bytes we need not copy to the stack
3489 because registers will take care of them. */
3491 if (partial != 0)
3492 xinner = adjust_address (xinner, BLKmode, used);
3494 /* If the partial register-part of the arg counts in its stack size,
3495 skip the part of stack space corresponding to the registers.
3496 Otherwise, start copying to the beginning of the stack space,
3497 by setting SKIP to 0. */
3498 skip = (reg_parm_stack_space == 0) ? 0 : used;
3500 #ifdef PUSH_ROUNDING
3501 /* Do it with several push insns if that doesn't take lots of insns
3502 and if there is no difficulty with push insns that skip bytes
3503 on the stack for alignment purposes. */
3504 if (args_addr == 0
3505 && PUSH_ARGS
3506 && GET_CODE (size) == CONST_INT
3507 && skip == 0
3508 && MEM_ALIGN (xinner) >= align
3509 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3510 /* Here we avoid the case of a structure whose weak alignment
3511 forces many pushes of a small amount of data,
3512 and such small pushes do rounding that causes trouble. */
3513 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3514 || align >= BIGGEST_ALIGNMENT
3515 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3516 == (align / BITS_PER_UNIT)))
3517 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3519 /* Push padding now if padding above and stack grows down,
3520 or if padding below and stack grows up.
3521 But if space already allocated, this has already been done. */
3522 if (extra && args_addr == 0
3523 && where_pad != none && where_pad != stack_direction)
3524 anti_adjust_stack (GEN_INT (extra));
3526 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3528 else
3529 #endif /* PUSH_ROUNDING */
3531 rtx target;
3533 /* Otherwise make space on the stack and copy the data
3534 to the address of that space. */
3536 /* Deduct words put into registers from the size we must copy. */
3537 if (partial != 0)
3539 if (GET_CODE (size) == CONST_INT)
3540 size = GEN_INT (INTVAL (size) - used);
3541 else
3542 size = expand_binop (GET_MODE (size), sub_optab, size,
3543 GEN_INT (used), NULL_RTX, 0,
3544 OPTAB_LIB_WIDEN);
3547 /* Get the address of the stack space.
3548 In this case, we do not deal with EXTRA separately.
3549 A single stack adjust will do. */
3550 if (! args_addr)
3552 temp = push_block (size, extra, where_pad == downward);
3553 extra = 0;
3555 else if (GET_CODE (args_so_far) == CONST_INT)
3556 temp = memory_address (BLKmode,
3557 plus_constant (args_addr,
3558 skip + INTVAL (args_so_far)));
3559 else
3560 temp = memory_address (BLKmode,
3561 plus_constant (gen_rtx_PLUS (Pmode,
3562 args_addr,
3563 args_so_far),
3564 skip));
3566 if (!ACCUMULATE_OUTGOING_ARGS)
3568 /* If the source is referenced relative to the stack pointer,
3569 copy it to another register to stabilize it. We do not need
3570 to do this if we know that we won't be changing sp. */
3572 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3573 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3574 temp = copy_to_reg (temp);
3577 target = gen_rtx_MEM (BLKmode, temp);
3579 if (type != 0)
3581 set_mem_attributes (target, type, 1);
3582 /* Function incoming arguments may overlap with sibling call
3583 outgoing arguments and we cannot allow reordering of reads
3584 from function arguments with stores to outgoing arguments
3585 of sibling calls. */
3586 set_mem_alias_set (target, 0);
3589 /* ALIGN may well be better aligned than TYPE, e.g. due to
3590 PARM_BOUNDARY. Assume the caller isn't lying. */
3591 set_mem_align (target, align);
3593 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3596 else if (partial > 0)
3598 /* Scalar partly in registers. */
3600 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3601 int i;
3602 int not_stack;
3603 /* # words of start of argument
3604 that we must make space for but need not store. */
3605 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3606 int args_offset = INTVAL (args_so_far);
3607 int skip;
3609 /* Push padding now if padding above and stack grows down,
3610 or if padding below and stack grows up.
3611 But if space already allocated, this has already been done. */
3612 if (extra && args_addr == 0
3613 && where_pad != none && where_pad != stack_direction)
3614 anti_adjust_stack (GEN_INT (extra));
3616 /* If we make space by pushing it, we might as well push
3617 the real data. Otherwise, we can leave OFFSET nonzero
3618 and leave the space uninitialized. */
3619 if (args_addr == 0)
3620 offset = 0;
3622 /* Now NOT_STACK gets the number of words that we don't need to
3623 allocate on the stack. */
3624 not_stack = partial - offset;
3626 /* If the partial register-part of the arg counts in its stack size,
3627 skip the part of stack space corresponding to the registers.
3628 Otherwise, start copying to the beginning of the stack space,
3629 by setting SKIP to 0. */
3630 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3632 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3633 x = validize_mem (force_const_mem (mode, x));
3635 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3636 SUBREGs of such registers are not allowed. */
3637 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3638 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3639 x = copy_to_reg (x);
3641 /* Loop over all the words allocated on the stack for this arg. */
3642 /* We can do it by words, because any scalar bigger than a word
3643 has a size a multiple of a word. */
3644 #ifndef PUSH_ARGS_REVERSED
3645 for (i = not_stack; i < size; i++)
3646 #else
3647 for (i = size - 1; i >= not_stack; i--)
3648 #endif
3649 if (i >= not_stack + offset)
3650 emit_push_insn (operand_subword_force (x, i, mode),
3651 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3652 0, args_addr,
3653 GEN_INT (args_offset + ((i - not_stack + skip)
3654 * UNITS_PER_WORD)),
3655 reg_parm_stack_space, alignment_pad);
3657 else
3659 rtx addr;
3660 rtx dest;
3662 /* Push padding now if padding above and stack grows down,
3663 or if padding below and stack grows up.
3664 But if space already allocated, this has already been done. */
3665 if (extra && args_addr == 0
3666 && where_pad != none && where_pad != stack_direction)
3667 anti_adjust_stack (GEN_INT (extra));
3669 #ifdef PUSH_ROUNDING
3670 if (args_addr == 0 && PUSH_ARGS)
3671 emit_single_push_insn (mode, x, type);
3672 else
3673 #endif
3675 if (GET_CODE (args_so_far) == CONST_INT)
3676 addr
3677 = memory_address (mode,
3678 plus_constant (args_addr,
3679 INTVAL (args_so_far)));
3680 else
3681 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3682 args_so_far));
3683 dest = gen_rtx_MEM (mode, addr);
3684 if (type != 0)
3686 set_mem_attributes (dest, type, 1);
3687 /* Function incoming arguments may overlap with sibling call
3688 outgoing arguments and we cannot allow reordering of reads
3689 from function arguments with stores to outgoing arguments
3690 of sibling calls. */
3691 set_mem_alias_set (dest, 0);
3694 emit_move_insn (dest, x);
3698 /* If part should go in registers, copy that part
3699 into the appropriate registers. Do this now, at the end,
3700 since mem-to-mem copies above may do function calls. */
3701 if (partial > 0 && reg != 0)
3703 /* Handle calls that pass values in multiple non-contiguous locations.
3704 The Irix 6 ABI has examples of this. */
3705 if (GET_CODE (reg) == PARALLEL)
3706 emit_group_load (reg, x, type, -1);
3707 else
3708 move_block_to_reg (REGNO (reg), x, partial, mode);
3711 if (extra && args_addr == 0 && where_pad == stack_direction)
3712 anti_adjust_stack (GEN_INT (extra));
3714 if (alignment_pad && args_addr == 0)
3715 anti_adjust_stack (alignment_pad);
3718 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3719 operations. */
3721 static rtx
3722 get_subtarget (rtx x)
3724 return ((x == 0
3725 /* Only registers can be subtargets. */
3726 || GET_CODE (x) != REG
3727 /* If the register is readonly, it can't be set more than once. */
3728 || RTX_UNCHANGING_P (x)
3729 /* Don't use hard regs to avoid extending their life. */
3730 || REGNO (x) < FIRST_PSEUDO_REGISTER
3731 /* Avoid subtargets inside loops,
3732 since they hide some invariant expressions. */
3733 || preserve_subexpressions_p ())
3734 ? 0 : x);
3737 /* Expand an assignment that stores the value of FROM into TO.
3738 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3739 (This may contain a QUEUED rtx;
3740 if the value is constant, this rtx is a constant.)
3741 Otherwise, the returned value is NULL_RTX. */
3744 expand_assignment (tree to, tree from, int want_value)
3746 rtx to_rtx = 0;
3747 rtx result;
3749 /* Don't crash if the lhs of the assignment was erroneous. */
3751 if (TREE_CODE (to) == ERROR_MARK)
3753 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3754 return want_value ? result : NULL_RTX;
3757 /* Assignment of a structure component needs special treatment
3758 if the structure component's rtx is not simply a MEM.
3759 Assignment of an array element at a constant index, and assignment of
3760 an array element in an unaligned packed structure field, has the same
3761 problem. */
3763 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3764 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3765 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3767 enum machine_mode mode1;
3768 HOST_WIDE_INT bitsize, bitpos;
3769 rtx orig_to_rtx;
3770 tree offset;
3771 int unsignedp;
3772 int volatilep = 0;
3773 tree tem;
3775 push_temp_slots ();
3776 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3777 &unsignedp, &volatilep);
3779 /* If we are going to use store_bit_field and extract_bit_field,
3780 make sure to_rtx will be safe for multiple use. */
3782 if (mode1 == VOIDmode && want_value)
3783 tem = stabilize_reference (tem);
3785 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3787 if (offset != 0)
3789 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3791 if (GET_CODE (to_rtx) != MEM)
3792 abort ();
3794 #ifdef POINTERS_EXTEND_UNSIGNED
3795 if (GET_MODE (offset_rtx) != Pmode)
3796 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3797 #else
3798 if (GET_MODE (offset_rtx) != ptr_mode)
3799 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3800 #endif
3802 /* A constant address in TO_RTX can have VOIDmode, we must not try
3803 to call force_reg for that case. Avoid that case. */
3804 if (GET_CODE (to_rtx) == MEM
3805 && GET_MODE (to_rtx) == BLKmode
3806 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3807 && bitsize > 0
3808 && (bitpos % bitsize) == 0
3809 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3810 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3812 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3813 bitpos = 0;
3816 to_rtx = offset_address (to_rtx, offset_rtx,
3817 highest_pow2_factor_for_target (to,
3818 offset));
3821 if (GET_CODE (to_rtx) == MEM)
3823 /* If the field is at offset zero, we could have been given the
3824 DECL_RTX of the parent struct. Don't munge it. */
3825 to_rtx = shallow_copy_rtx (to_rtx);
3827 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3830 /* Deal with volatile and readonly fields. The former is only done
3831 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3832 if (volatilep && GET_CODE (to_rtx) == MEM)
3834 if (to_rtx == orig_to_rtx)
3835 to_rtx = copy_rtx (to_rtx);
3836 MEM_VOLATILE_P (to_rtx) = 1;
3839 if (TREE_CODE (to) == COMPONENT_REF
3840 && TREE_READONLY (TREE_OPERAND (to, 1))
3841 /* We can't assert that a MEM won't be set more than once
3842 if the component is not addressable because another
3843 non-addressable component may be referenced by the same MEM. */
3844 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3846 if (to_rtx == orig_to_rtx)
3847 to_rtx = copy_rtx (to_rtx);
3848 RTX_UNCHANGING_P (to_rtx) = 1;
3851 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3853 if (to_rtx == orig_to_rtx)
3854 to_rtx = copy_rtx (to_rtx);
3855 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3858 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3859 (want_value
3860 /* Spurious cast for HPUX compiler. */
3861 ? ((enum machine_mode)
3862 TYPE_MODE (TREE_TYPE (to)))
3863 : VOIDmode),
3864 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3866 preserve_temp_slots (result);
3867 free_temp_slots ();
3868 pop_temp_slots ();
3870 /* If the value is meaningful, convert RESULT to the proper mode.
3871 Otherwise, return nothing. */
3872 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3873 TYPE_MODE (TREE_TYPE (from)),
3874 result,
3875 TYPE_UNSIGNED (TREE_TYPE (to)))
3876 : NULL_RTX);
3879 /* If the rhs is a function call and its value is not an aggregate,
3880 call the function before we start to compute the lhs.
3881 This is needed for correct code for cases such as
3882 val = setjmp (buf) on machines where reference to val
3883 requires loading up part of an address in a separate insn.
3885 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3886 since it might be a promoted variable where the zero- or sign- extension
3887 needs to be done. Handling this in the normal way is safe because no
3888 computation is done before the call. */
3889 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3891 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3892 && GET_CODE (DECL_RTL (to)) == REG))
3894 rtx value;
3896 push_temp_slots ();
3897 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3898 if (to_rtx == 0)
3899 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3901 /* Handle calls that return values in multiple non-contiguous locations.
3902 The Irix 6 ABI has examples of this. */
3903 if (GET_CODE (to_rtx) == PARALLEL)
3904 emit_group_load (to_rtx, value, TREE_TYPE (from),
3905 int_size_in_bytes (TREE_TYPE (from)));
3906 else if (GET_MODE (to_rtx) == BLKmode)
3907 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3908 else
3910 if (POINTER_TYPE_P (TREE_TYPE (to)))
3911 value = convert_memory_address (GET_MODE (to_rtx), value);
3912 emit_move_insn (to_rtx, value);
3914 preserve_temp_slots (to_rtx);
3915 free_temp_slots ();
3916 pop_temp_slots ();
3917 return want_value ? to_rtx : NULL_RTX;
3920 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3921 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3923 if (to_rtx == 0)
3924 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3926 /* Don't move directly into a return register. */
3927 if (TREE_CODE (to) == RESULT_DECL
3928 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3930 rtx temp;
3932 push_temp_slots ();
3933 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3935 if (GET_CODE (to_rtx) == PARALLEL)
3936 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3937 int_size_in_bytes (TREE_TYPE (from)));
3938 else
3939 emit_move_insn (to_rtx, temp);
3941 preserve_temp_slots (to_rtx);
3942 free_temp_slots ();
3943 pop_temp_slots ();
3944 return want_value ? to_rtx : NULL_RTX;
3947 /* In case we are returning the contents of an object which overlaps
3948 the place the value is being stored, use a safe function when copying
3949 a value through a pointer into a structure value return block. */
3950 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3951 && current_function_returns_struct
3952 && !current_function_returns_pcc_struct)
3954 rtx from_rtx, size;
3956 push_temp_slots ();
3957 size = expr_size (from);
3958 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3960 if (TARGET_MEM_FUNCTIONS)
3961 emit_library_call (memmove_libfunc, LCT_NORMAL,
3962 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3963 XEXP (from_rtx, 0), Pmode,
3964 convert_to_mode (TYPE_MODE (sizetype),
3965 size, TYPE_UNSIGNED (sizetype)),
3966 TYPE_MODE (sizetype));
3967 else
3968 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3969 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3970 XEXP (to_rtx, 0), Pmode,
3971 convert_to_mode (TYPE_MODE (integer_type_node),
3972 size,
3973 TYPE_UNSIGNED (integer_type_node)),
3974 TYPE_MODE (integer_type_node));
3976 preserve_temp_slots (to_rtx);
3977 free_temp_slots ();
3978 pop_temp_slots ();
3979 return want_value ? to_rtx : NULL_RTX;
3982 /* Compute FROM and store the value in the rtx we got. */
3984 push_temp_slots ();
3985 result = store_expr (from, to_rtx, want_value);
3986 preserve_temp_slots (result);
3987 free_temp_slots ();
3988 pop_temp_slots ();
3989 return want_value ? result : NULL_RTX;
3992 /* Generate code for computing expression EXP,
3993 and storing the value into TARGET.
3994 TARGET may contain a QUEUED rtx.
3996 If WANT_VALUE & 1 is nonzero, return a copy of the value
3997 not in TARGET, so that we can be sure to use the proper
3998 value in a containing expression even if TARGET has something
3999 else stored in it. If possible, we copy the value through a pseudo
4000 and return that pseudo. Or, if the value is constant, we try to
4001 return the constant. In some cases, we return a pseudo
4002 copied *from* TARGET.
4004 If the mode is BLKmode then we may return TARGET itself.
4005 It turns out that in BLKmode it doesn't cause a problem.
4006 because C has no operators that could combine two different
4007 assignments into the same BLKmode object with different values
4008 with no sequence point. Will other languages need this to
4009 be more thorough?
4011 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4012 to catch quickly any cases where the caller uses the value
4013 and fails to set WANT_VALUE.
4015 If WANT_VALUE & 2 is set, this is a store into a call param on the
4016 stack, and block moves may need to be treated specially. */
4019 store_expr (tree exp, rtx target, int want_value)
4021 rtx temp;
4022 rtx alt_rtl = NULL_RTX;
4023 rtx mark = mark_queue ();
4024 int dont_return_target = 0;
4025 int dont_store_target = 0;
4027 if (VOID_TYPE_P (TREE_TYPE (exp)))
4029 /* C++ can generate ?: expressions with a throw expression in one
4030 branch and an rvalue in the other. Here, we resolve attempts to
4031 store the throw expression's nonexistent result. */
4032 if (want_value)
4033 abort ();
4034 expand_expr (exp, const0_rtx, VOIDmode, 0);
4035 return NULL_RTX;
4037 if (TREE_CODE (exp) == COMPOUND_EXPR)
4039 /* Perform first part of compound expression, then assign from second
4040 part. */
4041 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4042 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4043 emit_queue ();
4044 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4046 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4048 /* For conditional expression, get safe form of the target. Then
4049 test the condition, doing the appropriate assignment on either
4050 side. This avoids the creation of unnecessary temporaries.
4051 For non-BLKmode, it is more efficient not to do this. */
4053 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4055 emit_queue ();
4056 target = protect_from_queue (target, 1);
4058 do_pending_stack_adjust ();
4059 NO_DEFER_POP;
4060 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4061 start_cleanup_deferral ();
4062 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4063 end_cleanup_deferral ();
4064 emit_queue ();
4065 emit_jump_insn (gen_jump (lab2));
4066 emit_barrier ();
4067 emit_label (lab1);
4068 start_cleanup_deferral ();
4069 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4070 end_cleanup_deferral ();
4071 emit_queue ();
4072 emit_label (lab2);
4073 OK_DEFER_POP;
4075 return want_value & 1 ? target : NULL_RTX;
4077 else if (queued_subexp_p (target))
4078 /* If target contains a postincrement, let's not risk
4079 using it as the place to generate the rhs. */
4081 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4083 /* Expand EXP into a new pseudo. */
4084 temp = gen_reg_rtx (GET_MODE (target));
4085 temp = expand_expr (exp, temp, GET_MODE (target),
4086 (want_value & 2
4087 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4089 else
4090 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4091 (want_value & 2
4092 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4094 /* If target is volatile, ANSI requires accessing the value
4095 *from* the target, if it is accessed. So make that happen.
4096 In no case return the target itself. */
4097 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4098 dont_return_target = 1;
4100 else if ((want_value & 1) != 0
4101 && GET_CODE (target) == MEM
4102 && ! MEM_VOLATILE_P (target)
4103 && GET_MODE (target) != BLKmode)
4104 /* If target is in memory and caller wants value in a register instead,
4105 arrange that. Pass TARGET as target for expand_expr so that,
4106 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4107 We know expand_expr will not use the target in that case.
4108 Don't do this if TARGET is volatile because we are supposed
4109 to write it and then read it. */
4111 temp = expand_expr (exp, target, GET_MODE (target),
4112 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4113 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4115 /* If TEMP is already in the desired TARGET, only copy it from
4116 memory and don't store it there again. */
4117 if (temp == target
4118 || (rtx_equal_p (temp, target)
4119 && ! side_effects_p (temp) && ! side_effects_p (target)))
4120 dont_store_target = 1;
4121 temp = copy_to_reg (temp);
4123 dont_return_target = 1;
4125 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4126 /* If this is a scalar in a register that is stored in a wider mode
4127 than the declared mode, compute the result into its declared mode
4128 and then convert to the wider mode. Our value is the computed
4129 expression. */
4131 rtx inner_target = 0;
4133 /* If we don't want a value, we can do the conversion inside EXP,
4134 which will often result in some optimizations. Do the conversion
4135 in two steps: first change the signedness, if needed, then
4136 the extend. But don't do this if the type of EXP is a subtype
4137 of something else since then the conversion might involve
4138 more than just converting modes. */
4139 if ((want_value & 1) == 0
4140 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4141 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4143 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4144 != SUBREG_PROMOTED_UNSIGNED_P (target))
4145 exp = convert
4146 (lang_hooks.types.signed_or_unsigned_type
4147 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4149 exp = convert (lang_hooks.types.type_for_mode
4150 (GET_MODE (SUBREG_REG (target)),
4151 SUBREG_PROMOTED_UNSIGNED_P (target)),
4152 exp);
4154 inner_target = SUBREG_REG (target);
4157 temp = expand_expr (exp, inner_target, VOIDmode,
4158 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4160 /* If TEMP is a MEM and we want a result value, make the access
4161 now so it gets done only once. Strictly speaking, this is
4162 only necessary if the MEM is volatile, or if the address
4163 overlaps TARGET. But not performing the load twice also
4164 reduces the amount of rtl we generate and then have to CSE. */
4165 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4166 temp = copy_to_reg (temp);
4168 /* If TEMP is a VOIDmode constant, use convert_modes to make
4169 sure that we properly convert it. */
4170 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4172 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4173 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4174 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4175 GET_MODE (target), temp,
4176 SUBREG_PROMOTED_UNSIGNED_P (target));
4179 convert_move (SUBREG_REG (target), temp,
4180 SUBREG_PROMOTED_UNSIGNED_P (target));
4182 /* If we promoted a constant, change the mode back down to match
4183 target. Otherwise, the caller might get confused by a result whose
4184 mode is larger than expected. */
4186 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4188 if (GET_MODE (temp) != VOIDmode)
4190 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4191 SUBREG_PROMOTED_VAR_P (temp) = 1;
4192 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4193 SUBREG_PROMOTED_UNSIGNED_P (target));
4195 else
4196 temp = convert_modes (GET_MODE (target),
4197 GET_MODE (SUBREG_REG (target)),
4198 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4201 return want_value & 1 ? temp : NULL_RTX;
4203 else
4205 temp = expand_expr_real (exp, target, GET_MODE (target),
4206 (want_value & 2
4207 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4208 &alt_rtl);
4209 /* Return TARGET if it's a specified hardware register.
4210 If TARGET is a volatile mem ref, either return TARGET
4211 or return a reg copied *from* TARGET; ANSI requires this.
4213 Otherwise, if TEMP is not TARGET, return TEMP
4214 if it is constant (for efficiency),
4215 or if we really want the correct value. */
4216 if (!(target && GET_CODE (target) == REG
4217 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4218 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4219 && ! rtx_equal_p (temp, target)
4220 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4221 dont_return_target = 1;
4224 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4225 the same as that of TARGET, adjust the constant. This is needed, for
4226 example, in case it is a CONST_DOUBLE and we want only a word-sized
4227 value. */
4228 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4229 && TREE_CODE (exp) != ERROR_MARK
4230 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4231 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4232 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4234 /* If value was not generated in the target, store it there.
4235 Convert the value to TARGET's type first if necessary and emit the
4236 pending incrementations that have been queued when expanding EXP.
4237 Note that we cannot emit the whole queue blindly because this will
4238 effectively disable the POST_INC optimization later.
4240 If TEMP and TARGET compare equal according to rtx_equal_p, but
4241 one or both of them are volatile memory refs, we have to distinguish
4242 two cases:
4243 - expand_expr has used TARGET. In this case, we must not generate
4244 another copy. This can be detected by TARGET being equal according
4245 to == .
4246 - expand_expr has not used TARGET - that means that the source just
4247 happens to have the same RTX form. Since temp will have been created
4248 by expand_expr, it will compare unequal according to == .
4249 We must generate a copy in this case, to reach the correct number
4250 of volatile memory references. */
4252 if ((! rtx_equal_p (temp, target)
4253 || (temp != target && (side_effects_p (temp)
4254 || side_effects_p (target))))
4255 && TREE_CODE (exp) != ERROR_MARK
4256 && ! dont_store_target
4257 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4258 but TARGET is not valid memory reference, TEMP will differ
4259 from TARGET although it is really the same location. */
4260 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4261 /* If there's nothing to copy, don't bother. Don't call expr_size
4262 unless necessary, because some front-ends (C++) expr_size-hook
4263 aborts on objects that are not supposed to be bit-copied or
4264 bit-initialized. */
4265 && expr_size (exp) != const0_rtx)
4267 emit_insns_enqueued_after_mark (mark);
4268 target = protect_from_queue (target, 1);
4269 temp = protect_from_queue (temp, 0);
4270 if (GET_MODE (temp) != GET_MODE (target)
4271 && GET_MODE (temp) != VOIDmode)
4273 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4274 if (dont_return_target)
4276 /* In this case, we will return TEMP,
4277 so make sure it has the proper mode.
4278 But don't forget to store the value into TARGET. */
4279 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4280 emit_move_insn (target, temp);
4282 else
4283 convert_move (target, temp, unsignedp);
4286 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4288 /* Handle copying a string constant into an array. The string
4289 constant may be shorter than the array. So copy just the string's
4290 actual length, and clear the rest. First get the size of the data
4291 type of the string, which is actually the size of the target. */
4292 rtx size = expr_size (exp);
4294 if (GET_CODE (size) == CONST_INT
4295 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4296 emit_block_move (target, temp, size,
4297 (want_value & 2
4298 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4299 else
4301 /* Compute the size of the data to copy from the string. */
4302 tree copy_size
4303 = size_binop (MIN_EXPR,
4304 make_tree (sizetype, size),
4305 size_int (TREE_STRING_LENGTH (exp)));
4306 rtx copy_size_rtx
4307 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4308 (want_value & 2
4309 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4310 rtx label = 0;
4312 /* Copy that much. */
4313 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4314 TYPE_UNSIGNED (sizetype));
4315 emit_block_move (target, temp, copy_size_rtx,
4316 (want_value & 2
4317 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4319 /* Figure out how much is left in TARGET that we have to clear.
4320 Do all calculations in ptr_mode. */
4321 if (GET_CODE (copy_size_rtx) == CONST_INT)
4323 size = plus_constant (size, -INTVAL (copy_size_rtx));
4324 target = adjust_address (target, BLKmode,
4325 INTVAL (copy_size_rtx));
4327 else
4329 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4330 copy_size_rtx, NULL_RTX, 0,
4331 OPTAB_LIB_WIDEN);
4333 #ifdef POINTERS_EXTEND_UNSIGNED
4334 if (GET_MODE (copy_size_rtx) != Pmode)
4335 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4336 TYPE_UNSIGNED (sizetype));
4337 #endif
4339 target = offset_address (target, copy_size_rtx,
4340 highest_pow2_factor (copy_size));
4341 label = gen_label_rtx ();
4342 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4343 GET_MODE (size), 0, label);
4346 if (size != const0_rtx)
4347 clear_storage (target, size);
4349 if (label)
4350 emit_label (label);
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 else if (GET_CODE (target) == PARALLEL)
4356 emit_group_load (target, temp, TREE_TYPE (exp),
4357 int_size_in_bytes (TREE_TYPE (exp)));
4358 else if (GET_MODE (temp) == BLKmode)
4359 emit_block_move (target, temp, expr_size (exp),
4360 (want_value & 2
4361 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4362 else
4364 temp = force_operand (temp, target);
4365 if (temp != target)
4366 emit_move_insn (target, temp);
4370 /* If we don't want a value, return NULL_RTX. */
4371 if ((want_value & 1) == 0)
4372 return NULL_RTX;
4374 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4375 ??? The latter test doesn't seem to make sense. */
4376 else if (dont_return_target && GET_CODE (temp) != MEM)
4377 return temp;
4379 /* Return TARGET itself if it is a hard register. */
4380 else if ((want_value & 1) != 0
4381 && GET_MODE (target) != BLKmode
4382 && ! (GET_CODE (target) == REG
4383 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4384 return copy_to_reg (target);
4386 else
4387 return target;
4390 /* Examine CTOR. Discover how many scalar fields are set to non-zero
4391 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4392 are set to non-constant values and place it in *P_NC_ELTS. */
4394 static void
4395 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4396 HOST_WIDE_INT *p_nc_elts)
4398 HOST_WIDE_INT nz_elts, nc_elts;
4399 tree list;
4401 nz_elts = 0;
4402 nc_elts = 0;
4404 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4406 tree value = TREE_VALUE (list);
4407 tree purpose = TREE_PURPOSE (list);
4408 HOST_WIDE_INT mult;
4410 mult = 1;
4411 if (TREE_CODE (purpose) == RANGE_EXPR)
4413 tree lo_index = TREE_OPERAND (purpose, 0);
4414 tree hi_index = TREE_OPERAND (purpose, 1);
4416 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4417 mult = (tree_low_cst (hi_index, 1)
4418 - tree_low_cst (lo_index, 1) + 1);
4421 switch (TREE_CODE (value))
4423 case CONSTRUCTOR:
4425 HOST_WIDE_INT nz = 0, nc = 0;
4426 categorize_ctor_elements_1 (value, &nz, &nc);
4427 nz_elts += mult * nz;
4428 nc_elts += mult * nc;
4430 break;
4432 case INTEGER_CST:
4433 case REAL_CST:
4434 if (!initializer_zerop (value))
4435 nz_elts += mult;
4436 break;
4437 case COMPLEX_CST:
4438 if (!initializer_zerop (TREE_REALPART (value)))
4439 nz_elts += mult;
4440 if (!initializer_zerop (TREE_IMAGPART (value)))
4441 nz_elts += mult;
4442 break;
4443 case VECTOR_CST:
4445 tree v;
4446 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4447 if (!initializer_zerop (TREE_VALUE (v)))
4448 nz_elts += mult;
4450 break;
4452 default:
4453 nz_elts += mult;
4454 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4455 nc_elts += mult;
4456 break;
4460 *p_nz_elts += nz_elts;
4461 *p_nc_elts += nc_elts;
4464 void
4465 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4466 HOST_WIDE_INT *p_nc_elts)
4468 *p_nz_elts = 0;
4469 *p_nc_elts = 0;
4470 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4473 /* Count the number of scalars in TYPE. Return -1 on overflow or
4474 variable-sized. */
4476 HOST_WIDE_INT
4477 count_type_elements (tree type)
4479 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4480 switch (TREE_CODE (type))
4482 case ARRAY_TYPE:
4484 tree telts = array_type_nelts (type);
4485 if (telts && host_integerp (telts, 1))
4487 HOST_WIDE_INT n = tree_low_cst (telts, 1);
4488 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4489 if (n == 0)
4490 return 0;
4491 if (max / n < m)
4492 return n * m;
4494 return -1;
4497 case RECORD_TYPE:
4499 HOST_WIDE_INT n = 0, t;
4500 tree f;
4502 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4503 if (TREE_CODE (f) == FIELD_DECL)
4505 t = count_type_elements (TREE_TYPE (f));
4506 if (t < 0)
4507 return -1;
4508 n += t;
4511 return n;
4514 case UNION_TYPE:
4515 case QUAL_UNION_TYPE:
4517 /* Ho hum. How in the world do we guess here? Clearly it isn't
4518 right to count the fields. Guess based on the number of words. */
4519 HOST_WIDE_INT n = int_size_in_bytes (type);
4520 if (n < 0)
4521 return -1;
4522 return n / UNITS_PER_WORD;
4525 case COMPLEX_TYPE:
4526 return 2;
4528 case VECTOR_TYPE:
4529 /* ??? This is broke. We should encode the vector width in the tree. */
4530 return GET_MODE_NUNITS (TYPE_MODE (type));
4532 case INTEGER_TYPE:
4533 case REAL_TYPE:
4534 case ENUMERAL_TYPE:
4535 case BOOLEAN_TYPE:
4536 case CHAR_TYPE:
4537 case POINTER_TYPE:
4538 case OFFSET_TYPE:
4539 case REFERENCE_TYPE:
4540 return 1;
4542 case VOID_TYPE:
4543 case METHOD_TYPE:
4544 case FILE_TYPE:
4545 case SET_TYPE:
4546 case FUNCTION_TYPE:
4547 case LANG_TYPE:
4548 default:
4549 abort ();
4553 /* Return 1 if EXP contains mostly (3/4) zeros. */
4556 mostly_zeros_p (tree exp)
4558 if (TREE_CODE (exp) == CONSTRUCTOR)
4561 HOST_WIDE_INT nz_elts, nc_elts, elts;
4563 /* If there are no ranges of true bits, it is all zero. */
4564 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4565 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4567 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4568 elts = count_type_elements (TREE_TYPE (exp));
4570 return nz_elts < elts / 4;
4573 return initializer_zerop (exp);
4576 /* Helper function for store_constructor.
4577 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4578 TYPE is the type of the CONSTRUCTOR, not the element type.
4579 CLEARED is as for store_constructor.
4580 ALIAS_SET is the alias set to use for any stores.
4582 This provides a recursive shortcut back to store_constructor when it isn't
4583 necessary to go through store_field. This is so that we can pass through
4584 the cleared field to let store_constructor know that we may not have to
4585 clear a substructure if the outer structure has already been cleared. */
4587 static void
4588 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4589 HOST_WIDE_INT bitpos, enum machine_mode mode,
4590 tree exp, tree type, int cleared, int alias_set)
4592 if (TREE_CODE (exp) == CONSTRUCTOR
4593 /* We can only call store_constructor recursively if the size and
4594 bit position are on a byte boundary. */
4595 && bitpos % BITS_PER_UNIT == 0
4596 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4597 /* If we have a nonzero bitpos for a register target, then we just
4598 let store_field do the bitfield handling. This is unlikely to
4599 generate unnecessary clear instructions anyways. */
4600 && (bitpos == 0 || GET_CODE (target) == MEM))
4602 if (GET_CODE (target) == MEM)
4603 target
4604 = adjust_address (target,
4605 GET_MODE (target) == BLKmode
4606 || 0 != (bitpos
4607 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4608 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4611 /* Update the alias set, if required. */
4612 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4613 && MEM_ALIAS_SET (target) != 0)
4615 target = copy_rtx (target);
4616 set_mem_alias_set (target, alias_set);
4619 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4621 else
4622 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4623 alias_set);
4626 /* Store the value of constructor EXP into the rtx TARGET.
4627 TARGET is either a REG or a MEM; we know it cannot conflict, since
4628 safe_from_p has been called.
4629 CLEARED is true if TARGET is known to have been zero'd.
4630 SIZE is the number of bytes of TARGET we are allowed to modify: this
4631 may not be the same as the size of EXP if we are assigning to a field
4632 which has been packed to exclude padding bits. */
4634 static void
4635 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4637 tree type = TREE_TYPE (exp);
4638 #ifdef WORD_REGISTER_OPERATIONS
4639 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4640 #endif
4642 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4643 || TREE_CODE (type) == QUAL_UNION_TYPE)
4645 tree elt;
4647 /* If size is zero or the target is already cleared, do nothing. */
4648 if (size == 0 || cleared)
4649 cleared = 1;
4650 /* We either clear the aggregate or indicate the value is dead. */
4651 else if ((TREE_CODE (type) == UNION_TYPE
4652 || TREE_CODE (type) == QUAL_UNION_TYPE)
4653 && ! CONSTRUCTOR_ELTS (exp))
4654 /* If the constructor is empty, clear the union. */
4656 clear_storage (target, expr_size (exp));
4657 cleared = 1;
4660 /* If we are building a static constructor into a register,
4661 set the initial value as zero so we can fold the value into
4662 a constant. But if more than one register is involved,
4663 this probably loses. */
4664 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4665 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4667 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4668 cleared = 1;
4671 /* If the constructor has fewer fields than the structure
4672 or if we are initializing the structure to mostly zeros,
4673 clear the whole structure first. Don't do this if TARGET is a
4674 register whose mode size isn't equal to SIZE since clear_storage
4675 can't handle this case. */
4676 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4677 || mostly_zeros_p (exp))
4678 && (GET_CODE (target) != REG
4679 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4680 == size)))
4682 rtx xtarget = target;
4684 if (readonly_fields_p (type))
4686 xtarget = copy_rtx (xtarget);
4687 RTX_UNCHANGING_P (xtarget) = 1;
4690 clear_storage (xtarget, GEN_INT (size));
4691 cleared = 1;
4694 if (! cleared)
4695 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4697 /* Store each element of the constructor into
4698 the corresponding field of TARGET. */
4700 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4702 tree field = TREE_PURPOSE (elt);
4703 tree value = TREE_VALUE (elt);
4704 enum machine_mode mode;
4705 HOST_WIDE_INT bitsize;
4706 HOST_WIDE_INT bitpos = 0;
4707 tree offset;
4708 rtx to_rtx = target;
4710 /* Just ignore missing fields.
4711 We cleared the whole structure, above,
4712 if any fields are missing. */
4713 if (field == 0)
4714 continue;
4716 if (cleared && initializer_zerop (value))
4717 continue;
4719 if (host_integerp (DECL_SIZE (field), 1))
4720 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4721 else
4722 bitsize = -1;
4724 mode = DECL_MODE (field);
4725 if (DECL_BIT_FIELD (field))
4726 mode = VOIDmode;
4728 offset = DECL_FIELD_OFFSET (field);
4729 if (host_integerp (offset, 0)
4730 && host_integerp (bit_position (field), 0))
4732 bitpos = int_bit_position (field);
4733 offset = 0;
4735 else
4736 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4738 if (offset)
4740 rtx offset_rtx;
4742 offset
4743 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4744 make_tree (TREE_TYPE (exp),
4745 target));
4747 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4748 if (GET_CODE (to_rtx) != MEM)
4749 abort ();
4751 #ifdef POINTERS_EXTEND_UNSIGNED
4752 if (GET_MODE (offset_rtx) != Pmode)
4753 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4754 #else
4755 if (GET_MODE (offset_rtx) != ptr_mode)
4756 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4757 #endif
4759 to_rtx = offset_address (to_rtx, offset_rtx,
4760 highest_pow2_factor (offset));
4763 if (TREE_READONLY (field))
4765 if (GET_CODE (to_rtx) == MEM)
4766 to_rtx = copy_rtx (to_rtx);
4768 RTX_UNCHANGING_P (to_rtx) = 1;
4771 #ifdef WORD_REGISTER_OPERATIONS
4772 /* If this initializes a field that is smaller than a word, at the
4773 start of a word, try to widen it to a full word.
4774 This special case allows us to output C++ member function
4775 initializations in a form that the optimizers can understand. */
4776 if (GET_CODE (target) == REG
4777 && bitsize < BITS_PER_WORD
4778 && bitpos % BITS_PER_WORD == 0
4779 && GET_MODE_CLASS (mode) == MODE_INT
4780 && TREE_CODE (value) == INTEGER_CST
4781 && exp_size >= 0
4782 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4784 tree type = TREE_TYPE (value);
4786 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4788 type = lang_hooks.types.type_for_size
4789 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4790 value = convert (type, value);
4793 if (BYTES_BIG_ENDIAN)
4794 value
4795 = fold (build (LSHIFT_EXPR, type, value,
4796 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4797 bitsize = BITS_PER_WORD;
4798 mode = word_mode;
4800 #endif
4802 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4803 && DECL_NONADDRESSABLE_P (field))
4805 to_rtx = copy_rtx (to_rtx);
4806 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4809 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4810 value, type, cleared,
4811 get_alias_set (TREE_TYPE (field)));
4814 else if (TREE_CODE (type) == ARRAY_TYPE
4815 || TREE_CODE (type) == VECTOR_TYPE)
4817 tree elt;
4818 int i;
4819 int need_to_clear;
4820 tree domain;
4821 tree elttype = TREE_TYPE (type);
4822 int const_bounds_p;
4823 HOST_WIDE_INT minelt = 0;
4824 HOST_WIDE_INT maxelt = 0;
4825 int icode = 0;
4826 rtx *vector = NULL;
4827 int elt_size = 0;
4828 unsigned n_elts = 0;
4830 if (TREE_CODE (type) == ARRAY_TYPE)
4831 domain = TYPE_DOMAIN (type);
4832 else
4833 /* Vectors do not have domains; look up the domain of
4834 the array embedded in the debug representation type.
4835 FIXME Would probably be more efficient to treat vectors
4836 separately from arrays. */
4838 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4839 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4840 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4842 enum machine_mode mode = GET_MODE (target);
4844 icode = (int) vec_init_optab->handlers[mode].insn_code;
4845 if (icode != CODE_FOR_nothing)
4847 unsigned int i;
4849 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4850 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4851 vector = alloca (n_elts);
4852 for (i = 0; i < n_elts; i++)
4853 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4858 const_bounds_p = (TYPE_MIN_VALUE (domain)
4859 && TYPE_MAX_VALUE (domain)
4860 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4861 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4863 /* If we have constant bounds for the range of the type, get them. */
4864 if (const_bounds_p)
4866 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4867 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4870 /* If the constructor has fewer elements than the array,
4871 clear the whole array first. Similarly if this is
4872 static constructor of a non-BLKmode object. */
4873 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4874 need_to_clear = 1;
4875 else
4877 HOST_WIDE_INT count = 0, zero_count = 0;
4878 need_to_clear = ! const_bounds_p;
4880 /* This loop is a more accurate version of the loop in
4881 mostly_zeros_p (it handles RANGE_EXPR in an index).
4882 It is also needed to check for missing elements. */
4883 for (elt = CONSTRUCTOR_ELTS (exp);
4884 elt != NULL_TREE && ! need_to_clear;
4885 elt = TREE_CHAIN (elt))
4887 tree index = TREE_PURPOSE (elt);
4888 HOST_WIDE_INT this_node_count;
4890 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4892 tree lo_index = TREE_OPERAND (index, 0);
4893 tree hi_index = TREE_OPERAND (index, 1);
4895 if (! host_integerp (lo_index, 1)
4896 || ! host_integerp (hi_index, 1))
4898 need_to_clear = 1;
4899 break;
4902 this_node_count = (tree_low_cst (hi_index, 1)
4903 - tree_low_cst (lo_index, 1) + 1);
4905 else
4906 this_node_count = 1;
4908 count += this_node_count;
4909 if (mostly_zeros_p (TREE_VALUE (elt)))
4910 zero_count += this_node_count;
4913 /* Clear the entire array first if there are any missing elements,
4914 or if the incidence of zero elements is >= 75%. */
4915 if (! need_to_clear
4916 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4917 need_to_clear = 1;
4920 if (need_to_clear && size > 0 && !vector)
4922 if (! cleared)
4924 if (REG_P (target))
4925 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4926 else
4927 clear_storage (target, GEN_INT (size));
4929 cleared = 1;
4931 else if (REG_P (target))
4932 /* Inform later passes that the old value is dead. */
4933 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4935 /* Store each element of the constructor into
4936 the corresponding element of TARGET, determined
4937 by counting the elements. */
4938 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4939 elt;
4940 elt = TREE_CHAIN (elt), i++)
4942 enum machine_mode mode;
4943 HOST_WIDE_INT bitsize;
4944 HOST_WIDE_INT bitpos;
4945 int unsignedp;
4946 tree value = TREE_VALUE (elt);
4947 tree index = TREE_PURPOSE (elt);
4948 rtx xtarget = target;
4950 if (cleared && initializer_zerop (value))
4951 continue;
4953 unsignedp = TYPE_UNSIGNED (elttype);
4954 mode = TYPE_MODE (elttype);
4955 if (mode == BLKmode)
4956 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4957 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4958 : -1);
4959 else
4960 bitsize = GET_MODE_BITSIZE (mode);
4962 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4964 tree lo_index = TREE_OPERAND (index, 0);
4965 tree hi_index = TREE_OPERAND (index, 1);
4966 rtx index_r, pos_rtx, loop_end;
4967 struct nesting *loop;
4968 HOST_WIDE_INT lo, hi, count;
4969 tree position;
4971 if (vector)
4972 abort ();
4974 /* If the range is constant and "small", unroll the loop. */
4975 if (const_bounds_p
4976 && host_integerp (lo_index, 0)
4977 && host_integerp (hi_index, 0)
4978 && (lo = tree_low_cst (lo_index, 0),
4979 hi = tree_low_cst (hi_index, 0),
4980 count = hi - lo + 1,
4981 (GET_CODE (target) != MEM
4982 || count <= 2
4983 || (host_integerp (TYPE_SIZE (elttype), 1)
4984 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4985 <= 40 * 8)))))
4987 lo -= minelt; hi -= minelt;
4988 for (; lo <= hi; lo++)
4990 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4992 if (GET_CODE (target) == MEM
4993 && !MEM_KEEP_ALIAS_SET_P (target)
4994 && TREE_CODE (type) == ARRAY_TYPE
4995 && TYPE_NONALIASED_COMPONENT (type))
4997 target = copy_rtx (target);
4998 MEM_KEEP_ALIAS_SET_P (target) = 1;
5001 store_constructor_field
5002 (target, bitsize, bitpos, mode, value, type, cleared,
5003 get_alias_set (elttype));
5006 else
5008 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5009 loop_end = gen_label_rtx ();
5011 unsignedp = TYPE_UNSIGNED (domain);
5013 index = build_decl (VAR_DECL, NULL_TREE, domain);
5015 index_r
5016 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5017 &unsignedp, 0));
5018 SET_DECL_RTL (index, index_r);
5019 if (TREE_CODE (value) == SAVE_EXPR
5020 && SAVE_EXPR_RTL (value) == 0)
5022 /* Make sure value gets expanded once before the
5023 loop. */
5024 expand_expr (value, const0_rtx, VOIDmode, 0);
5025 emit_queue ();
5027 store_expr (lo_index, index_r, 0);
5028 loop = expand_start_loop (0);
5030 /* Assign value to element index. */
5031 position
5032 = convert (ssizetype,
5033 fold (build (MINUS_EXPR, TREE_TYPE (index),
5034 index, TYPE_MIN_VALUE (domain))));
5035 position = size_binop (MULT_EXPR, position,
5036 convert (ssizetype,
5037 TYPE_SIZE_UNIT (elttype)));
5039 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5040 xtarget = offset_address (target, pos_rtx,
5041 highest_pow2_factor (position));
5042 xtarget = adjust_address (xtarget, mode, 0);
5043 if (TREE_CODE (value) == CONSTRUCTOR)
5044 store_constructor (value, xtarget, cleared,
5045 bitsize / BITS_PER_UNIT);
5046 else
5047 store_expr (value, xtarget, 0);
5049 expand_exit_loop_if_false (loop,
5050 build (LT_EXPR, integer_type_node,
5051 index, hi_index));
5053 expand_increment (build (PREINCREMENT_EXPR,
5054 TREE_TYPE (index),
5055 index, integer_one_node), 0, 0);
5056 expand_end_loop ();
5057 emit_label (loop_end);
5060 else if ((index != 0 && ! host_integerp (index, 0))
5061 || ! host_integerp (TYPE_SIZE (elttype), 1))
5063 tree position;
5065 if (vector)
5066 abort ();
5068 if (index == 0)
5069 index = ssize_int (1);
5071 if (minelt)
5072 index = convert (ssizetype,
5073 fold (build (MINUS_EXPR, index,
5074 TYPE_MIN_VALUE (domain))));
5076 position = size_binop (MULT_EXPR, index,
5077 convert (ssizetype,
5078 TYPE_SIZE_UNIT (elttype)));
5079 xtarget = offset_address (target,
5080 expand_expr (position, 0, VOIDmode, 0),
5081 highest_pow2_factor (position));
5082 xtarget = adjust_address (xtarget, mode, 0);
5083 store_expr (value, xtarget, 0);
5085 else if (vector)
5087 int pos;
5089 if (index != 0)
5090 pos = tree_low_cst (index, 0) - minelt;
5091 else
5092 pos = i;
5093 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5095 else
5097 if (index != 0)
5098 bitpos = ((tree_low_cst (index, 0) - minelt)
5099 * tree_low_cst (TYPE_SIZE (elttype), 1));
5100 else
5101 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5103 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5104 && TREE_CODE (type) == ARRAY_TYPE
5105 && TYPE_NONALIASED_COMPONENT (type))
5107 target = copy_rtx (target);
5108 MEM_KEEP_ALIAS_SET_P (target) = 1;
5110 store_constructor_field (target, bitsize, bitpos, mode, value,
5111 type, cleared, get_alias_set (elttype));
5114 if (vector)
5116 emit_insn (GEN_FCN (icode) (target,
5117 gen_rtx_PARALLEL (GET_MODE (target),
5118 gen_rtvec_v (n_elts, vector))));
5122 /* Set constructor assignments. */
5123 else if (TREE_CODE (type) == SET_TYPE)
5125 tree elt = CONSTRUCTOR_ELTS (exp);
5126 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5127 tree domain = TYPE_DOMAIN (type);
5128 tree domain_min, domain_max, bitlength;
5130 /* The default implementation strategy is to extract the constant
5131 parts of the constructor, use that to initialize the target,
5132 and then "or" in whatever non-constant ranges we need in addition.
5134 If a large set is all zero or all ones, it is
5135 probably better to set it using memset (if available) or bzero.
5136 Also, if a large set has just a single range, it may also be
5137 better to first clear all the first clear the set (using
5138 bzero/memset), and set the bits we want. */
5140 /* Check for all zeros. */
5141 if (elt == NULL_TREE && size > 0)
5143 if (!cleared)
5144 clear_storage (target, GEN_INT (size));
5145 return;
5148 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5149 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5150 bitlength = size_binop (PLUS_EXPR,
5151 size_diffop (domain_max, domain_min),
5152 ssize_int (1));
5154 nbits = tree_low_cst (bitlength, 1);
5156 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5157 are "complicated" (more than one range), initialize (the
5158 constant parts) by copying from a constant. */
5159 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5160 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5162 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5163 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5164 char *bit_buffer = alloca (nbits);
5165 HOST_WIDE_INT word = 0;
5166 unsigned int bit_pos = 0;
5167 unsigned int ibit = 0;
5168 unsigned int offset = 0; /* In bytes from beginning of set. */
5170 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5171 for (;;)
5173 if (bit_buffer[ibit])
5175 if (BYTES_BIG_ENDIAN)
5176 word |= (1 << (set_word_size - 1 - bit_pos));
5177 else
5178 word |= 1 << bit_pos;
5181 bit_pos++; ibit++;
5182 if (bit_pos >= set_word_size || ibit == nbits)
5184 if (word != 0 || ! cleared)
5186 rtx datum = gen_int_mode (word, mode);
5187 rtx to_rtx;
5189 /* The assumption here is that it is safe to use
5190 XEXP if the set is multi-word, but not if
5191 it's single-word. */
5192 if (GET_CODE (target) == MEM)
5193 to_rtx = adjust_address (target, mode, offset);
5194 else if (offset == 0)
5195 to_rtx = target;
5196 else
5197 abort ();
5198 emit_move_insn (to_rtx, datum);
5201 if (ibit == nbits)
5202 break;
5203 word = 0;
5204 bit_pos = 0;
5205 offset += set_word_size / BITS_PER_UNIT;
5209 else if (!cleared)
5210 /* Don't bother clearing storage if the set is all ones. */
5211 if (TREE_CHAIN (elt) != NULL_TREE
5212 || (TREE_PURPOSE (elt) == NULL_TREE
5213 ? nbits != 1
5214 : ( ! host_integerp (TREE_VALUE (elt), 0)
5215 || ! host_integerp (TREE_PURPOSE (elt), 0)
5216 || (tree_low_cst (TREE_VALUE (elt), 0)
5217 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5218 != (HOST_WIDE_INT) nbits))))
5219 clear_storage (target, expr_size (exp));
5221 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5223 /* Start of range of element or NULL. */
5224 tree startbit = TREE_PURPOSE (elt);
5225 /* End of range of element, or element value. */
5226 tree endbit = TREE_VALUE (elt);
5227 HOST_WIDE_INT startb, endb;
5228 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5230 bitlength_rtx = expand_expr (bitlength,
5231 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5233 /* Handle non-range tuple element like [ expr ]. */
5234 if (startbit == NULL_TREE)
5236 startbit = save_expr (endbit);
5237 endbit = startbit;
5240 startbit = convert (sizetype, startbit);
5241 endbit = convert (sizetype, endbit);
5242 if (! integer_zerop (domain_min))
5244 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5245 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5247 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5248 EXPAND_CONST_ADDRESS);
5249 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5250 EXPAND_CONST_ADDRESS);
5252 if (REG_P (target))
5254 targetx
5255 = assign_temp
5256 ((build_qualified_type (lang_hooks.types.type_for_mode
5257 (GET_MODE (target), 0),
5258 TYPE_QUAL_CONST)),
5259 0, 1, 1);
5260 emit_move_insn (targetx, target);
5263 else if (GET_CODE (target) == MEM)
5264 targetx = target;
5265 else
5266 abort ();
5268 /* Optimization: If startbit and endbit are constants divisible
5269 by BITS_PER_UNIT, call memset instead. */
5270 if (TARGET_MEM_FUNCTIONS
5271 && TREE_CODE (startbit) == INTEGER_CST
5272 && TREE_CODE (endbit) == INTEGER_CST
5273 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5274 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5276 emit_library_call (memset_libfunc, LCT_NORMAL,
5277 VOIDmode, 3,
5278 plus_constant (XEXP (targetx, 0),
5279 startb / BITS_PER_UNIT),
5280 Pmode,
5281 constm1_rtx, TYPE_MODE (integer_type_node),
5282 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5283 TYPE_MODE (sizetype));
5285 else
5286 emit_library_call (setbits_libfunc, LCT_NORMAL,
5287 VOIDmode, 4, XEXP (targetx, 0),
5288 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5289 startbit_rtx, TYPE_MODE (sizetype),
5290 endbit_rtx, TYPE_MODE (sizetype));
5292 if (REG_P (target))
5293 emit_move_insn (target, targetx);
5297 else
5298 abort ();
5301 /* Store the value of EXP (an expression tree)
5302 into a subfield of TARGET which has mode MODE and occupies
5303 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5304 If MODE is VOIDmode, it means that we are storing into a bit-field.
5306 If VALUE_MODE is VOIDmode, return nothing in particular.
5307 UNSIGNEDP is not used in this case.
5309 Otherwise, return an rtx for the value stored. This rtx
5310 has mode VALUE_MODE if that is convenient to do.
5311 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5313 TYPE is the type of the underlying object,
5315 ALIAS_SET is the alias set for the destination. This value will
5316 (in general) be different from that for TARGET, since TARGET is a
5317 reference to the containing structure. */
5319 static rtx
5320 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5321 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5322 int unsignedp, tree type, int alias_set)
5324 HOST_WIDE_INT width_mask = 0;
5326 if (TREE_CODE (exp) == ERROR_MARK)
5327 return const0_rtx;
5329 /* If we have nothing to store, do nothing unless the expression has
5330 side-effects. */
5331 if (bitsize == 0)
5332 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5333 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5334 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5336 /* If we are storing into an unaligned field of an aligned union that is
5337 in a register, we may have the mode of TARGET being an integer mode but
5338 MODE == BLKmode. In that case, get an aligned object whose size and
5339 alignment are the same as TARGET and store TARGET into it (we can avoid
5340 the store if the field being stored is the entire width of TARGET). Then
5341 call ourselves recursively to store the field into a BLKmode version of
5342 that object. Finally, load from the object into TARGET. This is not
5343 very efficient in general, but should only be slightly more expensive
5344 than the otherwise-required unaligned accesses. Perhaps this can be
5345 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5346 twice, once with emit_move_insn and once via store_field. */
5348 if (mode == BLKmode
5349 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5351 rtx object = assign_temp (type, 0, 1, 1);
5352 rtx blk_object = adjust_address (object, BLKmode, 0);
5354 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5355 emit_move_insn (object, target);
5357 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5358 alias_set);
5360 emit_move_insn (target, object);
5362 /* We want to return the BLKmode version of the data. */
5363 return blk_object;
5366 if (GET_CODE (target) == CONCAT)
5368 /* We're storing into a struct containing a single __complex. */
5370 if (bitpos != 0)
5371 abort ();
5372 return store_expr (exp, target, value_mode != VOIDmode);
5375 /* If the structure is in a register or if the component
5376 is a bit field, we cannot use addressing to access it.
5377 Use bit-field techniques or SUBREG to store in it. */
5379 if (mode == VOIDmode
5380 || (mode != BLKmode && ! direct_store[(int) mode]
5381 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5382 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5383 || GET_CODE (target) == REG
5384 || GET_CODE (target) == SUBREG
5385 /* If the field isn't aligned enough to store as an ordinary memref,
5386 store it as a bit field. */
5387 || (mode != BLKmode
5388 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5389 || bitpos % GET_MODE_ALIGNMENT (mode))
5390 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5391 || (bitpos % BITS_PER_UNIT != 0)))
5392 /* If the RHS and field are a constant size and the size of the
5393 RHS isn't the same size as the bitfield, we must use bitfield
5394 operations. */
5395 || (bitsize >= 0
5396 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5397 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5399 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5401 /* If BITSIZE is narrower than the size of the type of EXP
5402 we will be narrowing TEMP. Normally, what's wanted are the
5403 low-order bits. However, if EXP's type is a record and this is
5404 big-endian machine, we want the upper BITSIZE bits. */
5405 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5406 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5407 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5408 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5409 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5410 - bitsize),
5411 NULL_RTX, 1);
5413 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5414 MODE. */
5415 if (mode != VOIDmode && mode != BLKmode
5416 && mode != TYPE_MODE (TREE_TYPE (exp)))
5417 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5419 /* If the modes of TARGET and TEMP are both BLKmode, both
5420 must be in memory and BITPOS must be aligned on a byte
5421 boundary. If so, we simply do a block copy. */
5422 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5424 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5425 || bitpos % BITS_PER_UNIT != 0)
5426 abort ();
5428 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5429 emit_block_move (target, temp,
5430 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5431 / BITS_PER_UNIT),
5432 BLOCK_OP_NORMAL);
5434 return value_mode == VOIDmode ? const0_rtx : target;
5437 /* Store the value in the bitfield. */
5438 store_bit_field (target, bitsize, bitpos, mode, temp,
5439 int_size_in_bytes (type));
5441 if (value_mode != VOIDmode)
5443 /* The caller wants an rtx for the value.
5444 If possible, avoid refetching from the bitfield itself. */
5445 if (width_mask != 0
5446 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5448 tree count;
5449 enum machine_mode tmode;
5451 tmode = GET_MODE (temp);
5452 if (tmode == VOIDmode)
5453 tmode = value_mode;
5455 if (unsignedp)
5456 return expand_and (tmode, temp,
5457 gen_int_mode (width_mask, tmode),
5458 NULL_RTX);
5460 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5461 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5462 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5465 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5466 NULL_RTX, value_mode, VOIDmode,
5467 int_size_in_bytes (type));
5469 return const0_rtx;
5471 else
5473 rtx addr = XEXP (target, 0);
5474 rtx to_rtx = target;
5476 /* If a value is wanted, it must be the lhs;
5477 so make the address stable for multiple use. */
5479 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5480 && ! CONSTANT_ADDRESS_P (addr)
5481 /* A frame-pointer reference is already stable. */
5482 && ! (GET_CODE (addr) == PLUS
5483 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5484 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5485 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5486 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5488 /* Now build a reference to just the desired component. */
5490 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5492 if (to_rtx == target)
5493 to_rtx = copy_rtx (to_rtx);
5495 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5496 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5497 set_mem_alias_set (to_rtx, alias_set);
5499 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5503 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5504 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5505 codes and find the ultimate containing object, which we return.
5507 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5508 bit position, and *PUNSIGNEDP to the signedness of the field.
5509 If the position of the field is variable, we store a tree
5510 giving the variable offset (in units) in *POFFSET.
5511 This offset is in addition to the bit position.
5512 If the position is not variable, we store 0 in *POFFSET.
5514 If any of the extraction expressions is volatile,
5515 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5517 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5518 is a mode that can be used to access the field. In that case, *PBITSIZE
5519 is redundant.
5521 If the field describes a variable-sized object, *PMODE is set to
5522 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5523 this case, but the address of the object can be found. */
5525 tree
5526 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5527 HOST_WIDE_INT *pbitpos, tree *poffset,
5528 enum machine_mode *pmode, int *punsignedp,
5529 int *pvolatilep)
5531 tree size_tree = 0;
5532 enum machine_mode mode = VOIDmode;
5533 tree offset = size_zero_node;
5534 tree bit_offset = bitsize_zero_node;
5535 tree tem;
5537 /* First get the mode, signedness, and size. We do this from just the
5538 outermost expression. */
5539 if (TREE_CODE (exp) == COMPONENT_REF)
5541 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5542 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5543 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5545 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5547 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5549 size_tree = TREE_OPERAND (exp, 1);
5550 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5552 else
5554 mode = TYPE_MODE (TREE_TYPE (exp));
5555 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5557 if (mode == BLKmode)
5558 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5559 else
5560 *pbitsize = GET_MODE_BITSIZE (mode);
5563 if (size_tree != 0)
5565 if (! host_integerp (size_tree, 1))
5566 mode = BLKmode, *pbitsize = -1;
5567 else
5568 *pbitsize = tree_low_cst (size_tree, 1);
5571 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5572 and find the ultimate containing object. */
5573 while (1)
5575 if (TREE_CODE (exp) == BIT_FIELD_REF)
5576 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5577 else if (TREE_CODE (exp) == COMPONENT_REF)
5579 tree field = TREE_OPERAND (exp, 1);
5580 tree this_offset = DECL_FIELD_OFFSET (field);
5582 /* If this field hasn't been filled in yet, don't go
5583 past it. This should only happen when folding expressions
5584 made during type construction. */
5585 if (this_offset == 0)
5586 break;
5587 else
5588 this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
5590 offset = size_binop (PLUS_EXPR, offset, this_offset);
5591 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5592 DECL_FIELD_BIT_OFFSET (field));
5594 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5597 else if (TREE_CODE (exp) == ARRAY_REF
5598 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5600 tree index = TREE_OPERAND (exp, 1);
5601 tree array = TREE_OPERAND (exp, 0);
5602 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5603 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5604 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5606 /* We assume all arrays have sizes that are a multiple of a byte.
5607 First subtract the lower bound, if any, in the type of the
5608 index, then convert to sizetype and multiply by the size of the
5609 array element. */
5610 if (low_bound != 0 && ! integer_zerop (low_bound))
5611 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5612 index, low_bound));
5614 /* If the index has a self-referential type, instantiate it with
5615 the object; likewise for the component size. */
5616 index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
5617 unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
5618 offset = size_binop (PLUS_EXPR, offset,
5619 size_binop (MULT_EXPR,
5620 convert (sizetype, index),
5621 unit_size));
5624 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5625 conversions that don't change the mode, and all view conversions
5626 except those that need to "step up" the alignment. */
5627 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5628 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5629 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5630 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5631 && STRICT_ALIGNMENT
5632 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5633 < BIGGEST_ALIGNMENT)
5634 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5635 || TYPE_ALIGN_OK (TREE_TYPE
5636 (TREE_OPERAND (exp, 0))))))
5637 && ! ((TREE_CODE (exp) == NOP_EXPR
5638 || TREE_CODE (exp) == CONVERT_EXPR)
5639 && (TYPE_MODE (TREE_TYPE (exp))
5640 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5641 break;
5643 /* If any reference in the chain is volatile, the effect is volatile. */
5644 if (TREE_THIS_VOLATILE (exp))
5645 *pvolatilep = 1;
5647 exp = TREE_OPERAND (exp, 0);
5650 /* If OFFSET is constant, see if we can return the whole thing as a
5651 constant bit position. Otherwise, split it up. */
5652 if (host_integerp (offset, 0)
5653 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5654 bitsize_unit_node))
5655 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5656 && host_integerp (tem, 0))
5657 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5658 else
5659 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5661 *pmode = mode;
5662 return exp;
5665 /* Return 1 if T is an expression that get_inner_reference handles. */
5668 handled_component_p (tree t)
5670 switch (TREE_CODE (t))
5672 case BIT_FIELD_REF:
5673 case COMPONENT_REF:
5674 case ARRAY_REF:
5675 case ARRAY_RANGE_REF:
5676 case NON_LVALUE_EXPR:
5677 case VIEW_CONVERT_EXPR:
5678 return 1;
5680 /* ??? Sure they are handled, but get_inner_reference may return
5681 a different PBITSIZE, depending upon whether the expression is
5682 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5683 case NOP_EXPR:
5684 case CONVERT_EXPR:
5685 return (TYPE_MODE (TREE_TYPE (t))
5686 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5688 default:
5689 return 0;
5693 /* Given an rtx VALUE that may contain additions and multiplications, return
5694 an equivalent value that just refers to a register, memory, or constant.
5695 This is done by generating instructions to perform the arithmetic and
5696 returning a pseudo-register containing the value.
5698 The returned value may be a REG, SUBREG, MEM or constant. */
5701 force_operand (rtx value, rtx target)
5703 rtx op1, op2;
5704 /* Use subtarget as the target for operand 0 of a binary operation. */
5705 rtx subtarget = get_subtarget (target);
5706 enum rtx_code code = GET_CODE (value);
5708 /* Check for subreg applied to an expression produced by loop optimizer. */
5709 if (code == SUBREG
5710 && GET_CODE (SUBREG_REG (value)) != REG
5711 && GET_CODE (SUBREG_REG (value)) != MEM)
5713 value = simplify_gen_subreg (GET_MODE (value),
5714 force_reg (GET_MODE (SUBREG_REG (value)),
5715 force_operand (SUBREG_REG (value),
5716 NULL_RTX)),
5717 GET_MODE (SUBREG_REG (value)),
5718 SUBREG_BYTE (value));
5719 code = GET_CODE (value);
5722 /* Check for a PIC address load. */
5723 if ((code == PLUS || code == MINUS)
5724 && XEXP (value, 0) == pic_offset_table_rtx
5725 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5726 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5727 || GET_CODE (XEXP (value, 1)) == CONST))
5729 if (!subtarget)
5730 subtarget = gen_reg_rtx (GET_MODE (value));
5731 emit_move_insn (subtarget, value);
5732 return subtarget;
5735 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5737 if (!target)
5738 target = gen_reg_rtx (GET_MODE (value));
5739 convert_move (target, force_operand (XEXP (value, 0), NULL),
5740 code == ZERO_EXTEND);
5741 return target;
5744 if (ARITHMETIC_P (value))
5746 op2 = XEXP (value, 1);
5747 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5748 subtarget = 0;
5749 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5751 code = PLUS;
5752 op2 = negate_rtx (GET_MODE (value), op2);
5755 /* Check for an addition with OP2 a constant integer and our first
5756 operand a PLUS of a virtual register and something else. In that
5757 case, we want to emit the sum of the virtual register and the
5758 constant first and then add the other value. This allows virtual
5759 register instantiation to simply modify the constant rather than
5760 creating another one around this addition. */
5761 if (code == PLUS && GET_CODE (op2) == CONST_INT
5762 && GET_CODE (XEXP (value, 0)) == PLUS
5763 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5764 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5765 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5767 rtx temp = expand_simple_binop (GET_MODE (value), code,
5768 XEXP (XEXP (value, 0), 0), op2,
5769 subtarget, 0, OPTAB_LIB_WIDEN);
5770 return expand_simple_binop (GET_MODE (value), code, temp,
5771 force_operand (XEXP (XEXP (value,
5772 0), 1), 0),
5773 target, 0, OPTAB_LIB_WIDEN);
5776 op1 = force_operand (XEXP (value, 0), subtarget);
5777 op2 = force_operand (op2, NULL_RTX);
5778 switch (code)
5780 case MULT:
5781 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5782 case DIV:
5783 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5784 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5785 target, 1, OPTAB_LIB_WIDEN);
5786 else
5787 return expand_divmod (0,
5788 FLOAT_MODE_P (GET_MODE (value))
5789 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5790 GET_MODE (value), op1, op2, target, 0);
5791 break;
5792 case MOD:
5793 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5794 target, 0);
5795 break;
5796 case UDIV:
5797 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5798 target, 1);
5799 break;
5800 case UMOD:
5801 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5802 target, 1);
5803 break;
5804 case ASHIFTRT:
5805 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5806 target, 0, OPTAB_LIB_WIDEN);
5807 break;
5808 default:
5809 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5810 target, 1, OPTAB_LIB_WIDEN);
5813 if (UNARY_P (value))
5815 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5816 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5819 #ifdef INSN_SCHEDULING
5820 /* On machines that have insn scheduling, we want all memory reference to be
5821 explicit, so we need to deal with such paradoxical SUBREGs. */
5822 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5823 && (GET_MODE_SIZE (GET_MODE (value))
5824 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5825 value
5826 = simplify_gen_subreg (GET_MODE (value),
5827 force_reg (GET_MODE (SUBREG_REG (value)),
5828 force_operand (SUBREG_REG (value),
5829 NULL_RTX)),
5830 GET_MODE (SUBREG_REG (value)),
5831 SUBREG_BYTE (value));
5832 #endif
5834 return value;
5837 /* Subroutine of expand_expr: return nonzero iff there is no way that
5838 EXP can reference X, which is being modified. TOP_P is nonzero if this
5839 call is going to be used to determine whether we need a temporary
5840 for EXP, as opposed to a recursive call to this function.
5842 It is always safe for this routine to return zero since it merely
5843 searches for optimization opportunities. */
5846 safe_from_p (rtx x, tree exp, int top_p)
5848 rtx exp_rtl = 0;
5849 int i, nops;
5850 static tree save_expr_list;
5852 if (x == 0
5853 /* If EXP has varying size, we MUST use a target since we currently
5854 have no way of allocating temporaries of variable size
5855 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5856 So we assume here that something at a higher level has prevented a
5857 clash. This is somewhat bogus, but the best we can do. Only
5858 do this when X is BLKmode and when we are at the top level. */
5859 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5860 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5861 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5862 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5863 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5864 != INTEGER_CST)
5865 && GET_MODE (x) == BLKmode)
5866 /* If X is in the outgoing argument area, it is always safe. */
5867 || (GET_CODE (x) == MEM
5868 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5869 || (GET_CODE (XEXP (x, 0)) == PLUS
5870 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5871 return 1;
5873 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5874 find the underlying pseudo. */
5875 if (GET_CODE (x) == SUBREG)
5877 x = SUBREG_REG (x);
5878 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5879 return 0;
5882 /* A SAVE_EXPR might appear many times in the expression passed to the
5883 top-level safe_from_p call, and if it has a complex subexpression,
5884 examining it multiple times could result in a combinatorial explosion.
5885 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5886 with optimization took about 28 minutes to compile -- even though it was
5887 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5888 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5889 we have processed. Note that the only test of top_p was above. */
5891 if (top_p)
5893 int rtn;
5894 tree t;
5896 save_expr_list = 0;
5898 rtn = safe_from_p (x, exp, 0);
5900 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5901 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5903 return rtn;
5906 /* Now look at our tree code and possibly recurse. */
5907 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5909 case 'd':
5910 exp_rtl = DECL_RTL_IF_SET (exp);
5911 break;
5913 case 'c':
5914 return 1;
5916 case 'x':
5917 if (TREE_CODE (exp) == TREE_LIST)
5919 while (1)
5921 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5922 return 0;
5923 exp = TREE_CHAIN (exp);
5924 if (!exp)
5925 return 1;
5926 if (TREE_CODE (exp) != TREE_LIST)
5927 return safe_from_p (x, exp, 0);
5930 else if (TREE_CODE (exp) == ERROR_MARK)
5931 return 1; /* An already-visited SAVE_EXPR? */
5932 else
5933 return 0;
5935 case '2':
5936 case '<':
5937 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5938 return 0;
5939 /* Fall through. */
5941 case '1':
5942 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5944 case 'e':
5945 case 'r':
5946 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5947 the expression. If it is set, we conflict iff we are that rtx or
5948 both are in memory. Otherwise, we check all operands of the
5949 expression recursively. */
5951 switch (TREE_CODE (exp))
5953 case ADDR_EXPR:
5954 /* If the operand is static or we are static, we can't conflict.
5955 Likewise if we don't conflict with the operand at all. */
5956 if (staticp (TREE_OPERAND (exp, 0))
5957 || TREE_STATIC (exp)
5958 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5959 return 1;
5961 /* Otherwise, the only way this can conflict is if we are taking
5962 the address of a DECL a that address if part of X, which is
5963 very rare. */
5964 exp = TREE_OPERAND (exp, 0);
5965 if (DECL_P (exp))
5967 if (!DECL_RTL_SET_P (exp)
5968 || GET_CODE (DECL_RTL (exp)) != MEM)
5969 return 0;
5970 else
5971 exp_rtl = XEXP (DECL_RTL (exp), 0);
5973 break;
5975 case INDIRECT_REF:
5976 if (GET_CODE (x) == MEM
5977 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5978 get_alias_set (exp)))
5979 return 0;
5980 break;
5982 case CALL_EXPR:
5983 /* Assume that the call will clobber all hard registers and
5984 all of memory. */
5985 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5986 || GET_CODE (x) == MEM)
5987 return 0;
5988 break;
5990 case RTL_EXPR:
5991 /* If a sequence exists, we would have to scan every instruction
5992 in the sequence to see if it was safe. This is probably not
5993 worthwhile. */
5994 if (RTL_EXPR_SEQUENCE (exp))
5995 return 0;
5997 exp_rtl = RTL_EXPR_RTL (exp);
5998 break;
6000 case WITH_CLEANUP_EXPR:
6001 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6002 break;
6004 case CLEANUP_POINT_EXPR:
6005 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6007 case SAVE_EXPR:
6008 exp_rtl = SAVE_EXPR_RTL (exp);
6009 if (exp_rtl)
6010 break;
6012 /* If we've already scanned this, don't do it again. Otherwise,
6013 show we've scanned it and record for clearing the flag if we're
6014 going on. */
6015 if (TREE_PRIVATE (exp))
6016 return 1;
6018 TREE_PRIVATE (exp) = 1;
6019 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6021 TREE_PRIVATE (exp) = 0;
6022 return 0;
6025 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6026 return 1;
6028 case BIND_EXPR:
6029 /* The only operand we look at is operand 1. The rest aren't
6030 part of the expression. */
6031 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6033 default:
6034 break;
6037 /* If we have an rtx, we do not need to scan our operands. */
6038 if (exp_rtl)
6039 break;
6041 nops = first_rtl_op (TREE_CODE (exp));
6042 for (i = 0; i < nops; i++)
6043 if (TREE_OPERAND (exp, i) != 0
6044 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6045 return 0;
6047 /* If this is a language-specific tree code, it may require
6048 special handling. */
6049 if ((unsigned int) TREE_CODE (exp)
6050 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6051 && !lang_hooks.safe_from_p (x, exp))
6052 return 0;
6055 /* If we have an rtl, find any enclosed object. Then see if we conflict
6056 with it. */
6057 if (exp_rtl)
6059 if (GET_CODE (exp_rtl) == SUBREG)
6061 exp_rtl = SUBREG_REG (exp_rtl);
6062 if (GET_CODE (exp_rtl) == REG
6063 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6064 return 0;
6067 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6068 are memory and they conflict. */
6069 return ! (rtx_equal_p (x, exp_rtl)
6070 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6071 && true_dependence (exp_rtl, VOIDmode, x,
6072 rtx_addr_varies_p)));
6075 /* If we reach here, it is safe. */
6076 return 1;
6079 /* Subroutine of expand_expr: return rtx if EXP is a
6080 variable or parameter; else return 0. */
6082 static rtx
6083 var_rtx (tree exp)
6085 STRIP_NOPS (exp);
6086 switch (TREE_CODE (exp))
6088 case PARM_DECL:
6089 case VAR_DECL:
6090 return DECL_RTL (exp);
6091 default:
6092 return 0;
6096 /* Return the highest power of two that EXP is known to be a multiple of.
6097 This is used in updating alignment of MEMs in array references. */
6099 static unsigned HOST_WIDE_INT
6100 highest_pow2_factor (tree exp)
6102 unsigned HOST_WIDE_INT c0, c1;
6104 switch (TREE_CODE (exp))
6106 case INTEGER_CST:
6107 /* We can find the lowest bit that's a one. If the low
6108 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6109 We need to handle this case since we can find it in a COND_EXPR,
6110 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6111 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6112 later ICE. */
6113 if (TREE_CONSTANT_OVERFLOW (exp))
6114 return BIGGEST_ALIGNMENT;
6115 else
6117 /* Note: tree_low_cst is intentionally not used here,
6118 we don't care about the upper bits. */
6119 c0 = TREE_INT_CST_LOW (exp);
6120 c0 &= -c0;
6121 return c0 ? c0 : BIGGEST_ALIGNMENT;
6123 break;
6125 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6126 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6127 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6128 return MIN (c0, c1);
6130 case MULT_EXPR:
6131 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6132 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6133 return c0 * c1;
6135 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6136 case CEIL_DIV_EXPR:
6137 if (integer_pow2p (TREE_OPERAND (exp, 1))
6138 && host_integerp (TREE_OPERAND (exp, 1), 1))
6140 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6141 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6142 return MAX (1, c0 / c1);
6144 break;
6146 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6147 case SAVE_EXPR:
6148 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6150 case COMPOUND_EXPR:
6151 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6153 case COND_EXPR:
6154 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6155 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6156 return MIN (c0, c1);
6158 default:
6159 break;
6162 return 1;
6165 /* Similar, except that the alignment requirements of TARGET are
6166 taken into account. Assume it is at least as aligned as its
6167 type, unless it is a COMPONENT_REF in which case the layout of
6168 the structure gives the alignment. */
6170 static unsigned HOST_WIDE_INT
6171 highest_pow2_factor_for_target (tree target, tree exp)
6173 unsigned HOST_WIDE_INT target_align, factor;
6175 factor = highest_pow2_factor (exp);
6176 if (TREE_CODE (target) == COMPONENT_REF)
6177 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6178 else
6179 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6180 return MAX (factor, target_align);
6183 /* Expands variable VAR. */
6185 void
6186 expand_var (tree var)
6188 if (DECL_EXTERNAL (var))
6189 return;
6191 if (TREE_STATIC (var))
6192 /* If this is an inlined copy of a static local variable,
6193 look up the original decl. */
6194 var = DECL_ORIGIN (var);
6196 if (TREE_STATIC (var)
6197 ? !TREE_ASM_WRITTEN (var)
6198 : !DECL_RTL_SET_P (var))
6200 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6202 /* Prepare a mem & address for the decl. */
6203 rtx x;
6205 if (TREE_STATIC (var))
6206 abort ();
6208 x = gen_rtx_MEM (DECL_MODE (var),
6209 gen_reg_rtx (Pmode));
6211 set_mem_attributes (x, var, 1);
6212 SET_DECL_RTL (var, x);
6214 else if (lang_hooks.expand_decl (var))
6215 /* OK. */;
6216 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6217 expand_decl (var);
6218 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6219 rest_of_decl_compilation (var, NULL, 0, 0);
6220 else if (TREE_CODE (var) == TYPE_DECL
6221 || TREE_CODE (var) == CONST_DECL
6222 || TREE_CODE (var) == FUNCTION_DECL
6223 || TREE_CODE (var) == LABEL_DECL)
6224 /* No expansion needed. */;
6225 else
6226 abort ();
6230 /* Expands declarations of variables in list VARS. */
6232 static void
6233 expand_vars (tree vars)
6235 for (; vars; vars = TREE_CHAIN (vars))
6237 tree var = vars;
6239 if (DECL_EXTERNAL (var))
6240 continue;
6242 expand_var (var);
6243 expand_decl_init (var);
6247 /* Subroutine of expand_expr. Expand the two operands of a binary
6248 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6249 The value may be stored in TARGET if TARGET is nonzero. The
6250 MODIFIER argument is as documented by expand_expr. */
6252 static void
6253 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6254 enum expand_modifier modifier)
6256 if (! safe_from_p (target, exp1, 1))
6257 target = 0;
6258 if (operand_equal_p (exp0, exp1, 0))
6260 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6261 *op1 = copy_rtx (*op0);
6263 else
6265 /* If we need to preserve evaluation order, copy exp0 into its own
6266 temporary variable so that it can't be clobbered by exp1. */
6267 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6268 exp0 = save_expr (exp0);
6269 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6270 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6275 /* expand_expr: generate code for computing expression EXP.
6276 An rtx for the computed value is returned. The value is never null.
6277 In the case of a void EXP, const0_rtx is returned.
6279 The value may be stored in TARGET if TARGET is nonzero.
6280 TARGET is just a suggestion; callers must assume that
6281 the rtx returned may not be the same as TARGET.
6283 If TARGET is CONST0_RTX, it means that the value will be ignored.
6285 If TMODE is not VOIDmode, it suggests generating the
6286 result in mode TMODE. But this is done only when convenient.
6287 Otherwise, TMODE is ignored and the value generated in its natural mode.
6288 TMODE is just a suggestion; callers must assume that
6289 the rtx returned may not have mode TMODE.
6291 Note that TARGET may have neither TMODE nor MODE. In that case, it
6292 probably will not be used.
6294 If MODIFIER is EXPAND_SUM then when EXP is an addition
6295 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6296 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6297 products as above, or REG or MEM, or constant.
6298 Ordinarily in such cases we would output mul or add instructions
6299 and then return a pseudo reg containing the sum.
6301 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6302 it also marks a label as absolutely required (it can't be dead).
6303 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6304 This is used for outputting expressions used in initializers.
6306 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6307 with a constant address even if that address is not normally legitimate.
6308 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6310 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6311 a call parameter. Such targets require special care as we haven't yet
6312 marked TARGET so that it's safe from being trashed by libcalls. We
6313 don't want to use TARGET for anything but the final result;
6314 Intermediate values must go elsewhere. Additionally, calls to
6315 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6317 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6318 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6319 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6320 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6321 recursively. */
6323 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6324 enum expand_modifier, rtx *);
6327 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6328 enum expand_modifier modifier, rtx *alt_rtl)
6330 int rn = -1;
6331 rtx ret, last = NULL;
6333 /* Handle ERROR_MARK before anybody tries to access its type. */
6334 if (TREE_CODE (exp) == ERROR_MARK
6335 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6337 ret = CONST0_RTX (tmode);
6338 return ret ? ret : const0_rtx;
6341 if (flag_non_call_exceptions)
6343 rn = lookup_stmt_eh_region (exp);
6344 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6345 if (rn >= 0)
6346 last = get_last_insn ();
6349 /* If this is an expression of some kind and it has an associated line
6350 number, then emit the line number before expanding the expression.
6352 We need to save and restore the file and line information so that
6353 errors discovered during expansion are emitted with the right
6354 information. It would be better of the diagnostic routines
6355 used the file/line information embedded in the tree nodes rather
6356 than globals. */
6357 if (cfun && EXPR_HAS_LOCATION (exp))
6359 location_t saved_location = input_location;
6360 input_location = EXPR_LOCATION (exp);
6361 emit_line_note (input_location);
6363 /* Record where the insns produced belong. */
6364 if (cfun->dont_emit_block_notes)
6365 record_block_change (TREE_BLOCK (exp));
6367 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6369 input_location = saved_location;
6371 else
6373 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6376 /* If using non-call exceptions, mark all insns that may trap.
6377 expand_call() will mark CALL_INSNs before we get to this code,
6378 but it doesn't handle libcalls, and these may trap. */
6379 if (rn >= 0)
6381 rtx insn;
6382 for (insn = next_real_insn (last); insn;
6383 insn = next_real_insn (insn))
6385 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6386 /* If we want exceptions for non-call insns, any
6387 may_trap_p instruction may throw. */
6388 && GET_CODE (PATTERN (insn)) != CLOBBER
6389 && GET_CODE (PATTERN (insn)) != USE
6390 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6392 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6393 REG_NOTES (insn));
6398 return ret;
6401 static rtx
6402 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6403 enum expand_modifier modifier, rtx *alt_rtl)
6405 rtx op0, op1, temp;
6406 tree type = TREE_TYPE (exp);
6407 int unsignedp;
6408 enum machine_mode mode;
6409 enum tree_code code = TREE_CODE (exp);
6410 optab this_optab;
6411 rtx subtarget, original_target;
6412 int ignore;
6413 tree context;
6415 mode = TYPE_MODE (type);
6416 unsignedp = TYPE_UNSIGNED (type);
6418 /* Use subtarget as the target for operand 0 of a binary operation. */
6419 subtarget = get_subtarget (target);
6420 original_target = target;
6421 ignore = (target == const0_rtx
6422 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6423 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6424 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6425 && TREE_CODE (type) == VOID_TYPE));
6427 /* If we are going to ignore this result, we need only do something
6428 if there is a side-effect somewhere in the expression. If there
6429 is, short-circuit the most common cases here. Note that we must
6430 not call expand_expr with anything but const0_rtx in case this
6431 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6433 if (ignore)
6435 if (! TREE_SIDE_EFFECTS (exp))
6436 return const0_rtx;
6438 /* Ensure we reference a volatile object even if value is ignored, but
6439 don't do this if all we are doing is taking its address. */
6440 if (TREE_THIS_VOLATILE (exp)
6441 && TREE_CODE (exp) != FUNCTION_DECL
6442 && mode != VOIDmode && mode != BLKmode
6443 && modifier != EXPAND_CONST_ADDRESS)
6445 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6446 if (GET_CODE (temp) == MEM)
6447 temp = copy_to_reg (temp);
6448 return const0_rtx;
6451 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6452 || code == INDIRECT_REF || code == BUFFER_REF)
6453 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6454 modifier);
6456 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6457 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6459 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6460 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6461 return const0_rtx;
6463 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6464 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6465 /* If the second operand has no side effects, just evaluate
6466 the first. */
6467 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6468 modifier);
6469 else if (code == BIT_FIELD_REF)
6471 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6472 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6473 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6474 return const0_rtx;
6477 target = 0;
6480 /* If will do cse, generate all results into pseudo registers
6481 since 1) that allows cse to find more things
6482 and 2) otherwise cse could produce an insn the machine
6483 cannot support. An exception is a CONSTRUCTOR into a multi-word
6484 MEM: that's much more likely to be most efficient into the MEM.
6485 Another is a CALL_EXPR which must return in memory. */
6487 if (! cse_not_expected && mode != BLKmode && target
6488 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6489 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6490 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6491 target = 0;
6493 switch (code)
6495 case LABEL_DECL:
6497 tree function = decl_function_context (exp);
6499 temp = label_rtx (exp);
6500 temp = gen_rtx_LABEL_REF (Pmode, temp);
6502 if (function != current_function_decl
6503 && function != 0)
6504 LABEL_REF_NONLOCAL_P (temp) = 1;
6506 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6507 return temp;
6510 case PARM_DECL:
6511 if (!DECL_RTL_SET_P (exp))
6513 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6514 return CONST0_RTX (mode);
6517 /* ... fall through ... */
6519 case VAR_DECL:
6520 /* If a static var's type was incomplete when the decl was written,
6521 but the type is complete now, lay out the decl now. */
6522 if (DECL_SIZE (exp) == 0
6523 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6524 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6525 layout_decl (exp, 0);
6527 /* ... fall through ... */
6529 case FUNCTION_DECL:
6530 case RESULT_DECL:
6531 if (DECL_RTL (exp) == 0)
6532 abort ();
6534 /* Ensure variable marked as used even if it doesn't go through
6535 a parser. If it hasn't be used yet, write out an external
6536 definition. */
6537 if (! TREE_USED (exp))
6539 assemble_external (exp);
6540 TREE_USED (exp) = 1;
6543 /* Show we haven't gotten RTL for this yet. */
6544 temp = 0;
6546 /* Handle variables inherited from containing functions. */
6547 context = decl_function_context (exp);
6549 if (context != 0 && context != current_function_decl
6550 /* If var is static, we don't need a static chain to access it. */
6551 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6552 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6554 rtx addr;
6556 /* Mark as non-local and addressable. */
6557 DECL_NONLOCAL (exp) = 1;
6558 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6559 abort ();
6560 lang_hooks.mark_addressable (exp);
6561 if (GET_CODE (DECL_RTL (exp)) != MEM)
6562 abort ();
6563 addr = XEXP (DECL_RTL (exp), 0);
6564 if (GET_CODE (addr) == MEM)
6565 addr
6566 = replace_equiv_address (addr,
6567 fix_lexical_addr (XEXP (addr, 0), exp));
6568 else
6569 addr = fix_lexical_addr (addr, exp);
6571 temp = replace_equiv_address (DECL_RTL (exp), addr);
6574 /* This is the case of an array whose size is to be determined
6575 from its initializer, while the initializer is still being parsed.
6576 See expand_decl. */
6578 else if (GET_CODE (DECL_RTL (exp)) == MEM
6579 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6580 temp = validize_mem (DECL_RTL (exp));
6582 /* If DECL_RTL is memory, we are in the normal case and either
6583 the address is not valid or it is not a register and -fforce-addr
6584 is specified, get the address into a register. */
6586 else if (GET_CODE (DECL_RTL (exp)) == MEM
6587 && modifier != EXPAND_CONST_ADDRESS
6588 && modifier != EXPAND_SUM
6589 && modifier != EXPAND_INITIALIZER
6590 && (! memory_address_p (DECL_MODE (exp),
6591 XEXP (DECL_RTL (exp), 0))
6592 || (flag_force_addr
6593 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6595 if (alt_rtl)
6596 *alt_rtl = DECL_RTL (exp);
6597 temp = replace_equiv_address (DECL_RTL (exp),
6598 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6601 /* If we got something, return it. But first, set the alignment
6602 if the address is a register. */
6603 if (temp != 0)
6605 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6606 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6608 return temp;
6611 /* If the mode of DECL_RTL does not match that of the decl, it
6612 must be a promoted value. We return a SUBREG of the wanted mode,
6613 but mark it so that we know that it was already extended. */
6615 if (GET_CODE (DECL_RTL (exp)) == REG
6616 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6618 /* Get the signedness used for this variable. Ensure we get the
6619 same mode we got when the variable was declared. */
6620 if (GET_MODE (DECL_RTL (exp))
6621 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6622 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6623 abort ();
6625 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6626 SUBREG_PROMOTED_VAR_P (temp) = 1;
6627 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6628 return temp;
6631 return DECL_RTL (exp);
6633 case INTEGER_CST:
6634 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6635 TREE_INT_CST_HIGH (exp), mode);
6637 /* ??? If overflow is set, fold will have done an incomplete job,
6638 which can result in (plus xx (const_int 0)), which can get
6639 simplified by validate_replace_rtx during virtual register
6640 instantiation, which can result in unrecognizable insns.
6641 Avoid this by forcing all overflows into registers. */
6642 if (TREE_CONSTANT_OVERFLOW (exp)
6643 && modifier != EXPAND_INITIALIZER)
6644 temp = force_reg (mode, temp);
6646 return temp;
6648 case VECTOR_CST:
6649 return const_vector_from_tree (exp);
6651 case CONST_DECL:
6652 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6654 case REAL_CST:
6655 /* If optimized, generate immediate CONST_DOUBLE
6656 which will be turned into memory by reload if necessary.
6658 We used to force a register so that loop.c could see it. But
6659 this does not allow gen_* patterns to perform optimizations with
6660 the constants. It also produces two insns in cases like "x = 1.0;".
6661 On most machines, floating-point constants are not permitted in
6662 many insns, so we'd end up copying it to a register in any case.
6664 Now, we do the copying in expand_binop, if appropriate. */
6665 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6666 TYPE_MODE (TREE_TYPE (exp)));
6668 case COMPLEX_CST:
6669 /* Handle evaluating a complex constant in a CONCAT target. */
6670 if (original_target && GET_CODE (original_target) == CONCAT)
6672 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6673 rtx rtarg, itarg;
6675 rtarg = XEXP (original_target, 0);
6676 itarg = XEXP (original_target, 1);
6678 /* Move the real and imaginary parts separately. */
6679 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6680 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6682 if (op0 != rtarg)
6683 emit_move_insn (rtarg, op0);
6684 if (op1 != itarg)
6685 emit_move_insn (itarg, op1);
6687 return original_target;
6690 /* ... fall through ... */
6692 case STRING_CST:
6693 temp = output_constant_def (exp, 1);
6695 /* temp contains a constant address.
6696 On RISC machines where a constant address isn't valid,
6697 make some insns to get that address into a register. */
6698 if (modifier != EXPAND_CONST_ADDRESS
6699 && modifier != EXPAND_INITIALIZER
6700 && modifier != EXPAND_SUM
6701 && (! memory_address_p (mode, XEXP (temp, 0))
6702 || flag_force_addr))
6703 return replace_equiv_address (temp,
6704 copy_rtx (XEXP (temp, 0)));
6705 return temp;
6707 case SAVE_EXPR:
6708 context = decl_function_context (exp);
6710 /* If this SAVE_EXPR was at global context, assume we are an
6711 initialization function and move it into our context. */
6712 if (context == 0)
6713 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6715 if (context == current_function_decl)
6716 context = 0;
6718 /* If this is non-local, handle it. */
6719 if (context)
6721 /* The following call just exists to abort if the context is
6722 not of a containing function. */
6723 find_function_data (context);
6725 temp = SAVE_EXPR_RTL (exp);
6726 if (temp && GET_CODE (temp) == REG)
6728 put_var_into_stack (exp, /*rescan=*/true);
6729 temp = SAVE_EXPR_RTL (exp);
6731 if (temp == 0 || GET_CODE (temp) != MEM)
6732 abort ();
6733 return
6734 replace_equiv_address (temp,
6735 fix_lexical_addr (XEXP (temp, 0), exp));
6737 if (SAVE_EXPR_RTL (exp) == 0)
6739 if (mode == VOIDmode)
6740 temp = const0_rtx;
6741 else
6742 temp = assign_temp (build_qualified_type (type,
6743 (TYPE_QUALS (type)
6744 | TYPE_QUAL_CONST)),
6745 3, 0, 0);
6747 SAVE_EXPR_RTL (exp) = temp;
6748 if (!optimize && GET_CODE (temp) == REG)
6749 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6750 save_expr_regs);
6752 /* If the mode of TEMP does not match that of the expression, it
6753 must be a promoted value. We pass store_expr a SUBREG of the
6754 wanted mode but mark it so that we know that it was already
6755 extended. */
6757 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6759 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6760 promote_mode (type, mode, &unsignedp, 0);
6761 SUBREG_PROMOTED_VAR_P (temp) = 1;
6762 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6765 if (temp == const0_rtx)
6766 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6767 else
6768 store_expr (TREE_OPERAND (exp, 0), temp,
6769 modifier == EXPAND_STACK_PARM ? 2 : 0);
6771 TREE_USED (exp) = 1;
6774 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6775 must be a promoted value. We return a SUBREG of the wanted mode,
6776 but mark it so that we know that it was already extended. */
6778 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6779 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6781 /* Compute the signedness and make the proper SUBREG. */
6782 promote_mode (type, mode, &unsignedp, 0);
6783 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6784 SUBREG_PROMOTED_VAR_P (temp) = 1;
6785 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6786 return temp;
6789 return SAVE_EXPR_RTL (exp);
6791 case UNSAVE_EXPR:
6793 rtx temp;
6794 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6795 TREE_OPERAND (exp, 0)
6796 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6797 return temp;
6800 case GOTO_EXPR:
6801 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6802 expand_goto (TREE_OPERAND (exp, 0));
6803 else
6804 expand_computed_goto (TREE_OPERAND (exp, 0));
6805 return const0_rtx;
6807 case EXIT_EXPR:
6808 expand_exit_loop_if_false (NULL,
6809 invert_truthvalue (TREE_OPERAND (exp, 0)));
6810 return const0_rtx;
6812 case LABELED_BLOCK_EXPR:
6813 if (LABELED_BLOCK_BODY (exp))
6814 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6815 /* Should perhaps use expand_label, but this is simpler and safer. */
6816 do_pending_stack_adjust ();
6817 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6818 return const0_rtx;
6820 case EXIT_BLOCK_EXPR:
6821 if (EXIT_BLOCK_RETURN (exp))
6822 sorry ("returned value in block_exit_expr");
6823 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6824 return const0_rtx;
6826 case LOOP_EXPR:
6827 push_temp_slots ();
6828 expand_start_loop (1);
6829 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6830 expand_end_loop ();
6831 pop_temp_slots ();
6833 return const0_rtx;
6835 case BIND_EXPR:
6837 tree block = BIND_EXPR_BLOCK (exp);
6838 int mark_ends;
6840 if (TREE_CODE (BIND_EXPR_BODY (exp)) != RTL_EXPR)
6842 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6843 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6844 mark_ends = (block != NULL_TREE);
6845 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6847 else
6849 /* If we're not in functions-as-trees mode, we've already emitted
6850 those notes into our RTL_EXPR, so we just want to splice our BLOCK
6851 into the enclosing one. */
6852 mark_ends = 0;
6854 /* Need to open a binding contour here because
6855 if there are any cleanups they must be contained here. */
6856 expand_start_bindings_and_block (2, NULL_TREE);
6858 /* Mark the corresponding BLOCK for output in its proper place. */
6859 if (block)
6861 if (TREE_USED (block))
6862 abort ();
6863 lang_hooks.decls.insert_block (block);
6867 /* If VARS have not yet been expanded, expand them now. */
6868 expand_vars (BIND_EXPR_VARS (exp));
6870 /* TARGET was clobbered early in this function. The correct
6871 indicator or whether or not we need the value of this
6872 expression is the IGNORE variable. */
6873 temp = expand_expr (BIND_EXPR_BODY (exp),
6874 ignore ? const0_rtx : target,
6875 tmode, modifier);
6877 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6879 return temp;
6882 case RTL_EXPR:
6883 if (RTL_EXPR_SEQUENCE (exp))
6885 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6886 abort ();
6887 emit_insn (RTL_EXPR_SEQUENCE (exp));
6888 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6890 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6891 free_temps_for_rtl_expr (exp);
6892 if (alt_rtl)
6893 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6894 return RTL_EXPR_RTL (exp);
6896 case CONSTRUCTOR:
6897 /* If we don't need the result, just ensure we evaluate any
6898 subexpressions. */
6899 if (ignore)
6901 tree elt;
6903 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6904 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6906 return const0_rtx;
6909 /* All elts simple constants => refer to a constant in memory. But
6910 if this is a non-BLKmode mode, let it store a field at a time
6911 since that should make a CONST_INT or CONST_DOUBLE when we
6912 fold. Likewise, if we have a target we can use, it is best to
6913 store directly into the target unless the type is large enough
6914 that memcpy will be used. If we are making an initializer and
6915 all operands are constant, put it in memory as well.
6917 FIXME: Avoid trying to fill vector constructors piece-meal.
6918 Output them with output_constant_def below unless we're sure
6919 they're zeros. This should go away when vector initializers
6920 are treated like VECTOR_CST instead of arrays.
6922 else if ((TREE_STATIC (exp)
6923 && ((mode == BLKmode
6924 && ! (target != 0 && safe_from_p (target, exp, 1)))
6925 || TREE_ADDRESSABLE (exp)
6926 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6927 && (! MOVE_BY_PIECES_P
6928 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6929 TYPE_ALIGN (type)))
6930 && ! mostly_zeros_p (exp))))
6931 || ((modifier == EXPAND_INITIALIZER
6932 || modifier == EXPAND_CONST_ADDRESS)
6933 && TREE_CONSTANT (exp)))
6935 rtx constructor = output_constant_def (exp, 1);
6937 if (modifier != EXPAND_CONST_ADDRESS
6938 && modifier != EXPAND_INITIALIZER
6939 && modifier != EXPAND_SUM)
6940 constructor = validize_mem (constructor);
6942 return constructor;
6944 else
6946 /* Handle calls that pass values in multiple non-contiguous
6947 locations. The Irix 6 ABI has examples of this. */
6948 if (target == 0 || ! safe_from_p (target, exp, 1)
6949 || GET_CODE (target) == PARALLEL
6950 || modifier == EXPAND_STACK_PARM)
6951 target
6952 = assign_temp (build_qualified_type (type,
6953 (TYPE_QUALS (type)
6954 | (TREE_READONLY (exp)
6955 * TYPE_QUAL_CONST))),
6956 0, TREE_ADDRESSABLE (exp), 1);
6958 store_constructor (exp, target, 0, int_expr_size (exp));
6959 return target;
6962 case INDIRECT_REF:
6964 tree exp1 = TREE_OPERAND (exp, 0);
6966 if (modifier != EXPAND_WRITE)
6968 tree t;
6970 t = fold_read_from_constant_string (exp);
6971 if (t)
6972 return expand_expr (t, target, tmode, modifier);
6975 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6976 op0 = memory_address (mode, op0);
6977 temp = gen_rtx_MEM (mode, op0);
6978 set_mem_attributes (temp, exp, 0);
6980 /* If we are writing to this object and its type is a record with
6981 readonly fields, we must mark it as readonly so it will
6982 conflict with readonly references to those fields. */
6983 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6984 RTX_UNCHANGING_P (temp) = 1;
6986 return temp;
6989 case ARRAY_REF:
6991 #ifdef ENABLE_CHECKING
6992 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6993 abort ();
6994 #endif
6997 tree array = TREE_OPERAND (exp, 0);
6998 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6999 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7000 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7001 HOST_WIDE_INT i;
7003 /* Optimize the special-case of a zero lower bound.
7005 We convert the low_bound to sizetype to avoid some problems
7006 with constant folding. (E.g. suppose the lower bound is 1,
7007 and its mode is QI. Without the conversion, (ARRAY
7008 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7009 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7011 if (! integer_zerop (low_bound))
7012 index = size_diffop (index, convert (sizetype, low_bound));
7014 /* Fold an expression like: "foo"[2].
7015 This is not done in fold so it won't happen inside &.
7016 Don't fold if this is for wide characters since it's too
7017 difficult to do correctly and this is a very rare case. */
7019 if (modifier != EXPAND_CONST_ADDRESS
7020 && modifier != EXPAND_INITIALIZER
7021 && modifier != EXPAND_MEMORY)
7023 tree t = fold_read_from_constant_string (exp);
7025 if (t)
7026 return expand_expr (t, target, tmode, modifier);
7029 /* If this is a constant index into a constant array,
7030 just get the value from the array. Handle both the cases when
7031 we have an explicit constructor and when our operand is a variable
7032 that was declared const. */
7034 if (modifier != EXPAND_CONST_ADDRESS
7035 && modifier != EXPAND_INITIALIZER
7036 && modifier != EXPAND_MEMORY
7037 && TREE_CODE (array) == CONSTRUCTOR
7038 && ! TREE_SIDE_EFFECTS (array)
7039 && TREE_CODE (index) == INTEGER_CST
7040 && 0 > compare_tree_int (index,
7041 list_length (CONSTRUCTOR_ELTS
7042 (TREE_OPERAND (exp, 0)))))
7044 tree elem;
7046 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7047 i = TREE_INT_CST_LOW (index);
7048 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7051 if (elem)
7052 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7053 modifier);
7056 else if (optimize >= 1
7057 && modifier != EXPAND_CONST_ADDRESS
7058 && modifier != EXPAND_INITIALIZER
7059 && modifier != EXPAND_MEMORY
7060 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7061 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7062 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7063 && targetm.binds_local_p (array))
7065 if (TREE_CODE (index) == INTEGER_CST)
7067 tree init = DECL_INITIAL (array);
7069 if (TREE_CODE (init) == CONSTRUCTOR)
7071 tree elem;
7073 for (elem = CONSTRUCTOR_ELTS (init);
7074 (elem
7075 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7076 elem = TREE_CHAIN (elem))
7079 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7080 return expand_expr (fold (TREE_VALUE (elem)), target,
7081 tmode, modifier);
7083 else if (TREE_CODE (init) == STRING_CST
7084 && 0 > compare_tree_int (index,
7085 TREE_STRING_LENGTH (init)))
7087 tree type = TREE_TYPE (TREE_TYPE (init));
7088 enum machine_mode mode = TYPE_MODE (type);
7090 if (GET_MODE_CLASS (mode) == MODE_INT
7091 && GET_MODE_SIZE (mode) == 1)
7092 return gen_int_mode (TREE_STRING_POINTER (init)
7093 [TREE_INT_CST_LOW (index)], mode);
7098 goto normal_inner_ref;
7100 case COMPONENT_REF:
7101 /* If the operand is a CONSTRUCTOR, we can just extract the
7102 appropriate field if it is present. */
7103 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7105 tree elt;
7107 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7108 elt = TREE_CHAIN (elt))
7109 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7110 /* We can normally use the value of the field in the
7111 CONSTRUCTOR. However, if this is a bitfield in
7112 an integral mode that we can fit in a HOST_WIDE_INT,
7113 we must mask only the number of bits in the bitfield,
7114 since this is done implicitly by the constructor. If
7115 the bitfield does not meet either of those conditions,
7116 we can't do this optimization. */
7117 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7118 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7119 == MODE_INT)
7120 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7121 <= HOST_BITS_PER_WIDE_INT))))
7123 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7124 && modifier == EXPAND_STACK_PARM)
7125 target = 0;
7126 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7127 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7129 HOST_WIDE_INT bitsize
7130 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7131 enum machine_mode imode
7132 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7134 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7136 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7137 op0 = expand_and (imode, op0, op1, target);
7139 else
7141 tree count
7142 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7145 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7146 target, 0);
7147 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7148 target, 0);
7152 return op0;
7155 goto normal_inner_ref;
7157 case BIT_FIELD_REF:
7158 case ARRAY_RANGE_REF:
7159 normal_inner_ref:
7161 enum machine_mode mode1;
7162 HOST_WIDE_INT bitsize, bitpos;
7163 tree offset;
7164 int volatilep = 0;
7165 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7166 &mode1, &unsignedp, &volatilep);
7167 rtx orig_op0;
7169 /* If we got back the original object, something is wrong. Perhaps
7170 we are evaluating an expression too early. In any event, don't
7171 infinitely recurse. */
7172 if (tem == exp)
7173 abort ();
7175 /* If TEM's type is a union of variable size, pass TARGET to the inner
7176 computation, since it will need a temporary and TARGET is known
7177 to have to do. This occurs in unchecked conversion in Ada. */
7179 orig_op0 = op0
7180 = expand_expr (tem,
7181 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7182 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7183 != INTEGER_CST)
7184 && modifier != EXPAND_STACK_PARM
7185 ? target : NULL_RTX),
7186 VOIDmode,
7187 (modifier == EXPAND_INITIALIZER
7188 || modifier == EXPAND_CONST_ADDRESS
7189 || modifier == EXPAND_STACK_PARM)
7190 ? modifier : EXPAND_NORMAL);
7192 /* If this is a constant, put it into a register if it is a
7193 legitimate constant and OFFSET is 0 and memory if it isn't. */
7194 if (CONSTANT_P (op0))
7196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7197 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7198 && offset == 0)
7199 op0 = force_reg (mode, op0);
7200 else
7201 op0 = validize_mem (force_const_mem (mode, op0));
7204 /* Otherwise, if this object not in memory and we either have an
7205 offset or a BLKmode result, put it there. This case can't occur in
7206 C, but can in Ada if we have unchecked conversion of an expression
7207 from a scalar type to an array or record type or for an
7208 ARRAY_RANGE_REF whose type is BLKmode. */
7209 else if (GET_CODE (op0) != MEM
7210 && (offset != 0
7211 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7213 /* If the operand is a SAVE_EXPR, we can deal with this by
7214 forcing the SAVE_EXPR into memory. */
7215 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7217 put_var_into_stack (TREE_OPERAND (exp, 0),
7218 /*rescan=*/true);
7219 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7221 else
7223 tree nt
7224 = build_qualified_type (TREE_TYPE (tem),
7225 (TYPE_QUALS (TREE_TYPE (tem))
7226 | TYPE_QUAL_CONST));
7227 rtx memloc = assign_temp (nt, 1, 1, 1);
7229 emit_move_insn (memloc, op0);
7230 op0 = memloc;
7234 if (offset != 0)
7236 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7237 EXPAND_SUM);
7239 if (GET_CODE (op0) != MEM)
7240 abort ();
7242 #ifdef POINTERS_EXTEND_UNSIGNED
7243 if (GET_MODE (offset_rtx) != Pmode)
7244 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7245 #else
7246 if (GET_MODE (offset_rtx) != ptr_mode)
7247 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7248 #endif
7250 if (GET_MODE (op0) == BLKmode
7251 /* A constant address in OP0 can have VOIDmode, we must
7252 not try to call force_reg in that case. */
7253 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7254 && bitsize != 0
7255 && (bitpos % bitsize) == 0
7256 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7257 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7259 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7260 bitpos = 0;
7263 op0 = offset_address (op0, offset_rtx,
7264 highest_pow2_factor (offset));
7267 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7268 record its alignment as BIGGEST_ALIGNMENT. */
7269 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7270 && is_aligning_offset (offset, tem))
7271 set_mem_align (op0, BIGGEST_ALIGNMENT);
7273 /* Don't forget about volatility even if this is a bitfield. */
7274 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7276 if (op0 == orig_op0)
7277 op0 = copy_rtx (op0);
7279 MEM_VOLATILE_P (op0) = 1;
7282 /* The following code doesn't handle CONCAT.
7283 Assume only bitpos == 0 can be used for CONCAT, due to
7284 one element arrays having the same mode as its element. */
7285 if (GET_CODE (op0) == CONCAT)
7287 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7288 abort ();
7289 return op0;
7292 /* In cases where an aligned union has an unaligned object
7293 as a field, we might be extracting a BLKmode value from
7294 an integer-mode (e.g., SImode) object. Handle this case
7295 by doing the extract into an object as wide as the field
7296 (which we know to be the width of a basic mode), then
7297 storing into memory, and changing the mode to BLKmode. */
7298 if (mode1 == VOIDmode
7299 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7300 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7301 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7302 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7303 && modifier != EXPAND_CONST_ADDRESS
7304 && modifier != EXPAND_INITIALIZER)
7305 /* If the field isn't aligned enough to fetch as a memref,
7306 fetch it as a bit field. */
7307 || (mode1 != BLKmode
7308 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7309 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7310 || (GET_CODE (op0) == MEM
7311 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7312 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7313 && ((modifier == EXPAND_CONST_ADDRESS
7314 || modifier == EXPAND_INITIALIZER)
7315 ? STRICT_ALIGNMENT
7316 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7317 || (bitpos % BITS_PER_UNIT != 0)))
7318 /* If the type and the field are a constant size and the
7319 size of the type isn't the same size as the bitfield,
7320 we must use bitfield operations. */
7321 || (bitsize >= 0
7322 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7323 == INTEGER_CST)
7324 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7325 bitsize)))
7327 enum machine_mode ext_mode = mode;
7329 if (ext_mode == BLKmode
7330 && ! (target != 0 && GET_CODE (op0) == MEM
7331 && GET_CODE (target) == MEM
7332 && bitpos % BITS_PER_UNIT == 0))
7333 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7335 if (ext_mode == BLKmode)
7337 if (target == 0)
7338 target = assign_temp (type, 0, 1, 1);
7340 if (bitsize == 0)
7341 return target;
7343 /* In this case, BITPOS must start at a byte boundary and
7344 TARGET, if specified, must be a MEM. */
7345 if (GET_CODE (op0) != MEM
7346 || (target != 0 && GET_CODE (target) != MEM)
7347 || bitpos % BITS_PER_UNIT != 0)
7348 abort ();
7350 emit_block_move (target,
7351 adjust_address (op0, VOIDmode,
7352 bitpos / BITS_PER_UNIT),
7353 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7354 / BITS_PER_UNIT),
7355 (modifier == EXPAND_STACK_PARM
7356 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7358 return target;
7361 op0 = validize_mem (op0);
7363 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7364 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7366 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7367 (modifier == EXPAND_STACK_PARM
7368 ? NULL_RTX : target),
7369 ext_mode, ext_mode,
7370 int_size_in_bytes (TREE_TYPE (tem)));
7372 /* If the result is a record type and BITSIZE is narrower than
7373 the mode of OP0, an integral mode, and this is a big endian
7374 machine, we must put the field into the high-order bits. */
7375 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7376 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7377 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7378 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7379 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7380 - bitsize),
7381 op0, 1);
7383 /* If the result type is BLKmode, store the data into a temporary
7384 of the appropriate type, but with the mode corresponding to the
7385 mode for the data we have (op0's mode). It's tempting to make
7386 this a constant type, since we know it's only being stored once,
7387 but that can cause problems if we are taking the address of this
7388 COMPONENT_REF because the MEM of any reference via that address
7389 will have flags corresponding to the type, which will not
7390 necessarily be constant. */
7391 if (mode == BLKmode)
7393 rtx new
7394 = assign_stack_temp_for_type
7395 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7397 emit_move_insn (new, op0);
7398 op0 = copy_rtx (new);
7399 PUT_MODE (op0, BLKmode);
7400 set_mem_attributes (op0, exp, 1);
7403 return op0;
7406 /* If the result is BLKmode, use that to access the object
7407 now as well. */
7408 if (mode == BLKmode)
7409 mode1 = BLKmode;
7411 /* Get a reference to just this component. */
7412 if (modifier == EXPAND_CONST_ADDRESS
7413 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7414 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7415 else
7416 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7418 if (op0 == orig_op0)
7419 op0 = copy_rtx (op0);
7421 set_mem_attributes (op0, exp, 0);
7422 if (GET_CODE (XEXP (op0, 0)) == REG)
7423 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7425 MEM_VOLATILE_P (op0) |= volatilep;
7426 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7427 || modifier == EXPAND_CONST_ADDRESS
7428 || modifier == EXPAND_INITIALIZER)
7429 return op0;
7430 else if (target == 0)
7431 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7433 convert_move (target, op0, unsignedp);
7434 return target;
7437 case VTABLE_REF:
7439 rtx insn, before = get_last_insn (), vtbl_ref;
7441 /* Evaluate the interior expression. */
7442 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7443 tmode, modifier);
7445 /* Get or create an instruction off which to hang a note. */
7446 if (REG_P (subtarget))
7448 target = subtarget;
7449 insn = get_last_insn ();
7450 if (insn == before)
7451 abort ();
7452 if (! INSN_P (insn))
7453 insn = prev_nonnote_insn (insn);
7455 else
7457 target = gen_reg_rtx (GET_MODE (subtarget));
7458 insn = emit_move_insn (target, subtarget);
7461 /* Collect the data for the note. */
7462 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7463 vtbl_ref = plus_constant (vtbl_ref,
7464 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7465 /* Discard the initial CONST that was added. */
7466 vtbl_ref = XEXP (vtbl_ref, 0);
7468 REG_NOTES (insn)
7469 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7471 return target;
7474 /* Intended for a reference to a buffer of a file-object in Pascal.
7475 But it's not certain that a special tree code will really be
7476 necessary for these. INDIRECT_REF might work for them. */
7477 case BUFFER_REF:
7478 abort ();
7480 case IN_EXPR:
7482 /* Pascal set IN expression.
7484 Algorithm:
7485 rlo = set_low - (set_low%bits_per_word);
7486 the_word = set [ (index - rlo)/bits_per_word ];
7487 bit_index = index % bits_per_word;
7488 bitmask = 1 << bit_index;
7489 return !!(the_word & bitmask); */
7491 tree set = TREE_OPERAND (exp, 0);
7492 tree index = TREE_OPERAND (exp, 1);
7493 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7494 tree set_type = TREE_TYPE (set);
7495 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7496 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7497 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7498 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7499 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7500 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7501 rtx setaddr = XEXP (setval, 0);
7502 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7503 rtx rlow;
7504 rtx diff, quo, rem, addr, bit, result;
7506 /* If domain is empty, answer is no. Likewise if index is constant
7507 and out of bounds. */
7508 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7509 && TREE_CODE (set_low_bound) == INTEGER_CST
7510 && tree_int_cst_lt (set_high_bound, set_low_bound))
7511 || (TREE_CODE (index) == INTEGER_CST
7512 && TREE_CODE (set_low_bound) == INTEGER_CST
7513 && tree_int_cst_lt (index, set_low_bound))
7514 || (TREE_CODE (set_high_bound) == INTEGER_CST
7515 && TREE_CODE (index) == INTEGER_CST
7516 && tree_int_cst_lt (set_high_bound, index))))
7517 return const0_rtx;
7519 if (target == 0)
7520 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7522 /* If we get here, we have to generate the code for both cases
7523 (in range and out of range). */
7525 op0 = gen_label_rtx ();
7526 op1 = gen_label_rtx ();
7528 if (! (GET_CODE (index_val) == CONST_INT
7529 && GET_CODE (lo_r) == CONST_INT))
7530 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7531 GET_MODE (index_val), iunsignedp, op1);
7533 if (! (GET_CODE (index_val) == CONST_INT
7534 && GET_CODE (hi_r) == CONST_INT))
7535 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7536 GET_MODE (index_val), iunsignedp, op1);
7538 /* Calculate the element number of bit zero in the first word
7539 of the set. */
7540 if (GET_CODE (lo_r) == CONST_INT)
7541 rlow = GEN_INT (INTVAL (lo_r)
7542 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7543 else
7544 rlow = expand_binop (index_mode, and_optab, lo_r,
7545 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7546 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7548 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7549 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7551 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7552 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7553 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7554 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7556 addr = memory_address (byte_mode,
7557 expand_binop (index_mode, add_optab, diff,
7558 setaddr, NULL_RTX, iunsignedp,
7559 OPTAB_LIB_WIDEN));
7561 /* Extract the bit we want to examine. */
7562 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7563 gen_rtx_MEM (byte_mode, addr),
7564 make_tree (TREE_TYPE (index), rem),
7565 NULL_RTX, 1);
7566 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7567 GET_MODE (target) == byte_mode ? target : 0,
7568 1, OPTAB_LIB_WIDEN);
7570 if (result != target)
7571 convert_move (target, result, 1);
7573 /* Output the code to handle the out-of-range case. */
7574 emit_jump (op0);
7575 emit_label (op1);
7576 emit_move_insn (target, const0_rtx);
7577 emit_label (op0);
7578 return target;
7581 case WITH_CLEANUP_EXPR:
7582 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7584 WITH_CLEANUP_EXPR_RTL (exp)
7585 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7586 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7587 CLEANUP_EH_ONLY (exp));
7589 /* That's it for this cleanup. */
7590 TREE_OPERAND (exp, 1) = 0;
7592 return WITH_CLEANUP_EXPR_RTL (exp);
7594 case CLEANUP_POINT_EXPR:
7596 /* Start a new binding layer that will keep track of all cleanup
7597 actions to be performed. */
7598 expand_start_bindings (2);
7600 target_temp_slot_level = temp_slot_level;
7602 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7603 /* If we're going to use this value, load it up now. */
7604 if (! ignore)
7605 op0 = force_not_mem (op0);
7606 preserve_temp_slots (op0);
7607 expand_end_bindings (NULL_TREE, 0, 0);
7609 return op0;
7611 case CALL_EXPR:
7612 /* Check for a built-in function. */
7613 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7614 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7615 == FUNCTION_DECL)
7616 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7618 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7619 == BUILT_IN_FRONTEND)
7620 return lang_hooks.expand_expr (exp, original_target,
7621 tmode, modifier,
7622 alt_rtl);
7623 else
7624 return expand_builtin (exp, target, subtarget, tmode, ignore);
7627 return expand_call (exp, target, ignore);
7629 case NON_LVALUE_EXPR:
7630 case NOP_EXPR:
7631 case CONVERT_EXPR:
7632 case REFERENCE_EXPR:
7633 if (TREE_OPERAND (exp, 0) == error_mark_node)
7634 return const0_rtx;
7636 if (TREE_CODE (type) == UNION_TYPE)
7638 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7640 /* If both input and output are BLKmode, this conversion isn't doing
7641 anything except possibly changing memory attribute. */
7642 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7644 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7645 modifier);
7647 result = copy_rtx (result);
7648 set_mem_attributes (result, exp, 0);
7649 return result;
7652 if (target == 0)
7654 if (TYPE_MODE (type) != BLKmode)
7655 target = gen_reg_rtx (TYPE_MODE (type));
7656 else
7657 target = assign_temp (type, 0, 1, 1);
7660 if (GET_CODE (target) == MEM)
7661 /* Store data into beginning of memory target. */
7662 store_expr (TREE_OPERAND (exp, 0),
7663 adjust_address (target, TYPE_MODE (valtype), 0),
7664 modifier == EXPAND_STACK_PARM ? 2 : 0);
7666 else if (GET_CODE (target) == REG)
7667 /* Store this field into a union of the proper type. */
7668 store_field (target,
7669 MIN ((int_size_in_bytes (TREE_TYPE
7670 (TREE_OPERAND (exp, 0)))
7671 * BITS_PER_UNIT),
7672 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7673 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7674 VOIDmode, 0, type, 0);
7675 else
7676 abort ();
7678 /* Return the entire union. */
7679 return target;
7682 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7684 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7685 modifier);
7687 /* If the signedness of the conversion differs and OP0 is
7688 a promoted SUBREG, clear that indication since we now
7689 have to do the proper extension. */
7690 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7691 && GET_CODE (op0) == SUBREG)
7692 SUBREG_PROMOTED_VAR_P (op0) = 0;
7694 return op0;
7697 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7698 if (GET_MODE (op0) == mode)
7699 return op0;
7701 /* If OP0 is a constant, just convert it into the proper mode. */
7702 if (CONSTANT_P (op0))
7704 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7705 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7707 if (modifier == EXPAND_INITIALIZER)
7708 return simplify_gen_subreg (mode, op0, inner_mode,
7709 subreg_lowpart_offset (mode,
7710 inner_mode));
7711 else
7712 return convert_modes (mode, inner_mode, op0,
7713 TYPE_UNSIGNED (inner_type));
7716 if (modifier == EXPAND_INITIALIZER)
7717 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7719 if (target == 0)
7720 return
7721 convert_to_mode (mode, op0,
7722 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7723 else
7724 convert_move (target, op0,
7725 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7726 return target;
7728 case VIEW_CONVERT_EXPR:
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7731 /* If the input and output modes are both the same, we are done.
7732 Otherwise, if neither mode is BLKmode and both are integral and within
7733 a word, we can use gen_lowpart. If neither is true, make sure the
7734 operand is in memory and convert the MEM to the new mode. */
7735 if (TYPE_MODE (type) == GET_MODE (op0))
7737 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7738 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7739 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7740 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7741 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7742 op0 = gen_lowpart (TYPE_MODE (type), op0);
7743 else if (GET_CODE (op0) != MEM)
7745 /* If the operand is not a MEM, force it into memory. Since we
7746 are going to be be changing the mode of the MEM, don't call
7747 force_const_mem for constants because we don't allow pool
7748 constants to change mode. */
7749 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7751 if (TREE_ADDRESSABLE (exp))
7752 abort ();
7754 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7755 target
7756 = assign_stack_temp_for_type
7757 (TYPE_MODE (inner_type),
7758 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7760 emit_move_insn (target, op0);
7761 op0 = target;
7764 /* At this point, OP0 is in the correct mode. If the output type is such
7765 that the operand is known to be aligned, indicate that it is.
7766 Otherwise, we need only be concerned about alignment for non-BLKmode
7767 results. */
7768 if (GET_CODE (op0) == MEM)
7770 op0 = copy_rtx (op0);
7772 if (TYPE_ALIGN_OK (type))
7773 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7774 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7775 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7777 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7778 HOST_WIDE_INT temp_size
7779 = MAX (int_size_in_bytes (inner_type),
7780 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7781 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7782 temp_size, 0, type);
7783 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7785 if (TREE_ADDRESSABLE (exp))
7786 abort ();
7788 if (GET_MODE (op0) == BLKmode)
7789 emit_block_move (new_with_op0_mode, op0,
7790 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7791 (modifier == EXPAND_STACK_PARM
7792 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7793 else
7794 emit_move_insn (new_with_op0_mode, op0);
7796 op0 = new;
7799 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7802 return op0;
7804 case PLUS_EXPR:
7805 this_optab = ! unsignedp && flag_trapv
7806 && (GET_MODE_CLASS (mode) == MODE_INT)
7807 ? addv_optab : add_optab;
7809 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7810 something else, make sure we add the register to the constant and
7811 then to the other thing. This case can occur during strength
7812 reduction and doing it this way will produce better code if the
7813 frame pointer or argument pointer is eliminated.
7815 fold-const.c will ensure that the constant is always in the inner
7816 PLUS_EXPR, so the only case we need to do anything about is if
7817 sp, ap, or fp is our second argument, in which case we must swap
7818 the innermost first argument and our second argument. */
7820 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7821 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7822 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7823 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7824 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7825 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7827 tree t = TREE_OPERAND (exp, 1);
7829 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7830 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7833 /* If the result is to be ptr_mode and we are adding an integer to
7834 something, we might be forming a constant. So try to use
7835 plus_constant. If it produces a sum and we can't accept it,
7836 use force_operand. This allows P = &ARR[const] to generate
7837 efficient code on machines where a SYMBOL_REF is not a valid
7838 address.
7840 If this is an EXPAND_SUM call, always return the sum. */
7841 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7842 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7844 if (modifier == EXPAND_STACK_PARM)
7845 target = 0;
7846 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7847 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7848 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7850 rtx constant_part;
7852 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7853 EXPAND_SUM);
7854 /* Use immed_double_const to ensure that the constant is
7855 truncated according to the mode of OP1, then sign extended
7856 to a HOST_WIDE_INT. Using the constant directly can result
7857 in non-canonical RTL in a 64x32 cross compile. */
7858 constant_part
7859 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7860 (HOST_WIDE_INT) 0,
7861 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7862 op1 = plus_constant (op1, INTVAL (constant_part));
7863 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7864 op1 = force_operand (op1, target);
7865 return op1;
7868 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7869 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7870 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7872 rtx constant_part;
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7875 (modifier == EXPAND_INITIALIZER
7876 ? EXPAND_INITIALIZER : EXPAND_SUM));
7877 if (! CONSTANT_P (op0))
7879 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7880 VOIDmode, modifier);
7881 /* Return a PLUS if modifier says it's OK. */
7882 if (modifier == EXPAND_SUM
7883 || modifier == EXPAND_INITIALIZER)
7884 return simplify_gen_binary (PLUS, mode, op0, op1);
7885 goto binop2;
7887 /* Use immed_double_const to ensure that the constant is
7888 truncated according to the mode of OP1, then sign extended
7889 to a HOST_WIDE_INT. Using the constant directly can result
7890 in non-canonical RTL in a 64x32 cross compile. */
7891 constant_part
7892 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7893 (HOST_WIDE_INT) 0,
7894 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7895 op0 = plus_constant (op0, INTVAL (constant_part));
7896 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7897 op0 = force_operand (op0, target);
7898 return op0;
7902 /* No sense saving up arithmetic to be done
7903 if it's all in the wrong mode to form part of an address.
7904 And force_operand won't know whether to sign-extend or
7905 zero-extend. */
7906 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7907 || mode != ptr_mode)
7909 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7910 subtarget, &op0, &op1, 0);
7911 if (op0 == const0_rtx)
7912 return op1;
7913 if (op1 == const0_rtx)
7914 return op0;
7915 goto binop2;
7918 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7919 subtarget, &op0, &op1, modifier);
7920 return simplify_gen_binary (PLUS, mode, op0, op1);
7922 case MINUS_EXPR:
7923 /* For initializers, we are allowed to return a MINUS of two
7924 symbolic constants. Here we handle all cases when both operands
7925 are constant. */
7926 /* Handle difference of two symbolic constants,
7927 for the sake of an initializer. */
7928 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7929 && really_constant_p (TREE_OPERAND (exp, 0))
7930 && really_constant_p (TREE_OPERAND (exp, 1)))
7932 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7933 NULL_RTX, &op0, &op1, modifier);
7935 /* If the last operand is a CONST_INT, use plus_constant of
7936 the negated constant. Else make the MINUS. */
7937 if (GET_CODE (op1) == CONST_INT)
7938 return plus_constant (op0, - INTVAL (op1));
7939 else
7940 return gen_rtx_MINUS (mode, op0, op1);
7943 this_optab = ! unsignedp && flag_trapv
7944 && (GET_MODE_CLASS(mode) == MODE_INT)
7945 ? subv_optab : sub_optab;
7947 /* No sense saving up arithmetic to be done
7948 if it's all in the wrong mode to form part of an address.
7949 And force_operand won't know whether to sign-extend or
7950 zero-extend. */
7951 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7952 || mode != ptr_mode)
7953 goto binop;
7955 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7956 subtarget, &op0, &op1, modifier);
7958 /* Convert A - const to A + (-const). */
7959 if (GET_CODE (op1) == CONST_INT)
7961 op1 = negate_rtx (mode, op1);
7962 return simplify_gen_binary (PLUS, mode, op0, op1);
7965 goto binop2;
7967 case MULT_EXPR:
7968 /* If first operand is constant, swap them.
7969 Thus the following special case checks need only
7970 check the second operand. */
7971 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7973 tree t1 = TREE_OPERAND (exp, 0);
7974 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7975 TREE_OPERAND (exp, 1) = t1;
7978 /* Attempt to return something suitable for generating an
7979 indexed address, for machines that support that. */
7981 if (modifier == EXPAND_SUM && mode == ptr_mode
7982 && host_integerp (TREE_OPERAND (exp, 1), 0))
7984 tree exp1 = TREE_OPERAND (exp, 1);
7986 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7987 EXPAND_SUM);
7989 if (GET_CODE (op0) != REG)
7990 op0 = force_operand (op0, NULL_RTX);
7991 if (GET_CODE (op0) != REG)
7992 op0 = copy_to_mode_reg (mode, op0);
7994 return gen_rtx_MULT (mode, op0,
7995 gen_int_mode (tree_low_cst (exp1, 0),
7996 TYPE_MODE (TREE_TYPE (exp1))));
7999 if (modifier == EXPAND_STACK_PARM)
8000 target = 0;
8002 /* Check for multiplying things that have been extended
8003 from a narrower type. If this machine supports multiplying
8004 in that narrower type with a result in the desired type,
8005 do it that way, and avoid the explicit type-conversion. */
8006 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8007 && TREE_CODE (type) == INTEGER_TYPE
8008 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8009 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8010 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8011 && int_fits_type_p (TREE_OPERAND (exp, 1),
8012 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8013 /* Don't use a widening multiply if a shift will do. */
8014 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8015 > HOST_BITS_PER_WIDE_INT)
8016 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8018 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8019 && (TYPE_PRECISION (TREE_TYPE
8020 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8021 == TYPE_PRECISION (TREE_TYPE
8022 (TREE_OPERAND
8023 (TREE_OPERAND (exp, 0), 0))))
8024 /* If both operands are extended, they must either both
8025 be zero-extended or both be sign-extended. */
8026 && (TYPE_UNSIGNED (TREE_TYPE
8027 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8028 == TYPE_UNSIGNED (TREE_TYPE
8029 (TREE_OPERAND
8030 (TREE_OPERAND (exp, 0), 0)))))))
8032 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8033 enum machine_mode innermode = TYPE_MODE (op0type);
8034 bool zextend_p = TYPE_UNSIGNED (op0type);
8035 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8036 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8038 if (mode == GET_MODE_WIDER_MODE (innermode))
8040 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8042 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8043 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8044 TREE_OPERAND (exp, 1),
8045 NULL_RTX, &op0, &op1, 0);
8046 else
8047 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8048 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8049 NULL_RTX, &op0, &op1, 0);
8050 goto binop2;
8052 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8053 && innermode == word_mode)
8055 rtx htem, hipart;
8056 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8057 NULL_RTX, VOIDmode, 0);
8058 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8059 op1 = convert_modes (innermode, mode,
8060 expand_expr (TREE_OPERAND (exp, 1),
8061 NULL_RTX, VOIDmode, 0),
8062 unsignedp);
8063 else
8064 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8065 NULL_RTX, VOIDmode, 0);
8066 temp = expand_binop (mode, other_optab, op0, op1, target,
8067 unsignedp, OPTAB_LIB_WIDEN);
8068 hipart = gen_highpart (innermode, temp);
8069 htem = expand_mult_highpart_adjust (innermode, hipart,
8070 op0, op1, hipart,
8071 zextend_p);
8072 if (htem != hipart)
8073 emit_move_insn (hipart, htem);
8074 return temp;
8078 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8079 subtarget, &op0, &op1, 0);
8080 return expand_mult (mode, op0, op1, target, unsignedp);
8082 case TRUNC_DIV_EXPR:
8083 case FLOOR_DIV_EXPR:
8084 case CEIL_DIV_EXPR:
8085 case ROUND_DIV_EXPR:
8086 case EXACT_DIV_EXPR:
8087 if (modifier == EXPAND_STACK_PARM)
8088 target = 0;
8089 /* Possible optimization: compute the dividend with EXPAND_SUM
8090 then if the divisor is constant can optimize the case
8091 where some terms of the dividend have coeffs divisible by it. */
8092 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8093 subtarget, &op0, &op1, 0);
8094 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8096 case RDIV_EXPR:
8097 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8098 expensive divide. If not, combine will rebuild the original
8099 computation. */
8100 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8101 && TREE_CODE (type) == REAL_TYPE
8102 && !real_onep (TREE_OPERAND (exp, 0)))
8103 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8104 build (RDIV_EXPR, type,
8105 build_real (type, dconst1),
8106 TREE_OPERAND (exp, 1))),
8107 target, tmode, modifier);
8108 this_optab = sdiv_optab;
8109 goto binop;
8111 case TRUNC_MOD_EXPR:
8112 case FLOOR_MOD_EXPR:
8113 case CEIL_MOD_EXPR:
8114 case ROUND_MOD_EXPR:
8115 if (modifier == EXPAND_STACK_PARM)
8116 target = 0;
8117 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8118 subtarget, &op0, &op1, 0);
8119 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8121 case FIX_ROUND_EXPR:
8122 case FIX_FLOOR_EXPR:
8123 case FIX_CEIL_EXPR:
8124 abort (); /* Not used for C. */
8126 case FIX_TRUNC_EXPR:
8127 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8128 if (target == 0 || modifier == EXPAND_STACK_PARM)
8129 target = gen_reg_rtx (mode);
8130 expand_fix (target, op0, unsignedp);
8131 return target;
8133 case FLOAT_EXPR:
8134 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8135 if (target == 0 || modifier == EXPAND_STACK_PARM)
8136 target = gen_reg_rtx (mode);
8137 /* expand_float can't figure out what to do if FROM has VOIDmode.
8138 So give it the correct mode. With -O, cse will optimize this. */
8139 if (GET_MODE (op0) == VOIDmode)
8140 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8141 op0);
8142 expand_float (target, op0,
8143 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8144 return target;
8146 case NEGATE_EXPR:
8147 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8148 if (modifier == EXPAND_STACK_PARM)
8149 target = 0;
8150 temp = expand_unop (mode,
8151 ! unsignedp && flag_trapv
8152 && (GET_MODE_CLASS(mode) == MODE_INT)
8153 ? negv_optab : neg_optab, op0, target, 0);
8154 if (temp == 0)
8155 abort ();
8156 return temp;
8158 case ABS_EXPR:
8159 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8160 if (modifier == EXPAND_STACK_PARM)
8161 target = 0;
8163 /* ABS_EXPR is not valid for complex arguments. */
8164 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8165 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8166 abort ();
8168 /* Unsigned abs is simply the operand. Testing here means we don't
8169 risk generating incorrect code below. */
8170 if (TYPE_UNSIGNED (type))
8171 return op0;
8173 return expand_abs (mode, op0, target, unsignedp,
8174 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8176 case MAX_EXPR:
8177 case MIN_EXPR:
8178 target = original_target;
8179 if (target == 0
8180 || modifier == EXPAND_STACK_PARM
8181 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8182 || GET_MODE (target) != mode
8183 || (GET_CODE (target) == REG
8184 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8185 target = gen_reg_rtx (mode);
8186 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8187 target, &op0, &op1, 0);
8189 /* First try to do it with a special MIN or MAX instruction.
8190 If that does not win, use a conditional jump to select the proper
8191 value. */
8192 this_optab = (unsignedp
8193 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8194 : (code == MIN_EXPR ? smin_optab : smax_optab));
8196 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8197 OPTAB_WIDEN);
8198 if (temp != 0)
8199 return temp;
8201 /* At this point, a MEM target is no longer useful; we will get better
8202 code without it. */
8204 if (GET_CODE (target) == MEM)
8205 target = gen_reg_rtx (mode);
8207 /* If op1 was placed in target, swap op0 and op1. */
8208 if (target != op0 && target == op1)
8210 rtx tem = op0;
8211 op0 = op1;
8212 op1 = tem;
8215 if (target != op0)
8216 emit_move_insn (target, op0);
8218 op0 = gen_label_rtx ();
8220 /* If this mode is an integer too wide to compare properly,
8221 compare word by word. Rely on cse to optimize constant cases. */
8222 if (GET_MODE_CLASS (mode) == MODE_INT
8223 && ! can_compare_p (GE, mode, ccp_jump))
8225 if (code == MAX_EXPR)
8226 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8227 NULL_RTX, op0);
8228 else
8229 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8230 NULL_RTX, op0);
8232 else
8234 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8235 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8237 emit_move_insn (target, op1);
8238 emit_label (op0);
8239 return target;
8241 case BIT_NOT_EXPR:
8242 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8243 if (modifier == EXPAND_STACK_PARM)
8244 target = 0;
8245 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8246 if (temp == 0)
8247 abort ();
8248 return temp;
8250 /* ??? Can optimize bitwise operations with one arg constant.
8251 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8252 and (a bitwise1 b) bitwise2 b (etc)
8253 but that is probably not worth while. */
8255 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8256 boolean values when we want in all cases to compute both of them. In
8257 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8258 as actual zero-or-1 values and then bitwise anding. In cases where
8259 there cannot be any side effects, better code would be made by
8260 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8261 how to recognize those cases. */
8263 case TRUTH_AND_EXPR:
8264 case BIT_AND_EXPR:
8265 this_optab = and_optab;
8266 goto binop;
8268 case TRUTH_OR_EXPR:
8269 case BIT_IOR_EXPR:
8270 this_optab = ior_optab;
8271 goto binop;
8273 case TRUTH_XOR_EXPR:
8274 case BIT_XOR_EXPR:
8275 this_optab = xor_optab;
8276 goto binop;
8278 case LSHIFT_EXPR:
8279 case RSHIFT_EXPR:
8280 case LROTATE_EXPR:
8281 case RROTATE_EXPR:
8282 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8283 subtarget = 0;
8284 if (modifier == EXPAND_STACK_PARM)
8285 target = 0;
8286 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8287 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8288 unsignedp);
8290 /* Could determine the answer when only additive constants differ. Also,
8291 the addition of one can be handled by changing the condition. */
8292 case LT_EXPR:
8293 case LE_EXPR:
8294 case GT_EXPR:
8295 case GE_EXPR:
8296 case EQ_EXPR:
8297 case NE_EXPR:
8298 case UNORDERED_EXPR:
8299 case ORDERED_EXPR:
8300 case UNLT_EXPR:
8301 case UNLE_EXPR:
8302 case UNGT_EXPR:
8303 case UNGE_EXPR:
8304 case UNEQ_EXPR:
8305 temp = do_store_flag (exp,
8306 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8307 tmode != VOIDmode ? tmode : mode, 0);
8308 if (temp != 0)
8309 return temp;
8311 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8312 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8313 && original_target
8314 && GET_CODE (original_target) == REG
8315 && (GET_MODE (original_target)
8316 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8318 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8319 VOIDmode, 0);
8321 /* If temp is constant, we can just compute the result. */
8322 if (GET_CODE (temp) == CONST_INT)
8324 if (INTVAL (temp) != 0)
8325 emit_move_insn (target, const1_rtx);
8326 else
8327 emit_move_insn (target, const0_rtx);
8329 return target;
8332 if (temp != original_target)
8334 enum machine_mode mode1 = GET_MODE (temp);
8335 if (mode1 == VOIDmode)
8336 mode1 = tmode != VOIDmode ? tmode : mode;
8338 temp = copy_to_mode_reg (mode1, temp);
8341 op1 = gen_label_rtx ();
8342 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8343 GET_MODE (temp), unsignedp, op1);
8344 emit_move_insn (temp, const1_rtx);
8345 emit_label (op1);
8346 return temp;
8349 /* If no set-flag instruction, must generate a conditional
8350 store into a temporary variable. Drop through
8351 and handle this like && and ||. */
8353 case TRUTH_ANDIF_EXPR:
8354 case TRUTH_ORIF_EXPR:
8355 if (! ignore
8356 && (target == 0
8357 || modifier == EXPAND_STACK_PARM
8358 || ! safe_from_p (target, exp, 1)
8359 /* Make sure we don't have a hard reg (such as function's return
8360 value) live across basic blocks, if not optimizing. */
8361 || (!optimize && GET_CODE (target) == REG
8362 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8363 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8365 if (target)
8366 emit_clr_insn (target);
8368 op1 = gen_label_rtx ();
8369 jumpifnot (exp, op1);
8371 if (target)
8372 emit_0_to_1_insn (target);
8374 emit_label (op1);
8375 return ignore ? const0_rtx : target;
8377 case TRUTH_NOT_EXPR:
8378 if (modifier == EXPAND_STACK_PARM)
8379 target = 0;
8380 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8381 /* The parser is careful to generate TRUTH_NOT_EXPR
8382 only with operands that are always zero or one. */
8383 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8384 target, 1, OPTAB_LIB_WIDEN);
8385 if (temp == 0)
8386 abort ();
8387 return temp;
8389 case COMPOUND_EXPR:
8390 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8391 emit_queue ();
8392 return expand_expr_real (TREE_OPERAND (exp, 1),
8393 (ignore ? const0_rtx : target),
8394 VOIDmode, modifier, alt_rtl);
8396 case STATEMENT_LIST:
8398 tree_stmt_iterator iter;
8400 if (!ignore)
8401 abort ();
8403 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8404 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8406 return const0_rtx;
8408 case COND_EXPR:
8409 /* If it's void, we don't need to worry about computing a value. */
8410 if (VOID_TYPE_P (TREE_TYPE (exp)))
8412 tree pred = TREE_OPERAND (exp, 0);
8413 tree then_ = TREE_OPERAND (exp, 1);
8414 tree else_ = TREE_OPERAND (exp, 2);
8416 /* If we do not have any pending cleanups or stack_levels
8417 to restore, and at least one arm of the COND_EXPR is a
8418 GOTO_EXPR to a local label, then we can emit more efficient
8419 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8420 if (! optimize
8421 || containing_blocks_have_cleanups_or_stack_level ())
8423 else if (TREE_CODE (then_) == GOTO_EXPR
8424 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8426 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8427 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8429 else if (TREE_CODE (else_) == GOTO_EXPR
8430 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8432 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8433 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8436 /* Just use the 'if' machinery. */
8437 expand_start_cond (pred, 0);
8438 start_cleanup_deferral ();
8439 expand_expr (then_, const0_rtx, VOIDmode, 0);
8441 exp = else_;
8443 /* Iterate over 'else if's instead of recursing. */
8444 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8446 expand_start_else ();
8447 if (EXPR_HAS_LOCATION (exp))
8449 emit_line_note (EXPR_LOCATION (exp));
8450 if (cfun->dont_emit_block_notes)
8451 record_block_change (TREE_BLOCK (exp));
8453 expand_elseif (TREE_OPERAND (exp, 0));
8454 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8456 /* Don't emit the jump and label if there's no 'else' clause. */
8457 if (TREE_SIDE_EFFECTS (exp))
8459 expand_start_else ();
8460 expand_expr (exp, const0_rtx, VOIDmode, 0);
8462 end_cleanup_deferral ();
8463 expand_end_cond ();
8464 return const0_rtx;
8467 /* If we would have a "singleton" (see below) were it not for a
8468 conversion in each arm, bring that conversion back out. */
8469 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8470 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8471 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8472 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8474 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8475 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8477 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8478 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8479 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8480 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8481 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8482 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8483 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8484 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8485 return expand_expr (build1 (NOP_EXPR, type,
8486 build (COND_EXPR, TREE_TYPE (iftrue),
8487 TREE_OPERAND (exp, 0),
8488 iftrue, iffalse)),
8489 target, tmode, modifier);
8493 /* Note that COND_EXPRs whose type is a structure or union
8494 are required to be constructed to contain assignments of
8495 a temporary variable, so that we can evaluate them here
8496 for side effect only. If type is void, we must do likewise. */
8498 /* If an arm of the branch requires a cleanup,
8499 only that cleanup is performed. */
8501 tree singleton = 0;
8502 tree binary_op = 0, unary_op = 0;
8504 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8505 convert it to our mode, if necessary. */
8506 if (integer_onep (TREE_OPERAND (exp, 1))
8507 && integer_zerop (TREE_OPERAND (exp, 2))
8508 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8510 if (ignore)
8512 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8513 modifier);
8514 return const0_rtx;
8517 if (modifier == EXPAND_STACK_PARM)
8518 target = 0;
8519 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8520 if (GET_MODE (op0) == mode)
8521 return op0;
8523 if (target == 0)
8524 target = gen_reg_rtx (mode);
8525 convert_move (target, op0, unsignedp);
8526 return target;
8529 /* Check for X ? A + B : A. If we have this, we can copy A to the
8530 output and conditionally add B. Similarly for unary operations.
8531 Don't do this if X has side-effects because those side effects
8532 might affect A or B and the "?" operation is a sequence point in
8533 ANSI. (operand_equal_p tests for side effects.) */
8535 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8536 && operand_equal_p (TREE_OPERAND (exp, 2),
8537 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8538 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8539 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8540 && operand_equal_p (TREE_OPERAND (exp, 1),
8541 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8542 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8543 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8544 && operand_equal_p (TREE_OPERAND (exp, 2),
8545 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8546 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8547 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8548 && operand_equal_p (TREE_OPERAND (exp, 1),
8549 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8550 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8552 /* If we are not to produce a result, we have no target. Otherwise,
8553 if a target was specified use it; it will not be used as an
8554 intermediate target unless it is safe. If no target, use a
8555 temporary. */
8557 if (ignore)
8558 temp = 0;
8559 else if (modifier == EXPAND_STACK_PARM)
8560 temp = assign_temp (type, 0, 0, 1);
8561 else if (original_target
8562 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8563 || (singleton && GET_CODE (original_target) == REG
8564 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8565 && original_target == var_rtx (singleton)))
8566 && GET_MODE (original_target) == mode
8567 #ifdef HAVE_conditional_move
8568 && (! can_conditionally_move_p (mode)
8569 || GET_CODE (original_target) == REG
8570 || TREE_ADDRESSABLE (type))
8571 #endif
8572 && (GET_CODE (original_target) != MEM
8573 || TREE_ADDRESSABLE (type)))
8574 temp = original_target;
8575 else if (TREE_ADDRESSABLE (type))
8576 abort ();
8577 else
8578 temp = assign_temp (type, 0, 0, 1);
8580 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8581 do the test of X as a store-flag operation, do this as
8582 A + ((X != 0) << log C). Similarly for other simple binary
8583 operators. Only do for C == 1 if BRANCH_COST is low. */
8584 if (temp && singleton && binary_op
8585 && (TREE_CODE (binary_op) == PLUS_EXPR
8586 || TREE_CODE (binary_op) == MINUS_EXPR
8587 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8588 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8589 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8590 : integer_onep (TREE_OPERAND (binary_op, 1)))
8591 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8593 rtx result;
8594 tree cond;
8595 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8596 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8597 ? addv_optab : add_optab)
8598 : TREE_CODE (binary_op) == MINUS_EXPR
8599 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8600 ? subv_optab : sub_optab)
8601 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8602 : xor_optab);
8604 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8605 if (singleton == TREE_OPERAND (exp, 1))
8606 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8607 else
8608 cond = TREE_OPERAND (exp, 0);
8610 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8611 ? temp : NULL_RTX),
8612 mode, BRANCH_COST <= 1);
8614 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8615 result = expand_shift (LSHIFT_EXPR, mode, result,
8616 build_int_2 (tree_log2
8617 (TREE_OPERAND
8618 (binary_op, 1)),
8620 (safe_from_p (temp, singleton, 1)
8621 ? temp : NULL_RTX), 0);
8623 if (result)
8625 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8626 return expand_binop (mode, boptab, op1, result, temp,
8627 unsignedp, OPTAB_LIB_WIDEN);
8631 do_pending_stack_adjust ();
8632 NO_DEFER_POP;
8633 op0 = gen_label_rtx ();
8635 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8637 if (temp != 0)
8639 /* If the target conflicts with the other operand of the
8640 binary op, we can't use it. Also, we can't use the target
8641 if it is a hard register, because evaluating the condition
8642 might clobber it. */
8643 if ((binary_op
8644 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8645 || (GET_CODE (temp) == REG
8646 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8647 temp = gen_reg_rtx (mode);
8648 store_expr (singleton, temp,
8649 modifier == EXPAND_STACK_PARM ? 2 : 0);
8651 else
8652 expand_expr (singleton,
8653 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8654 if (singleton == TREE_OPERAND (exp, 1))
8655 jumpif (TREE_OPERAND (exp, 0), op0);
8656 else
8657 jumpifnot (TREE_OPERAND (exp, 0), op0);
8659 start_cleanup_deferral ();
8660 if (binary_op && temp == 0)
8661 /* Just touch the other operand. */
8662 expand_expr (TREE_OPERAND (binary_op, 1),
8663 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8664 else if (binary_op)
8665 store_expr (build (TREE_CODE (binary_op), type,
8666 make_tree (type, temp),
8667 TREE_OPERAND (binary_op, 1)),
8668 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8669 else
8670 store_expr (build1 (TREE_CODE (unary_op), type,
8671 make_tree (type, temp)),
8672 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8673 op1 = op0;
8675 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8676 comparison operator. If we have one of these cases, set the
8677 output to A, branch on A (cse will merge these two references),
8678 then set the output to FOO. */
8679 else if (temp
8680 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8681 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8683 TREE_OPERAND (exp, 1), 0)
8684 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8685 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8686 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8688 if (GET_CODE (temp) == REG
8689 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8690 temp = gen_reg_rtx (mode);
8691 store_expr (TREE_OPERAND (exp, 1), temp,
8692 modifier == EXPAND_STACK_PARM ? 2 : 0);
8693 jumpif (TREE_OPERAND (exp, 0), op0);
8695 start_cleanup_deferral ();
8696 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8697 store_expr (TREE_OPERAND (exp, 2), temp,
8698 modifier == EXPAND_STACK_PARM ? 2 : 0);
8699 else
8700 expand_expr (TREE_OPERAND (exp, 2),
8701 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8702 op1 = op0;
8704 else if (temp
8705 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8706 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8707 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8708 TREE_OPERAND (exp, 2), 0)
8709 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8710 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8711 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8713 if (GET_CODE (temp) == REG
8714 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8715 temp = gen_reg_rtx (mode);
8716 store_expr (TREE_OPERAND (exp, 2), temp,
8717 modifier == EXPAND_STACK_PARM ? 2 : 0);
8718 jumpifnot (TREE_OPERAND (exp, 0), op0);
8720 start_cleanup_deferral ();
8721 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8722 store_expr (TREE_OPERAND (exp, 1), temp,
8723 modifier == EXPAND_STACK_PARM ? 2 : 0);
8724 else
8725 expand_expr (TREE_OPERAND (exp, 1),
8726 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8727 op1 = op0;
8729 else
8731 op1 = gen_label_rtx ();
8732 jumpifnot (TREE_OPERAND (exp, 0), op0);
8734 start_cleanup_deferral ();
8736 /* One branch of the cond can be void, if it never returns. For
8737 example A ? throw : E */
8738 if (temp != 0
8739 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8740 store_expr (TREE_OPERAND (exp, 1), temp,
8741 modifier == EXPAND_STACK_PARM ? 2 : 0);
8742 else
8743 expand_expr (TREE_OPERAND (exp, 1),
8744 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8745 end_cleanup_deferral ();
8746 emit_queue ();
8747 emit_jump_insn (gen_jump (op1));
8748 emit_barrier ();
8749 emit_label (op0);
8750 start_cleanup_deferral ();
8751 if (temp != 0
8752 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8753 store_expr (TREE_OPERAND (exp, 2), temp,
8754 modifier == EXPAND_STACK_PARM ? 2 : 0);
8755 else
8756 expand_expr (TREE_OPERAND (exp, 2),
8757 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8760 end_cleanup_deferral ();
8762 emit_queue ();
8763 emit_label (op1);
8764 OK_DEFER_POP;
8766 return temp;
8769 case TARGET_EXPR:
8771 /* Something needs to be initialized, but we didn't know
8772 where that thing was when building the tree. For example,
8773 it could be the return value of a function, or a parameter
8774 to a function which lays down in the stack, or a temporary
8775 variable which must be passed by reference.
8777 We guarantee that the expression will either be constructed
8778 or copied into our original target. */
8780 tree slot = TREE_OPERAND (exp, 0);
8781 tree cleanups = NULL_TREE;
8782 tree exp1;
8784 if (TREE_CODE (slot) != VAR_DECL)
8785 abort ();
8787 if (! ignore)
8788 target = original_target;
8790 /* Set this here so that if we get a target that refers to a
8791 register variable that's already been used, put_reg_into_stack
8792 knows that it should fix up those uses. */
8793 TREE_USED (slot) = 1;
8795 if (target == 0)
8797 if (DECL_RTL_SET_P (slot))
8799 target = DECL_RTL (slot);
8800 /* If we have already expanded the slot, so don't do
8801 it again. (mrs) */
8802 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8803 return target;
8805 else
8807 target = assign_temp (type, 2, 0, 1);
8808 SET_DECL_RTL (slot, target);
8809 if (TREE_ADDRESSABLE (slot))
8810 put_var_into_stack (slot, /*rescan=*/false);
8812 /* Since SLOT is not known to the called function
8813 to belong to its stack frame, we must build an explicit
8814 cleanup. This case occurs when we must build up a reference
8815 to pass the reference as an argument. In this case,
8816 it is very likely that such a reference need not be
8817 built here. */
8819 if (TREE_OPERAND (exp, 2) == 0)
8820 TREE_OPERAND (exp, 2)
8821 = lang_hooks.maybe_build_cleanup (slot);
8822 cleanups = TREE_OPERAND (exp, 2);
8825 else
8827 /* This case does occur, when expanding a parameter which
8828 needs to be constructed on the stack. The target
8829 is the actual stack address that we want to initialize.
8830 The function we call will perform the cleanup in this case. */
8832 /* If we have already assigned it space, use that space,
8833 not target that we were passed in, as our target
8834 parameter is only a hint. */
8835 if (DECL_RTL_SET_P (slot))
8837 target = DECL_RTL (slot);
8838 /* If we have already expanded the slot, so don't do
8839 it again. (mrs) */
8840 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8841 return target;
8843 else
8845 SET_DECL_RTL (slot, target);
8846 /* If we must have an addressable slot, then make sure that
8847 the RTL that we just stored in slot is OK. */
8848 if (TREE_ADDRESSABLE (slot))
8849 put_var_into_stack (slot, /*rescan=*/true);
8853 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8854 /* Mark it as expanded. */
8855 TREE_OPERAND (exp, 1) = NULL_TREE;
8857 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8859 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8861 return target;
8864 case INIT_EXPR:
8866 tree lhs = TREE_OPERAND (exp, 0);
8867 tree rhs = TREE_OPERAND (exp, 1);
8869 temp = expand_assignment (lhs, rhs, ! ignore);
8870 return temp;
8873 case MODIFY_EXPR:
8875 /* If lhs is complex, expand calls in rhs before computing it.
8876 That's so we don't compute a pointer and save it over a
8877 call. If lhs is simple, compute it first so we can give it
8878 as a target if the rhs is just a call. This avoids an
8879 extra temp and copy and that prevents a partial-subsumption
8880 which makes bad code. Actually we could treat
8881 component_ref's of vars like vars. */
8883 tree lhs = TREE_OPERAND (exp, 0);
8884 tree rhs = TREE_OPERAND (exp, 1);
8886 temp = 0;
8888 /* Check for |= or &= of a bitfield of size one into another bitfield
8889 of size 1. In this case, (unless we need the result of the
8890 assignment) we can do this more efficiently with a
8891 test followed by an assignment, if necessary.
8893 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8894 things change so we do, this code should be enhanced to
8895 support it. */
8896 if (ignore
8897 && TREE_CODE (lhs) == COMPONENT_REF
8898 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8899 || TREE_CODE (rhs) == BIT_AND_EXPR)
8900 && TREE_OPERAND (rhs, 0) == lhs
8901 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8902 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8903 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8905 rtx label = gen_label_rtx ();
8907 do_jump (TREE_OPERAND (rhs, 1),
8908 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8909 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8910 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8911 (TREE_CODE (rhs) == BIT_IOR_EXPR
8912 ? integer_one_node
8913 : integer_zero_node)),
8915 do_pending_stack_adjust ();
8916 emit_label (label);
8917 return const0_rtx;
8920 temp = expand_assignment (lhs, rhs, ! ignore);
8922 return temp;
8925 case RETURN_EXPR:
8926 if (!TREE_OPERAND (exp, 0))
8927 expand_null_return ();
8928 else
8929 expand_return (TREE_OPERAND (exp, 0));
8930 return const0_rtx;
8932 case PREINCREMENT_EXPR:
8933 case PREDECREMENT_EXPR:
8934 return expand_increment (exp, 0, ignore);
8936 case POSTINCREMENT_EXPR:
8937 case POSTDECREMENT_EXPR:
8938 /* Faster to treat as pre-increment if result is not used. */
8939 return expand_increment (exp, ! ignore, ignore);
8941 case ADDR_EXPR:
8942 if (modifier == EXPAND_STACK_PARM)
8943 target = 0;
8944 /* If we are taking the address of something erroneous, just
8945 return a zero. */
8946 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8947 return const0_rtx;
8948 /* If we are taking the address of a constant and are at the
8949 top level, we have to use output_constant_def since we can't
8950 call force_const_mem at top level. */
8951 else if (cfun == 0
8952 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8953 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8954 == 'c')))
8955 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8956 else
8958 /* We make sure to pass const0_rtx down if we came in with
8959 ignore set, to avoid doing the cleanups twice for something. */
8960 op0 = expand_expr (TREE_OPERAND (exp, 0),
8961 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8962 (modifier == EXPAND_INITIALIZER
8963 ? modifier : EXPAND_CONST_ADDRESS));
8965 /* If we are going to ignore the result, OP0 will have been set
8966 to const0_rtx, so just return it. Don't get confused and
8967 think we are taking the address of the constant. */
8968 if (ignore)
8969 return op0;
8971 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8972 clever and returns a REG when given a MEM. */
8973 op0 = protect_from_queue (op0, 1);
8975 /* We would like the object in memory. If it is a constant, we can
8976 have it be statically allocated into memory. For a non-constant,
8977 we need to allocate some memory and store the value into it. */
8979 if (CONSTANT_P (op0))
8980 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8981 op0);
8982 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8983 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8984 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8986 /* If the operand is a SAVE_EXPR, we can deal with this by
8987 forcing the SAVE_EXPR into memory. */
8988 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8990 put_var_into_stack (TREE_OPERAND (exp, 0),
8991 /*rescan=*/true);
8992 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8994 else
8996 /* If this object is in a register, it can't be BLKmode. */
8997 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8998 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9000 if (GET_CODE (op0) == PARALLEL)
9001 /* Handle calls that pass values in multiple
9002 non-contiguous locations. The Irix 6 ABI has examples
9003 of this. */
9004 emit_group_store (memloc, op0, inner_type,
9005 int_size_in_bytes (inner_type));
9006 else
9007 emit_move_insn (memloc, op0);
9009 op0 = memloc;
9013 if (GET_CODE (op0) != MEM)
9014 abort ();
9016 mark_temp_addr_taken (op0);
9017 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9019 op0 = XEXP (op0, 0);
9020 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9021 op0 = convert_memory_address (ptr_mode, op0);
9022 return op0;
9025 /* If OP0 is not aligned as least as much as the type requires, we
9026 need to make a temporary, copy OP0 to it, and take the address of
9027 the temporary. We want to use the alignment of the type, not of
9028 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9029 the test for BLKmode means that can't happen. The test for
9030 BLKmode is because we never make mis-aligned MEMs with
9031 non-BLKmode.
9033 We don't need to do this at all if the machine doesn't have
9034 strict alignment. */
9035 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9036 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9037 > MEM_ALIGN (op0))
9038 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9040 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9041 rtx new;
9043 if (TYPE_ALIGN_OK (inner_type))
9044 abort ();
9046 if (TREE_ADDRESSABLE (inner_type))
9048 /* We can't make a bitwise copy of this object, so fail. */
9049 error ("cannot take the address of an unaligned member");
9050 return const0_rtx;
9053 new = assign_stack_temp_for_type
9054 (TYPE_MODE (inner_type),
9055 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9056 : int_size_in_bytes (inner_type),
9057 1, build_qualified_type (inner_type,
9058 (TYPE_QUALS (inner_type)
9059 | TYPE_QUAL_CONST)));
9061 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9062 (modifier == EXPAND_STACK_PARM
9063 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9065 op0 = new;
9068 op0 = force_operand (XEXP (op0, 0), target);
9071 if (flag_force_addr
9072 && GET_CODE (op0) != REG
9073 && modifier != EXPAND_CONST_ADDRESS
9074 && modifier != EXPAND_INITIALIZER
9075 && modifier != EXPAND_SUM)
9076 op0 = force_reg (Pmode, op0);
9078 if (GET_CODE (op0) == REG
9079 && ! REG_USERVAR_P (op0))
9080 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9082 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9083 op0 = convert_memory_address (ptr_mode, op0);
9085 return op0;
9087 case ENTRY_VALUE_EXPR:
9088 abort ();
9090 /* COMPLEX type for Extended Pascal & Fortran */
9091 case COMPLEX_EXPR:
9093 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9094 rtx insns;
9096 /* Get the rtx code of the operands. */
9097 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9098 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9100 if (! target)
9101 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9103 start_sequence ();
9105 /* Move the real (op0) and imaginary (op1) parts to their location. */
9106 emit_move_insn (gen_realpart (mode, target), op0);
9107 emit_move_insn (gen_imagpart (mode, target), op1);
9109 insns = get_insns ();
9110 end_sequence ();
9112 /* Complex construction should appear as a single unit. */
9113 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9114 each with a separate pseudo as destination.
9115 It's not correct for flow to treat them as a unit. */
9116 if (GET_CODE (target) != CONCAT)
9117 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9118 else
9119 emit_insn (insns);
9121 return target;
9124 case REALPART_EXPR:
9125 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9126 return gen_realpart (mode, op0);
9128 case IMAGPART_EXPR:
9129 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9130 return gen_imagpart (mode, op0);
9132 case CONJ_EXPR:
9134 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9135 rtx imag_t;
9136 rtx insns;
9138 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9140 if (! target)
9141 target = gen_reg_rtx (mode);
9143 start_sequence ();
9145 /* Store the realpart and the negated imagpart to target. */
9146 emit_move_insn (gen_realpart (partmode, target),
9147 gen_realpart (partmode, op0));
9149 imag_t = gen_imagpart (partmode, target);
9150 temp = expand_unop (partmode,
9151 ! unsignedp && flag_trapv
9152 && (GET_MODE_CLASS(partmode) == MODE_INT)
9153 ? negv_optab : neg_optab,
9154 gen_imagpart (partmode, op0), imag_t, 0);
9155 if (temp != imag_t)
9156 emit_move_insn (imag_t, temp);
9158 insns = get_insns ();
9159 end_sequence ();
9161 /* Conjugate should appear as a single unit
9162 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9163 each with a separate pseudo as destination.
9164 It's not correct for flow to treat them as a unit. */
9165 if (GET_CODE (target) != CONCAT)
9166 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9167 else
9168 emit_insn (insns);
9170 return target;
9173 case RESX_EXPR:
9174 expand_resx_expr (exp);
9175 return const0_rtx;
9177 case TRY_CATCH_EXPR:
9179 tree handler = TREE_OPERAND (exp, 1);
9181 expand_eh_region_start ();
9182 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9183 expand_eh_handler (handler);
9185 return op0;
9188 case CATCH_EXPR:
9189 expand_start_catch (CATCH_TYPES (exp));
9190 expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
9191 expand_end_catch ();
9192 return const0_rtx;
9194 case EH_FILTER_EXPR:
9195 /* Should have been handled in expand_eh_handler. */
9196 abort ();
9198 case TRY_FINALLY_EXPR:
9200 tree try_block = TREE_OPERAND (exp, 0);
9201 tree finally_block = TREE_OPERAND (exp, 1);
9203 if ((!optimize && lang_protect_cleanup_actions == NULL)
9204 || unsafe_for_reeval (finally_block) > 1)
9206 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9207 is not sufficient, so we cannot expand the block twice.
9208 So we play games with GOTO_SUBROUTINE_EXPR to let us
9209 expand the thing only once. */
9210 /* When not optimizing, we go ahead with this form since
9211 (1) user breakpoints operate more predictably without
9212 code duplication, and
9213 (2) we're not running any of the global optimizers
9214 that would explode in time/space with the highly
9215 connected CFG created by the indirect branching. */
9217 rtx finally_label = gen_label_rtx ();
9218 rtx done_label = gen_label_rtx ();
9219 rtx return_link = gen_reg_rtx (Pmode);
9220 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9221 (tree) finally_label, (tree) return_link);
9222 TREE_SIDE_EFFECTS (cleanup) = 1;
9224 /* Start a new binding layer that will keep track of all cleanup
9225 actions to be performed. */
9226 expand_start_bindings (2);
9227 target_temp_slot_level = temp_slot_level;
9229 expand_decl_cleanup (NULL_TREE, cleanup);
9230 op0 = expand_expr (try_block, target, tmode, modifier);
9232 preserve_temp_slots (op0);
9233 expand_end_bindings (NULL_TREE, 0, 0);
9234 emit_jump (done_label);
9235 emit_label (finally_label);
9236 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9237 emit_indirect_jump (return_link);
9238 emit_label (done_label);
9240 else
9242 expand_start_bindings (2);
9243 target_temp_slot_level = temp_slot_level;
9245 expand_decl_cleanup (NULL_TREE, finally_block);
9246 op0 = expand_expr (try_block, target, tmode, modifier);
9248 preserve_temp_slots (op0);
9249 expand_end_bindings (NULL_TREE, 0, 0);
9252 return op0;
9255 case GOTO_SUBROUTINE_EXPR:
9257 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9258 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9259 rtx return_address = gen_label_rtx ();
9260 emit_move_insn (return_link,
9261 gen_rtx_LABEL_REF (Pmode, return_address));
9262 emit_jump (subr);
9263 emit_label (return_address);
9264 return const0_rtx;
9267 case VA_ARG_EXPR:
9268 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9270 case EXC_PTR_EXPR:
9271 return get_exception_pointer (cfun);
9273 case FILTER_EXPR:
9274 return get_exception_filter (cfun);
9276 case FDESC_EXPR:
9277 /* Function descriptors are not valid except for as
9278 initialization constants, and should not be expanded. */
9279 abort ();
9281 case SWITCH_EXPR:
9282 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
9283 "switch");
9284 if (SWITCH_BODY (exp))
9285 expand_expr_stmt (SWITCH_BODY (exp));
9286 if (SWITCH_LABELS (exp))
9288 tree duplicate = 0;
9289 tree vec = SWITCH_LABELS (exp);
9290 size_t i, n = TREE_VEC_LENGTH (vec);
9292 for (i = 0; i < n; ++i)
9294 tree elt = TREE_VEC_ELT (vec, i);
9295 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
9296 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
9297 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
9299 tree case_low = CASE_LOW (elt);
9300 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
9301 if (case_low && case_high)
9303 /* Case label is less than minimum for type. */
9304 if ((tree_int_cst_compare (case_low, min_value) < 0)
9305 && (tree_int_cst_compare (case_high, min_value) < 0))
9307 warning ("case label value %d is less than minimum value for type",
9308 TREE_INT_CST (case_low));
9309 continue;
9312 /* Case value is greater than maximum for type. */
9313 if ((tree_int_cst_compare (case_low, max_value) > 0)
9314 && (tree_int_cst_compare (case_high, max_value) > 0))
9316 warning ("case label value %d exceeds maximum value for type",
9317 TREE_INT_CST (case_high));
9318 continue;
9321 /* Saturate lower case label value to minimum. */
9322 if ((tree_int_cst_compare (case_high, min_value) >= 0)
9323 && (tree_int_cst_compare (case_low, min_value) < 0))
9325 warning ("lower value %d in case label range less than minimum value for type",
9326 TREE_INT_CST (case_low));
9327 case_low = min_value;
9330 /* Saturate upper case label value to maximum. */
9331 if ((tree_int_cst_compare (case_low, max_value) <= 0)
9332 && (tree_int_cst_compare (case_high, max_value) > 0))
9334 warning ("upper value %d in case label range exceeds maximum value for type",
9335 TREE_INT_CST (case_high));
9336 case_high = max_value;
9340 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
9341 if (duplicate)
9342 abort ();
9345 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
9346 return const0_rtx;
9348 case LABEL_EXPR:
9349 expand_label (TREE_OPERAND (exp, 0));
9350 return const0_rtx;
9352 case CASE_LABEL_EXPR:
9354 tree duplicate = 0;
9355 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
9356 &duplicate, false);
9357 if (duplicate)
9358 abort ();
9359 return const0_rtx;
9362 case ASM_EXPR:
9363 expand_asm_expr (exp);
9364 return const0_rtx;
9366 default:
9367 return lang_hooks.expand_expr (exp, original_target, tmode,
9368 modifier, alt_rtl);
9371 /* Here to do an ordinary binary operator, generating an instruction
9372 from the optab already placed in `this_optab'. */
9373 binop:
9374 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9375 subtarget, &op0, &op1, 0);
9376 binop2:
9377 if (modifier == EXPAND_STACK_PARM)
9378 target = 0;
9379 temp = expand_binop (mode, this_optab, op0, op1, target,
9380 unsignedp, OPTAB_LIB_WIDEN);
9381 if (temp == 0)
9382 abort ();
9383 return temp;
9386 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9387 when applied to the address of EXP produces an address known to be
9388 aligned more than BIGGEST_ALIGNMENT. */
9390 static int
9391 is_aligning_offset (tree offset, tree exp)
9393 /* Strip off any conversions. */
9394 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9395 || TREE_CODE (offset) == NOP_EXPR
9396 || TREE_CODE (offset) == CONVERT_EXPR)
9397 offset = TREE_OPERAND (offset, 0);
9399 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9400 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9401 if (TREE_CODE (offset) != BIT_AND_EXPR
9402 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9403 || compare_tree_int (TREE_OPERAND (offset, 1),
9404 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9405 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9406 return 0;
9408 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9409 It must be NEGATE_EXPR. Then strip any more conversions. */
9410 offset = TREE_OPERAND (offset, 0);
9411 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9412 || TREE_CODE (offset) == NOP_EXPR
9413 || TREE_CODE (offset) == CONVERT_EXPR)
9414 offset = TREE_OPERAND (offset, 0);
9416 if (TREE_CODE (offset) != NEGATE_EXPR)
9417 return 0;
9419 offset = TREE_OPERAND (offset, 0);
9420 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9421 || TREE_CODE (offset) == NOP_EXPR
9422 || TREE_CODE (offset) == CONVERT_EXPR)
9423 offset = TREE_OPERAND (offset, 0);
9425 /* This must now be the address of EXP. */
9426 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9429 /* Return the tree node if an ARG corresponds to a string constant or zero
9430 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9431 in bytes within the string that ARG is accessing. The type of the
9432 offset will be `sizetype'. */
9434 tree
9435 string_constant (tree arg, tree *ptr_offset)
9437 STRIP_NOPS (arg);
9439 if (TREE_CODE (arg) == ADDR_EXPR
9440 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9442 *ptr_offset = size_zero_node;
9443 return TREE_OPERAND (arg, 0);
9445 if (TREE_CODE (arg) == ADDR_EXPR
9446 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9447 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9449 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9450 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9452 else if (TREE_CODE (arg) == PLUS_EXPR)
9454 tree arg0 = TREE_OPERAND (arg, 0);
9455 tree arg1 = TREE_OPERAND (arg, 1);
9457 STRIP_NOPS (arg0);
9458 STRIP_NOPS (arg1);
9460 if (TREE_CODE (arg0) == ADDR_EXPR
9461 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9463 *ptr_offset = convert (sizetype, arg1);
9464 return TREE_OPERAND (arg0, 0);
9466 else if (TREE_CODE (arg1) == ADDR_EXPR
9467 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9469 *ptr_offset = convert (sizetype, arg0);
9470 return TREE_OPERAND (arg1, 0);
9474 return 0;
9477 /* Expand code for a post- or pre- increment or decrement
9478 and return the RTX for the result.
9479 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9481 static rtx
9482 expand_increment (tree exp, int post, int ignore)
9484 rtx op0, op1;
9485 rtx temp, value;
9486 tree incremented = TREE_OPERAND (exp, 0);
9487 optab this_optab = add_optab;
9488 int icode;
9489 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9490 int op0_is_copy = 0;
9491 int single_insn = 0;
9492 /* 1 means we can't store into OP0 directly,
9493 because it is a subreg narrower than a word,
9494 and we don't dare clobber the rest of the word. */
9495 int bad_subreg = 0;
9497 /* Stabilize any component ref that might need to be
9498 evaluated more than once below. */
9499 if (!post
9500 || TREE_CODE (incremented) == BIT_FIELD_REF
9501 || (TREE_CODE (incremented) == COMPONENT_REF
9502 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9503 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9504 incremented = stabilize_reference (incremented);
9505 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9506 ones into save exprs so that they don't accidentally get evaluated
9507 more than once by the code below. */
9508 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9509 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9510 incremented = save_expr (incremented);
9512 /* Compute the operands as RTX.
9513 Note whether OP0 is the actual lvalue or a copy of it:
9514 I believe it is a copy iff it is a register or subreg
9515 and insns were generated in computing it. */
9517 temp = get_last_insn ();
9518 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9520 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9521 in place but instead must do sign- or zero-extension during assignment,
9522 so we copy it into a new register and let the code below use it as
9523 a copy.
9525 Note that we can safely modify this SUBREG since it is know not to be
9526 shared (it was made by the expand_expr call above). */
9528 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9530 if (post)
9531 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9532 else
9533 bad_subreg = 1;
9535 else if (GET_CODE (op0) == SUBREG
9536 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9538 /* We cannot increment this SUBREG in place. If we are
9539 post-incrementing, get a copy of the old value. Otherwise,
9540 just mark that we cannot increment in place. */
9541 if (post)
9542 op0 = copy_to_reg (op0);
9543 else
9544 bad_subreg = 1;
9547 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9548 && temp != get_last_insn ());
9549 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9551 /* Decide whether incrementing or decrementing. */
9552 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9553 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9554 this_optab = sub_optab;
9556 /* Convert decrement by a constant into a negative increment. */
9557 if (this_optab == sub_optab
9558 && GET_CODE (op1) == CONST_INT)
9560 op1 = GEN_INT (-INTVAL (op1));
9561 this_optab = add_optab;
9564 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9565 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9567 /* For a preincrement, see if we can do this with a single instruction. */
9568 if (!post)
9570 icode = (int) this_optab->handlers[(int) mode].insn_code;
9571 if (icode != (int) CODE_FOR_nothing
9572 /* Make sure that OP0 is valid for operands 0 and 1
9573 of the insn we want to queue. */
9574 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9575 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9576 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9577 single_insn = 1;
9580 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9581 then we cannot just increment OP0. We must therefore contrive to
9582 increment the original value. Then, for postincrement, we can return
9583 OP0 since it is a copy of the old value. For preincrement, expand here
9584 unless we can do it with a single insn.
9586 Likewise if storing directly into OP0 would clobber high bits
9587 we need to preserve (bad_subreg). */
9588 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9590 /* This is the easiest way to increment the value wherever it is.
9591 Problems with multiple evaluation of INCREMENTED are prevented
9592 because either (1) it is a component_ref or preincrement,
9593 in which case it was stabilized above, or (2) it is an array_ref
9594 with constant index in an array in a register, which is
9595 safe to reevaluate. */
9596 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9597 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9598 ? MINUS_EXPR : PLUS_EXPR),
9599 TREE_TYPE (exp),
9600 incremented,
9601 TREE_OPERAND (exp, 1));
9603 while (TREE_CODE (incremented) == NOP_EXPR
9604 || TREE_CODE (incremented) == CONVERT_EXPR)
9606 newexp = convert (TREE_TYPE (incremented), newexp);
9607 incremented = TREE_OPERAND (incremented, 0);
9610 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9611 return post ? op0 : temp;
9614 if (post)
9616 /* We have a true reference to the value in OP0.
9617 If there is an insn to add or subtract in this mode, queue it.
9618 Queuing the increment insn avoids the register shuffling
9619 that often results if we must increment now and first save
9620 the old value for subsequent use. */
9622 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9623 op0 = stabilize (op0);
9624 #endif
9626 icode = (int) this_optab->handlers[(int) mode].insn_code;
9627 if (icode != (int) CODE_FOR_nothing
9628 /* Make sure that OP0 is valid for operands 0 and 1
9629 of the insn we want to queue. */
9630 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9631 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9633 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9634 op1 = force_reg (mode, op1);
9636 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9638 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9640 rtx addr = (general_operand (XEXP (op0, 0), mode)
9641 ? force_reg (Pmode, XEXP (op0, 0))
9642 : copy_to_reg (XEXP (op0, 0)));
9643 rtx temp, result;
9645 op0 = replace_equiv_address (op0, addr);
9646 temp = force_reg (GET_MODE (op0), op0);
9647 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9648 op1 = force_reg (mode, op1);
9650 /* The increment queue is LIFO, thus we have to `queue'
9651 the instructions in reverse order. */
9652 enqueue_insn (op0, gen_move_insn (op0, temp));
9653 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9654 return result;
9658 /* Preincrement, or we can't increment with one simple insn. */
9659 if (post)
9660 /* Save a copy of the value before inc or dec, to return it later. */
9661 temp = value = copy_to_reg (op0);
9662 else
9663 /* Arrange to return the incremented value. */
9664 /* Copy the rtx because expand_binop will protect from the queue,
9665 and the results of that would be invalid for us to return
9666 if our caller does emit_queue before using our result. */
9667 temp = copy_rtx (value = op0);
9669 /* Increment however we can. */
9670 op1 = expand_binop (mode, this_optab, value, op1, op0,
9671 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9673 /* Make sure the value is stored into OP0. */
9674 if (op1 != op0)
9675 emit_move_insn (op0, op1);
9677 return temp;
9680 /* Generate code to calculate EXP using a store-flag instruction
9681 and return an rtx for the result. EXP is either a comparison
9682 or a TRUTH_NOT_EXPR whose operand is a comparison.
9684 If TARGET is nonzero, store the result there if convenient.
9686 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9687 cheap.
9689 Return zero if there is no suitable set-flag instruction
9690 available on this machine.
9692 Once expand_expr has been called on the arguments of the comparison,
9693 we are committed to doing the store flag, since it is not safe to
9694 re-evaluate the expression. We emit the store-flag insn by calling
9695 emit_store_flag, but only expand the arguments if we have a reason
9696 to believe that emit_store_flag will be successful. If we think that
9697 it will, but it isn't, we have to simulate the store-flag with a
9698 set/jump/set sequence. */
9700 static rtx
9701 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9703 enum rtx_code code;
9704 tree arg0, arg1, type;
9705 tree tem;
9706 enum machine_mode operand_mode;
9707 int invert = 0;
9708 int unsignedp;
9709 rtx op0, op1;
9710 enum insn_code icode;
9711 rtx subtarget = target;
9712 rtx result, label;
9714 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9715 result at the end. We can't simply invert the test since it would
9716 have already been inverted if it were valid. This case occurs for
9717 some floating-point comparisons. */
9719 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9720 invert = 1, exp = TREE_OPERAND (exp, 0);
9722 arg0 = TREE_OPERAND (exp, 0);
9723 arg1 = TREE_OPERAND (exp, 1);
9725 /* Don't crash if the comparison was erroneous. */
9726 if (arg0 == error_mark_node || arg1 == error_mark_node)
9727 return const0_rtx;
9729 type = TREE_TYPE (arg0);
9730 operand_mode = TYPE_MODE (type);
9731 unsignedp = TYPE_UNSIGNED (type);
9733 /* We won't bother with BLKmode store-flag operations because it would mean
9734 passing a lot of information to emit_store_flag. */
9735 if (operand_mode == BLKmode)
9736 return 0;
9738 /* We won't bother with store-flag operations involving function pointers
9739 when function pointers must be canonicalized before comparisons. */
9740 #ifdef HAVE_canonicalize_funcptr_for_compare
9741 if (HAVE_canonicalize_funcptr_for_compare
9742 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9743 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9744 == FUNCTION_TYPE))
9745 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9746 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9747 == FUNCTION_TYPE))))
9748 return 0;
9749 #endif
9751 STRIP_NOPS (arg0);
9752 STRIP_NOPS (arg1);
9754 /* Get the rtx comparison code to use. We know that EXP is a comparison
9755 operation of some type. Some comparisons against 1 and -1 can be
9756 converted to comparisons with zero. Do so here so that the tests
9757 below will be aware that we have a comparison with zero. These
9758 tests will not catch constants in the first operand, but constants
9759 are rarely passed as the first operand. */
9761 switch (TREE_CODE (exp))
9763 case EQ_EXPR:
9764 code = EQ;
9765 break;
9766 case NE_EXPR:
9767 code = NE;
9768 break;
9769 case LT_EXPR:
9770 if (integer_onep (arg1))
9771 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9772 else
9773 code = unsignedp ? LTU : LT;
9774 break;
9775 case LE_EXPR:
9776 if (! unsignedp && integer_all_onesp (arg1))
9777 arg1 = integer_zero_node, code = LT;
9778 else
9779 code = unsignedp ? LEU : LE;
9780 break;
9781 case GT_EXPR:
9782 if (! unsignedp && integer_all_onesp (arg1))
9783 arg1 = integer_zero_node, code = GE;
9784 else
9785 code = unsignedp ? GTU : GT;
9786 break;
9787 case GE_EXPR:
9788 if (integer_onep (arg1))
9789 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9790 else
9791 code = unsignedp ? GEU : GE;
9792 break;
9794 case UNORDERED_EXPR:
9795 code = UNORDERED;
9796 break;
9797 case ORDERED_EXPR:
9798 code = ORDERED;
9799 break;
9800 case UNLT_EXPR:
9801 code = UNLT;
9802 break;
9803 case UNLE_EXPR:
9804 code = UNLE;
9805 break;
9806 case UNGT_EXPR:
9807 code = UNGT;
9808 break;
9809 case UNGE_EXPR:
9810 code = UNGE;
9811 break;
9812 case UNEQ_EXPR:
9813 code = UNEQ;
9814 break;
9816 default:
9817 abort ();
9820 /* Put a constant second. */
9821 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9823 tem = arg0; arg0 = arg1; arg1 = tem;
9824 code = swap_condition (code);
9827 /* If this is an equality or inequality test of a single bit, we can
9828 do this by shifting the bit being tested to the low-order bit and
9829 masking the result with the constant 1. If the condition was EQ,
9830 we xor it with 1. This does not require an scc insn and is faster
9831 than an scc insn even if we have it.
9833 The code to make this transformation was moved into fold_single_bit_test,
9834 so we just call into the folder and expand its result. */
9836 if ((code == NE || code == EQ)
9837 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9838 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9840 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9841 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9842 arg0, arg1, type),
9843 target, VOIDmode, EXPAND_NORMAL);
9846 /* Now see if we are likely to be able to do this. Return if not. */
9847 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9848 return 0;
9850 icode = setcc_gen_code[(int) code];
9851 if (icode == CODE_FOR_nothing
9852 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9854 /* We can only do this if it is one of the special cases that
9855 can be handled without an scc insn. */
9856 if ((code == LT && integer_zerop (arg1))
9857 || (! only_cheap && code == GE && integer_zerop (arg1)))
9859 else if (BRANCH_COST >= 0
9860 && ! only_cheap && (code == NE || code == EQ)
9861 && TREE_CODE (type) != REAL_TYPE
9862 && ((abs_optab->handlers[(int) operand_mode].insn_code
9863 != CODE_FOR_nothing)
9864 || (ffs_optab->handlers[(int) operand_mode].insn_code
9865 != CODE_FOR_nothing)))
9867 else
9868 return 0;
9871 if (! get_subtarget (target)
9872 || GET_MODE (subtarget) != operand_mode)
9873 subtarget = 0;
9875 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9877 if (target == 0)
9878 target = gen_reg_rtx (mode);
9880 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9881 because, if the emit_store_flag does anything it will succeed and
9882 OP0 and OP1 will not be used subsequently. */
9884 result = emit_store_flag (target, code,
9885 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9886 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9887 operand_mode, unsignedp, 1);
9889 if (result)
9891 if (invert)
9892 result = expand_binop (mode, xor_optab, result, const1_rtx,
9893 result, 0, OPTAB_LIB_WIDEN);
9894 return result;
9897 /* If this failed, we have to do this with set/compare/jump/set code. */
9898 if (GET_CODE (target) != REG
9899 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9900 target = gen_reg_rtx (GET_MODE (target));
9902 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9903 result = compare_from_rtx (op0, op1, code, unsignedp,
9904 operand_mode, NULL_RTX);
9905 if (GET_CODE (result) == CONST_INT)
9906 return (((result == const0_rtx && ! invert)
9907 || (result != const0_rtx && invert))
9908 ? const0_rtx : const1_rtx);
9910 /* The code of RESULT may not match CODE if compare_from_rtx
9911 decided to swap its operands and reverse the original code.
9913 We know that compare_from_rtx returns either a CONST_INT or
9914 a new comparison code, so it is safe to just extract the
9915 code from RESULT. */
9916 code = GET_CODE (result);
9918 label = gen_label_rtx ();
9919 if (bcc_gen_fctn[(int) code] == 0)
9920 abort ();
9922 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9923 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9924 emit_label (label);
9926 return target;
9930 /* Stubs in case we haven't got a casesi insn. */
9931 #ifndef HAVE_casesi
9932 # define HAVE_casesi 0
9933 # define gen_casesi(a, b, c, d, e) (0)
9934 # define CODE_FOR_casesi CODE_FOR_nothing
9935 #endif
9937 /* If the machine does not have a case insn that compares the bounds,
9938 this means extra overhead for dispatch tables, which raises the
9939 threshold for using them. */
9940 #ifndef CASE_VALUES_THRESHOLD
9941 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9942 #endif /* CASE_VALUES_THRESHOLD */
9944 unsigned int
9945 case_values_threshold (void)
9947 return CASE_VALUES_THRESHOLD;
9950 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9951 0 otherwise (i.e. if there is no casesi instruction). */
9953 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9954 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9956 enum machine_mode index_mode = SImode;
9957 int index_bits = GET_MODE_BITSIZE (index_mode);
9958 rtx op1, op2, index;
9959 enum machine_mode op_mode;
9961 if (! HAVE_casesi)
9962 return 0;
9964 /* Convert the index to SImode. */
9965 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9967 enum machine_mode omode = TYPE_MODE (index_type);
9968 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9970 /* We must handle the endpoints in the original mode. */
9971 index_expr = build (MINUS_EXPR, index_type,
9972 index_expr, minval);
9973 minval = integer_zero_node;
9974 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9975 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9976 omode, 1, default_label);
9977 /* Now we can safely truncate. */
9978 index = convert_to_mode (index_mode, index, 0);
9980 else
9982 if (TYPE_MODE (index_type) != index_mode)
9984 index_expr = convert (lang_hooks.types.type_for_size
9985 (index_bits, 0), index_expr);
9986 index_type = TREE_TYPE (index_expr);
9989 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9991 emit_queue ();
9992 index = protect_from_queue (index, 0);
9993 do_pending_stack_adjust ();
9995 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9996 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9997 (index, op_mode))
9998 index = copy_to_mode_reg (op_mode, index);
10000 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10002 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10003 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10004 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10005 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10006 (op1, op_mode))
10007 op1 = copy_to_mode_reg (op_mode, op1);
10009 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10011 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10012 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10013 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10014 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10015 (op2, op_mode))
10016 op2 = copy_to_mode_reg (op_mode, op2);
10018 emit_jump_insn (gen_casesi (index, op1, op2,
10019 table_label, default_label));
10020 return 1;
10023 /* Attempt to generate a tablejump instruction; same concept. */
10024 #ifndef HAVE_tablejump
10025 #define HAVE_tablejump 0
10026 #define gen_tablejump(x, y) (0)
10027 #endif
10029 /* Subroutine of the next function.
10031 INDEX is the value being switched on, with the lowest value
10032 in the table already subtracted.
10033 MODE is its expected mode (needed if INDEX is constant).
10034 RANGE is the length of the jump table.
10035 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10037 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10038 index value is out of range. */
10040 static void
10041 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10042 rtx default_label)
10044 rtx temp, vector;
10046 if (INTVAL (range) > cfun->max_jumptable_ents)
10047 cfun->max_jumptable_ents = INTVAL (range);
10049 /* Do an unsigned comparison (in the proper mode) between the index
10050 expression and the value which represents the length of the range.
10051 Since we just finished subtracting the lower bound of the range
10052 from the index expression, this comparison allows us to simultaneously
10053 check that the original index expression value is both greater than
10054 or equal to the minimum value of the range and less than or equal to
10055 the maximum value of the range. */
10057 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10058 default_label);
10060 /* If index is in range, it must fit in Pmode.
10061 Convert to Pmode so we can index with it. */
10062 if (mode != Pmode)
10063 index = convert_to_mode (Pmode, index, 1);
10065 /* Don't let a MEM slip through, because then INDEX that comes
10066 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10067 and break_out_memory_refs will go to work on it and mess it up. */
10068 #ifdef PIC_CASE_VECTOR_ADDRESS
10069 if (flag_pic && GET_CODE (index) != REG)
10070 index = copy_to_mode_reg (Pmode, index);
10071 #endif
10073 /* If flag_force_addr were to affect this address
10074 it could interfere with the tricky assumptions made
10075 about addresses that contain label-refs,
10076 which may be valid only very near the tablejump itself. */
10077 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10078 GET_MODE_SIZE, because this indicates how large insns are. The other
10079 uses should all be Pmode, because they are addresses. This code
10080 could fail if addresses and insns are not the same size. */
10081 index = gen_rtx_PLUS (Pmode,
10082 gen_rtx_MULT (Pmode, index,
10083 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10084 gen_rtx_LABEL_REF (Pmode, table_label));
10085 #ifdef PIC_CASE_VECTOR_ADDRESS
10086 if (flag_pic)
10087 index = PIC_CASE_VECTOR_ADDRESS (index);
10088 else
10089 #endif
10090 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10091 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10092 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10093 RTX_UNCHANGING_P (vector) = 1;
10094 MEM_NOTRAP_P (vector) = 1;
10095 convert_move (temp, vector, 0);
10097 emit_jump_insn (gen_tablejump (temp, table_label));
10099 /* If we are generating PIC code or if the table is PC-relative, the
10100 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10101 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10102 emit_barrier ();
10106 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10107 rtx table_label, rtx default_label)
10109 rtx index;
10111 if (! HAVE_tablejump)
10112 return 0;
10114 index_expr = fold (build (MINUS_EXPR, index_type,
10115 convert (index_type, index_expr),
10116 convert (index_type, minval)));
10117 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10118 emit_queue ();
10119 index = protect_from_queue (index, 0);
10120 do_pending_stack_adjust ();
10122 do_tablejump (index, TYPE_MODE (index_type),
10123 convert_modes (TYPE_MODE (index_type),
10124 TYPE_MODE (TREE_TYPE (range)),
10125 expand_expr (range, NULL_RTX,
10126 VOIDmode, 0),
10127 TYPE_UNSIGNED (TREE_TYPE (range))),
10128 table_label, default_label);
10129 return 1;
10132 /* Nonzero if the mode is a valid vector mode for this architecture.
10133 This returns nonzero even if there is no hardware support for the
10134 vector mode, but we can emulate with narrower modes. */
10137 vector_mode_valid_p (enum machine_mode mode)
10139 enum mode_class class = GET_MODE_CLASS (mode);
10140 enum machine_mode innermode;
10142 /* Doh! What's going on? */
10143 if (class != MODE_VECTOR_INT
10144 && class != MODE_VECTOR_FLOAT)
10145 return 0;
10147 /* Hardware support. Woo hoo! */
10148 if (VECTOR_MODE_SUPPORTED_P (mode))
10149 return 1;
10151 innermode = GET_MODE_INNER (mode);
10153 /* We should probably return 1 if requesting V4DI and we have no DI,
10154 but we have V2DI, but this is probably very unlikely. */
10156 /* If we have support for the inner mode, we can safely emulate it.
10157 We may not have V2DI, but me can emulate with a pair of DIs. */
10158 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10161 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10162 static rtx
10163 const_vector_from_tree (tree exp)
10165 rtvec v;
10166 int units, i;
10167 tree link, elt;
10168 enum machine_mode inner, mode;
10170 mode = TYPE_MODE (TREE_TYPE (exp));
10172 if (initializer_zerop (exp))
10173 return CONST0_RTX (mode);
10175 units = GET_MODE_NUNITS (mode);
10176 inner = GET_MODE_INNER (mode);
10178 v = rtvec_alloc (units);
10180 link = TREE_VECTOR_CST_ELTS (exp);
10181 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10183 elt = TREE_VALUE (link);
10185 if (TREE_CODE (elt) == REAL_CST)
10186 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10187 inner);
10188 else
10189 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10190 TREE_INT_CST_HIGH (elt),
10191 inner);
10194 /* Initialize remaining elements to 0. */
10195 for (; i < units; ++i)
10196 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10198 return gen_rtx_raw_CONST_VECTOR (mode, v);
10201 #include "gt-expr.h"