(extendsfdf2): Add pattern accidentally deleted when cirrus instructions were
[official-gcc.git] / gcc / expr.c
blob9643a558c731ef7ecf5e89b49730f2c434308854
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
169 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
170 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
171 static int is_aligning_offset PARAMS ((tree, tree));
172 static rtx expand_increment PARAMS ((tree, int, int));
173 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
174 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
175 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
176 rtx, rtx));
177 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 #ifdef PUSH_ROUNDING
179 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 #endif
181 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
280 int regno;
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
298 REGNO (reg) = regno;
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
331 enum insn_code ic;
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
347 void
348 init_expr ()
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
363 void
364 finish_expr_for_function ()
366 if (pending_chain)
367 abort ();
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
417 if (code != QUEUED)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
430 if (QUEUED_INSN (y))
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
466 return x;
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
491 queued_subexp_p (x)
492 rtx x;
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
511 /* Perform all the pending incrementations. */
513 void
514 emit_queue ()
516 rtx p;
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
573 abort ();
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
592 return;
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
606 return;
609 if (to_real != from_real)
610 abort ();
612 if (to_real)
614 rtx value, insns;
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
647 #endif
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
668 #endif
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
704 #endif
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
712 #endif
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
726 #endif
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
733 #endif
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
769 #endif
771 libcall = (rtx) 0;
772 switch (from_mode)
774 case SFmode:
775 switch (to_mode)
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 case DFmode:
795 switch (to_mode)
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
809 default:
810 break;
812 break;
814 case XFmode:
815 switch (to_mode)
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
825 default:
826 break;
828 break;
830 case TFmode:
831 switch (to_mode)
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
841 default:
842 break;
844 break;
846 default:
847 break;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
852 abort ();
854 start_sequence ();
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
856 1, from, from_mode);
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
861 return;
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
929 #ifdef HAVE_slt
930 if (HAVE_slt
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
935 lowpart_mode, 0);
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
939 else
940 #endif
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
945 NULL_RTX, 0);
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
956 if (subword == 0)
957 abort ();
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
964 end_sequence ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 return;
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_truncqipqi2 */
999 abort ();
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1009 else
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_extendpqiqi2 */
1018 abort ();
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1031 return;
1033 #endif /* HAVE_truncsipsi2 */
1034 abort ();
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1044 else
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1050 return;
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1059 #endif /* HAVE_zero_extendpsisi2 */
1060 abort ();
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1075 #endif /* HAVE_truncdipdi2 */
1076 abort ();
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1086 else
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1094 #endif /* HAVE_extendpdidi2 */
1095 abort ();
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1134 else
1136 enum machine_mode intermediate;
1137 rtx tmp;
1138 tree shift_amount;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1313 /* Mode combination is not recognized. */
1314 abort ();
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
1355 rtx temp;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1369 return x;
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1410 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1412 /* ?? If we don't know OLDMODE, we have to assume here that
1413 X does not need sign- or zero-extension. This may not be
1414 the case, but it's the best we can do. */
1415 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1416 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1418 HOST_WIDE_INT val = INTVAL (x);
1419 int width = GET_MODE_BITSIZE (oldmode);
1421 /* We must sign or zero-extend in this case. Start by
1422 zero-extending, then sign extend if we need to. */
1423 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1424 if (! unsignedp
1425 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1426 val |= (HOST_WIDE_INT) (-1) << width;
1428 return gen_int_mode (val, mode);
1431 return gen_lowpart (mode, x);
1434 temp = gen_reg_rtx (mode);
1435 convert_move (temp, x, unsignedp);
1436 return temp;
1439 /* This macro is used to determine what the largest unit size that
1440 move_by_pieces can use is. */
1442 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1443 move efficiently, as opposed to MOVE_MAX which is the maximum
1444 number of bytes we can move with a single instruction. */
1446 #ifndef MOVE_MAX_PIECES
1447 #define MOVE_MAX_PIECES MOVE_MAX
1448 #endif
1450 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1451 store efficiently. Due to internal GCC limitations, this is
1452 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1453 for an immediate constant. */
1455 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1457 /* Generate several move instructions to copy LEN bytes from block FROM to
1458 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1459 and TO through protect_from_queue before calling.
1461 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1462 used to push FROM to the stack.
1464 ALIGN is maximum alignment we can assume. */
1466 void
1467 move_by_pieces (to, from, len, align)
1468 rtx to, from;
1469 unsigned HOST_WIDE_INT len;
1470 unsigned int align;
1472 struct move_by_pieces data;
1473 rtx to_addr, from_addr = XEXP (from, 0);
1474 unsigned int max_size = MOVE_MAX_PIECES + 1;
1475 enum machine_mode mode = VOIDmode, tmode;
1476 enum insn_code icode;
1478 data.offset = 0;
1479 data.from_addr = from_addr;
1480 if (to)
1482 to_addr = XEXP (to, 0);
1483 data.to = to;
1484 data.autinc_to
1485 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1486 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1487 data.reverse
1488 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1490 else
1492 to_addr = NULL_RTX;
1493 data.to = NULL_RTX;
1494 data.autinc_to = 1;
1495 #ifdef STACK_GROWS_DOWNWARD
1496 data.reverse = 1;
1497 #else
1498 data.reverse = 0;
1499 #endif
1501 data.to_addr = to_addr;
1502 data.from = from;
1503 data.autinc_from
1504 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1505 || GET_CODE (from_addr) == POST_INC
1506 || GET_CODE (from_addr) == POST_DEC);
1508 data.explicit_inc_from = 0;
1509 data.explicit_inc_to = 0;
1510 if (data.reverse) data.offset = len;
1511 data.len = len;
1513 /* If copying requires more than two move insns,
1514 copy addresses to registers (to make displacements shorter)
1515 and use post-increment if available. */
1516 if (!(data.autinc_from && data.autinc_to)
1517 && move_by_pieces_ninsns (len, align) > 2)
1519 /* Find the mode of the largest move... */
1520 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1521 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1522 if (GET_MODE_SIZE (tmode) < max_size)
1523 mode = tmode;
1525 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1527 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1528 data.autinc_from = 1;
1529 data.explicit_inc_from = -1;
1531 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1533 data.from_addr = copy_addr_to_reg (from_addr);
1534 data.autinc_from = 1;
1535 data.explicit_inc_from = 1;
1537 if (!data.autinc_from && CONSTANT_P (from_addr))
1538 data.from_addr = copy_addr_to_reg (from_addr);
1539 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1541 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1542 data.autinc_to = 1;
1543 data.explicit_inc_to = -1;
1545 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1547 data.to_addr = copy_addr_to_reg (to_addr);
1548 data.autinc_to = 1;
1549 data.explicit_inc_to = 1;
1551 if (!data.autinc_to && CONSTANT_P (to_addr))
1552 data.to_addr = copy_addr_to_reg (to_addr);
1555 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1556 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1557 align = MOVE_MAX * BITS_PER_UNIT;
1559 /* First move what we can in the largest integer mode, then go to
1560 successively smaller modes. */
1562 while (max_size > 1)
1564 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1565 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1566 if (GET_MODE_SIZE (tmode) < max_size)
1567 mode = tmode;
1569 if (mode == VOIDmode)
1570 break;
1572 icode = mov_optab->handlers[(int) mode].insn_code;
1573 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1574 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1576 max_size = GET_MODE_SIZE (mode);
1579 /* The code above should have handled everything. */
1580 if (data.len > 0)
1581 abort ();
1584 /* Return number of insns required to move L bytes by pieces.
1585 ALIGN (in bits) is maximum alignment we can assume. */
1587 static unsigned HOST_WIDE_INT
1588 move_by_pieces_ninsns (l, align)
1589 unsigned HOST_WIDE_INT l;
1590 unsigned int align;
1592 unsigned HOST_WIDE_INT n_insns = 0;
1593 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1595 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1596 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1597 align = MOVE_MAX * BITS_PER_UNIT;
1599 while (max_size > 1)
1601 enum machine_mode mode = VOIDmode, tmode;
1602 enum insn_code icode;
1604 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1605 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1606 if (GET_MODE_SIZE (tmode) < max_size)
1607 mode = tmode;
1609 if (mode == VOIDmode)
1610 break;
1612 icode = mov_optab->handlers[(int) mode].insn_code;
1613 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1614 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1616 max_size = GET_MODE_SIZE (mode);
1619 if (l)
1620 abort ();
1621 return n_insns;
1624 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1625 with move instructions for mode MODE. GENFUN is the gen_... function
1626 to make a move insn for that mode. DATA has all the other info. */
1628 static void
1629 move_by_pieces_1 (genfun, mode, data)
1630 rtx (*genfun) PARAMS ((rtx, ...));
1631 enum machine_mode mode;
1632 struct move_by_pieces *data;
1634 unsigned int size = GET_MODE_SIZE (mode);
1635 rtx to1 = NULL_RTX, from1;
1637 while (data->len >= size)
1639 if (data->reverse)
1640 data->offset -= size;
1642 if (data->to)
1644 if (data->autinc_to)
1645 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1646 data->offset);
1647 else
1648 to1 = adjust_address (data->to, mode, data->offset);
1651 if (data->autinc_from)
1652 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1653 data->offset);
1654 else
1655 from1 = adjust_address (data->from, mode, data->offset);
1657 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1658 emit_insn (gen_add2_insn (data->to_addr,
1659 GEN_INT (-(HOST_WIDE_INT)size)));
1660 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1661 emit_insn (gen_add2_insn (data->from_addr,
1662 GEN_INT (-(HOST_WIDE_INT)size)));
1664 if (data->to)
1665 emit_insn ((*genfun) (to1, from1));
1666 else
1668 #ifdef PUSH_ROUNDING
1669 emit_single_push_insn (mode, from1, NULL);
1670 #else
1671 abort ();
1672 #endif
1675 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1676 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1678 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1680 if (! data->reverse)
1681 data->offset += size;
1683 data->len -= size;
1687 /* Emit code to move a block Y to a block X. This may be done with
1688 string-move instructions, with multiple scalar move instructions,
1689 or with a library call.
1691 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1692 SIZE is an rtx that says how long they are.
1693 ALIGN is the maximum alignment we can assume they have.
1694 METHOD describes what kind of copy this is, and what mechanisms may be used.
1696 Return the address of the new block, if memcpy is called and returns it,
1697 0 otherwise. */
1700 emit_block_move (x, y, size, method)
1701 rtx x, y, size;
1702 enum block_op_methods method;
1704 bool may_use_call;
1705 rtx retval = 0;
1706 unsigned int align;
1708 switch (method)
1710 case BLOCK_OP_NORMAL:
1711 may_use_call = true;
1712 break;
1714 case BLOCK_OP_CALL_PARM:
1715 may_use_call = block_move_libcall_safe_for_call_parm ();
1717 /* Make inhibit_defer_pop nonzero around the library call
1718 to force it to pop the arguments right away. */
1719 NO_DEFER_POP;
1720 break;
1722 case BLOCK_OP_NO_LIBCALL:
1723 may_use_call = false;
1724 break;
1726 default:
1727 abort ();
1730 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1732 if (GET_MODE (x) != BLKmode)
1733 abort ();
1734 if (GET_MODE (y) != BLKmode)
1735 abort ();
1737 x = protect_from_queue (x, 1);
1738 y = protect_from_queue (y, 0);
1739 size = protect_from_queue (size, 0);
1741 if (GET_CODE (x) != MEM)
1742 abort ();
1743 if (GET_CODE (y) != MEM)
1744 abort ();
1745 if (size == 0)
1746 abort ();
1748 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1749 can be incorrect is coming from __builtin_memcpy. */
1750 if (GET_CODE (size) == CONST_INT)
1752 x = shallow_copy_rtx (x);
1753 y = shallow_copy_rtx (y);
1754 set_mem_size (x, size);
1755 set_mem_size (y, size);
1758 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1759 move_by_pieces (x, y, INTVAL (size), align);
1760 else if (emit_block_move_via_movstr (x, y, size, align))
1762 else if (may_use_call)
1763 retval = emit_block_move_via_libcall (x, y, size);
1764 else
1765 emit_block_move_via_loop (x, y, size, align);
1767 if (method == BLOCK_OP_CALL_PARM)
1768 OK_DEFER_POP;
1770 return retval;
1773 /* A subroutine of emit_block_move. Returns true if calling the
1774 block move libcall will not clobber any parameters which may have
1775 already been placed on the stack. */
1777 static bool
1778 block_move_libcall_safe_for_call_parm ()
1780 if (PUSH_ARGS)
1781 return true;
1782 else
1784 /* Check to see whether memcpy takes all register arguments. */
1785 static enum {
1786 takes_regs_uninit, takes_regs_no, takes_regs_yes
1787 } takes_regs = takes_regs_uninit;
1789 switch (takes_regs)
1791 case takes_regs_uninit:
1793 CUMULATIVE_ARGS args_so_far;
1794 tree fn, arg;
1796 fn = emit_block_move_libcall_fn (false);
1797 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1799 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1800 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1802 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1803 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1804 if (!tmp || !REG_P (tmp))
1805 goto fail_takes_regs;
1806 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1807 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1808 NULL_TREE, 1))
1809 goto fail_takes_regs;
1810 #endif
1811 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1814 takes_regs = takes_regs_yes;
1815 /* FALLTHRU */
1817 case takes_regs_yes:
1818 return true;
1820 fail_takes_regs:
1821 takes_regs = takes_regs_no;
1822 /* FALLTHRU */
1823 case takes_regs_no:
1824 return false;
1826 default:
1827 abort ();
1832 /* A subroutine of emit_block_move. Expand a movstr pattern;
1833 return true if successful. */
1835 static bool
1836 emit_block_move_via_movstr (x, y, size, align)
1837 rtx x, y, size;
1838 unsigned int align;
1840 /* Try the most limited insn first, because there's no point
1841 including more than one in the machine description unless
1842 the more limited one has some advantage. */
1844 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1845 enum machine_mode mode;
1847 /* Since this is a move insn, we don't care about volatility. */
1848 volatile_ok = 1;
1850 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1851 mode = GET_MODE_WIDER_MODE (mode))
1853 enum insn_code code = movstr_optab[(int) mode];
1854 insn_operand_predicate_fn pred;
1856 if (code != CODE_FOR_nothing
1857 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1858 here because if SIZE is less than the mode mask, as it is
1859 returned by the macro, it will definitely be less than the
1860 actual mode mask. */
1861 && ((GET_CODE (size) == CONST_INT
1862 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1863 <= (GET_MODE_MASK (mode) >> 1)))
1864 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1865 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1866 || (*pred) (x, BLKmode))
1867 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1868 || (*pred) (y, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1870 || (*pred) (opalign, VOIDmode)))
1872 rtx op2;
1873 rtx last = get_last_insn ();
1874 rtx pat;
1876 op2 = convert_to_mode (mode, size, 1);
1877 pred = insn_data[(int) code].operand[2].predicate;
1878 if (pred != 0 && ! (*pred) (op2, mode))
1879 op2 = copy_to_mode_reg (mode, op2);
1881 /* ??? When called via emit_block_move_for_call, it'd be
1882 nice if there were some way to inform the backend, so
1883 that it doesn't fail the expansion because it thinks
1884 emitting the libcall would be more efficient. */
1886 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1887 if (pat)
1889 emit_insn (pat);
1890 volatile_ok = 0;
1891 return true;
1893 else
1894 delete_insns_since (last);
1898 volatile_ok = 0;
1899 return false;
1902 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1903 Return the return value from memcpy, 0 otherwise. */
1905 static rtx
1906 emit_block_move_via_libcall (dst, src, size)
1907 rtx dst, src, size;
1909 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1910 enum machine_mode size_mode;
1911 rtx retval;
1913 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1915 It is unsafe to save the value generated by protect_from_queue
1916 and reuse it later. Consider what happens if emit_queue is
1917 called before the return value from protect_from_queue is used.
1919 Expansion of the CALL_EXPR below will call emit_queue before
1920 we are finished emitting RTL for argument setup. So if we are
1921 not careful we could get the wrong value for an argument.
1923 To avoid this problem we go ahead and emit code to copy X, Y &
1924 SIZE into new pseudos. We can then place those new pseudos
1925 into an RTL_EXPR and use them later, even after a call to
1926 emit_queue.
1928 Note this is not strictly needed for library calls since they
1929 do not call emit_queue before loading their arguments. However,
1930 we may need to have library calls call emit_queue in the future
1931 since failing to do so could cause problems for targets which
1932 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1934 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1935 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1937 if (TARGET_MEM_FUNCTIONS)
1938 size_mode = TYPE_MODE (sizetype);
1939 else
1940 size_mode = TYPE_MODE (unsigned_type_node);
1941 size = convert_to_mode (size_mode, size, 1);
1942 size = copy_to_mode_reg (size_mode, size);
1944 /* It is incorrect to use the libcall calling conventions to call
1945 memcpy in this context. This could be a user call to memcpy and
1946 the user may wish to examine the return value from memcpy. For
1947 targets where libcalls and normal calls have different conventions
1948 for returning pointers, we could end up generating incorrect code.
1950 For convenience, we generate the call to bcopy this way as well. */
1952 dst_tree = make_tree (ptr_type_node, dst);
1953 src_tree = make_tree (ptr_type_node, src);
1954 if (TARGET_MEM_FUNCTIONS)
1955 size_tree = make_tree (sizetype, size);
1956 else
1957 size_tree = make_tree (unsigned_type_node, size);
1959 fn = emit_block_move_libcall_fn (true);
1960 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1961 if (TARGET_MEM_FUNCTIONS)
1963 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1964 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1966 else
1968 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1969 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1972 /* Now we have to build up the CALL_EXPR itself. */
1973 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1974 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1975 call_expr, arg_list, NULL_TREE);
1976 TREE_SIDE_EFFECTS (call_expr) = 1;
1978 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1980 /* If we are initializing a readonly value, show the above call
1981 clobbered it. Otherwise, a load from it may erroneously be
1982 hoisted from a loop. */
1983 if (RTX_UNCHANGING_P (dst))
1984 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1986 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1989 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1990 for the function we use for block copies. The first time FOR_CALL
1991 is true, we call assemble_external. */
1993 static GTY(()) tree block_move_fn;
1995 static tree
1996 emit_block_move_libcall_fn (for_call)
1997 int for_call;
1999 static bool emitted_extern;
2000 tree fn = block_move_fn, args;
2002 if (!fn)
2004 if (TARGET_MEM_FUNCTIONS)
2006 fn = get_identifier ("memcpy");
2007 args = build_function_type_list (ptr_type_node, ptr_type_node,
2008 const_ptr_type_node, sizetype,
2009 NULL_TREE);
2011 else
2013 fn = get_identifier ("bcopy");
2014 args = build_function_type_list (void_type_node, const_ptr_type_node,
2015 ptr_type_node, unsigned_type_node,
2016 NULL_TREE);
2019 fn = build_decl (FUNCTION_DECL, fn, args);
2020 DECL_EXTERNAL (fn) = 1;
2021 TREE_PUBLIC (fn) = 1;
2022 DECL_ARTIFICIAL (fn) = 1;
2023 TREE_NOTHROW (fn) = 1;
2025 block_move_fn = fn;
2028 if (for_call && !emitted_extern)
2030 emitted_extern = true;
2031 make_decl_rtl (fn, NULL);
2032 assemble_external (fn);
2035 return fn;
2038 /* A subroutine of emit_block_move. Copy the data via an explicit
2039 loop. This is used only when libcalls are forbidden. */
2040 /* ??? It'd be nice to copy in hunks larger than QImode. */
2042 static void
2043 emit_block_move_via_loop (x, y, size, align)
2044 rtx x, y, size;
2045 unsigned int align ATTRIBUTE_UNUSED;
2047 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2048 enum machine_mode iter_mode;
2050 iter_mode = GET_MODE (size);
2051 if (iter_mode == VOIDmode)
2052 iter_mode = word_mode;
2054 top_label = gen_label_rtx ();
2055 cmp_label = gen_label_rtx ();
2056 iter = gen_reg_rtx (iter_mode);
2058 emit_move_insn (iter, const0_rtx);
2060 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2061 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2062 do_pending_stack_adjust ();
2064 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2066 emit_jump (cmp_label);
2067 emit_label (top_label);
2069 tmp = convert_modes (Pmode, iter_mode, iter, true);
2070 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2071 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2072 x = change_address (x, QImode, x_addr);
2073 y = change_address (y, QImode, y_addr);
2075 emit_move_insn (x, y);
2077 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2078 true, OPTAB_LIB_WIDEN);
2079 if (tmp != iter)
2080 emit_move_insn (iter, tmp);
2082 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2083 emit_label (cmp_label);
2085 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2086 true, top_label);
2088 emit_note (NULL, NOTE_INSN_LOOP_END);
2091 /* Copy all or part of a value X into registers starting at REGNO.
2092 The number of registers to be filled is NREGS. */
2094 void
2095 move_block_to_reg (regno, x, nregs, mode)
2096 int regno;
2097 rtx x;
2098 int nregs;
2099 enum machine_mode mode;
2101 int i;
2102 #ifdef HAVE_load_multiple
2103 rtx pat;
2104 rtx last;
2105 #endif
2107 if (nregs == 0)
2108 return;
2110 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2111 x = validize_mem (force_const_mem (mode, x));
2113 /* See if the machine can do this with a load multiple insn. */
2114 #ifdef HAVE_load_multiple
2115 if (HAVE_load_multiple)
2117 last = get_last_insn ();
2118 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2119 GEN_INT (nregs));
2120 if (pat)
2122 emit_insn (pat);
2123 return;
2125 else
2126 delete_insns_since (last);
2128 #endif
2130 for (i = 0; i < nregs; i++)
2131 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2132 operand_subword_force (x, i, mode));
2135 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2136 The number of registers to be filled is NREGS. SIZE indicates the number
2137 of bytes in the object X. */
2139 void
2140 move_block_from_reg (regno, x, nregs, size)
2141 int regno;
2142 rtx x;
2143 int nregs;
2144 int size;
2146 int i;
2147 #ifdef HAVE_store_multiple
2148 rtx pat;
2149 rtx last;
2150 #endif
2151 enum machine_mode mode;
2153 if (nregs == 0)
2154 return;
2156 /* If SIZE is that of a mode no bigger than a word, just use that
2157 mode's store operation. */
2158 if (size <= UNITS_PER_WORD
2159 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2161 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2162 return;
2165 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2166 to the left before storing to memory. Note that the previous test
2167 doesn't handle all cases (e.g. SIZE == 3). */
2168 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2170 rtx tem = operand_subword (x, 0, 1, BLKmode);
2171 rtx shift;
2173 if (tem == 0)
2174 abort ();
2176 shift = expand_shift (LSHIFT_EXPR, word_mode,
2177 gen_rtx_REG (word_mode, regno),
2178 build_int_2 ((UNITS_PER_WORD - size)
2179 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2180 emit_move_insn (tem, shift);
2181 return;
2184 /* See if the machine can do this with a store multiple insn. */
2185 #ifdef HAVE_store_multiple
2186 if (HAVE_store_multiple)
2188 last = get_last_insn ();
2189 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2190 GEN_INT (nregs));
2191 if (pat)
2193 emit_insn (pat);
2194 return;
2196 else
2197 delete_insns_since (last);
2199 #endif
2201 for (i = 0; i < nregs; i++)
2203 rtx tem = operand_subword (x, i, 1, BLKmode);
2205 if (tem == 0)
2206 abort ();
2208 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2212 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2213 ORIG, where ORIG is a non-consecutive group of registers represented by
2214 a PARALLEL. The clone is identical to the original except in that the
2215 original set of registers is replaced by a new set of pseudo registers.
2216 The new set has the same modes as the original set. */
2219 gen_group_rtx (orig)
2220 rtx orig;
2222 int i, length;
2223 rtx *tmps;
2225 if (GET_CODE (orig) != PARALLEL)
2226 abort ();
2228 length = XVECLEN (orig, 0);
2229 tmps = (rtx *) alloca (sizeof (rtx) * length);
2231 /* Skip a NULL entry in first slot. */
2232 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2234 if (i)
2235 tmps[0] = 0;
2237 for (; i < length; i++)
2239 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2240 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2242 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2245 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2248 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2249 registers represented by a PARALLEL. SSIZE represents the total size of
2250 block SRC in bytes, or -1 if not known. */
2251 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2252 the balance will be in what would be the low-order memory addresses, i.e.
2253 left justified for big endian, right justified for little endian. This
2254 happens to be true for the targets currently using this support. If this
2255 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2256 would be needed. */
2258 void
2259 emit_group_load (dst, orig_src, ssize)
2260 rtx dst, orig_src;
2261 int ssize;
2263 rtx *tmps, src;
2264 int start, i;
2266 if (GET_CODE (dst) != PARALLEL)
2267 abort ();
2269 /* Check for a NULL entry, used to indicate that the parameter goes
2270 both on the stack and in registers. */
2271 if (XEXP (XVECEXP (dst, 0, 0), 0))
2272 start = 0;
2273 else
2274 start = 1;
2276 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2278 /* Process the pieces. */
2279 for (i = start; i < XVECLEN (dst, 0); i++)
2281 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2282 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2283 unsigned int bytelen = GET_MODE_SIZE (mode);
2284 int shift = 0;
2286 /* Handle trailing fragments that run over the size of the struct. */
2287 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2289 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2290 bytelen = ssize - bytepos;
2291 if (bytelen <= 0)
2292 abort ();
2295 /* If we won't be loading directly from memory, protect the real source
2296 from strange tricks we might play; but make sure that the source can
2297 be loaded directly into the destination. */
2298 src = orig_src;
2299 if (GET_CODE (orig_src) != MEM
2300 && (!CONSTANT_P (orig_src)
2301 || (GET_MODE (orig_src) != mode
2302 && GET_MODE (orig_src) != VOIDmode)))
2304 if (GET_MODE (orig_src) == VOIDmode)
2305 src = gen_reg_rtx (mode);
2306 else
2307 src = gen_reg_rtx (GET_MODE (orig_src));
2309 emit_move_insn (src, orig_src);
2312 /* Optimize the access just a bit. */
2313 if (GET_CODE (src) == MEM
2314 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2315 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2316 && bytelen == GET_MODE_SIZE (mode))
2318 tmps[i] = gen_reg_rtx (mode);
2319 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2321 else if (GET_CODE (src) == CONCAT)
2323 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2324 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2326 if ((bytepos == 0 && bytelen == slen0)
2327 || (bytepos != 0 && bytepos + bytelen <= slen))
2329 /* The following assumes that the concatenated objects all
2330 have the same size. In this case, a simple calculation
2331 can be used to determine the object and the bit field
2332 to be extracted. */
2333 tmps[i] = XEXP (src, bytepos / slen0);
2334 if (! CONSTANT_P (tmps[i])
2335 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2336 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2337 (bytepos % slen0) * BITS_PER_UNIT,
2338 1, NULL_RTX, mode, mode, ssize);
2340 else if (bytepos == 0)
2342 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2343 emit_move_insn (mem, src);
2344 tmps[i] = adjust_address (mem, mode, 0);
2346 else
2347 abort ();
2349 else if (CONSTANT_P (src)
2350 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2351 tmps[i] = src;
2352 else
2353 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2354 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2355 mode, mode, ssize);
2357 if (BYTES_BIG_ENDIAN && shift)
2358 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2359 tmps[i], 0, OPTAB_WIDEN);
2362 emit_queue ();
2364 /* Copy the extracted pieces into the proper (probable) hard regs. */
2365 for (i = start; i < XVECLEN (dst, 0); i++)
2366 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2369 /* Emit code to move a block SRC to block DST, where SRC and DST are
2370 non-consecutive groups of registers, each represented by a PARALLEL. */
2372 void
2373 emit_group_move (dst, src)
2374 rtx dst, src;
2376 int i;
2378 if (GET_CODE (src) != PARALLEL
2379 || GET_CODE (dst) != PARALLEL
2380 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2381 abort ();
2383 /* Skip first entry if NULL. */
2384 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2385 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2386 XEXP (XVECEXP (src, 0, i), 0));
2389 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2390 registers represented by a PARALLEL. SSIZE represents the total size of
2391 block DST, or -1 if not known. */
2393 void
2394 emit_group_store (orig_dst, src, ssize)
2395 rtx orig_dst, src;
2396 int ssize;
2398 rtx *tmps, dst;
2399 int start, i;
2401 if (GET_CODE (src) != PARALLEL)
2402 abort ();
2404 /* Check for a NULL entry, used to indicate that the parameter goes
2405 both on the stack and in registers. */
2406 if (XEXP (XVECEXP (src, 0, 0), 0))
2407 start = 0;
2408 else
2409 start = 1;
2411 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2413 /* Copy the (probable) hard regs into pseudos. */
2414 for (i = start; i < XVECLEN (src, 0); i++)
2416 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2417 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2418 emit_move_insn (tmps[i], reg);
2420 emit_queue ();
2422 /* If we won't be storing directly into memory, protect the real destination
2423 from strange tricks we might play. */
2424 dst = orig_dst;
2425 if (GET_CODE (dst) == PARALLEL)
2427 rtx temp;
2429 /* We can get a PARALLEL dst if there is a conditional expression in
2430 a return statement. In that case, the dst and src are the same,
2431 so no action is necessary. */
2432 if (rtx_equal_p (dst, src))
2433 return;
2435 /* It is unclear if we can ever reach here, but we may as well handle
2436 it. Allocate a temporary, and split this into a store/load to/from
2437 the temporary. */
2439 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2440 emit_group_store (temp, src, ssize);
2441 emit_group_load (dst, temp, ssize);
2442 return;
2444 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2446 dst = gen_reg_rtx (GET_MODE (orig_dst));
2447 /* Make life a bit easier for combine. */
2448 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2451 /* Process the pieces. */
2452 for (i = start; i < XVECLEN (src, 0); i++)
2454 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2455 enum machine_mode mode = GET_MODE (tmps[i]);
2456 unsigned int bytelen = GET_MODE_SIZE (mode);
2457 rtx dest = dst;
2459 /* Handle trailing fragments that run over the size of the struct. */
2460 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2462 if (BYTES_BIG_ENDIAN)
2464 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2465 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2466 tmps[i], 0, OPTAB_WIDEN);
2468 bytelen = ssize - bytepos;
2471 if (GET_CODE (dst) == CONCAT)
2473 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2474 dest = XEXP (dst, 0);
2475 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2477 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2478 dest = XEXP (dst, 1);
2480 else if (bytepos == 0 && XVECLEN (src, 0))
2482 dest = assign_stack_temp (GET_MODE (dest),
2483 GET_MODE_SIZE (GET_MODE (dest)), 0);
2484 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2485 tmps[i]);
2486 dst = dest;
2487 break;
2489 else
2490 abort ();
2493 /* Optimize the access just a bit. */
2494 if (GET_CODE (dest) == MEM
2495 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2496 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2497 && bytelen == GET_MODE_SIZE (mode))
2498 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2499 else
2500 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2501 mode, tmps[i], ssize);
2504 emit_queue ();
2506 /* Copy from the pseudo into the (probable) hard reg. */
2507 if (orig_dst != dst)
2508 emit_move_insn (orig_dst, dst);
2511 /* Generate code to copy a BLKmode object of TYPE out of a
2512 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2513 is null, a stack temporary is created. TGTBLK is returned.
2515 The primary purpose of this routine is to handle functions
2516 that return BLKmode structures in registers. Some machines
2517 (the PA for example) want to return all small structures
2518 in registers regardless of the structure's alignment. */
2521 copy_blkmode_from_reg (tgtblk, srcreg, type)
2522 rtx tgtblk;
2523 rtx srcreg;
2524 tree type;
2526 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2527 rtx src = NULL, dst = NULL;
2528 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2529 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2531 if (tgtblk == 0)
2533 tgtblk = assign_temp (build_qualified_type (type,
2534 (TYPE_QUALS (type)
2535 | TYPE_QUAL_CONST)),
2536 0, 1, 1);
2537 preserve_temp_slots (tgtblk);
2540 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2541 into a new pseudo which is a full word. */
2543 if (GET_MODE (srcreg) != BLKmode
2544 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2545 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2547 /* Structures whose size is not a multiple of a word are aligned
2548 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2549 machine, this means we must skip the empty high order bytes when
2550 calculating the bit offset. */
2551 if (BYTES_BIG_ENDIAN
2552 && bytes % UNITS_PER_WORD)
2553 big_endian_correction
2554 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2556 /* Copy the structure BITSIZE bites at a time.
2558 We could probably emit more efficient code for machines which do not use
2559 strict alignment, but it doesn't seem worth the effort at the current
2560 time. */
2561 for (bitpos = 0, xbitpos = big_endian_correction;
2562 bitpos < bytes * BITS_PER_UNIT;
2563 bitpos += bitsize, xbitpos += bitsize)
2565 /* We need a new source operand each time xbitpos is on a
2566 word boundary and when xbitpos == big_endian_correction
2567 (the first time through). */
2568 if (xbitpos % BITS_PER_WORD == 0
2569 || xbitpos == big_endian_correction)
2570 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2571 GET_MODE (srcreg));
2573 /* We need a new destination operand each time bitpos is on
2574 a word boundary. */
2575 if (bitpos % BITS_PER_WORD == 0)
2576 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2578 /* Use xbitpos for the source extraction (right justified) and
2579 xbitpos for the destination store (left justified). */
2580 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2581 extract_bit_field (src, bitsize,
2582 xbitpos % BITS_PER_WORD, 1,
2583 NULL_RTX, word_mode, word_mode,
2584 BITS_PER_WORD),
2585 BITS_PER_WORD);
2588 return tgtblk;
2591 /* Add a USE expression for REG to the (possibly empty) list pointed
2592 to by CALL_FUSAGE. REG must denote a hard register. */
2594 void
2595 use_reg (call_fusage, reg)
2596 rtx *call_fusage, reg;
2598 if (GET_CODE (reg) != REG
2599 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2600 abort ();
2602 *call_fusage
2603 = gen_rtx_EXPR_LIST (VOIDmode,
2604 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2607 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2608 starting at REGNO. All of these registers must be hard registers. */
2610 void
2611 use_regs (call_fusage, regno, nregs)
2612 rtx *call_fusage;
2613 int regno;
2614 int nregs;
2616 int i;
2618 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2619 abort ();
2621 for (i = 0; i < nregs; i++)
2622 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2625 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2626 PARALLEL REGS. This is for calls that pass values in multiple
2627 non-contiguous locations. The Irix 6 ABI has examples of this. */
2629 void
2630 use_group_regs (call_fusage, regs)
2631 rtx *call_fusage;
2632 rtx regs;
2634 int i;
2636 for (i = 0; i < XVECLEN (regs, 0); i++)
2638 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2640 /* A NULL entry means the parameter goes both on the stack and in
2641 registers. This can also be a MEM for targets that pass values
2642 partially on the stack and partially in registers. */
2643 if (reg != 0 && GET_CODE (reg) == REG)
2644 use_reg (call_fusage, reg);
2649 /* Determine whether the LEN bytes generated by CONSTFUN can be
2650 stored to memory using several move instructions. CONSTFUNDATA is
2651 a pointer which will be passed as argument in every CONSTFUN call.
2652 ALIGN is maximum alignment we can assume. Return nonzero if a
2653 call to store_by_pieces should succeed. */
2656 can_store_by_pieces (len, constfun, constfundata, align)
2657 unsigned HOST_WIDE_INT len;
2658 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2659 PTR constfundata;
2660 unsigned int align;
2662 unsigned HOST_WIDE_INT max_size, l;
2663 HOST_WIDE_INT offset = 0;
2664 enum machine_mode mode, tmode;
2665 enum insn_code icode;
2666 int reverse;
2667 rtx cst;
2669 if (! STORE_BY_PIECES_P (len, align))
2670 return 0;
2672 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2673 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2674 align = MOVE_MAX * BITS_PER_UNIT;
2676 /* We would first store what we can in the largest integer mode, then go to
2677 successively smaller modes. */
2679 for (reverse = 0;
2680 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2681 reverse++)
2683 l = len;
2684 mode = VOIDmode;
2685 max_size = STORE_MAX_PIECES + 1;
2686 while (max_size > 1)
2688 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2689 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2690 if (GET_MODE_SIZE (tmode) < max_size)
2691 mode = tmode;
2693 if (mode == VOIDmode)
2694 break;
2696 icode = mov_optab->handlers[(int) mode].insn_code;
2697 if (icode != CODE_FOR_nothing
2698 && align >= GET_MODE_ALIGNMENT (mode))
2700 unsigned int size = GET_MODE_SIZE (mode);
2702 while (l >= size)
2704 if (reverse)
2705 offset -= size;
2707 cst = (*constfun) (constfundata, offset, mode);
2708 if (!LEGITIMATE_CONSTANT_P (cst))
2709 return 0;
2711 if (!reverse)
2712 offset += size;
2714 l -= size;
2718 max_size = GET_MODE_SIZE (mode);
2721 /* The code above should have handled everything. */
2722 if (l != 0)
2723 abort ();
2726 return 1;
2729 /* Generate several move instructions to store LEN bytes generated by
2730 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2731 pointer which will be passed as argument in every CONSTFUN call.
2732 ALIGN is maximum alignment we can assume. */
2734 void
2735 store_by_pieces (to, len, constfun, constfundata, align)
2736 rtx to;
2737 unsigned HOST_WIDE_INT len;
2738 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2739 PTR constfundata;
2740 unsigned int align;
2742 struct store_by_pieces data;
2744 if (! STORE_BY_PIECES_P (len, align))
2745 abort ();
2746 to = protect_from_queue (to, 1);
2747 data.constfun = constfun;
2748 data.constfundata = constfundata;
2749 data.len = len;
2750 data.to = to;
2751 store_by_pieces_1 (&data, align);
2754 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2755 rtx with BLKmode). The caller must pass TO through protect_from_queue
2756 before calling. ALIGN is maximum alignment we can assume. */
2758 static void
2759 clear_by_pieces (to, len, align)
2760 rtx to;
2761 unsigned HOST_WIDE_INT len;
2762 unsigned int align;
2764 struct store_by_pieces data;
2766 data.constfun = clear_by_pieces_1;
2767 data.constfundata = NULL;
2768 data.len = len;
2769 data.to = to;
2770 store_by_pieces_1 (&data, align);
2773 /* Callback routine for clear_by_pieces.
2774 Return const0_rtx unconditionally. */
2776 static rtx
2777 clear_by_pieces_1 (data, offset, mode)
2778 PTR data ATTRIBUTE_UNUSED;
2779 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2780 enum machine_mode mode ATTRIBUTE_UNUSED;
2782 return const0_rtx;
2785 /* Subroutine of clear_by_pieces and store_by_pieces.
2786 Generate several move instructions to store LEN bytes of block TO. (A MEM
2787 rtx with BLKmode). The caller must pass TO through protect_from_queue
2788 before calling. ALIGN is maximum alignment we can assume. */
2790 static void
2791 store_by_pieces_1 (data, align)
2792 struct store_by_pieces *data;
2793 unsigned int align;
2795 rtx to_addr = XEXP (data->to, 0);
2796 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2797 enum machine_mode mode = VOIDmode, tmode;
2798 enum insn_code icode;
2800 data->offset = 0;
2801 data->to_addr = to_addr;
2802 data->autinc_to
2803 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2804 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2806 data->explicit_inc_to = 0;
2807 data->reverse
2808 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2809 if (data->reverse)
2810 data->offset = data->len;
2812 /* If storing requires more than two move insns,
2813 copy addresses to registers (to make displacements shorter)
2814 and use post-increment if available. */
2815 if (!data->autinc_to
2816 && move_by_pieces_ninsns (data->len, align) > 2)
2818 /* Determine the main mode we'll be using. */
2819 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2820 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2821 if (GET_MODE_SIZE (tmode) < max_size)
2822 mode = tmode;
2824 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2826 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2827 data->autinc_to = 1;
2828 data->explicit_inc_to = -1;
2831 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2832 && ! data->autinc_to)
2834 data->to_addr = copy_addr_to_reg (to_addr);
2835 data->autinc_to = 1;
2836 data->explicit_inc_to = 1;
2839 if ( !data->autinc_to && CONSTANT_P (to_addr))
2840 data->to_addr = copy_addr_to_reg (to_addr);
2843 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2844 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2845 align = MOVE_MAX * BITS_PER_UNIT;
2847 /* First store what we can in the largest integer mode, then go to
2848 successively smaller modes. */
2850 while (max_size > 1)
2852 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2853 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2854 if (GET_MODE_SIZE (tmode) < max_size)
2855 mode = tmode;
2857 if (mode == VOIDmode)
2858 break;
2860 icode = mov_optab->handlers[(int) mode].insn_code;
2861 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2862 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2864 max_size = GET_MODE_SIZE (mode);
2867 /* The code above should have handled everything. */
2868 if (data->len != 0)
2869 abort ();
2872 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2873 with move instructions for mode MODE. GENFUN is the gen_... function
2874 to make a move insn for that mode. DATA has all the other info. */
2876 static void
2877 store_by_pieces_2 (genfun, mode, data)
2878 rtx (*genfun) PARAMS ((rtx, ...));
2879 enum machine_mode mode;
2880 struct store_by_pieces *data;
2882 unsigned int size = GET_MODE_SIZE (mode);
2883 rtx to1, cst;
2885 while (data->len >= size)
2887 if (data->reverse)
2888 data->offset -= size;
2890 if (data->autinc_to)
2891 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2892 data->offset);
2893 else
2894 to1 = adjust_address (data->to, mode, data->offset);
2896 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2897 emit_insn (gen_add2_insn (data->to_addr,
2898 GEN_INT (-(HOST_WIDE_INT) size)));
2900 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2901 emit_insn ((*genfun) (to1, cst));
2903 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2904 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2906 if (! data->reverse)
2907 data->offset += size;
2909 data->len -= size;
2913 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2914 its length in bytes. */
2917 clear_storage (object, size)
2918 rtx object;
2919 rtx size;
2921 rtx retval = 0;
2922 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2923 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2925 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2926 just move a zero. Otherwise, do this a piece at a time. */
2927 if (GET_MODE (object) != BLKmode
2928 && GET_CODE (size) == CONST_INT
2929 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2930 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2931 else
2933 object = protect_from_queue (object, 1);
2934 size = protect_from_queue (size, 0);
2936 if (GET_CODE (size) == CONST_INT
2937 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2938 clear_by_pieces (object, INTVAL (size), align);
2939 else if (clear_storage_via_clrstr (object, size, align))
2941 else
2942 retval = clear_storage_via_libcall (object, size);
2945 return retval;
2948 /* A subroutine of clear_storage. Expand a clrstr pattern;
2949 return true if successful. */
2951 static bool
2952 clear_storage_via_clrstr (object, size, align)
2953 rtx object, size;
2954 unsigned int align;
2956 /* Try the most limited insn first, because there's no point
2957 including more than one in the machine description unless
2958 the more limited one has some advantage. */
2960 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2961 enum machine_mode mode;
2963 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2964 mode = GET_MODE_WIDER_MODE (mode))
2966 enum insn_code code = clrstr_optab[(int) mode];
2967 insn_operand_predicate_fn pred;
2969 if (code != CODE_FOR_nothing
2970 /* We don't need MODE to be narrower than
2971 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2972 the mode mask, as it is returned by the macro, it will
2973 definitely be less than the actual mode mask. */
2974 && ((GET_CODE (size) == CONST_INT
2975 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2976 <= (GET_MODE_MASK (mode) >> 1)))
2977 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2978 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2979 || (*pred) (object, BLKmode))
2980 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2981 || (*pred) (opalign, VOIDmode)))
2983 rtx op1;
2984 rtx last = get_last_insn ();
2985 rtx pat;
2987 op1 = convert_to_mode (mode, size, 1);
2988 pred = insn_data[(int) code].operand[1].predicate;
2989 if (pred != 0 && ! (*pred) (op1, mode))
2990 op1 = copy_to_mode_reg (mode, op1);
2992 pat = GEN_FCN ((int) code) (object, op1, opalign);
2993 if (pat)
2995 emit_insn (pat);
2996 return true;
2998 else
2999 delete_insns_since (last);
3003 return false;
3006 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3007 Return the return value of memset, 0 otherwise. */
3009 static rtx
3010 clear_storage_via_libcall (object, size)
3011 rtx object, size;
3013 tree call_expr, arg_list, fn, object_tree, size_tree;
3014 enum machine_mode size_mode;
3015 rtx retval;
3017 /* OBJECT or SIZE may have been passed through protect_from_queue.
3019 It is unsafe to save the value generated by protect_from_queue
3020 and reuse it later. Consider what happens if emit_queue is
3021 called before the return value from protect_from_queue is used.
3023 Expansion of the CALL_EXPR below will call emit_queue before
3024 we are finished emitting RTL for argument setup. So if we are
3025 not careful we could get the wrong value for an argument.
3027 To avoid this problem we go ahead and emit code to copy OBJECT
3028 and SIZE into new pseudos. We can then place those new pseudos
3029 into an RTL_EXPR and use them later, even after a call to
3030 emit_queue.
3032 Note this is not strictly needed for library calls since they
3033 do not call emit_queue before loading their arguments. However,
3034 we may need to have library calls call emit_queue in the future
3035 since failing to do so could cause problems for targets which
3036 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3038 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3040 if (TARGET_MEM_FUNCTIONS)
3041 size_mode = TYPE_MODE (sizetype);
3042 else
3043 size_mode = TYPE_MODE (unsigned_type_node);
3044 size = convert_to_mode (size_mode, size, 1);
3045 size = copy_to_mode_reg (size_mode, size);
3047 /* It is incorrect to use the libcall calling conventions to call
3048 memset in this context. This could be a user call to memset and
3049 the user may wish to examine the return value from memset. For
3050 targets where libcalls and normal calls have different conventions
3051 for returning pointers, we could end up generating incorrect code.
3053 For convenience, we generate the call to bzero this way as well. */
3055 object_tree = make_tree (ptr_type_node, object);
3056 if (TARGET_MEM_FUNCTIONS)
3057 size_tree = make_tree (sizetype, size);
3058 else
3059 size_tree = make_tree (unsigned_type_node, size);
3061 fn = clear_storage_libcall_fn (true);
3062 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3063 if (TARGET_MEM_FUNCTIONS)
3064 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3065 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3067 /* Now we have to build up the CALL_EXPR itself. */
3068 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3069 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3070 call_expr, arg_list, NULL_TREE);
3071 TREE_SIDE_EFFECTS (call_expr) = 1;
3073 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3075 /* If we are initializing a readonly value, show the above call
3076 clobbered it. Otherwise, a load from it may erroneously be
3077 hoisted from a loop. */
3078 if (RTX_UNCHANGING_P (object))
3079 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3081 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3084 /* A subroutine of clear_storage_via_libcall. Create the tree node
3085 for the function we use for block clears. The first time FOR_CALL
3086 is true, we call assemble_external. */
3088 static GTY(()) tree block_clear_fn;
3090 static tree
3091 clear_storage_libcall_fn (for_call)
3092 int for_call;
3094 static bool emitted_extern;
3095 tree fn = block_clear_fn, args;
3097 if (!fn)
3099 if (TARGET_MEM_FUNCTIONS)
3101 fn = get_identifier ("memset");
3102 args = build_function_type_list (ptr_type_node, ptr_type_node,
3103 integer_type_node, sizetype,
3104 NULL_TREE);
3106 else
3108 fn = get_identifier ("bzero");
3109 args = build_function_type_list (void_type_node, ptr_type_node,
3110 unsigned_type_node, NULL_TREE);
3113 fn = build_decl (FUNCTION_DECL, fn, args);
3114 DECL_EXTERNAL (fn) = 1;
3115 TREE_PUBLIC (fn) = 1;
3116 DECL_ARTIFICIAL (fn) = 1;
3117 TREE_NOTHROW (fn) = 1;
3119 block_clear_fn = fn;
3122 if (for_call && !emitted_extern)
3124 emitted_extern = true;
3125 make_decl_rtl (fn, NULL);
3126 assemble_external (fn);
3129 return fn;
3132 /* Generate code to copy Y into X.
3133 Both Y and X must have the same mode, except that
3134 Y can be a constant with VOIDmode.
3135 This mode cannot be BLKmode; use emit_block_move for that.
3137 Return the last instruction emitted. */
3140 emit_move_insn (x, y)
3141 rtx x, y;
3143 enum machine_mode mode = GET_MODE (x);
3144 rtx y_cst = NULL_RTX;
3145 rtx last_insn;
3147 x = protect_from_queue (x, 1);
3148 y = protect_from_queue (y, 0);
3150 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3151 abort ();
3153 /* Never force constant_p_rtx to memory. */
3154 if (GET_CODE (y) == CONSTANT_P_RTX)
3156 else if (CONSTANT_P (y))
3158 if (optimize
3159 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3160 && (last_insn = compress_float_constant (x, y)))
3161 return last_insn;
3163 if (!LEGITIMATE_CONSTANT_P (y))
3165 y_cst = y;
3166 y = force_const_mem (mode, y);
3168 /* If the target's cannot_force_const_mem prevented the spill,
3169 assume that the target's move expanders will also take care
3170 of the non-legitimate constant. */
3171 if (!y)
3172 y = y_cst;
3176 /* If X or Y are memory references, verify that their addresses are valid
3177 for the machine. */
3178 if (GET_CODE (x) == MEM
3179 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3180 && ! push_operand (x, GET_MODE (x)))
3181 || (flag_force_addr
3182 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3183 x = validize_mem (x);
3185 if (GET_CODE (y) == MEM
3186 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3187 || (flag_force_addr
3188 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3189 y = validize_mem (y);
3191 if (mode == BLKmode)
3192 abort ();
3194 last_insn = emit_move_insn_1 (x, y);
3196 if (y_cst && GET_CODE (x) == REG)
3197 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3199 return last_insn;
3202 /* Low level part of emit_move_insn.
3203 Called just like emit_move_insn, but assumes X and Y
3204 are basically valid. */
3207 emit_move_insn_1 (x, y)
3208 rtx x, y;
3210 enum machine_mode mode = GET_MODE (x);
3211 enum machine_mode submode;
3212 enum mode_class class = GET_MODE_CLASS (mode);
3214 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3215 abort ();
3217 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3218 return
3219 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3221 /* Expand complex moves by moving real part and imag part, if possible. */
3222 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3223 && BLKmode != (submode = GET_MODE_INNER (mode))
3224 && (mov_optab->handlers[(int) submode].insn_code
3225 != CODE_FOR_nothing))
3227 /* Don't split destination if it is a stack push. */
3228 int stack = push_operand (x, GET_MODE (x));
3230 #ifdef PUSH_ROUNDING
3231 /* In case we output to the stack, but the size is smaller machine can
3232 push exactly, we need to use move instructions. */
3233 if (stack
3234 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3235 != GET_MODE_SIZE (submode)))
3237 rtx temp;
3238 HOST_WIDE_INT offset1, offset2;
3240 /* Do not use anti_adjust_stack, since we don't want to update
3241 stack_pointer_delta. */
3242 temp = expand_binop (Pmode,
3243 #ifdef STACK_GROWS_DOWNWARD
3244 sub_optab,
3245 #else
3246 add_optab,
3247 #endif
3248 stack_pointer_rtx,
3249 GEN_INT
3250 (PUSH_ROUNDING
3251 (GET_MODE_SIZE (GET_MODE (x)))),
3252 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3254 if (temp != stack_pointer_rtx)
3255 emit_move_insn (stack_pointer_rtx, temp);
3257 #ifdef STACK_GROWS_DOWNWARD
3258 offset1 = 0;
3259 offset2 = GET_MODE_SIZE (submode);
3260 #else
3261 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3262 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3263 + GET_MODE_SIZE (submode));
3264 #endif
3266 emit_move_insn (change_address (x, submode,
3267 gen_rtx_PLUS (Pmode,
3268 stack_pointer_rtx,
3269 GEN_INT (offset1))),
3270 gen_realpart (submode, y));
3271 emit_move_insn (change_address (x, submode,
3272 gen_rtx_PLUS (Pmode,
3273 stack_pointer_rtx,
3274 GEN_INT (offset2))),
3275 gen_imagpart (submode, y));
3277 else
3278 #endif
3279 /* If this is a stack, push the highpart first, so it
3280 will be in the argument order.
3282 In that case, change_address is used only to convert
3283 the mode, not to change the address. */
3284 if (stack)
3286 /* Note that the real part always precedes the imag part in memory
3287 regardless of machine's endianness. */
3288 #ifdef STACK_GROWS_DOWNWARD
3289 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3290 (gen_rtx_MEM (submode, XEXP (x, 0)),
3291 gen_imagpart (submode, y)));
3292 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3293 (gen_rtx_MEM (submode, XEXP (x, 0)),
3294 gen_realpart (submode, y)));
3295 #else
3296 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3297 (gen_rtx_MEM (submode, XEXP (x, 0)),
3298 gen_realpart (submode, y)));
3299 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3300 (gen_rtx_MEM (submode, XEXP (x, 0)),
3301 gen_imagpart (submode, y)));
3302 #endif
3304 else
3306 rtx realpart_x, realpart_y;
3307 rtx imagpart_x, imagpart_y;
3309 /* If this is a complex value with each part being smaller than a
3310 word, the usual calling sequence will likely pack the pieces into
3311 a single register. Unfortunately, SUBREG of hard registers only
3312 deals in terms of words, so we have a problem converting input
3313 arguments to the CONCAT of two registers that is used elsewhere
3314 for complex values. If this is before reload, we can copy it into
3315 memory and reload. FIXME, we should see about using extract and
3316 insert on integer registers, but complex short and complex char
3317 variables should be rarely used. */
3318 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3319 && (reload_in_progress | reload_completed) == 0)
3321 int packed_dest_p
3322 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3323 int packed_src_p
3324 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3326 if (packed_dest_p || packed_src_p)
3328 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3329 ? MODE_FLOAT : MODE_INT);
3331 enum machine_mode reg_mode
3332 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3334 if (reg_mode != BLKmode)
3336 rtx mem = assign_stack_temp (reg_mode,
3337 GET_MODE_SIZE (mode), 0);
3338 rtx cmem = adjust_address (mem, mode, 0);
3340 cfun->cannot_inline
3341 = N_("function using short complex types cannot be inline");
3343 if (packed_dest_p)
3345 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3347 emit_move_insn_1 (cmem, y);
3348 return emit_move_insn_1 (sreg, mem);
3350 else
3352 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3354 emit_move_insn_1 (mem, sreg);
3355 return emit_move_insn_1 (x, cmem);
3361 realpart_x = gen_realpart (submode, x);
3362 realpart_y = gen_realpart (submode, y);
3363 imagpart_x = gen_imagpart (submode, x);
3364 imagpart_y = gen_imagpart (submode, y);
3366 /* Show the output dies here. This is necessary for SUBREGs
3367 of pseudos since we cannot track their lifetimes correctly;
3368 hard regs shouldn't appear here except as return values.
3369 We never want to emit such a clobber after reload. */
3370 if (x != y
3371 && ! (reload_in_progress || reload_completed)
3372 && (GET_CODE (realpart_x) == SUBREG
3373 || GET_CODE (imagpart_x) == SUBREG))
3374 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3376 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3377 (realpart_x, realpart_y));
3378 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3379 (imagpart_x, imagpart_y));
3382 return get_last_insn ();
3385 /* This will handle any multi-word or full-word mode that lacks a move_insn
3386 pattern. However, you will get better code if you define such patterns,
3387 even if they must turn into multiple assembler instructions. */
3388 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3390 rtx last_insn = 0;
3391 rtx seq, inner;
3392 int need_clobber;
3393 int i;
3395 #ifdef PUSH_ROUNDING
3397 /* If X is a push on the stack, do the push now and replace
3398 X with a reference to the stack pointer. */
3399 if (push_operand (x, GET_MODE (x)))
3401 rtx temp;
3402 enum rtx_code code;
3404 /* Do not use anti_adjust_stack, since we don't want to update
3405 stack_pointer_delta. */
3406 temp = expand_binop (Pmode,
3407 #ifdef STACK_GROWS_DOWNWARD
3408 sub_optab,
3409 #else
3410 add_optab,
3411 #endif
3412 stack_pointer_rtx,
3413 GEN_INT
3414 (PUSH_ROUNDING
3415 (GET_MODE_SIZE (GET_MODE (x)))),
3416 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3418 if (temp != stack_pointer_rtx)
3419 emit_move_insn (stack_pointer_rtx, temp);
3421 code = GET_CODE (XEXP (x, 0));
3423 /* Just hope that small offsets off SP are OK. */
3424 if (code == POST_INC)
3425 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3426 GEN_INT (-((HOST_WIDE_INT)
3427 GET_MODE_SIZE (GET_MODE (x)))));
3428 else if (code == POST_DEC)
3429 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3430 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3431 else
3432 temp = stack_pointer_rtx;
3434 x = change_address (x, VOIDmode, temp);
3436 #endif
3438 /* If we are in reload, see if either operand is a MEM whose address
3439 is scheduled for replacement. */
3440 if (reload_in_progress && GET_CODE (x) == MEM
3441 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3442 x = replace_equiv_address_nv (x, inner);
3443 if (reload_in_progress && GET_CODE (y) == MEM
3444 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3445 y = replace_equiv_address_nv (y, inner);
3447 start_sequence ();
3449 need_clobber = 0;
3450 for (i = 0;
3451 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3452 i++)
3454 rtx xpart = operand_subword (x, i, 1, mode);
3455 rtx ypart = operand_subword (y, i, 1, mode);
3457 /* If we can't get a part of Y, put Y into memory if it is a
3458 constant. Otherwise, force it into a register. If we still
3459 can't get a part of Y, abort. */
3460 if (ypart == 0 && CONSTANT_P (y))
3462 y = force_const_mem (mode, y);
3463 ypart = operand_subword (y, i, 1, mode);
3465 else if (ypart == 0)
3466 ypart = operand_subword_force (y, i, mode);
3468 if (xpart == 0 || ypart == 0)
3469 abort ();
3471 need_clobber |= (GET_CODE (xpart) == SUBREG);
3473 last_insn = emit_move_insn (xpart, ypart);
3476 seq = get_insns ();
3477 end_sequence ();
3479 /* Show the output dies here. This is necessary for SUBREGs
3480 of pseudos since we cannot track their lifetimes correctly;
3481 hard regs shouldn't appear here except as return values.
3482 We never want to emit such a clobber after reload. */
3483 if (x != y
3484 && ! (reload_in_progress || reload_completed)
3485 && need_clobber != 0)
3486 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3488 emit_insn (seq);
3490 return last_insn;
3492 else
3493 abort ();
3496 /* If Y is representable exactly in a narrower mode, and the target can
3497 perform the extension directly from constant or memory, then emit the
3498 move as an extension. */
3500 static rtx
3501 compress_float_constant (x, y)
3502 rtx x, y;
3504 enum machine_mode dstmode = GET_MODE (x);
3505 enum machine_mode orig_srcmode = GET_MODE (y);
3506 enum machine_mode srcmode;
3507 REAL_VALUE_TYPE r;
3509 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3511 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3512 srcmode != orig_srcmode;
3513 srcmode = GET_MODE_WIDER_MODE (srcmode))
3515 enum insn_code ic;
3516 rtx trunc_y, last_insn;
3518 /* Skip if the target can't extend this way. */
3519 ic = can_extend_p (dstmode, srcmode, 0);
3520 if (ic == CODE_FOR_nothing)
3521 continue;
3523 /* Skip if the narrowed value isn't exact. */
3524 if (! exact_real_truncate (srcmode, &r))
3525 continue;
3527 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3529 if (LEGITIMATE_CONSTANT_P (trunc_y))
3531 /* Skip if the target needs extra instructions to perform
3532 the extension. */
3533 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3534 continue;
3536 else if (float_extend_from_mem[dstmode][srcmode])
3537 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3538 else
3539 continue;
3541 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3542 last_insn = get_last_insn ();
3544 if (GET_CODE (x) == REG)
3545 REG_NOTES (last_insn)
3546 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3548 return last_insn;
3551 return NULL_RTX;
3554 /* Pushing data onto the stack. */
3556 /* Push a block of length SIZE (perhaps variable)
3557 and return an rtx to address the beginning of the block.
3558 Note that it is not possible for the value returned to be a QUEUED.
3559 The value may be virtual_outgoing_args_rtx.
3561 EXTRA is the number of bytes of padding to push in addition to SIZE.
3562 BELOW nonzero means this padding comes at low addresses;
3563 otherwise, the padding comes at high addresses. */
3566 push_block (size, extra, below)
3567 rtx size;
3568 int extra, below;
3570 rtx temp;
3572 size = convert_modes (Pmode, ptr_mode, size, 1);
3573 if (CONSTANT_P (size))
3574 anti_adjust_stack (plus_constant (size, extra));
3575 else if (GET_CODE (size) == REG && extra == 0)
3576 anti_adjust_stack (size);
3577 else
3579 temp = copy_to_mode_reg (Pmode, size);
3580 if (extra != 0)
3581 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3582 temp, 0, OPTAB_LIB_WIDEN);
3583 anti_adjust_stack (temp);
3586 #ifndef STACK_GROWS_DOWNWARD
3587 if (0)
3588 #else
3589 if (1)
3590 #endif
3592 temp = virtual_outgoing_args_rtx;
3593 if (extra != 0 && below)
3594 temp = plus_constant (temp, extra);
3596 else
3598 if (GET_CODE (size) == CONST_INT)
3599 temp = plus_constant (virtual_outgoing_args_rtx,
3600 -INTVAL (size) - (below ? 0 : extra));
3601 else if (extra != 0 && !below)
3602 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3603 negate_rtx (Pmode, plus_constant (size, extra)));
3604 else
3605 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3606 negate_rtx (Pmode, size));
3609 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3612 #ifdef PUSH_ROUNDING
3614 /* Emit single push insn. */
3616 static void
3617 emit_single_push_insn (mode, x, type)
3618 rtx x;
3619 enum machine_mode mode;
3620 tree type;
3622 rtx dest_addr;
3623 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3624 rtx dest;
3625 enum insn_code icode;
3626 insn_operand_predicate_fn pred;
3628 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3629 /* If there is push pattern, use it. Otherwise try old way of throwing
3630 MEM representing push operation to move expander. */
3631 icode = push_optab->handlers[(int) mode].insn_code;
3632 if (icode != CODE_FOR_nothing)
3634 if (((pred = insn_data[(int) icode].operand[0].predicate)
3635 && !((*pred) (x, mode))))
3636 x = force_reg (mode, x);
3637 emit_insn (GEN_FCN (icode) (x));
3638 return;
3640 if (GET_MODE_SIZE (mode) == rounded_size)
3641 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3642 else
3644 #ifdef STACK_GROWS_DOWNWARD
3645 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3646 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3647 #else
3648 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3649 GEN_INT (rounded_size));
3650 #endif
3651 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3654 dest = gen_rtx_MEM (mode, dest_addr);
3656 if (type != 0)
3658 set_mem_attributes (dest, type, 1);
3660 if (flag_optimize_sibling_calls)
3661 /* Function incoming arguments may overlap with sibling call
3662 outgoing arguments and we cannot allow reordering of reads
3663 from function arguments with stores to outgoing arguments
3664 of sibling calls. */
3665 set_mem_alias_set (dest, 0);
3667 emit_move_insn (dest, x);
3669 #endif
3671 /* Generate code to push X onto the stack, assuming it has mode MODE and
3672 type TYPE.
3673 MODE is redundant except when X is a CONST_INT (since they don't
3674 carry mode info).
3675 SIZE is an rtx for the size of data to be copied (in bytes),
3676 needed only if X is BLKmode.
3678 ALIGN (in bits) is maximum alignment we can assume.
3680 If PARTIAL and REG are both nonzero, then copy that many of the first
3681 words of X into registers starting with REG, and push the rest of X.
3682 The amount of space pushed is decreased by PARTIAL words,
3683 rounded *down* to a multiple of PARM_BOUNDARY.
3684 REG must be a hard register in this case.
3685 If REG is zero but PARTIAL is not, take any all others actions for an
3686 argument partially in registers, but do not actually load any
3687 registers.
3689 EXTRA is the amount in bytes of extra space to leave next to this arg.
3690 This is ignored if an argument block has already been allocated.
3692 On a machine that lacks real push insns, ARGS_ADDR is the address of
3693 the bottom of the argument block for this call. We use indexing off there
3694 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3695 argument block has not been preallocated.
3697 ARGS_SO_FAR is the size of args previously pushed for this call.
3699 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3700 for arguments passed in registers. If nonzero, it will be the number
3701 of bytes required. */
3703 void
3704 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3705 args_addr, args_so_far, reg_parm_stack_space,
3706 alignment_pad)
3707 rtx x;
3708 enum machine_mode mode;
3709 tree type;
3710 rtx size;
3711 unsigned int align;
3712 int partial;
3713 rtx reg;
3714 int extra;
3715 rtx args_addr;
3716 rtx args_so_far;
3717 int reg_parm_stack_space;
3718 rtx alignment_pad;
3720 rtx xinner;
3721 enum direction stack_direction
3722 #ifdef STACK_GROWS_DOWNWARD
3723 = downward;
3724 #else
3725 = upward;
3726 #endif
3728 /* Decide where to pad the argument: `downward' for below,
3729 `upward' for above, or `none' for don't pad it.
3730 Default is below for small data on big-endian machines; else above. */
3731 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3733 /* Invert direction if stack is post-decrement.
3734 FIXME: why? */
3735 if (STACK_PUSH_CODE == POST_DEC)
3736 if (where_pad != none)
3737 where_pad = (where_pad == downward ? upward : downward);
3739 xinner = x = protect_from_queue (x, 0);
3741 if (mode == BLKmode)
3743 /* Copy a block into the stack, entirely or partially. */
3745 rtx temp;
3746 int used = partial * UNITS_PER_WORD;
3747 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3748 int skip;
3750 if (size == 0)
3751 abort ();
3753 used -= offset;
3755 /* USED is now the # of bytes we need not copy to the stack
3756 because registers will take care of them. */
3758 if (partial != 0)
3759 xinner = adjust_address (xinner, BLKmode, used);
3761 /* If the partial register-part of the arg counts in its stack size,
3762 skip the part of stack space corresponding to the registers.
3763 Otherwise, start copying to the beginning of the stack space,
3764 by setting SKIP to 0. */
3765 skip = (reg_parm_stack_space == 0) ? 0 : used;
3767 #ifdef PUSH_ROUNDING
3768 /* Do it with several push insns if that doesn't take lots of insns
3769 and if there is no difficulty with push insns that skip bytes
3770 on the stack for alignment purposes. */
3771 if (args_addr == 0
3772 && PUSH_ARGS
3773 && GET_CODE (size) == CONST_INT
3774 && skip == 0
3775 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3776 /* Here we avoid the case of a structure whose weak alignment
3777 forces many pushes of a small amount of data,
3778 and such small pushes do rounding that causes trouble. */
3779 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3780 || align >= BIGGEST_ALIGNMENT
3781 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3782 == (align / BITS_PER_UNIT)))
3783 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3785 /* Push padding now if padding above and stack grows down,
3786 or if padding below and stack grows up.
3787 But if space already allocated, this has already been done. */
3788 if (extra && args_addr == 0
3789 && where_pad != none && where_pad != stack_direction)
3790 anti_adjust_stack (GEN_INT (extra));
3792 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3794 else
3795 #endif /* PUSH_ROUNDING */
3797 rtx target;
3799 /* Otherwise make space on the stack and copy the data
3800 to the address of that space. */
3802 /* Deduct words put into registers from the size we must copy. */
3803 if (partial != 0)
3805 if (GET_CODE (size) == CONST_INT)
3806 size = GEN_INT (INTVAL (size) - used);
3807 else
3808 size = expand_binop (GET_MODE (size), sub_optab, size,
3809 GEN_INT (used), NULL_RTX, 0,
3810 OPTAB_LIB_WIDEN);
3813 /* Get the address of the stack space.
3814 In this case, we do not deal with EXTRA separately.
3815 A single stack adjust will do. */
3816 if (! args_addr)
3818 temp = push_block (size, extra, where_pad == downward);
3819 extra = 0;
3821 else if (GET_CODE (args_so_far) == CONST_INT)
3822 temp = memory_address (BLKmode,
3823 plus_constant (args_addr,
3824 skip + INTVAL (args_so_far)));
3825 else
3826 temp = memory_address (BLKmode,
3827 plus_constant (gen_rtx_PLUS (Pmode,
3828 args_addr,
3829 args_so_far),
3830 skip));
3832 if (!ACCUMULATE_OUTGOING_ARGS)
3834 /* If the source is referenced relative to the stack pointer,
3835 copy it to another register to stabilize it. We do not need
3836 to do this if we know that we won't be changing sp. */
3838 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3839 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3840 temp = copy_to_reg (temp);
3843 target = gen_rtx_MEM (BLKmode, temp);
3845 if (type != 0)
3847 set_mem_attributes (target, type, 1);
3848 /* Function incoming arguments may overlap with sibling call
3849 outgoing arguments and we cannot allow reordering of reads
3850 from function arguments with stores to outgoing arguments
3851 of sibling calls. */
3852 set_mem_alias_set (target, 0);
3855 /* ALIGN may well be better aligned than TYPE, e.g. due to
3856 PARM_BOUNDARY. Assume the caller isn't lying. */
3857 set_mem_align (target, align);
3859 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3862 else if (partial > 0)
3864 /* Scalar partly in registers. */
3866 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3867 int i;
3868 int not_stack;
3869 /* # words of start of argument
3870 that we must make space for but need not store. */
3871 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3872 int args_offset = INTVAL (args_so_far);
3873 int skip;
3875 /* Push padding now if padding above and stack grows down,
3876 or if padding below and stack grows up.
3877 But if space already allocated, this has already been done. */
3878 if (extra && args_addr == 0
3879 && where_pad != none && where_pad != stack_direction)
3880 anti_adjust_stack (GEN_INT (extra));
3882 /* If we make space by pushing it, we might as well push
3883 the real data. Otherwise, we can leave OFFSET nonzero
3884 and leave the space uninitialized. */
3885 if (args_addr == 0)
3886 offset = 0;
3888 /* Now NOT_STACK gets the number of words that we don't need to
3889 allocate on the stack. */
3890 not_stack = partial - offset;
3892 /* If the partial register-part of the arg counts in its stack size,
3893 skip the part of stack space corresponding to the registers.
3894 Otherwise, start copying to the beginning of the stack space,
3895 by setting SKIP to 0. */
3896 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3898 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3899 x = validize_mem (force_const_mem (mode, x));
3901 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3902 SUBREGs of such registers are not allowed. */
3903 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3904 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3905 x = copy_to_reg (x);
3907 /* Loop over all the words allocated on the stack for this arg. */
3908 /* We can do it by words, because any scalar bigger than a word
3909 has a size a multiple of a word. */
3910 #ifndef PUSH_ARGS_REVERSED
3911 for (i = not_stack; i < size; i++)
3912 #else
3913 for (i = size - 1; i >= not_stack; i--)
3914 #endif
3915 if (i >= not_stack + offset)
3916 emit_push_insn (operand_subword_force (x, i, mode),
3917 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3918 0, args_addr,
3919 GEN_INT (args_offset + ((i - not_stack + skip)
3920 * UNITS_PER_WORD)),
3921 reg_parm_stack_space, alignment_pad);
3923 else
3925 rtx addr;
3926 rtx dest;
3928 /* Push padding now if padding above and stack grows down,
3929 or if padding below and stack grows up.
3930 But if space already allocated, this has already been done. */
3931 if (extra && args_addr == 0
3932 && where_pad != none && where_pad != stack_direction)
3933 anti_adjust_stack (GEN_INT (extra));
3935 #ifdef PUSH_ROUNDING
3936 if (args_addr == 0 && PUSH_ARGS)
3937 emit_single_push_insn (mode, x, type);
3938 else
3939 #endif
3941 if (GET_CODE (args_so_far) == CONST_INT)
3942 addr
3943 = memory_address (mode,
3944 plus_constant (args_addr,
3945 INTVAL (args_so_far)));
3946 else
3947 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3948 args_so_far));
3949 dest = gen_rtx_MEM (mode, addr);
3950 if (type != 0)
3952 set_mem_attributes (dest, type, 1);
3953 /* Function incoming arguments may overlap with sibling call
3954 outgoing arguments and we cannot allow reordering of reads
3955 from function arguments with stores to outgoing arguments
3956 of sibling calls. */
3957 set_mem_alias_set (dest, 0);
3960 emit_move_insn (dest, x);
3964 /* If part should go in registers, copy that part
3965 into the appropriate registers. Do this now, at the end,
3966 since mem-to-mem copies above may do function calls. */
3967 if (partial > 0 && reg != 0)
3969 /* Handle calls that pass values in multiple non-contiguous locations.
3970 The Irix 6 ABI has examples of this. */
3971 if (GET_CODE (reg) == PARALLEL)
3972 emit_group_load (reg, x, -1); /* ??? size? */
3973 else
3974 move_block_to_reg (REGNO (reg), x, partial, mode);
3977 if (extra && args_addr == 0 && where_pad == stack_direction)
3978 anti_adjust_stack (GEN_INT (extra));
3980 if (alignment_pad && args_addr == 0)
3981 anti_adjust_stack (alignment_pad);
3984 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3985 operations. */
3987 static rtx
3988 get_subtarget (x)
3989 rtx x;
3991 return ((x == 0
3992 /* Only registers can be subtargets. */
3993 || GET_CODE (x) != REG
3994 /* If the register is readonly, it can't be set more than once. */
3995 || RTX_UNCHANGING_P (x)
3996 /* Don't use hard regs to avoid extending their life. */
3997 || REGNO (x) < FIRST_PSEUDO_REGISTER
3998 /* Avoid subtargets inside loops,
3999 since they hide some invariant expressions. */
4000 || preserve_subexpressions_p ())
4001 ? 0 : x);
4004 /* Expand an assignment that stores the value of FROM into TO.
4005 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4006 (This may contain a QUEUED rtx;
4007 if the value is constant, this rtx is a constant.)
4008 Otherwise, the returned value is NULL_RTX.
4010 SUGGEST_REG is no longer actually used.
4011 It used to mean, copy the value through a register
4012 and return that register, if that is possible.
4013 We now use WANT_VALUE to decide whether to do this. */
4016 expand_assignment (to, from, want_value, suggest_reg)
4017 tree to, from;
4018 int want_value;
4019 int suggest_reg ATTRIBUTE_UNUSED;
4021 rtx to_rtx = 0;
4022 rtx result;
4024 /* Don't crash if the lhs of the assignment was erroneous. */
4026 if (TREE_CODE (to) == ERROR_MARK)
4028 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4029 return want_value ? result : NULL_RTX;
4032 /* Assignment of a structure component needs special treatment
4033 if the structure component's rtx is not simply a MEM.
4034 Assignment of an array element at a constant index, and assignment of
4035 an array element in an unaligned packed structure field, has the same
4036 problem. */
4038 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4039 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4040 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4042 enum machine_mode mode1;
4043 HOST_WIDE_INT bitsize, bitpos;
4044 rtx orig_to_rtx;
4045 tree offset;
4046 int unsignedp;
4047 int volatilep = 0;
4048 tree tem;
4050 push_temp_slots ();
4051 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4052 &unsignedp, &volatilep);
4054 /* If we are going to use store_bit_field and extract_bit_field,
4055 make sure to_rtx will be safe for multiple use. */
4057 if (mode1 == VOIDmode && want_value)
4058 tem = stabilize_reference (tem);
4060 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4062 if (offset != 0)
4064 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4066 if (GET_CODE (to_rtx) != MEM)
4067 abort ();
4069 #ifdef POINTERS_EXTEND_UNSIGNED
4070 if (GET_MODE (offset_rtx) != Pmode)
4071 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4072 #else
4073 if (GET_MODE (offset_rtx) != ptr_mode)
4074 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4075 #endif
4077 /* A constant address in TO_RTX can have VOIDmode, we must not try
4078 to call force_reg for that case. Avoid that case. */
4079 if (GET_CODE (to_rtx) == MEM
4080 && GET_MODE (to_rtx) == BLKmode
4081 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4082 && bitsize > 0
4083 && (bitpos % bitsize) == 0
4084 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4085 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4087 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4088 bitpos = 0;
4091 to_rtx = offset_address (to_rtx, offset_rtx,
4092 highest_pow2_factor_for_type (TREE_TYPE (to),
4093 offset));
4096 if (GET_CODE (to_rtx) == MEM)
4098 /* If the field is at offset zero, we could have been given the
4099 DECL_RTX of the parent struct. Don't munge it. */
4100 to_rtx = shallow_copy_rtx (to_rtx);
4102 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4105 /* Deal with volatile and readonly fields. The former is only done
4106 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4107 if (volatilep && GET_CODE (to_rtx) == MEM)
4109 if (to_rtx == orig_to_rtx)
4110 to_rtx = copy_rtx (to_rtx);
4111 MEM_VOLATILE_P (to_rtx) = 1;
4114 if (TREE_CODE (to) == COMPONENT_REF
4115 && TREE_READONLY (TREE_OPERAND (to, 1)))
4117 if (to_rtx == orig_to_rtx)
4118 to_rtx = copy_rtx (to_rtx);
4119 RTX_UNCHANGING_P (to_rtx) = 1;
4122 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4124 if (to_rtx == orig_to_rtx)
4125 to_rtx = copy_rtx (to_rtx);
4126 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4129 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4130 (want_value
4131 /* Spurious cast for HPUX compiler. */
4132 ? ((enum machine_mode)
4133 TYPE_MODE (TREE_TYPE (to)))
4134 : VOIDmode),
4135 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4137 preserve_temp_slots (result);
4138 free_temp_slots ();
4139 pop_temp_slots ();
4141 /* If the value is meaningful, convert RESULT to the proper mode.
4142 Otherwise, return nothing. */
4143 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4144 TYPE_MODE (TREE_TYPE (from)),
4145 result,
4146 TREE_UNSIGNED (TREE_TYPE (to)))
4147 : NULL_RTX);
4150 /* If the rhs is a function call and its value is not an aggregate,
4151 call the function before we start to compute the lhs.
4152 This is needed for correct code for cases such as
4153 val = setjmp (buf) on machines where reference to val
4154 requires loading up part of an address in a separate insn.
4156 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4157 since it might be a promoted variable where the zero- or sign- extension
4158 needs to be done. Handling this in the normal way is safe because no
4159 computation is done before the call. */
4160 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4161 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4162 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4163 && GET_CODE (DECL_RTL (to)) == REG))
4165 rtx value;
4167 push_temp_slots ();
4168 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4169 if (to_rtx == 0)
4170 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4172 /* Handle calls that return values in multiple non-contiguous locations.
4173 The Irix 6 ABI has examples of this. */
4174 if (GET_CODE (to_rtx) == PARALLEL)
4175 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4176 else if (GET_MODE (to_rtx) == BLKmode)
4177 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4178 else
4180 #ifdef POINTERS_EXTEND_UNSIGNED
4181 if (POINTER_TYPE_P (TREE_TYPE (to))
4182 && GET_MODE (to_rtx) != GET_MODE (value))
4183 value = convert_memory_address (GET_MODE (to_rtx), value);
4184 #endif
4185 emit_move_insn (to_rtx, value);
4187 preserve_temp_slots (to_rtx);
4188 free_temp_slots ();
4189 pop_temp_slots ();
4190 return want_value ? to_rtx : NULL_RTX;
4193 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4194 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4196 if (to_rtx == 0)
4197 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4199 /* Don't move directly into a return register. */
4200 if (TREE_CODE (to) == RESULT_DECL
4201 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4203 rtx temp;
4205 push_temp_slots ();
4206 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4208 if (GET_CODE (to_rtx) == PARALLEL)
4209 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4210 else
4211 emit_move_insn (to_rtx, temp);
4213 preserve_temp_slots (to_rtx);
4214 free_temp_slots ();
4215 pop_temp_slots ();
4216 return want_value ? to_rtx : NULL_RTX;
4219 /* In case we are returning the contents of an object which overlaps
4220 the place the value is being stored, use a safe function when copying
4221 a value through a pointer into a structure value return block. */
4222 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4223 && current_function_returns_struct
4224 && !current_function_returns_pcc_struct)
4226 rtx from_rtx, size;
4228 push_temp_slots ();
4229 size = expr_size (from);
4230 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4232 if (TARGET_MEM_FUNCTIONS)
4233 emit_library_call (memmove_libfunc, LCT_NORMAL,
4234 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4235 XEXP (from_rtx, 0), Pmode,
4236 convert_to_mode (TYPE_MODE (sizetype),
4237 size, TREE_UNSIGNED (sizetype)),
4238 TYPE_MODE (sizetype));
4239 else
4240 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4241 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4242 XEXP (to_rtx, 0), Pmode,
4243 convert_to_mode (TYPE_MODE (integer_type_node),
4244 size,
4245 TREE_UNSIGNED (integer_type_node)),
4246 TYPE_MODE (integer_type_node));
4248 preserve_temp_slots (to_rtx);
4249 free_temp_slots ();
4250 pop_temp_slots ();
4251 return want_value ? to_rtx : NULL_RTX;
4254 /* Compute FROM and store the value in the rtx we got. */
4256 push_temp_slots ();
4257 result = store_expr (from, to_rtx, want_value);
4258 preserve_temp_slots (result);
4259 free_temp_slots ();
4260 pop_temp_slots ();
4261 return want_value ? result : NULL_RTX;
4264 /* Generate code for computing expression EXP,
4265 and storing the value into TARGET.
4266 TARGET may contain a QUEUED rtx.
4268 If WANT_VALUE & 1 is nonzero, return a copy of the value
4269 not in TARGET, so that we can be sure to use the proper
4270 value in a containing expression even if TARGET has something
4271 else stored in it. If possible, we copy the value through a pseudo
4272 and return that pseudo. Or, if the value is constant, we try to
4273 return the constant. In some cases, we return a pseudo
4274 copied *from* TARGET.
4276 If the mode is BLKmode then we may return TARGET itself.
4277 It turns out that in BLKmode it doesn't cause a problem.
4278 because C has no operators that could combine two different
4279 assignments into the same BLKmode object with different values
4280 with no sequence point. Will other languages need this to
4281 be more thorough?
4283 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4284 to catch quickly any cases where the caller uses the value
4285 and fails to set WANT_VALUE.
4287 If WANT_VALUE & 2 is set, this is a store into a call param on the
4288 stack, and block moves may need to be treated specially. */
4291 store_expr (exp, target, want_value)
4292 tree exp;
4293 rtx target;
4294 int want_value;
4296 rtx temp;
4297 int dont_return_target = 0;
4298 int dont_store_target = 0;
4300 if (TREE_CODE (exp) == COMPOUND_EXPR)
4302 /* Perform first part of compound expression, then assign from second
4303 part. */
4304 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4305 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4306 emit_queue ();
4307 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4309 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4311 /* For conditional expression, get safe form of the target. Then
4312 test the condition, doing the appropriate assignment on either
4313 side. This avoids the creation of unnecessary temporaries.
4314 For non-BLKmode, it is more efficient not to do this. */
4316 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4318 emit_queue ();
4319 target = protect_from_queue (target, 1);
4321 do_pending_stack_adjust ();
4322 NO_DEFER_POP;
4323 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4324 start_cleanup_deferral ();
4325 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4326 end_cleanup_deferral ();
4327 emit_queue ();
4328 emit_jump_insn (gen_jump (lab2));
4329 emit_barrier ();
4330 emit_label (lab1);
4331 start_cleanup_deferral ();
4332 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4333 end_cleanup_deferral ();
4334 emit_queue ();
4335 emit_label (lab2);
4336 OK_DEFER_POP;
4338 return want_value & 1 ? target : NULL_RTX;
4340 else if (queued_subexp_p (target))
4341 /* If target contains a postincrement, let's not risk
4342 using it as the place to generate the rhs. */
4344 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4346 /* Expand EXP into a new pseudo. */
4347 temp = gen_reg_rtx (GET_MODE (target));
4348 temp = expand_expr (exp, temp, GET_MODE (target),
4349 (want_value & 2
4350 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4352 else
4353 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4354 (want_value & 2
4355 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4357 /* If target is volatile, ANSI requires accessing the value
4358 *from* the target, if it is accessed. So make that happen.
4359 In no case return the target itself. */
4360 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4361 dont_return_target = 1;
4363 else if ((want_value & 1) != 0
4364 && GET_CODE (target) == MEM
4365 && ! MEM_VOLATILE_P (target)
4366 && GET_MODE (target) != BLKmode)
4367 /* If target is in memory and caller wants value in a register instead,
4368 arrange that. Pass TARGET as target for expand_expr so that,
4369 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4370 We know expand_expr will not use the target in that case.
4371 Don't do this if TARGET is volatile because we are supposed
4372 to write it and then read it. */
4374 temp = expand_expr (exp, target, GET_MODE (target),
4375 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4376 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4378 /* If TEMP is already in the desired TARGET, only copy it from
4379 memory and don't store it there again. */
4380 if (temp == target
4381 || (rtx_equal_p (temp, target)
4382 && ! side_effects_p (temp) && ! side_effects_p (target)))
4383 dont_store_target = 1;
4384 temp = copy_to_reg (temp);
4386 dont_return_target = 1;
4388 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4389 /* If this is a scalar in a register that is stored in a wider mode
4390 than the declared mode, compute the result into its declared mode
4391 and then convert to the wider mode. Our value is the computed
4392 expression. */
4394 rtx inner_target = 0;
4396 /* If we don't want a value, we can do the conversion inside EXP,
4397 which will often result in some optimizations. Do the conversion
4398 in two steps: first change the signedness, if needed, then
4399 the extend. But don't do this if the type of EXP is a subtype
4400 of something else since then the conversion might involve
4401 more than just converting modes. */
4402 if ((want_value & 1) == 0
4403 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4404 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4406 if (TREE_UNSIGNED (TREE_TYPE (exp))
4407 != SUBREG_PROMOTED_UNSIGNED_P (target))
4408 exp = convert
4409 ((*lang_hooks.types.signed_or_unsigned_type)
4410 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4412 exp = convert ((*lang_hooks.types.type_for_mode)
4413 (GET_MODE (SUBREG_REG (target)),
4414 SUBREG_PROMOTED_UNSIGNED_P (target)),
4415 exp);
4417 inner_target = SUBREG_REG (target);
4420 temp = expand_expr (exp, inner_target, VOIDmode,
4421 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4423 /* If TEMP is a MEM and we want a result value, make the access
4424 now so it gets done only once. Strictly speaking, this is
4425 only necessary if the MEM is volatile, or if the address
4426 overlaps TARGET. But not performing the load twice also
4427 reduces the amount of rtl we generate and then have to CSE. */
4428 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4429 temp = copy_to_reg (temp);
4431 /* If TEMP is a VOIDmode constant, use convert_modes to make
4432 sure that we properly convert it. */
4433 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4435 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4436 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4437 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4438 GET_MODE (target), temp,
4439 SUBREG_PROMOTED_UNSIGNED_P (target));
4442 convert_move (SUBREG_REG (target), temp,
4443 SUBREG_PROMOTED_UNSIGNED_P (target));
4445 /* If we promoted a constant, change the mode back down to match
4446 target. Otherwise, the caller might get confused by a result whose
4447 mode is larger than expected. */
4449 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4451 if (GET_MODE (temp) != VOIDmode)
4453 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4454 SUBREG_PROMOTED_VAR_P (temp) = 1;
4455 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4456 SUBREG_PROMOTED_UNSIGNED_P (target));
4458 else
4459 temp = convert_modes (GET_MODE (target),
4460 GET_MODE (SUBREG_REG (target)),
4461 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4464 return want_value & 1 ? temp : NULL_RTX;
4466 else
4468 temp = expand_expr (exp, target, GET_MODE (target),
4469 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4470 /* Return TARGET if it's a specified hardware register.
4471 If TARGET is a volatile mem ref, either return TARGET
4472 or return a reg copied *from* TARGET; ANSI requires this.
4474 Otherwise, if TEMP is not TARGET, return TEMP
4475 if it is constant (for efficiency),
4476 or if we really want the correct value. */
4477 if (!(target && GET_CODE (target) == REG
4478 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4479 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4480 && ! rtx_equal_p (temp, target)
4481 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4482 dont_return_target = 1;
4485 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4486 the same as that of TARGET, adjust the constant. This is needed, for
4487 example, in case it is a CONST_DOUBLE and we want only a word-sized
4488 value. */
4489 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4490 && TREE_CODE (exp) != ERROR_MARK
4491 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4492 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4493 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4495 /* If value was not generated in the target, store it there.
4496 Convert the value to TARGET's type first if necessary.
4497 If TEMP and TARGET compare equal according to rtx_equal_p, but
4498 one or both of them are volatile memory refs, we have to distinguish
4499 two cases:
4500 - expand_expr has used TARGET. In this case, we must not generate
4501 another copy. This can be detected by TARGET being equal according
4502 to == .
4503 - expand_expr has not used TARGET - that means that the source just
4504 happens to have the same RTX form. Since temp will have been created
4505 by expand_expr, it will compare unequal according to == .
4506 We must generate a copy in this case, to reach the correct number
4507 of volatile memory references. */
4509 if ((! rtx_equal_p (temp, target)
4510 || (temp != target && (side_effects_p (temp)
4511 || side_effects_p (target))))
4512 && TREE_CODE (exp) != ERROR_MARK
4513 && ! dont_store_target
4514 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4515 but TARGET is not valid memory reference, TEMP will differ
4516 from TARGET although it is really the same location. */
4517 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4518 || target != DECL_RTL_IF_SET (exp))
4519 /* If there's nothing to copy, don't bother. Don't call expr_size
4520 unless necessary, because some front-ends (C++) expr_size-hook
4521 aborts on objects that are not supposed to be bit-copied or
4522 bit-initialized. */
4523 && expr_size (exp) != const0_rtx)
4525 target = protect_from_queue (target, 1);
4526 if (GET_MODE (temp) != GET_MODE (target)
4527 && GET_MODE (temp) != VOIDmode)
4529 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4530 if (dont_return_target)
4532 /* In this case, we will return TEMP,
4533 so make sure it has the proper mode.
4534 But don't forget to store the value into TARGET. */
4535 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4536 emit_move_insn (target, temp);
4538 else
4539 convert_move (target, temp, unsignedp);
4542 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4544 /* Handle copying a string constant into an array. The string
4545 constant may be shorter than the array. So copy just the string's
4546 actual length, and clear the rest. First get the size of the data
4547 type of the string, which is actually the size of the target. */
4548 rtx size = expr_size (exp);
4550 if (GET_CODE (size) == CONST_INT
4551 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4552 emit_block_move (target, temp, size,
4553 (want_value & 2
4554 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4555 else
4557 /* Compute the size of the data to copy from the string. */
4558 tree copy_size
4559 = size_binop (MIN_EXPR,
4560 make_tree (sizetype, size),
4561 size_int (TREE_STRING_LENGTH (exp)));
4562 rtx copy_size_rtx
4563 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4564 (want_value & 2
4565 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4566 rtx label = 0;
4568 /* Copy that much. */
4569 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4570 emit_block_move (target, temp, copy_size_rtx,
4571 (want_value & 2
4572 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4574 /* Figure out how much is left in TARGET that we have to clear.
4575 Do all calculations in ptr_mode. */
4576 if (GET_CODE (copy_size_rtx) == CONST_INT)
4578 size = plus_constant (size, -INTVAL (copy_size_rtx));
4579 target = adjust_address (target, BLKmode,
4580 INTVAL (copy_size_rtx));
4582 else
4584 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4585 copy_size_rtx, NULL_RTX, 0,
4586 OPTAB_LIB_WIDEN);
4588 #ifdef POINTERS_EXTEND_UNSIGNED
4589 if (GET_MODE (copy_size_rtx) != Pmode)
4590 copy_size_rtx = convert_memory_address (Pmode,
4591 copy_size_rtx);
4592 #endif
4594 target = offset_address (target, copy_size_rtx,
4595 highest_pow2_factor (copy_size));
4596 label = gen_label_rtx ();
4597 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4598 GET_MODE (size), 0, label);
4601 if (size != const0_rtx)
4602 clear_storage (target, size);
4604 if (label)
4605 emit_label (label);
4608 /* Handle calls that return values in multiple non-contiguous locations.
4609 The Irix 6 ABI has examples of this. */
4610 else if (GET_CODE (target) == PARALLEL)
4611 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4612 else if (GET_MODE (temp) == BLKmode)
4613 emit_block_move (target, temp, expr_size (exp),
4614 (want_value & 2
4615 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4616 else
4617 emit_move_insn (target, temp);
4620 /* If we don't want a value, return NULL_RTX. */
4621 if ((want_value & 1) == 0)
4622 return NULL_RTX;
4624 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4625 ??? The latter test doesn't seem to make sense. */
4626 else if (dont_return_target && GET_CODE (temp) != MEM)
4627 return temp;
4629 /* Return TARGET itself if it is a hard register. */
4630 else if ((want_value & 1) != 0
4631 && GET_MODE (target) != BLKmode
4632 && ! (GET_CODE (target) == REG
4633 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4634 return copy_to_reg (target);
4636 else
4637 return target;
4640 /* Return 1 if EXP just contains zeros. */
4642 static int
4643 is_zeros_p (exp)
4644 tree exp;
4646 tree elt;
4648 switch (TREE_CODE (exp))
4650 case CONVERT_EXPR:
4651 case NOP_EXPR:
4652 case NON_LVALUE_EXPR:
4653 case VIEW_CONVERT_EXPR:
4654 return is_zeros_p (TREE_OPERAND (exp, 0));
4656 case INTEGER_CST:
4657 return integer_zerop (exp);
4659 case COMPLEX_CST:
4660 return
4661 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4663 case REAL_CST:
4664 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4666 case VECTOR_CST:
4667 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4668 elt = TREE_CHAIN (elt))
4669 if (!is_zeros_p (TREE_VALUE (elt)))
4670 return 0;
4672 return 1;
4674 case CONSTRUCTOR:
4675 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4676 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4677 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4678 if (! is_zeros_p (TREE_VALUE (elt)))
4679 return 0;
4681 return 1;
4683 default:
4684 return 0;
4688 /* Return 1 if EXP contains mostly (3/4) zeros. */
4690 static int
4691 mostly_zeros_p (exp)
4692 tree exp;
4694 if (TREE_CODE (exp) == CONSTRUCTOR)
4696 int elts = 0, zeros = 0;
4697 tree elt = CONSTRUCTOR_ELTS (exp);
4698 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4700 /* If there are no ranges of true bits, it is all zero. */
4701 return elt == NULL_TREE;
4703 for (; elt; elt = TREE_CHAIN (elt))
4705 /* We do not handle the case where the index is a RANGE_EXPR,
4706 so the statistic will be somewhat inaccurate.
4707 We do make a more accurate count in store_constructor itself,
4708 so since this function is only used for nested array elements,
4709 this should be close enough. */
4710 if (mostly_zeros_p (TREE_VALUE (elt)))
4711 zeros++;
4712 elts++;
4715 return 4 * zeros >= 3 * elts;
4718 return is_zeros_p (exp);
4721 /* Helper function for store_constructor.
4722 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4723 TYPE is the type of the CONSTRUCTOR, not the element type.
4724 CLEARED is as for store_constructor.
4725 ALIAS_SET is the alias set to use for any stores.
4727 This provides a recursive shortcut back to store_constructor when it isn't
4728 necessary to go through store_field. This is so that we can pass through
4729 the cleared field to let store_constructor know that we may not have to
4730 clear a substructure if the outer structure has already been cleared. */
4732 static void
4733 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4734 alias_set)
4735 rtx target;
4736 unsigned HOST_WIDE_INT bitsize;
4737 HOST_WIDE_INT bitpos;
4738 enum machine_mode mode;
4739 tree exp, type;
4740 int cleared;
4741 int alias_set;
4743 if (TREE_CODE (exp) == CONSTRUCTOR
4744 && bitpos % BITS_PER_UNIT == 0
4745 /* If we have a nonzero bitpos for a register target, then we just
4746 let store_field do the bitfield handling. This is unlikely to
4747 generate unnecessary clear instructions anyways. */
4748 && (bitpos == 0 || GET_CODE (target) == MEM))
4750 if (GET_CODE (target) == MEM)
4751 target
4752 = adjust_address (target,
4753 GET_MODE (target) == BLKmode
4754 || 0 != (bitpos
4755 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4756 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4759 /* Update the alias set, if required. */
4760 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4761 && MEM_ALIAS_SET (target) != 0)
4763 target = copy_rtx (target);
4764 set_mem_alias_set (target, alias_set);
4767 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4769 else
4770 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4771 alias_set);
4774 /* Store the value of constructor EXP into the rtx TARGET.
4775 TARGET is either a REG or a MEM; we know it cannot conflict, since
4776 safe_from_p has been called.
4777 CLEARED is true if TARGET is known to have been zero'd.
4778 SIZE is the number of bytes of TARGET we are allowed to modify: this
4779 may not be the same as the size of EXP if we are assigning to a field
4780 which has been packed to exclude padding bits. */
4782 static void
4783 store_constructor (exp, target, cleared, size)
4784 tree exp;
4785 rtx target;
4786 int cleared;
4787 HOST_WIDE_INT size;
4789 tree type = TREE_TYPE (exp);
4790 #ifdef WORD_REGISTER_OPERATIONS
4791 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4792 #endif
4794 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4795 || TREE_CODE (type) == QUAL_UNION_TYPE)
4797 tree elt;
4799 /* We either clear the aggregate or indicate the value is dead. */
4800 if ((TREE_CODE (type) == UNION_TYPE
4801 || TREE_CODE (type) == QUAL_UNION_TYPE)
4802 && ! cleared
4803 && ! CONSTRUCTOR_ELTS (exp))
4804 /* If the constructor is empty, clear the union. */
4806 clear_storage (target, expr_size (exp));
4807 cleared = 1;
4810 /* If we are building a static constructor into a register,
4811 set the initial value as zero so we can fold the value into
4812 a constant. But if more than one register is involved,
4813 this probably loses. */
4814 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4815 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4817 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4818 cleared = 1;
4821 /* If the constructor has fewer fields than the structure
4822 or if we are initializing the structure to mostly zeros,
4823 clear the whole structure first. Don't do this if TARGET is a
4824 register whose mode size isn't equal to SIZE since clear_storage
4825 can't handle this case. */
4826 else if (! cleared && size > 0
4827 && ((list_length (CONSTRUCTOR_ELTS (exp))
4828 != fields_length (type))
4829 || mostly_zeros_p (exp))
4830 && (GET_CODE (target) != REG
4831 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4832 == size)))
4834 clear_storage (target, GEN_INT (size));
4835 cleared = 1;
4838 if (! cleared)
4839 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4841 /* Store each element of the constructor into
4842 the corresponding field of TARGET. */
4844 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4846 tree field = TREE_PURPOSE (elt);
4847 tree value = TREE_VALUE (elt);
4848 enum machine_mode mode;
4849 HOST_WIDE_INT bitsize;
4850 HOST_WIDE_INT bitpos = 0;
4851 tree offset;
4852 rtx to_rtx = target;
4854 /* Just ignore missing fields.
4855 We cleared the whole structure, above,
4856 if any fields are missing. */
4857 if (field == 0)
4858 continue;
4860 if (cleared && is_zeros_p (value))
4861 continue;
4863 if (host_integerp (DECL_SIZE (field), 1))
4864 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4865 else
4866 bitsize = -1;
4868 mode = DECL_MODE (field);
4869 if (DECL_BIT_FIELD (field))
4870 mode = VOIDmode;
4872 offset = DECL_FIELD_OFFSET (field);
4873 if (host_integerp (offset, 0)
4874 && host_integerp (bit_position (field), 0))
4876 bitpos = int_bit_position (field);
4877 offset = 0;
4879 else
4880 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4882 if (offset)
4884 rtx offset_rtx;
4886 if (contains_placeholder_p (offset))
4887 offset = build (WITH_RECORD_EXPR, sizetype,
4888 offset, make_tree (TREE_TYPE (exp), target));
4890 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4891 if (GET_CODE (to_rtx) != MEM)
4892 abort ();
4894 #ifdef POINTERS_EXTEND_UNSIGNED
4895 if (GET_MODE (offset_rtx) != Pmode)
4896 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4897 #else
4898 if (GET_MODE (offset_rtx) != ptr_mode)
4899 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4900 #endif
4902 to_rtx = offset_address (to_rtx, offset_rtx,
4903 highest_pow2_factor (offset));
4906 if (TREE_READONLY (field))
4908 if (GET_CODE (to_rtx) == MEM)
4909 to_rtx = copy_rtx (to_rtx);
4911 RTX_UNCHANGING_P (to_rtx) = 1;
4914 #ifdef WORD_REGISTER_OPERATIONS
4915 /* If this initializes a field that is smaller than a word, at the
4916 start of a word, try to widen it to a full word.
4917 This special case allows us to output C++ member function
4918 initializations in a form that the optimizers can understand. */
4919 if (GET_CODE (target) == REG
4920 && bitsize < BITS_PER_WORD
4921 && bitpos % BITS_PER_WORD == 0
4922 && GET_MODE_CLASS (mode) == MODE_INT
4923 && TREE_CODE (value) == INTEGER_CST
4924 && exp_size >= 0
4925 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4927 tree type = TREE_TYPE (value);
4929 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4931 type = (*lang_hooks.types.type_for_size)
4932 (BITS_PER_WORD, TREE_UNSIGNED (type));
4933 value = convert (type, value);
4936 if (BYTES_BIG_ENDIAN)
4937 value
4938 = fold (build (LSHIFT_EXPR, type, value,
4939 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4940 bitsize = BITS_PER_WORD;
4941 mode = word_mode;
4943 #endif
4945 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4946 && DECL_NONADDRESSABLE_P (field))
4948 to_rtx = copy_rtx (to_rtx);
4949 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4952 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4953 value, type, cleared,
4954 get_alias_set (TREE_TYPE (field)));
4957 else if (TREE_CODE (type) == ARRAY_TYPE
4958 || TREE_CODE (type) == VECTOR_TYPE)
4960 tree elt;
4961 int i;
4962 int need_to_clear;
4963 tree domain = TYPE_DOMAIN (type);
4964 tree elttype = TREE_TYPE (type);
4965 int const_bounds_p;
4966 HOST_WIDE_INT minelt = 0;
4967 HOST_WIDE_INT maxelt = 0;
4969 /* Vectors are like arrays, but the domain is stored via an array
4970 type indirectly. */
4971 if (TREE_CODE (type) == VECTOR_TYPE)
4973 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4974 the same field as TYPE_DOMAIN, we are not guaranteed that
4975 it always will. */
4976 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4977 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4980 const_bounds_p = (TYPE_MIN_VALUE (domain)
4981 && TYPE_MAX_VALUE (domain)
4982 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4983 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4985 /* If we have constant bounds for the range of the type, get them. */
4986 if (const_bounds_p)
4988 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4989 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4992 /* If the constructor has fewer elements than the array,
4993 clear the whole array first. Similarly if this is
4994 static constructor of a non-BLKmode object. */
4995 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4996 need_to_clear = 1;
4997 else
4999 HOST_WIDE_INT count = 0, zero_count = 0;
5000 need_to_clear = ! const_bounds_p;
5002 /* This loop is a more accurate version of the loop in
5003 mostly_zeros_p (it handles RANGE_EXPR in an index).
5004 It is also needed to check for missing elements. */
5005 for (elt = CONSTRUCTOR_ELTS (exp);
5006 elt != NULL_TREE && ! need_to_clear;
5007 elt = TREE_CHAIN (elt))
5009 tree index = TREE_PURPOSE (elt);
5010 HOST_WIDE_INT this_node_count;
5012 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5014 tree lo_index = TREE_OPERAND (index, 0);
5015 tree hi_index = TREE_OPERAND (index, 1);
5017 if (! host_integerp (lo_index, 1)
5018 || ! host_integerp (hi_index, 1))
5020 need_to_clear = 1;
5021 break;
5024 this_node_count = (tree_low_cst (hi_index, 1)
5025 - tree_low_cst (lo_index, 1) + 1);
5027 else
5028 this_node_count = 1;
5030 count += this_node_count;
5031 if (mostly_zeros_p (TREE_VALUE (elt)))
5032 zero_count += this_node_count;
5035 /* Clear the entire array first if there are any missing elements,
5036 or if the incidence of zero elements is >= 75%. */
5037 if (! need_to_clear
5038 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5039 need_to_clear = 1;
5042 if (need_to_clear && size > 0)
5044 if (! cleared)
5046 if (REG_P (target))
5047 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5048 else
5049 clear_storage (target, GEN_INT (size));
5051 cleared = 1;
5053 else if (REG_P (target))
5054 /* Inform later passes that the old value is dead. */
5055 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5057 /* Store each element of the constructor into
5058 the corresponding element of TARGET, determined
5059 by counting the elements. */
5060 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5061 elt;
5062 elt = TREE_CHAIN (elt), i++)
5064 enum machine_mode mode;
5065 HOST_WIDE_INT bitsize;
5066 HOST_WIDE_INT bitpos;
5067 int unsignedp;
5068 tree value = TREE_VALUE (elt);
5069 tree index = TREE_PURPOSE (elt);
5070 rtx xtarget = target;
5072 if (cleared && is_zeros_p (value))
5073 continue;
5075 unsignedp = TREE_UNSIGNED (elttype);
5076 mode = TYPE_MODE (elttype);
5077 if (mode == BLKmode)
5078 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5079 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5080 : -1);
5081 else
5082 bitsize = GET_MODE_BITSIZE (mode);
5084 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5086 tree lo_index = TREE_OPERAND (index, 0);
5087 tree hi_index = TREE_OPERAND (index, 1);
5088 rtx index_r, pos_rtx, loop_end;
5089 struct nesting *loop;
5090 HOST_WIDE_INT lo, hi, count;
5091 tree position;
5093 /* If the range is constant and "small", unroll the loop. */
5094 if (const_bounds_p
5095 && host_integerp (lo_index, 0)
5096 && host_integerp (hi_index, 0)
5097 && (lo = tree_low_cst (lo_index, 0),
5098 hi = tree_low_cst (hi_index, 0),
5099 count = hi - lo + 1,
5100 (GET_CODE (target) != MEM
5101 || count <= 2
5102 || (host_integerp (TYPE_SIZE (elttype), 1)
5103 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5104 <= 40 * 8)))))
5106 lo -= minelt; hi -= minelt;
5107 for (; lo <= hi; lo++)
5109 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5111 if (GET_CODE (target) == MEM
5112 && !MEM_KEEP_ALIAS_SET_P (target)
5113 && TREE_CODE (type) == ARRAY_TYPE
5114 && TYPE_NONALIASED_COMPONENT (type))
5116 target = copy_rtx (target);
5117 MEM_KEEP_ALIAS_SET_P (target) = 1;
5120 store_constructor_field
5121 (target, bitsize, bitpos, mode, value, type, cleared,
5122 get_alias_set (elttype));
5125 else
5127 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5128 loop_end = gen_label_rtx ();
5130 unsignedp = TREE_UNSIGNED (domain);
5132 index = build_decl (VAR_DECL, NULL_TREE, domain);
5134 index_r
5135 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5136 &unsignedp, 0));
5137 SET_DECL_RTL (index, index_r);
5138 if (TREE_CODE (value) == SAVE_EXPR
5139 && SAVE_EXPR_RTL (value) == 0)
5141 /* Make sure value gets expanded once before the
5142 loop. */
5143 expand_expr (value, const0_rtx, VOIDmode, 0);
5144 emit_queue ();
5146 store_expr (lo_index, index_r, 0);
5147 loop = expand_start_loop (0);
5149 /* Assign value to element index. */
5150 position
5151 = convert (ssizetype,
5152 fold (build (MINUS_EXPR, TREE_TYPE (index),
5153 index, TYPE_MIN_VALUE (domain))));
5154 position = size_binop (MULT_EXPR, position,
5155 convert (ssizetype,
5156 TYPE_SIZE_UNIT (elttype)));
5158 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5159 xtarget = offset_address (target, pos_rtx,
5160 highest_pow2_factor (position));
5161 xtarget = adjust_address (xtarget, mode, 0);
5162 if (TREE_CODE (value) == CONSTRUCTOR)
5163 store_constructor (value, xtarget, cleared,
5164 bitsize / BITS_PER_UNIT);
5165 else
5166 store_expr (value, xtarget, 0);
5168 expand_exit_loop_if_false (loop,
5169 build (LT_EXPR, integer_type_node,
5170 index, hi_index));
5172 expand_increment (build (PREINCREMENT_EXPR,
5173 TREE_TYPE (index),
5174 index, integer_one_node), 0, 0);
5175 expand_end_loop ();
5176 emit_label (loop_end);
5179 else if ((index != 0 && ! host_integerp (index, 0))
5180 || ! host_integerp (TYPE_SIZE (elttype), 1))
5182 tree position;
5184 if (index == 0)
5185 index = ssize_int (1);
5187 if (minelt)
5188 index = convert (ssizetype,
5189 fold (build (MINUS_EXPR, index,
5190 TYPE_MIN_VALUE (domain))));
5192 position = size_binop (MULT_EXPR, index,
5193 convert (ssizetype,
5194 TYPE_SIZE_UNIT (elttype)));
5195 xtarget = offset_address (target,
5196 expand_expr (position, 0, VOIDmode, 0),
5197 highest_pow2_factor (position));
5198 xtarget = adjust_address (xtarget, mode, 0);
5199 store_expr (value, xtarget, 0);
5201 else
5203 if (index != 0)
5204 bitpos = ((tree_low_cst (index, 0) - minelt)
5205 * tree_low_cst (TYPE_SIZE (elttype), 1));
5206 else
5207 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5209 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5210 && TREE_CODE (type) == ARRAY_TYPE
5211 && TYPE_NONALIASED_COMPONENT (type))
5213 target = copy_rtx (target);
5214 MEM_KEEP_ALIAS_SET_P (target) = 1;
5217 store_constructor_field (target, bitsize, bitpos, mode, value,
5218 type, cleared, get_alias_set (elttype));
5224 /* Set constructor assignments. */
5225 else if (TREE_CODE (type) == SET_TYPE)
5227 tree elt = CONSTRUCTOR_ELTS (exp);
5228 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5229 tree domain = TYPE_DOMAIN (type);
5230 tree domain_min, domain_max, bitlength;
5232 /* The default implementation strategy is to extract the constant
5233 parts of the constructor, use that to initialize the target,
5234 and then "or" in whatever non-constant ranges we need in addition.
5236 If a large set is all zero or all ones, it is
5237 probably better to set it using memset (if available) or bzero.
5238 Also, if a large set has just a single range, it may also be
5239 better to first clear all the first clear the set (using
5240 bzero/memset), and set the bits we want. */
5242 /* Check for all zeros. */
5243 if (elt == NULL_TREE && size > 0)
5245 if (!cleared)
5246 clear_storage (target, GEN_INT (size));
5247 return;
5250 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5251 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5252 bitlength = size_binop (PLUS_EXPR,
5253 size_diffop (domain_max, domain_min),
5254 ssize_int (1));
5256 nbits = tree_low_cst (bitlength, 1);
5258 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5259 are "complicated" (more than one range), initialize (the
5260 constant parts) by copying from a constant. */
5261 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5262 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5264 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5265 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5266 char *bit_buffer = (char *) alloca (nbits);
5267 HOST_WIDE_INT word = 0;
5268 unsigned int bit_pos = 0;
5269 unsigned int ibit = 0;
5270 unsigned int offset = 0; /* In bytes from beginning of set. */
5272 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5273 for (;;)
5275 if (bit_buffer[ibit])
5277 if (BYTES_BIG_ENDIAN)
5278 word |= (1 << (set_word_size - 1 - bit_pos));
5279 else
5280 word |= 1 << bit_pos;
5283 bit_pos++; ibit++;
5284 if (bit_pos >= set_word_size || ibit == nbits)
5286 if (word != 0 || ! cleared)
5288 rtx datum = GEN_INT (word);
5289 rtx to_rtx;
5291 /* The assumption here is that it is safe to use
5292 XEXP if the set is multi-word, but not if
5293 it's single-word. */
5294 if (GET_CODE (target) == MEM)
5295 to_rtx = adjust_address (target, mode, offset);
5296 else if (offset == 0)
5297 to_rtx = target;
5298 else
5299 abort ();
5300 emit_move_insn (to_rtx, datum);
5303 if (ibit == nbits)
5304 break;
5305 word = 0;
5306 bit_pos = 0;
5307 offset += set_word_size / BITS_PER_UNIT;
5311 else if (!cleared)
5312 /* Don't bother clearing storage if the set is all ones. */
5313 if (TREE_CHAIN (elt) != NULL_TREE
5314 || (TREE_PURPOSE (elt) == NULL_TREE
5315 ? nbits != 1
5316 : ( ! host_integerp (TREE_VALUE (elt), 0)
5317 || ! host_integerp (TREE_PURPOSE (elt), 0)
5318 || (tree_low_cst (TREE_VALUE (elt), 0)
5319 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5320 != (HOST_WIDE_INT) nbits))))
5321 clear_storage (target, expr_size (exp));
5323 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5325 /* Start of range of element or NULL. */
5326 tree startbit = TREE_PURPOSE (elt);
5327 /* End of range of element, or element value. */
5328 tree endbit = TREE_VALUE (elt);
5329 HOST_WIDE_INT startb, endb;
5330 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5332 bitlength_rtx = expand_expr (bitlength,
5333 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5335 /* Handle non-range tuple element like [ expr ]. */
5336 if (startbit == NULL_TREE)
5338 startbit = save_expr (endbit);
5339 endbit = startbit;
5342 startbit = convert (sizetype, startbit);
5343 endbit = convert (sizetype, endbit);
5344 if (! integer_zerop (domain_min))
5346 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5347 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5349 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5350 EXPAND_CONST_ADDRESS);
5351 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5352 EXPAND_CONST_ADDRESS);
5354 if (REG_P (target))
5356 targetx
5357 = assign_temp
5358 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5359 (GET_MODE (target), 0),
5360 TYPE_QUAL_CONST)),
5361 0, 1, 1);
5362 emit_move_insn (targetx, target);
5365 else if (GET_CODE (target) == MEM)
5366 targetx = target;
5367 else
5368 abort ();
5370 /* Optimization: If startbit and endbit are constants divisible
5371 by BITS_PER_UNIT, call memset instead. */
5372 if (TARGET_MEM_FUNCTIONS
5373 && TREE_CODE (startbit) == INTEGER_CST
5374 && TREE_CODE (endbit) == INTEGER_CST
5375 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5376 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5378 emit_library_call (memset_libfunc, LCT_NORMAL,
5379 VOIDmode, 3,
5380 plus_constant (XEXP (targetx, 0),
5381 startb / BITS_PER_UNIT),
5382 Pmode,
5383 constm1_rtx, TYPE_MODE (integer_type_node),
5384 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5385 TYPE_MODE (sizetype));
5387 else
5388 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5389 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5390 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5391 startbit_rtx, TYPE_MODE (sizetype),
5392 endbit_rtx, TYPE_MODE (sizetype));
5394 if (REG_P (target))
5395 emit_move_insn (target, targetx);
5399 else
5400 abort ();
5403 /* Store the value of EXP (an expression tree)
5404 into a subfield of TARGET which has mode MODE and occupies
5405 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5406 If MODE is VOIDmode, it means that we are storing into a bit-field.
5408 If VALUE_MODE is VOIDmode, return nothing in particular.
5409 UNSIGNEDP is not used in this case.
5411 Otherwise, return an rtx for the value stored. This rtx
5412 has mode VALUE_MODE if that is convenient to do.
5413 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5415 TYPE is the type of the underlying object,
5417 ALIAS_SET is the alias set for the destination. This value will
5418 (in general) be different from that for TARGET, since TARGET is a
5419 reference to the containing structure. */
5421 static rtx
5422 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5423 alias_set)
5424 rtx target;
5425 HOST_WIDE_INT bitsize;
5426 HOST_WIDE_INT bitpos;
5427 enum machine_mode mode;
5428 tree exp;
5429 enum machine_mode value_mode;
5430 int unsignedp;
5431 tree type;
5432 int alias_set;
5434 HOST_WIDE_INT width_mask = 0;
5436 if (TREE_CODE (exp) == ERROR_MARK)
5437 return const0_rtx;
5439 /* If we have nothing to store, do nothing unless the expression has
5440 side-effects. */
5441 if (bitsize == 0)
5442 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5443 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5444 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5446 /* If we are storing into an unaligned field of an aligned union that is
5447 in a register, we may have the mode of TARGET being an integer mode but
5448 MODE == BLKmode. In that case, get an aligned object whose size and
5449 alignment are the same as TARGET and store TARGET into it (we can avoid
5450 the store if the field being stored is the entire width of TARGET). Then
5451 call ourselves recursively to store the field into a BLKmode version of
5452 that object. Finally, load from the object into TARGET. This is not
5453 very efficient in general, but should only be slightly more expensive
5454 than the otherwise-required unaligned accesses. Perhaps this can be
5455 cleaned up later. */
5457 if (mode == BLKmode
5458 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5460 rtx object
5461 = assign_temp
5462 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5463 0, 1, 1);
5464 rtx blk_object = adjust_address (object, BLKmode, 0);
5466 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5467 emit_move_insn (object, target);
5469 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5470 alias_set);
5472 emit_move_insn (target, object);
5474 /* We want to return the BLKmode version of the data. */
5475 return blk_object;
5478 if (GET_CODE (target) == CONCAT)
5480 /* We're storing into a struct containing a single __complex. */
5482 if (bitpos != 0)
5483 abort ();
5484 return store_expr (exp, target, 0);
5487 /* If the structure is in a register or if the component
5488 is a bit field, we cannot use addressing to access it.
5489 Use bit-field techniques or SUBREG to store in it. */
5491 if (mode == VOIDmode
5492 || (mode != BLKmode && ! direct_store[(int) mode]
5493 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5494 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5495 || GET_CODE (target) == REG
5496 || GET_CODE (target) == SUBREG
5497 /* If the field isn't aligned enough to store as an ordinary memref,
5498 store it as a bit field. */
5499 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5500 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5501 || bitpos % GET_MODE_ALIGNMENT (mode)))
5502 /* If the RHS and field are a constant size and the size of the
5503 RHS isn't the same size as the bitfield, we must use bitfield
5504 operations. */
5505 || (bitsize >= 0
5506 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5507 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5509 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5511 /* If BITSIZE is narrower than the size of the type of EXP
5512 we will be narrowing TEMP. Normally, what's wanted are the
5513 low-order bits. However, if EXP's type is a record and this is
5514 big-endian machine, we want the upper BITSIZE bits. */
5515 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5516 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5517 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5518 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5519 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5520 - bitsize),
5521 temp, 1);
5523 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5524 MODE. */
5525 if (mode != VOIDmode && mode != BLKmode
5526 && mode != TYPE_MODE (TREE_TYPE (exp)))
5527 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5529 /* If the modes of TARGET and TEMP are both BLKmode, both
5530 must be in memory and BITPOS must be aligned on a byte
5531 boundary. If so, we simply do a block copy. */
5532 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5534 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5535 || bitpos % BITS_PER_UNIT != 0)
5536 abort ();
5538 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5539 emit_block_move (target, temp,
5540 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5541 / BITS_PER_UNIT),
5542 BLOCK_OP_NORMAL);
5544 return value_mode == VOIDmode ? const0_rtx : target;
5547 /* Store the value in the bitfield. */
5548 store_bit_field (target, bitsize, bitpos, mode, temp,
5549 int_size_in_bytes (type));
5551 if (value_mode != VOIDmode)
5553 /* The caller wants an rtx for the value.
5554 If possible, avoid refetching from the bitfield itself. */
5555 if (width_mask != 0
5556 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5558 tree count;
5559 enum machine_mode tmode;
5561 tmode = GET_MODE (temp);
5562 if (tmode == VOIDmode)
5563 tmode = value_mode;
5565 if (unsignedp)
5566 return expand_and (tmode, temp,
5567 gen_int_mode (width_mask, tmode),
5568 NULL_RTX);
5570 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5571 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5572 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5575 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5576 NULL_RTX, value_mode, VOIDmode,
5577 int_size_in_bytes (type));
5579 return const0_rtx;
5581 else
5583 rtx addr = XEXP (target, 0);
5584 rtx to_rtx = target;
5586 /* If a value is wanted, it must be the lhs;
5587 so make the address stable for multiple use. */
5589 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5590 && ! CONSTANT_ADDRESS_P (addr)
5591 /* A frame-pointer reference is already stable. */
5592 && ! (GET_CODE (addr) == PLUS
5593 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5594 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5595 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5596 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5598 /* Now build a reference to just the desired component. */
5600 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5602 if (to_rtx == target)
5603 to_rtx = copy_rtx (to_rtx);
5605 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5606 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5607 set_mem_alias_set (to_rtx, alias_set);
5609 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5613 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5614 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5615 codes and find the ultimate containing object, which we return.
5617 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5618 bit position, and *PUNSIGNEDP to the signedness of the field.
5619 If the position of the field is variable, we store a tree
5620 giving the variable offset (in units) in *POFFSET.
5621 This offset is in addition to the bit position.
5622 If the position is not variable, we store 0 in *POFFSET.
5624 If any of the extraction expressions is volatile,
5625 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5627 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5628 is a mode that can be used to access the field. In that case, *PBITSIZE
5629 is redundant.
5631 If the field describes a variable-sized object, *PMODE is set to
5632 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5633 this case, but the address of the object can be found. */
5635 tree
5636 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5637 punsignedp, pvolatilep)
5638 tree exp;
5639 HOST_WIDE_INT *pbitsize;
5640 HOST_WIDE_INT *pbitpos;
5641 tree *poffset;
5642 enum machine_mode *pmode;
5643 int *punsignedp;
5644 int *pvolatilep;
5646 tree size_tree = 0;
5647 enum machine_mode mode = VOIDmode;
5648 tree offset = size_zero_node;
5649 tree bit_offset = bitsize_zero_node;
5650 tree placeholder_ptr = 0;
5651 tree tem;
5653 /* First get the mode, signedness, and size. We do this from just the
5654 outermost expression. */
5655 if (TREE_CODE (exp) == COMPONENT_REF)
5657 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5658 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5659 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5661 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5663 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5665 size_tree = TREE_OPERAND (exp, 1);
5666 *punsignedp = TREE_UNSIGNED (exp);
5668 else
5670 mode = TYPE_MODE (TREE_TYPE (exp));
5671 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5673 if (mode == BLKmode)
5674 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5675 else
5676 *pbitsize = GET_MODE_BITSIZE (mode);
5679 if (size_tree != 0)
5681 if (! host_integerp (size_tree, 1))
5682 mode = BLKmode, *pbitsize = -1;
5683 else
5684 *pbitsize = tree_low_cst (size_tree, 1);
5687 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5688 and find the ultimate containing object. */
5689 while (1)
5691 if (TREE_CODE (exp) == BIT_FIELD_REF)
5692 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5693 else if (TREE_CODE (exp) == COMPONENT_REF)
5695 tree field = TREE_OPERAND (exp, 1);
5696 tree this_offset = DECL_FIELD_OFFSET (field);
5698 /* If this field hasn't been filled in yet, don't go
5699 past it. This should only happen when folding expressions
5700 made during type construction. */
5701 if (this_offset == 0)
5702 break;
5703 else if (! TREE_CONSTANT (this_offset)
5704 && contains_placeholder_p (this_offset))
5705 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5707 offset = size_binop (PLUS_EXPR, offset, this_offset);
5708 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5709 DECL_FIELD_BIT_OFFSET (field));
5711 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5714 else if (TREE_CODE (exp) == ARRAY_REF
5715 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5717 tree index = TREE_OPERAND (exp, 1);
5718 tree array = TREE_OPERAND (exp, 0);
5719 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5720 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5721 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5723 /* We assume all arrays have sizes that are a multiple of a byte.
5724 First subtract the lower bound, if any, in the type of the
5725 index, then convert to sizetype and multiply by the size of the
5726 array element. */
5727 if (low_bound != 0 && ! integer_zerop (low_bound))
5728 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5729 index, low_bound));
5731 /* If the index has a self-referential type, pass it to a
5732 WITH_RECORD_EXPR; if the component size is, pass our
5733 component to one. */
5734 if (! TREE_CONSTANT (index)
5735 && contains_placeholder_p (index))
5736 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5737 if (! TREE_CONSTANT (unit_size)
5738 && contains_placeholder_p (unit_size))
5739 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5741 offset = size_binop (PLUS_EXPR, offset,
5742 size_binop (MULT_EXPR,
5743 convert (sizetype, index),
5744 unit_size));
5747 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5749 tree new = find_placeholder (exp, &placeholder_ptr);
5751 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5752 We might have been called from tree optimization where we
5753 haven't set up an object yet. */
5754 if (new == 0)
5755 break;
5756 else
5757 exp = new;
5759 continue;
5761 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5762 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5763 && ! ((TREE_CODE (exp) == NOP_EXPR
5764 || TREE_CODE (exp) == CONVERT_EXPR)
5765 && (TYPE_MODE (TREE_TYPE (exp))
5766 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5767 break;
5769 /* If any reference in the chain is volatile, the effect is volatile. */
5770 if (TREE_THIS_VOLATILE (exp))
5771 *pvolatilep = 1;
5773 exp = TREE_OPERAND (exp, 0);
5776 /* If OFFSET is constant, see if we can return the whole thing as a
5777 constant bit position. Otherwise, split it up. */
5778 if (host_integerp (offset, 0)
5779 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5780 bitsize_unit_node))
5781 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5782 && host_integerp (tem, 0))
5783 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5784 else
5785 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5787 *pmode = mode;
5788 return exp;
5791 /* Return 1 if T is an expression that get_inner_reference handles. */
5794 handled_component_p (t)
5795 tree t;
5797 switch (TREE_CODE (t))
5799 case BIT_FIELD_REF:
5800 case COMPONENT_REF:
5801 case ARRAY_REF:
5802 case ARRAY_RANGE_REF:
5803 case NON_LVALUE_EXPR:
5804 case VIEW_CONVERT_EXPR:
5805 return 1;
5807 case NOP_EXPR:
5808 case CONVERT_EXPR:
5809 return (TYPE_MODE (TREE_TYPE (t))
5810 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5812 default:
5813 return 0;
5817 /* Given an rtx VALUE that may contain additions and multiplications, return
5818 an equivalent value that just refers to a register, memory, or constant.
5819 This is done by generating instructions to perform the arithmetic and
5820 returning a pseudo-register containing the value.
5822 The returned value may be a REG, SUBREG, MEM or constant. */
5825 force_operand (value, target)
5826 rtx value, target;
5828 rtx op1, op2;
5829 /* Use subtarget as the target for operand 0 of a binary operation. */
5830 rtx subtarget = get_subtarget (target);
5831 enum rtx_code code = GET_CODE (value);
5833 /* Check for a PIC address load. */
5834 if ((code == PLUS || code == MINUS)
5835 && XEXP (value, 0) == pic_offset_table_rtx
5836 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5837 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5838 || GET_CODE (XEXP (value, 1)) == CONST))
5840 if (!subtarget)
5841 subtarget = gen_reg_rtx (GET_MODE (value));
5842 emit_move_insn (subtarget, value);
5843 return subtarget;
5846 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5848 if (!target)
5849 target = gen_reg_rtx (GET_MODE (value));
5850 convert_move (target, force_operand (XEXP (value, 0), NULL),
5851 code == ZERO_EXTEND);
5852 return target;
5855 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5857 op2 = XEXP (value, 1);
5858 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5859 subtarget = 0;
5860 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5862 code = PLUS;
5863 op2 = negate_rtx (GET_MODE (value), op2);
5866 /* Check for an addition with OP2 a constant integer and our first
5867 operand a PLUS of a virtual register and something else. In that
5868 case, we want to emit the sum of the virtual register and the
5869 constant first and then add the other value. This allows virtual
5870 register instantiation to simply modify the constant rather than
5871 creating another one around this addition. */
5872 if (code == PLUS && GET_CODE (op2) == CONST_INT
5873 && GET_CODE (XEXP (value, 0)) == PLUS
5874 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5875 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5876 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5878 rtx temp = expand_simple_binop (GET_MODE (value), code,
5879 XEXP (XEXP (value, 0), 0), op2,
5880 subtarget, 0, OPTAB_LIB_WIDEN);
5881 return expand_simple_binop (GET_MODE (value), code, temp,
5882 force_operand (XEXP (XEXP (value,
5883 0), 1), 0),
5884 target, 0, OPTAB_LIB_WIDEN);
5887 op1 = force_operand (XEXP (value, 0), subtarget);
5888 op2 = force_operand (op2, NULL_RTX);
5889 switch (code)
5891 case MULT:
5892 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5893 case DIV:
5894 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5895 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5896 target, 1, OPTAB_LIB_WIDEN);
5897 else
5898 return expand_divmod (0,
5899 FLOAT_MODE_P (GET_MODE (value))
5900 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5901 GET_MODE (value), op1, op2, target, 0);
5902 break;
5903 case MOD:
5904 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5905 target, 0);
5906 break;
5907 case UDIV:
5908 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5909 target, 1);
5910 break;
5911 case UMOD:
5912 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5913 target, 1);
5914 break;
5915 case ASHIFTRT:
5916 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5917 target, 0, OPTAB_LIB_WIDEN);
5918 break;
5919 default:
5920 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5921 target, 1, OPTAB_LIB_WIDEN);
5924 if (GET_RTX_CLASS (code) == '1')
5926 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5927 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5930 #ifdef INSN_SCHEDULING
5931 /* On machines that have insn scheduling, we want all memory reference to be
5932 explicit, so we need to deal with such paradoxical SUBREGs. */
5933 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5934 && (GET_MODE_SIZE (GET_MODE (value))
5935 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5936 value
5937 = simplify_gen_subreg (GET_MODE (value),
5938 force_reg (GET_MODE (SUBREG_REG (value)),
5939 force_operand (SUBREG_REG (value),
5940 NULL_RTX)),
5941 GET_MODE (SUBREG_REG (value)),
5942 SUBREG_BYTE (value));
5943 #endif
5945 return value;
5948 /* Subroutine of expand_expr: return nonzero iff there is no way that
5949 EXP can reference X, which is being modified. TOP_P is nonzero if this
5950 call is going to be used to determine whether we need a temporary
5951 for EXP, as opposed to a recursive call to this function.
5953 It is always safe for this routine to return zero since it merely
5954 searches for optimization opportunities. */
5957 safe_from_p (x, exp, top_p)
5958 rtx x;
5959 tree exp;
5960 int top_p;
5962 rtx exp_rtl = 0;
5963 int i, nops;
5964 static tree save_expr_list;
5966 if (x == 0
5967 /* If EXP has varying size, we MUST use a target since we currently
5968 have no way of allocating temporaries of variable size
5969 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5970 So we assume here that something at a higher level has prevented a
5971 clash. This is somewhat bogus, but the best we can do. Only
5972 do this when X is BLKmode and when we are at the top level. */
5973 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5974 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5975 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5976 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5977 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5978 != INTEGER_CST)
5979 && GET_MODE (x) == BLKmode)
5980 /* If X is in the outgoing argument area, it is always safe. */
5981 || (GET_CODE (x) == MEM
5982 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5983 || (GET_CODE (XEXP (x, 0)) == PLUS
5984 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5985 return 1;
5987 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5988 find the underlying pseudo. */
5989 if (GET_CODE (x) == SUBREG)
5991 x = SUBREG_REG (x);
5992 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5993 return 0;
5996 /* A SAVE_EXPR might appear many times in the expression passed to the
5997 top-level safe_from_p call, and if it has a complex subexpression,
5998 examining it multiple times could result in a combinatorial explosion.
5999 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6000 with optimization took about 28 minutes to compile -- even though it was
6001 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6002 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6003 we have processed. Note that the only test of top_p was above. */
6005 if (top_p)
6007 int rtn;
6008 tree t;
6010 save_expr_list = 0;
6012 rtn = safe_from_p (x, exp, 0);
6014 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6015 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6017 return rtn;
6020 /* Now look at our tree code and possibly recurse. */
6021 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6023 case 'd':
6024 exp_rtl = DECL_RTL_IF_SET (exp);
6025 break;
6027 case 'c':
6028 return 1;
6030 case 'x':
6031 if (TREE_CODE (exp) == TREE_LIST)
6032 return ((TREE_VALUE (exp) == 0
6033 || safe_from_p (x, TREE_VALUE (exp), 0))
6034 && (TREE_CHAIN (exp) == 0
6035 || safe_from_p (x, TREE_CHAIN (exp), 0)));
6036 else if (TREE_CODE (exp) == ERROR_MARK)
6037 return 1; /* An already-visited SAVE_EXPR? */
6038 else
6039 return 0;
6041 case '1':
6042 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6044 case '2':
6045 case '<':
6046 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6047 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6049 case 'e':
6050 case 'r':
6051 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6052 the expression. If it is set, we conflict iff we are that rtx or
6053 both are in memory. Otherwise, we check all operands of the
6054 expression recursively. */
6056 switch (TREE_CODE (exp))
6058 case ADDR_EXPR:
6059 /* If the operand is static or we are static, we can't conflict.
6060 Likewise if we don't conflict with the operand at all. */
6061 if (staticp (TREE_OPERAND (exp, 0))
6062 || TREE_STATIC (exp)
6063 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6064 return 1;
6066 /* Otherwise, the only way this can conflict is if we are taking
6067 the address of a DECL a that address if part of X, which is
6068 very rare. */
6069 exp = TREE_OPERAND (exp, 0);
6070 if (DECL_P (exp))
6072 if (!DECL_RTL_SET_P (exp)
6073 || GET_CODE (DECL_RTL (exp)) != MEM)
6074 return 0;
6075 else
6076 exp_rtl = XEXP (DECL_RTL (exp), 0);
6078 break;
6080 case INDIRECT_REF:
6081 if (GET_CODE (x) == MEM
6082 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6083 get_alias_set (exp)))
6084 return 0;
6085 break;
6087 case CALL_EXPR:
6088 /* Assume that the call will clobber all hard registers and
6089 all of memory. */
6090 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6091 || GET_CODE (x) == MEM)
6092 return 0;
6093 break;
6095 case RTL_EXPR:
6096 /* If a sequence exists, we would have to scan every instruction
6097 in the sequence to see if it was safe. This is probably not
6098 worthwhile. */
6099 if (RTL_EXPR_SEQUENCE (exp))
6100 return 0;
6102 exp_rtl = RTL_EXPR_RTL (exp);
6103 break;
6105 case WITH_CLEANUP_EXPR:
6106 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6107 break;
6109 case CLEANUP_POINT_EXPR:
6110 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6112 case SAVE_EXPR:
6113 exp_rtl = SAVE_EXPR_RTL (exp);
6114 if (exp_rtl)
6115 break;
6117 /* If we've already scanned this, don't do it again. Otherwise,
6118 show we've scanned it and record for clearing the flag if we're
6119 going on. */
6120 if (TREE_PRIVATE (exp))
6121 return 1;
6123 TREE_PRIVATE (exp) = 1;
6124 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6126 TREE_PRIVATE (exp) = 0;
6127 return 0;
6130 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6131 return 1;
6133 case BIND_EXPR:
6134 /* The only operand we look at is operand 1. The rest aren't
6135 part of the expression. */
6136 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6138 case METHOD_CALL_EXPR:
6139 /* This takes an rtx argument, but shouldn't appear here. */
6140 abort ();
6142 default:
6143 break;
6146 /* If we have an rtx, we do not need to scan our operands. */
6147 if (exp_rtl)
6148 break;
6150 nops = first_rtl_op (TREE_CODE (exp));
6151 for (i = 0; i < nops; i++)
6152 if (TREE_OPERAND (exp, i) != 0
6153 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6154 return 0;
6156 /* If this is a language-specific tree code, it may require
6157 special handling. */
6158 if ((unsigned int) TREE_CODE (exp)
6159 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6160 && !(*lang_hooks.safe_from_p) (x, exp))
6161 return 0;
6164 /* If we have an rtl, find any enclosed object. Then see if we conflict
6165 with it. */
6166 if (exp_rtl)
6168 if (GET_CODE (exp_rtl) == SUBREG)
6170 exp_rtl = SUBREG_REG (exp_rtl);
6171 if (GET_CODE (exp_rtl) == REG
6172 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6173 return 0;
6176 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6177 are memory and they conflict. */
6178 return ! (rtx_equal_p (x, exp_rtl)
6179 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6180 && true_dependence (exp_rtl, VOIDmode, x,
6181 rtx_addr_varies_p)));
6184 /* If we reach here, it is safe. */
6185 return 1;
6188 /* Subroutine of expand_expr: return rtx if EXP is a
6189 variable or parameter; else return 0. */
6191 static rtx
6192 var_rtx (exp)
6193 tree exp;
6195 STRIP_NOPS (exp);
6196 switch (TREE_CODE (exp))
6198 case PARM_DECL:
6199 case VAR_DECL:
6200 return DECL_RTL (exp);
6201 default:
6202 return 0;
6206 #ifdef MAX_INTEGER_COMPUTATION_MODE
6208 void
6209 check_max_integer_computation_mode (exp)
6210 tree exp;
6212 enum tree_code code;
6213 enum machine_mode mode;
6215 /* Strip any NOPs that don't change the mode. */
6216 STRIP_NOPS (exp);
6217 code = TREE_CODE (exp);
6219 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6220 if (code == NOP_EXPR
6221 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6222 return;
6224 /* First check the type of the overall operation. We need only look at
6225 unary, binary and relational operations. */
6226 if (TREE_CODE_CLASS (code) == '1'
6227 || TREE_CODE_CLASS (code) == '2'
6228 || TREE_CODE_CLASS (code) == '<')
6230 mode = TYPE_MODE (TREE_TYPE (exp));
6231 if (GET_MODE_CLASS (mode) == MODE_INT
6232 && mode > MAX_INTEGER_COMPUTATION_MODE)
6233 internal_error ("unsupported wide integer operation");
6236 /* Check operand of a unary op. */
6237 if (TREE_CODE_CLASS (code) == '1')
6239 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6240 if (GET_MODE_CLASS (mode) == MODE_INT
6241 && mode > MAX_INTEGER_COMPUTATION_MODE)
6242 internal_error ("unsupported wide integer operation");
6245 /* Check operands of a binary/comparison op. */
6246 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6248 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6249 if (GET_MODE_CLASS (mode) == MODE_INT
6250 && mode > MAX_INTEGER_COMPUTATION_MODE)
6251 internal_error ("unsupported wide integer operation");
6253 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6254 if (GET_MODE_CLASS (mode) == MODE_INT
6255 && mode > MAX_INTEGER_COMPUTATION_MODE)
6256 internal_error ("unsupported wide integer operation");
6259 #endif
6261 /* Return the highest power of two that EXP is known to be a multiple of.
6262 This is used in updating alignment of MEMs in array references. */
6264 static HOST_WIDE_INT
6265 highest_pow2_factor (exp)
6266 tree exp;
6268 HOST_WIDE_INT c0, c1;
6270 switch (TREE_CODE (exp))
6272 case INTEGER_CST:
6273 /* We can find the lowest bit that's a one. If the low
6274 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6275 We need to handle this case since we can find it in a COND_EXPR,
6276 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6277 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6278 later ICE. */
6279 if (TREE_CONSTANT_OVERFLOW (exp))
6280 return BIGGEST_ALIGNMENT;
6281 else
6283 /* Note: tree_low_cst is intentionally not used here,
6284 we don't care about the upper bits. */
6285 c0 = TREE_INT_CST_LOW (exp);
6286 c0 &= -c0;
6287 return c0 ? c0 : BIGGEST_ALIGNMENT;
6289 break;
6291 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6292 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6293 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6294 return MIN (c0, c1);
6296 case MULT_EXPR:
6297 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6298 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6299 return c0 * c1;
6301 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6302 case CEIL_DIV_EXPR:
6303 if (integer_pow2p (TREE_OPERAND (exp, 1))
6304 && host_integerp (TREE_OPERAND (exp, 1), 1))
6306 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6307 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6308 return MAX (1, c0 / c1);
6310 break;
6312 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6313 case SAVE_EXPR: case WITH_RECORD_EXPR:
6314 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6316 case COMPOUND_EXPR:
6317 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6319 case COND_EXPR:
6320 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6321 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6322 return MIN (c0, c1);
6324 default:
6325 break;
6328 return 1;
6331 /* Similar, except that it is known that the expression must be a multiple
6332 of the alignment of TYPE. */
6334 static HOST_WIDE_INT
6335 highest_pow2_factor_for_type (type, exp)
6336 tree type;
6337 tree exp;
6339 HOST_WIDE_INT type_align, factor;
6341 factor = highest_pow2_factor (exp);
6342 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6343 return MAX (factor, type_align);
6346 /* Return an object on the placeholder list that matches EXP, a
6347 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6348 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6349 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6350 is a location which initially points to a starting location in the
6351 placeholder list (zero means start of the list) and where a pointer into
6352 the placeholder list at which the object is found is placed. */
6354 tree
6355 find_placeholder (exp, plist)
6356 tree exp;
6357 tree *plist;
6359 tree type = TREE_TYPE (exp);
6360 tree placeholder_expr;
6362 for (placeholder_expr
6363 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6364 placeholder_expr != 0;
6365 placeholder_expr = TREE_CHAIN (placeholder_expr))
6367 tree need_type = TYPE_MAIN_VARIANT (type);
6368 tree elt;
6370 /* Find the outermost reference that is of the type we want. If none,
6371 see if any object has a type that is a pointer to the type we
6372 want. */
6373 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6374 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6375 || TREE_CODE (elt) == COND_EXPR)
6376 ? TREE_OPERAND (elt, 1)
6377 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6378 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6379 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6380 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6381 ? TREE_OPERAND (elt, 0) : 0))
6382 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6384 if (plist)
6385 *plist = placeholder_expr;
6386 return elt;
6389 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6391 = ((TREE_CODE (elt) == COMPOUND_EXPR
6392 || TREE_CODE (elt) == COND_EXPR)
6393 ? TREE_OPERAND (elt, 1)
6394 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6395 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6396 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6397 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6398 ? TREE_OPERAND (elt, 0) : 0))
6399 if (POINTER_TYPE_P (TREE_TYPE (elt))
6400 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6401 == need_type))
6403 if (plist)
6404 *plist = placeholder_expr;
6405 return build1 (INDIRECT_REF, need_type, elt);
6409 return 0;
6412 /* expand_expr: generate code for computing expression EXP.
6413 An rtx for the computed value is returned. The value is never null.
6414 In the case of a void EXP, const0_rtx is returned.
6416 The value may be stored in TARGET if TARGET is nonzero.
6417 TARGET is just a suggestion; callers must assume that
6418 the rtx returned may not be the same as TARGET.
6420 If TARGET is CONST0_RTX, it means that the value will be ignored.
6422 If TMODE is not VOIDmode, it suggests generating the
6423 result in mode TMODE. But this is done only when convenient.
6424 Otherwise, TMODE is ignored and the value generated in its natural mode.
6425 TMODE is just a suggestion; callers must assume that
6426 the rtx returned may not have mode TMODE.
6428 Note that TARGET may have neither TMODE nor MODE. In that case, it
6429 probably will not be used.
6431 If MODIFIER is EXPAND_SUM then when EXP is an addition
6432 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6433 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6434 products as above, or REG or MEM, or constant.
6435 Ordinarily in such cases we would output mul or add instructions
6436 and then return a pseudo reg containing the sum.
6438 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6439 it also marks a label as absolutely required (it can't be dead).
6440 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6441 This is used for outputting expressions used in initializers.
6443 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6444 with a constant address even if that address is not normally legitimate.
6445 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6447 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6448 a call parameter. Such targets require special care as we haven't yet
6449 marked TARGET so that it's safe from being trashed by libcalls. We
6450 don't want to use TARGET for anything but the final result;
6451 Intermediate values must go elsewhere. Additionally, calls to
6452 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6455 expand_expr (exp, target, tmode, modifier)
6456 tree exp;
6457 rtx target;
6458 enum machine_mode tmode;
6459 enum expand_modifier modifier;
6461 rtx op0, op1, temp;
6462 tree type = TREE_TYPE (exp);
6463 int unsignedp = TREE_UNSIGNED (type);
6464 enum machine_mode mode;
6465 enum tree_code code = TREE_CODE (exp);
6466 optab this_optab;
6467 rtx subtarget, original_target;
6468 int ignore;
6469 tree context;
6471 /* Handle ERROR_MARK before anybody tries to access its type. */
6472 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6474 op0 = CONST0_RTX (tmode);
6475 if (op0 != 0)
6476 return op0;
6477 return const0_rtx;
6480 mode = TYPE_MODE (type);
6481 /* Use subtarget as the target for operand 0 of a binary operation. */
6482 subtarget = get_subtarget (target);
6483 original_target = target;
6484 ignore = (target == const0_rtx
6485 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6486 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6487 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6488 && TREE_CODE (type) == VOID_TYPE));
6490 /* If we are going to ignore this result, we need only do something
6491 if there is a side-effect somewhere in the expression. If there
6492 is, short-circuit the most common cases here. Note that we must
6493 not call expand_expr with anything but const0_rtx in case this
6494 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6496 if (ignore)
6498 if (! TREE_SIDE_EFFECTS (exp))
6499 return const0_rtx;
6501 /* Ensure we reference a volatile object even if value is ignored, but
6502 don't do this if all we are doing is taking its address. */
6503 if (TREE_THIS_VOLATILE (exp)
6504 && TREE_CODE (exp) != FUNCTION_DECL
6505 && mode != VOIDmode && mode != BLKmode
6506 && modifier != EXPAND_CONST_ADDRESS)
6508 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6509 if (GET_CODE (temp) == MEM)
6510 temp = copy_to_reg (temp);
6511 return const0_rtx;
6514 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6515 || code == INDIRECT_REF || code == BUFFER_REF)
6516 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6517 modifier);
6519 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6520 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6522 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6523 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6524 return const0_rtx;
6526 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6527 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6528 /* If the second operand has no side effects, just evaluate
6529 the first. */
6530 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6531 modifier);
6532 else if (code == BIT_FIELD_REF)
6534 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6535 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6536 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6537 return const0_rtx;
6540 target = 0;
6543 #ifdef MAX_INTEGER_COMPUTATION_MODE
6544 /* Only check stuff here if the mode we want is different from the mode
6545 of the expression; if it's the same, check_max_integer_computation_mode
6546 will handle it. Do we really need to check this stuff at all? */
6548 if (target
6549 && GET_MODE (target) != mode
6550 && TREE_CODE (exp) != INTEGER_CST
6551 && TREE_CODE (exp) != PARM_DECL
6552 && TREE_CODE (exp) != ARRAY_REF
6553 && TREE_CODE (exp) != ARRAY_RANGE_REF
6554 && TREE_CODE (exp) != COMPONENT_REF
6555 && TREE_CODE (exp) != BIT_FIELD_REF
6556 && TREE_CODE (exp) != INDIRECT_REF
6557 && TREE_CODE (exp) != CALL_EXPR
6558 && TREE_CODE (exp) != VAR_DECL
6559 && TREE_CODE (exp) != RTL_EXPR)
6561 enum machine_mode mode = GET_MODE (target);
6563 if (GET_MODE_CLASS (mode) == MODE_INT
6564 && mode > MAX_INTEGER_COMPUTATION_MODE)
6565 internal_error ("unsupported wide integer operation");
6568 if (tmode != mode
6569 && TREE_CODE (exp) != INTEGER_CST
6570 && TREE_CODE (exp) != PARM_DECL
6571 && TREE_CODE (exp) != ARRAY_REF
6572 && TREE_CODE (exp) != ARRAY_RANGE_REF
6573 && TREE_CODE (exp) != COMPONENT_REF
6574 && TREE_CODE (exp) != BIT_FIELD_REF
6575 && TREE_CODE (exp) != INDIRECT_REF
6576 && TREE_CODE (exp) != VAR_DECL
6577 && TREE_CODE (exp) != CALL_EXPR
6578 && TREE_CODE (exp) != RTL_EXPR
6579 && GET_MODE_CLASS (tmode) == MODE_INT
6580 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6581 internal_error ("unsupported wide integer operation");
6583 check_max_integer_computation_mode (exp);
6584 #endif
6586 /* If will do cse, generate all results into pseudo registers
6587 since 1) that allows cse to find more things
6588 and 2) otherwise cse could produce an insn the machine
6589 cannot support. An exception is a CONSTRUCTOR into a multi-word
6590 MEM: that's much more likely to be most efficient into the MEM.
6591 Another is a CALL_EXPR which must return in memory. */
6593 if (! cse_not_expected && mode != BLKmode && target
6594 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6595 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6596 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6597 target = 0;
6599 switch (code)
6601 case LABEL_DECL:
6603 tree function = decl_function_context (exp);
6604 /* Handle using a label in a containing function. */
6605 if (function != current_function_decl
6606 && function != inline_function_decl && function != 0)
6608 struct function *p = find_function_data (function);
6609 p->expr->x_forced_labels
6610 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6611 p->expr->x_forced_labels);
6613 else
6615 if (modifier == EXPAND_INITIALIZER)
6616 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6617 label_rtx (exp),
6618 forced_labels);
6621 temp = gen_rtx_MEM (FUNCTION_MODE,
6622 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6623 if (function != current_function_decl
6624 && function != inline_function_decl && function != 0)
6625 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6626 return temp;
6629 case PARM_DECL:
6630 if (!DECL_RTL_SET_P (exp))
6632 error_with_decl (exp, "prior parameter's size depends on `%s'");
6633 return CONST0_RTX (mode);
6636 /* ... fall through ... */
6638 case VAR_DECL:
6639 /* If a static var's type was incomplete when the decl was written,
6640 but the type is complete now, lay out the decl now. */
6641 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6642 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6644 rtx value = DECL_RTL_IF_SET (exp);
6646 layout_decl (exp, 0);
6648 /* If the RTL was already set, update its mode and memory
6649 attributes. */
6650 if (value != 0)
6652 PUT_MODE (value, DECL_MODE (exp));
6653 SET_DECL_RTL (exp, 0);
6654 set_mem_attributes (value, exp, 1);
6655 SET_DECL_RTL (exp, value);
6659 /* ... fall through ... */
6661 case FUNCTION_DECL:
6662 case RESULT_DECL:
6663 if (DECL_RTL (exp) == 0)
6664 abort ();
6666 /* Ensure variable marked as used even if it doesn't go through
6667 a parser. If it hasn't be used yet, write out an external
6668 definition. */
6669 if (! TREE_USED (exp))
6671 assemble_external (exp);
6672 TREE_USED (exp) = 1;
6675 /* Show we haven't gotten RTL for this yet. */
6676 temp = 0;
6678 /* Handle variables inherited from containing functions. */
6679 context = decl_function_context (exp);
6681 /* We treat inline_function_decl as an alias for the current function
6682 because that is the inline function whose vars, types, etc.
6683 are being merged into the current function.
6684 See expand_inline_function. */
6686 if (context != 0 && context != current_function_decl
6687 && context != inline_function_decl
6688 /* If var is static, we don't need a static chain to access it. */
6689 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6690 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6692 rtx addr;
6694 /* Mark as non-local and addressable. */
6695 DECL_NONLOCAL (exp) = 1;
6696 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6697 abort ();
6698 (*lang_hooks.mark_addressable) (exp);
6699 if (GET_CODE (DECL_RTL (exp)) != MEM)
6700 abort ();
6701 addr = XEXP (DECL_RTL (exp), 0);
6702 if (GET_CODE (addr) == MEM)
6703 addr
6704 = replace_equiv_address (addr,
6705 fix_lexical_addr (XEXP (addr, 0), exp));
6706 else
6707 addr = fix_lexical_addr (addr, exp);
6709 temp = replace_equiv_address (DECL_RTL (exp), addr);
6712 /* This is the case of an array whose size is to be determined
6713 from its initializer, while the initializer is still being parsed.
6714 See expand_decl. */
6716 else if (GET_CODE (DECL_RTL (exp)) == MEM
6717 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6718 temp = validize_mem (DECL_RTL (exp));
6720 /* If DECL_RTL is memory, we are in the normal case and either
6721 the address is not valid or it is not a register and -fforce-addr
6722 is specified, get the address into a register. */
6724 else if (GET_CODE (DECL_RTL (exp)) == MEM
6725 && modifier != EXPAND_CONST_ADDRESS
6726 && modifier != EXPAND_SUM
6727 && modifier != EXPAND_INITIALIZER
6728 && (! memory_address_p (DECL_MODE (exp),
6729 XEXP (DECL_RTL (exp), 0))
6730 || (flag_force_addr
6731 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6732 temp = replace_equiv_address (DECL_RTL (exp),
6733 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6735 /* If we got something, return it. But first, set the alignment
6736 if the address is a register. */
6737 if (temp != 0)
6739 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6740 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6742 return temp;
6745 /* If the mode of DECL_RTL does not match that of the decl, it
6746 must be a promoted value. We return a SUBREG of the wanted mode,
6747 but mark it so that we know that it was already extended. */
6749 if (GET_CODE (DECL_RTL (exp)) == REG
6750 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6752 /* Get the signedness used for this variable. Ensure we get the
6753 same mode we got when the variable was declared. */
6754 if (GET_MODE (DECL_RTL (exp))
6755 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6756 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6757 abort ();
6759 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6760 SUBREG_PROMOTED_VAR_P (temp) = 1;
6761 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6762 return temp;
6765 return DECL_RTL (exp);
6767 case INTEGER_CST:
6768 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6769 TREE_INT_CST_HIGH (exp), mode);
6771 /* ??? If overflow is set, fold will have done an incomplete job,
6772 which can result in (plus xx (const_int 0)), which can get
6773 simplified by validate_replace_rtx during virtual register
6774 instantiation, which can result in unrecognizable insns.
6775 Avoid this by forcing all overflows into registers. */
6776 if (TREE_CONSTANT_OVERFLOW (exp)
6777 && modifier != EXPAND_INITIALIZER)
6778 temp = force_reg (mode, temp);
6780 return temp;
6782 case CONST_DECL:
6783 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6785 case REAL_CST:
6786 /* If optimized, generate immediate CONST_DOUBLE
6787 which will be turned into memory by reload if necessary.
6789 We used to force a register so that loop.c could see it. But
6790 this does not allow gen_* patterns to perform optimizations with
6791 the constants. It also produces two insns in cases like "x = 1.0;".
6792 On most machines, floating-point constants are not permitted in
6793 many insns, so we'd end up copying it to a register in any case.
6795 Now, we do the copying in expand_binop, if appropriate. */
6796 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6797 TYPE_MODE (TREE_TYPE (exp)));
6799 case COMPLEX_CST:
6800 case STRING_CST:
6801 if (! TREE_CST_RTL (exp))
6802 output_constant_def (exp, 1);
6804 /* TREE_CST_RTL probably contains a constant address.
6805 On RISC machines where a constant address isn't valid,
6806 make some insns to get that address into a register. */
6807 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6808 && modifier != EXPAND_CONST_ADDRESS
6809 && modifier != EXPAND_INITIALIZER
6810 && modifier != EXPAND_SUM
6811 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6812 || (flag_force_addr
6813 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6814 return replace_equiv_address (TREE_CST_RTL (exp),
6815 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6816 return TREE_CST_RTL (exp);
6818 case EXPR_WITH_FILE_LOCATION:
6820 rtx to_return;
6821 const char *saved_input_filename = input_filename;
6822 int saved_lineno = lineno;
6823 input_filename = EXPR_WFL_FILENAME (exp);
6824 lineno = EXPR_WFL_LINENO (exp);
6825 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6826 emit_line_note (input_filename, lineno);
6827 /* Possibly avoid switching back and forth here. */
6828 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6829 input_filename = saved_input_filename;
6830 lineno = saved_lineno;
6831 return to_return;
6834 case SAVE_EXPR:
6835 context = decl_function_context (exp);
6837 /* If this SAVE_EXPR was at global context, assume we are an
6838 initialization function and move it into our context. */
6839 if (context == 0)
6840 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6842 /* We treat inline_function_decl as an alias for the current function
6843 because that is the inline function whose vars, types, etc.
6844 are being merged into the current function.
6845 See expand_inline_function. */
6846 if (context == current_function_decl || context == inline_function_decl)
6847 context = 0;
6849 /* If this is non-local, handle it. */
6850 if (context)
6852 /* The following call just exists to abort if the context is
6853 not of a containing function. */
6854 find_function_data (context);
6856 temp = SAVE_EXPR_RTL (exp);
6857 if (temp && GET_CODE (temp) == REG)
6859 put_var_into_stack (exp);
6860 temp = SAVE_EXPR_RTL (exp);
6862 if (temp == 0 || GET_CODE (temp) != MEM)
6863 abort ();
6864 return
6865 replace_equiv_address (temp,
6866 fix_lexical_addr (XEXP (temp, 0), exp));
6868 if (SAVE_EXPR_RTL (exp) == 0)
6870 if (mode == VOIDmode)
6871 temp = const0_rtx;
6872 else
6873 temp = assign_temp (build_qualified_type (type,
6874 (TYPE_QUALS (type)
6875 | TYPE_QUAL_CONST)),
6876 3, 0, 0);
6878 SAVE_EXPR_RTL (exp) = temp;
6879 if (!optimize && GET_CODE (temp) == REG)
6880 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6881 save_expr_regs);
6883 /* If the mode of TEMP does not match that of the expression, it
6884 must be a promoted value. We pass store_expr a SUBREG of the
6885 wanted mode but mark it so that we know that it was already
6886 extended. Note that `unsignedp' was modified above in
6887 this case. */
6889 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6891 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6892 SUBREG_PROMOTED_VAR_P (temp) = 1;
6893 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6896 if (temp == const0_rtx)
6897 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6898 else
6899 store_expr (TREE_OPERAND (exp, 0), temp,
6900 modifier == EXPAND_STACK_PARM ? 2 : 0);
6902 TREE_USED (exp) = 1;
6905 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6906 must be a promoted value. We return a SUBREG of the wanted mode,
6907 but mark it so that we know that it was already extended. */
6909 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6910 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6912 /* Compute the signedness and make the proper SUBREG. */
6913 promote_mode (type, mode, &unsignedp, 0);
6914 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6915 SUBREG_PROMOTED_VAR_P (temp) = 1;
6916 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6917 return temp;
6920 return SAVE_EXPR_RTL (exp);
6922 case UNSAVE_EXPR:
6924 rtx temp;
6925 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6926 TREE_OPERAND (exp, 0)
6927 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6928 return temp;
6931 case PLACEHOLDER_EXPR:
6933 tree old_list = placeholder_list;
6934 tree placeholder_expr = 0;
6936 exp = find_placeholder (exp, &placeholder_expr);
6937 if (exp == 0)
6938 abort ();
6940 placeholder_list = TREE_CHAIN (placeholder_expr);
6941 temp = expand_expr (exp, original_target, tmode, modifier);
6942 placeholder_list = old_list;
6943 return temp;
6946 case WITH_RECORD_EXPR:
6947 /* Put the object on the placeholder list, expand our first operand,
6948 and pop the list. */
6949 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6950 placeholder_list);
6951 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6952 modifier);
6953 placeholder_list = TREE_CHAIN (placeholder_list);
6954 return target;
6956 case GOTO_EXPR:
6957 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6958 expand_goto (TREE_OPERAND (exp, 0));
6959 else
6960 expand_computed_goto (TREE_OPERAND (exp, 0));
6961 return const0_rtx;
6963 case EXIT_EXPR:
6964 expand_exit_loop_if_false (NULL,
6965 invert_truthvalue (TREE_OPERAND (exp, 0)));
6966 return const0_rtx;
6968 case LABELED_BLOCK_EXPR:
6969 if (LABELED_BLOCK_BODY (exp))
6970 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6971 /* Should perhaps use expand_label, but this is simpler and safer. */
6972 do_pending_stack_adjust ();
6973 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6974 return const0_rtx;
6976 case EXIT_BLOCK_EXPR:
6977 if (EXIT_BLOCK_RETURN (exp))
6978 sorry ("returned value in block_exit_expr");
6979 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6980 return const0_rtx;
6982 case LOOP_EXPR:
6983 push_temp_slots ();
6984 expand_start_loop (1);
6985 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6986 expand_end_loop ();
6987 pop_temp_slots ();
6989 return const0_rtx;
6991 case BIND_EXPR:
6993 tree vars = TREE_OPERAND (exp, 0);
6995 /* Need to open a binding contour here because
6996 if there are any cleanups they must be contained here. */
6997 expand_start_bindings (2);
6999 /* Mark the corresponding BLOCK for output in its proper place. */
7000 if (TREE_OPERAND (exp, 2) != 0
7001 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7002 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7004 /* If VARS have not yet been expanded, expand them now. */
7005 while (vars)
7007 if (!DECL_RTL_SET_P (vars))
7008 expand_decl (vars);
7009 expand_decl_init (vars);
7010 vars = TREE_CHAIN (vars);
7013 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7015 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7017 return temp;
7020 case RTL_EXPR:
7021 if (RTL_EXPR_SEQUENCE (exp))
7023 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7024 abort ();
7025 emit_insn (RTL_EXPR_SEQUENCE (exp));
7026 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7028 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7029 free_temps_for_rtl_expr (exp);
7030 return RTL_EXPR_RTL (exp);
7032 case CONSTRUCTOR:
7033 /* If we don't need the result, just ensure we evaluate any
7034 subexpressions. */
7035 if (ignore)
7037 tree elt;
7039 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7040 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7042 return const0_rtx;
7045 /* All elts simple constants => refer to a constant in memory. But
7046 if this is a non-BLKmode mode, let it store a field at a time
7047 since that should make a CONST_INT or CONST_DOUBLE when we
7048 fold. Likewise, if we have a target we can use, it is best to
7049 store directly into the target unless the type is large enough
7050 that memcpy will be used. If we are making an initializer and
7051 all operands are constant, put it in memory as well.
7053 FIXME: Avoid trying to fill vector constructors piece-meal.
7054 Output them with output_constant_def below unless we're sure
7055 they're zeros. This should go away when vector initializers
7056 are treated like VECTOR_CST instead of arrays.
7058 else if ((TREE_STATIC (exp)
7059 && ((mode == BLKmode
7060 && ! (target != 0 && safe_from_p (target, exp, 1)))
7061 || TREE_ADDRESSABLE (exp)
7062 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7063 && (! MOVE_BY_PIECES_P
7064 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7065 TYPE_ALIGN (type)))
7066 && ((TREE_CODE (type) == VECTOR_TYPE
7067 && !is_zeros_p (exp))
7068 || ! mostly_zeros_p (exp)))))
7069 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7071 rtx constructor = output_constant_def (exp, 1);
7073 if (modifier != EXPAND_CONST_ADDRESS
7074 && modifier != EXPAND_INITIALIZER
7075 && modifier != EXPAND_SUM)
7076 constructor = validize_mem (constructor);
7078 return constructor;
7080 else
7082 /* Handle calls that pass values in multiple non-contiguous
7083 locations. The Irix 6 ABI has examples of this. */
7084 if (target == 0 || ! safe_from_p (target, exp, 1)
7085 || GET_CODE (target) == PARALLEL
7086 || modifier == EXPAND_STACK_PARM)
7087 target
7088 = assign_temp (build_qualified_type (type,
7089 (TYPE_QUALS (type)
7090 | (TREE_READONLY (exp)
7091 * TYPE_QUAL_CONST))),
7092 0, TREE_ADDRESSABLE (exp), 1);
7094 store_constructor (exp, target, 0, int_expr_size (exp));
7095 return target;
7098 case INDIRECT_REF:
7100 tree exp1 = TREE_OPERAND (exp, 0);
7101 tree index;
7102 tree string = string_constant (exp1, &index);
7104 /* Try to optimize reads from const strings. */
7105 if (string
7106 && TREE_CODE (string) == STRING_CST
7107 && TREE_CODE (index) == INTEGER_CST
7108 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7109 && GET_MODE_CLASS (mode) == MODE_INT
7110 && GET_MODE_SIZE (mode) == 1
7111 && modifier != EXPAND_WRITE)
7112 return gen_int_mode (TREE_STRING_POINTER (string)
7113 [TREE_INT_CST_LOW (index)], mode);
7115 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7116 op0 = memory_address (mode, op0);
7117 temp = gen_rtx_MEM (mode, op0);
7118 set_mem_attributes (temp, exp, 0);
7120 /* If we are writing to this object and its type is a record with
7121 readonly fields, we must mark it as readonly so it will
7122 conflict with readonly references to those fields. */
7123 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7124 RTX_UNCHANGING_P (temp) = 1;
7126 return temp;
7129 case ARRAY_REF:
7130 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7131 abort ();
7134 tree array = TREE_OPERAND (exp, 0);
7135 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7136 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7137 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7138 HOST_WIDE_INT i;
7140 /* Optimize the special-case of a zero lower bound.
7142 We convert the low_bound to sizetype to avoid some problems
7143 with constant folding. (E.g. suppose the lower bound is 1,
7144 and its mode is QI. Without the conversion, (ARRAY
7145 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7146 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7148 if (! integer_zerop (low_bound))
7149 index = size_diffop (index, convert (sizetype, low_bound));
7151 /* Fold an expression like: "foo"[2].
7152 This is not done in fold so it won't happen inside &.
7153 Don't fold if this is for wide characters since it's too
7154 difficult to do correctly and this is a very rare case. */
7156 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7157 && TREE_CODE (array) == STRING_CST
7158 && TREE_CODE (index) == INTEGER_CST
7159 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7160 && GET_MODE_CLASS (mode) == MODE_INT
7161 && GET_MODE_SIZE (mode) == 1)
7162 return gen_int_mode (TREE_STRING_POINTER (array)
7163 [TREE_INT_CST_LOW (index)], mode);
7165 /* If this is a constant index into a constant array,
7166 just get the value from the array. Handle both the cases when
7167 we have an explicit constructor and when our operand is a variable
7168 that was declared const. */
7170 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7171 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7172 && TREE_CODE (index) == INTEGER_CST
7173 && 0 > compare_tree_int (index,
7174 list_length (CONSTRUCTOR_ELTS
7175 (TREE_OPERAND (exp, 0)))))
7177 tree elem;
7179 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7180 i = TREE_INT_CST_LOW (index);
7181 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7184 if (elem)
7185 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7186 modifier);
7189 else if (optimize >= 1
7190 && modifier != EXPAND_CONST_ADDRESS
7191 && modifier != EXPAND_INITIALIZER
7192 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7193 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7194 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7196 if (TREE_CODE (index) == INTEGER_CST)
7198 tree init = DECL_INITIAL (array);
7200 if (TREE_CODE (init) == CONSTRUCTOR)
7202 tree elem;
7204 for (elem = CONSTRUCTOR_ELTS (init);
7205 (elem
7206 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7207 elem = TREE_CHAIN (elem))
7210 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7211 return expand_expr (fold (TREE_VALUE (elem)), target,
7212 tmode, modifier);
7214 else if (TREE_CODE (init) == STRING_CST
7215 && 0 > compare_tree_int (index,
7216 TREE_STRING_LENGTH (init)))
7218 tree type = TREE_TYPE (TREE_TYPE (init));
7219 enum machine_mode mode = TYPE_MODE (type);
7221 if (GET_MODE_CLASS (mode) == MODE_INT
7222 && GET_MODE_SIZE (mode) == 1)
7223 return gen_int_mode (TREE_STRING_POINTER (init)
7224 [TREE_INT_CST_LOW (index)], mode);
7229 /* Fall through. */
7231 case COMPONENT_REF:
7232 case BIT_FIELD_REF:
7233 case ARRAY_RANGE_REF:
7234 /* If the operand is a CONSTRUCTOR, we can just extract the
7235 appropriate field if it is present. Don't do this if we have
7236 already written the data since we want to refer to that copy
7237 and varasm.c assumes that's what we'll do. */
7238 if (code == COMPONENT_REF
7239 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7240 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7242 tree elt;
7244 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7245 elt = TREE_CHAIN (elt))
7246 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7247 /* We can normally use the value of the field in the
7248 CONSTRUCTOR. However, if this is a bitfield in
7249 an integral mode that we can fit in a HOST_WIDE_INT,
7250 we must mask only the number of bits in the bitfield,
7251 since this is done implicitly by the constructor. If
7252 the bitfield does not meet either of those conditions,
7253 we can't do this optimization. */
7254 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7255 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7256 == MODE_INT)
7257 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7258 <= HOST_BITS_PER_WIDE_INT))))
7260 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7261 && modifier == EXPAND_STACK_PARM)
7262 target = 0;
7263 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7264 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7266 HOST_WIDE_INT bitsize
7267 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7268 enum machine_mode imode
7269 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7271 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7273 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7274 op0 = expand_and (imode, op0, op1, target);
7276 else
7278 tree count
7279 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7282 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7283 target, 0);
7284 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7285 target, 0);
7289 return op0;
7294 enum machine_mode mode1;
7295 HOST_WIDE_INT bitsize, bitpos;
7296 tree offset;
7297 int volatilep = 0;
7298 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7299 &mode1, &unsignedp, &volatilep);
7300 rtx orig_op0;
7302 /* If we got back the original object, something is wrong. Perhaps
7303 we are evaluating an expression too early. In any event, don't
7304 infinitely recurse. */
7305 if (tem == exp)
7306 abort ();
7308 /* If TEM's type is a union of variable size, pass TARGET to the inner
7309 computation, since it will need a temporary and TARGET is known
7310 to have to do. This occurs in unchecked conversion in Ada. */
7312 orig_op0 = op0
7313 = expand_expr (tem,
7314 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7315 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7316 != INTEGER_CST)
7317 && modifier != EXPAND_STACK_PARM
7318 ? target : NULL_RTX),
7319 VOIDmode,
7320 (modifier == EXPAND_INITIALIZER
7321 || modifier == EXPAND_CONST_ADDRESS
7322 || modifier == EXPAND_STACK_PARM)
7323 ? modifier : EXPAND_NORMAL);
7325 /* If this is a constant, put it into a register if it is a
7326 legitimate constant and OFFSET is 0 and memory if it isn't. */
7327 if (CONSTANT_P (op0))
7329 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7330 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7331 && offset == 0)
7332 op0 = force_reg (mode, op0);
7333 else
7334 op0 = validize_mem (force_const_mem (mode, op0));
7337 if (offset != 0)
7339 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7340 EXPAND_SUM);
7342 /* If this object is in a register, put it into memory.
7343 This case can't occur in C, but can in Ada if we have
7344 unchecked conversion of an expression from a scalar type to
7345 an array or record type. */
7346 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7347 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7349 /* If the operand is a SAVE_EXPR, we can deal with this by
7350 forcing the SAVE_EXPR into memory. */
7351 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7353 put_var_into_stack (TREE_OPERAND (exp, 0));
7354 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7356 else
7358 tree nt
7359 = build_qualified_type (TREE_TYPE (tem),
7360 (TYPE_QUALS (TREE_TYPE (tem))
7361 | TYPE_QUAL_CONST));
7362 rtx memloc = assign_temp (nt, 1, 1, 1);
7364 emit_move_insn (memloc, op0);
7365 op0 = memloc;
7369 if (GET_CODE (op0) != MEM)
7370 abort ();
7372 #ifdef POINTERS_EXTEND_UNSIGNED
7373 if (GET_MODE (offset_rtx) != Pmode)
7374 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7375 #else
7376 if (GET_MODE (offset_rtx) != ptr_mode)
7377 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7378 #endif
7380 /* A constant address in OP0 can have VOIDmode, we must not try
7381 to call force_reg for that case. Avoid that case. */
7382 if (GET_CODE (op0) == MEM
7383 && GET_MODE (op0) == BLKmode
7384 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7385 && bitsize != 0
7386 && (bitpos % bitsize) == 0
7387 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7388 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7390 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7391 bitpos = 0;
7394 op0 = offset_address (op0, offset_rtx,
7395 highest_pow2_factor (offset));
7398 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7399 record its alignment as BIGGEST_ALIGNMENT. */
7400 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7401 && is_aligning_offset (offset, tem))
7402 set_mem_align (op0, BIGGEST_ALIGNMENT);
7404 /* Don't forget about volatility even if this is a bitfield. */
7405 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7407 if (op0 == orig_op0)
7408 op0 = copy_rtx (op0);
7410 MEM_VOLATILE_P (op0) = 1;
7413 /* The following code doesn't handle CONCAT.
7414 Assume only bitpos == 0 can be used for CONCAT, due to
7415 one element arrays having the same mode as its element. */
7416 if (GET_CODE (op0) == CONCAT)
7418 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7419 abort ();
7420 return op0;
7423 /* In cases where an aligned union has an unaligned object
7424 as a field, we might be extracting a BLKmode value from
7425 an integer-mode (e.g., SImode) object. Handle this case
7426 by doing the extract into an object as wide as the field
7427 (which we know to be the width of a basic mode), then
7428 storing into memory, and changing the mode to BLKmode. */
7429 if (mode1 == VOIDmode
7430 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7431 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7432 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7433 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7434 && modifier != EXPAND_CONST_ADDRESS
7435 && modifier != EXPAND_INITIALIZER)
7436 /* If the field isn't aligned enough to fetch as a memref,
7437 fetch it as a bit field. */
7438 || (mode1 != BLKmode
7439 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7440 && ((TYPE_ALIGN (TREE_TYPE (tem))
7441 < GET_MODE_ALIGNMENT (mode))
7442 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7443 /* If the type and the field are a constant size and the
7444 size of the type isn't the same size as the bitfield,
7445 we must use bitfield operations. */
7446 || (bitsize >= 0
7447 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7448 == INTEGER_CST)
7449 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7450 bitsize)))
7452 enum machine_mode ext_mode = mode;
7454 if (ext_mode == BLKmode
7455 && ! (target != 0 && GET_CODE (op0) == MEM
7456 && GET_CODE (target) == MEM
7457 && bitpos % BITS_PER_UNIT == 0))
7458 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7460 if (ext_mode == BLKmode)
7462 /* In this case, BITPOS must start at a byte boundary and
7463 TARGET, if specified, must be a MEM. */
7464 if (GET_CODE (op0) != MEM
7465 || (target != 0 && GET_CODE (target) != MEM)
7466 || bitpos % BITS_PER_UNIT != 0)
7467 abort ();
7469 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7470 if (target == 0)
7471 target = assign_temp (type, 0, 1, 1);
7473 emit_block_move (target, op0,
7474 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7475 / BITS_PER_UNIT),
7476 (modifier == EXPAND_STACK_PARM
7477 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7479 return target;
7482 op0 = validize_mem (op0);
7484 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7485 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7487 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7488 (modifier == EXPAND_STACK_PARM
7489 ? NULL_RTX : target),
7490 ext_mode, ext_mode,
7491 int_size_in_bytes (TREE_TYPE (tem)));
7493 /* If the result is a record type and BITSIZE is narrower than
7494 the mode of OP0, an integral mode, and this is a big endian
7495 machine, we must put the field into the high-order bits. */
7496 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7497 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7498 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7499 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7500 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7501 - bitsize),
7502 op0, 1);
7504 if (mode == BLKmode)
7506 rtx new = assign_temp (build_qualified_type
7507 ((*lang_hooks.types.type_for_mode)
7508 (ext_mode, 0),
7509 TYPE_QUAL_CONST), 0, 1, 1);
7511 emit_move_insn (new, op0);
7512 op0 = copy_rtx (new);
7513 PUT_MODE (op0, BLKmode);
7514 set_mem_attributes (op0, exp, 1);
7517 return op0;
7520 /* If the result is BLKmode, use that to access the object
7521 now as well. */
7522 if (mode == BLKmode)
7523 mode1 = BLKmode;
7525 /* Get a reference to just this component. */
7526 if (modifier == EXPAND_CONST_ADDRESS
7527 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7528 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7529 else
7530 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7532 if (op0 == orig_op0)
7533 op0 = copy_rtx (op0);
7535 set_mem_attributes (op0, exp, 0);
7536 if (GET_CODE (XEXP (op0, 0)) == REG)
7537 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7539 MEM_VOLATILE_P (op0) |= volatilep;
7540 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7541 || modifier == EXPAND_CONST_ADDRESS
7542 || modifier == EXPAND_INITIALIZER)
7543 return op0;
7544 else if (target == 0)
7545 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7547 convert_move (target, op0, unsignedp);
7548 return target;
7551 case VTABLE_REF:
7553 rtx insn, before = get_last_insn (), vtbl_ref;
7555 /* Evaluate the interior expression. */
7556 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7557 tmode, modifier);
7559 /* Get or create an instruction off which to hang a note. */
7560 if (REG_P (subtarget))
7562 target = subtarget;
7563 insn = get_last_insn ();
7564 if (insn == before)
7565 abort ();
7566 if (! INSN_P (insn))
7567 insn = prev_nonnote_insn (insn);
7569 else
7571 target = gen_reg_rtx (GET_MODE (subtarget));
7572 insn = emit_move_insn (target, subtarget);
7575 /* Collect the data for the note. */
7576 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7577 vtbl_ref = plus_constant (vtbl_ref,
7578 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7579 /* Discard the initial CONST that was added. */
7580 vtbl_ref = XEXP (vtbl_ref, 0);
7582 REG_NOTES (insn)
7583 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7585 return target;
7588 /* Intended for a reference to a buffer of a file-object in Pascal.
7589 But it's not certain that a special tree code will really be
7590 necessary for these. INDIRECT_REF might work for them. */
7591 case BUFFER_REF:
7592 abort ();
7594 case IN_EXPR:
7596 /* Pascal set IN expression.
7598 Algorithm:
7599 rlo = set_low - (set_low%bits_per_word);
7600 the_word = set [ (index - rlo)/bits_per_word ];
7601 bit_index = index % bits_per_word;
7602 bitmask = 1 << bit_index;
7603 return !!(the_word & bitmask); */
7605 tree set = TREE_OPERAND (exp, 0);
7606 tree index = TREE_OPERAND (exp, 1);
7607 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7608 tree set_type = TREE_TYPE (set);
7609 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7610 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7611 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7612 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7613 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7614 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7615 rtx setaddr = XEXP (setval, 0);
7616 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7617 rtx rlow;
7618 rtx diff, quo, rem, addr, bit, result;
7620 /* If domain is empty, answer is no. Likewise if index is constant
7621 and out of bounds. */
7622 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7623 && TREE_CODE (set_low_bound) == INTEGER_CST
7624 && tree_int_cst_lt (set_high_bound, set_low_bound))
7625 || (TREE_CODE (index) == INTEGER_CST
7626 && TREE_CODE (set_low_bound) == INTEGER_CST
7627 && tree_int_cst_lt (index, set_low_bound))
7628 || (TREE_CODE (set_high_bound) == INTEGER_CST
7629 && TREE_CODE (index) == INTEGER_CST
7630 && tree_int_cst_lt (set_high_bound, index))))
7631 return const0_rtx;
7633 if (target == 0)
7634 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7636 /* If we get here, we have to generate the code for both cases
7637 (in range and out of range). */
7639 op0 = gen_label_rtx ();
7640 op1 = gen_label_rtx ();
7642 if (! (GET_CODE (index_val) == CONST_INT
7643 && GET_CODE (lo_r) == CONST_INT))
7644 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7645 GET_MODE (index_val), iunsignedp, op1);
7647 if (! (GET_CODE (index_val) == CONST_INT
7648 && GET_CODE (hi_r) == CONST_INT))
7649 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7650 GET_MODE (index_val), iunsignedp, op1);
7652 /* Calculate the element number of bit zero in the first word
7653 of the set. */
7654 if (GET_CODE (lo_r) == CONST_INT)
7655 rlow = GEN_INT (INTVAL (lo_r)
7656 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7657 else
7658 rlow = expand_binop (index_mode, and_optab, lo_r,
7659 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7660 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7662 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7663 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7665 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7666 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7667 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7668 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7670 addr = memory_address (byte_mode,
7671 expand_binop (index_mode, add_optab, diff,
7672 setaddr, NULL_RTX, iunsignedp,
7673 OPTAB_LIB_WIDEN));
7675 /* Extract the bit we want to examine. */
7676 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7677 gen_rtx_MEM (byte_mode, addr),
7678 make_tree (TREE_TYPE (index), rem),
7679 NULL_RTX, 1);
7680 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7681 GET_MODE (target) == byte_mode ? target : 0,
7682 1, OPTAB_LIB_WIDEN);
7684 if (result != target)
7685 convert_move (target, result, 1);
7687 /* Output the code to handle the out-of-range case. */
7688 emit_jump (op0);
7689 emit_label (op1);
7690 emit_move_insn (target, const0_rtx);
7691 emit_label (op0);
7692 return target;
7695 case WITH_CLEANUP_EXPR:
7696 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7698 WITH_CLEANUP_EXPR_RTL (exp)
7699 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7700 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7701 CLEANUP_EH_ONLY (exp));
7703 /* That's it for this cleanup. */
7704 TREE_OPERAND (exp, 1) = 0;
7706 return WITH_CLEANUP_EXPR_RTL (exp);
7708 case CLEANUP_POINT_EXPR:
7710 /* Start a new binding layer that will keep track of all cleanup
7711 actions to be performed. */
7712 expand_start_bindings (2);
7714 target_temp_slot_level = temp_slot_level;
7716 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7717 /* If we're going to use this value, load it up now. */
7718 if (! ignore)
7719 op0 = force_not_mem (op0);
7720 preserve_temp_slots (op0);
7721 expand_end_bindings (NULL_TREE, 0, 0);
7723 return op0;
7725 case CALL_EXPR:
7726 /* Check for a built-in function. */
7727 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7728 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7729 == FUNCTION_DECL)
7730 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7732 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7733 == BUILT_IN_FRONTEND)
7734 return (*lang_hooks.expand_expr) (exp, original_target,
7735 tmode, modifier);
7736 else
7737 return expand_builtin (exp, target, subtarget, tmode, ignore);
7740 return expand_call (exp, target, ignore);
7742 case NON_LVALUE_EXPR:
7743 case NOP_EXPR:
7744 case CONVERT_EXPR:
7745 case REFERENCE_EXPR:
7746 if (TREE_OPERAND (exp, 0) == error_mark_node)
7747 return const0_rtx;
7749 if (TREE_CODE (type) == UNION_TYPE)
7751 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7753 /* If both input and output are BLKmode, this conversion isn't doing
7754 anything except possibly changing memory attribute. */
7755 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7757 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7758 modifier);
7760 result = copy_rtx (result);
7761 set_mem_attributes (result, exp, 0);
7762 return result;
7765 if (target == 0)
7766 target = assign_temp (type, 0, 1, 1);
7768 if (GET_CODE (target) == MEM)
7769 /* Store data into beginning of memory target. */
7770 store_expr (TREE_OPERAND (exp, 0),
7771 adjust_address (target, TYPE_MODE (valtype), 0),
7772 modifier == EXPAND_STACK_PARM ? 2 : 0);
7774 else if (GET_CODE (target) == REG)
7775 /* Store this field into a union of the proper type. */
7776 store_field (target,
7777 MIN ((int_size_in_bytes (TREE_TYPE
7778 (TREE_OPERAND (exp, 0)))
7779 * BITS_PER_UNIT),
7780 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7781 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7782 VOIDmode, 0, type, 0);
7783 else
7784 abort ();
7786 /* Return the entire union. */
7787 return target;
7790 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7792 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7793 modifier);
7795 /* If the signedness of the conversion differs and OP0 is
7796 a promoted SUBREG, clear that indication since we now
7797 have to do the proper extension. */
7798 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7799 && GET_CODE (op0) == SUBREG)
7800 SUBREG_PROMOTED_VAR_P (op0) = 0;
7802 return op0;
7805 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7806 if (GET_MODE (op0) == mode)
7807 return op0;
7809 /* If OP0 is a constant, just convert it into the proper mode. */
7810 if (CONSTANT_P (op0))
7812 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7813 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7815 if (modifier == EXPAND_INITIALIZER)
7816 return simplify_gen_subreg (mode, op0, inner_mode,
7817 subreg_lowpart_offset (mode,
7818 inner_mode));
7819 else
7820 return convert_modes (mode, inner_mode, op0,
7821 TREE_UNSIGNED (inner_type));
7824 if (modifier == EXPAND_INITIALIZER)
7825 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7827 if (target == 0)
7828 return
7829 convert_to_mode (mode, op0,
7830 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7831 else
7832 convert_move (target, op0,
7833 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7834 return target;
7836 case VIEW_CONVERT_EXPR:
7837 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7839 /* If the input and output modes are both the same, we are done.
7840 Otherwise, if neither mode is BLKmode and both are within a word, we
7841 can use gen_lowpart. If neither is true, make sure the operand is
7842 in memory and convert the MEM to the new mode. */
7843 if (TYPE_MODE (type) == GET_MODE (op0))
7845 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7846 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7847 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7848 op0 = gen_lowpart (TYPE_MODE (type), op0);
7849 else if (GET_CODE (op0) != MEM)
7851 /* If the operand is not a MEM, force it into memory. Since we
7852 are going to be be changing the mode of the MEM, don't call
7853 force_const_mem for constants because we don't allow pool
7854 constants to change mode. */
7855 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7857 if (TREE_ADDRESSABLE (exp))
7858 abort ();
7860 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7861 target
7862 = assign_stack_temp_for_type
7863 (TYPE_MODE (inner_type),
7864 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7866 emit_move_insn (target, op0);
7867 op0 = target;
7870 /* At this point, OP0 is in the correct mode. If the output type is such
7871 that the operand is known to be aligned, indicate that it is.
7872 Otherwise, we need only be concerned about alignment for non-BLKmode
7873 results. */
7874 if (GET_CODE (op0) == MEM)
7876 op0 = copy_rtx (op0);
7878 if (TYPE_ALIGN_OK (type))
7879 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7880 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7881 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7883 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7884 HOST_WIDE_INT temp_size
7885 = MAX (int_size_in_bytes (inner_type),
7886 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7887 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7888 temp_size, 0, type);
7889 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7891 if (TREE_ADDRESSABLE (exp))
7892 abort ();
7894 if (GET_MODE (op0) == BLKmode)
7895 emit_block_move (new_with_op0_mode, op0,
7896 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7897 (modifier == EXPAND_STACK_PARM
7898 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7899 else
7900 emit_move_insn (new_with_op0_mode, op0);
7902 op0 = new;
7905 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7908 return op0;
7910 case PLUS_EXPR:
7911 this_optab = ! unsignedp && flag_trapv
7912 && (GET_MODE_CLASS (mode) == MODE_INT)
7913 ? addv_optab : add_optab;
7915 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7916 something else, make sure we add the register to the constant and
7917 then to the other thing. This case can occur during strength
7918 reduction and doing it this way will produce better code if the
7919 frame pointer or argument pointer is eliminated.
7921 fold-const.c will ensure that the constant is always in the inner
7922 PLUS_EXPR, so the only case we need to do anything about is if
7923 sp, ap, or fp is our second argument, in which case we must swap
7924 the innermost first argument and our second argument. */
7926 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7927 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7928 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7929 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7930 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7931 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7933 tree t = TREE_OPERAND (exp, 1);
7935 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7936 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7939 /* If the result is to be ptr_mode and we are adding an integer to
7940 something, we might be forming a constant. So try to use
7941 plus_constant. If it produces a sum and we can't accept it,
7942 use force_operand. This allows P = &ARR[const] to generate
7943 efficient code on machines where a SYMBOL_REF is not a valid
7944 address.
7946 If this is an EXPAND_SUM call, always return the sum. */
7947 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7948 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7950 if (modifier == EXPAND_STACK_PARM)
7951 target = 0;
7952 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7953 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7954 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7956 rtx constant_part;
7958 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7959 EXPAND_SUM);
7960 /* Use immed_double_const to ensure that the constant is
7961 truncated according to the mode of OP1, then sign extended
7962 to a HOST_WIDE_INT. Using the constant directly can result
7963 in non-canonical RTL in a 64x32 cross compile. */
7964 constant_part
7965 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7966 (HOST_WIDE_INT) 0,
7967 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7968 op1 = plus_constant (op1, INTVAL (constant_part));
7969 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7970 op1 = force_operand (op1, target);
7971 return op1;
7974 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7975 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7976 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7978 rtx constant_part;
7980 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7981 (modifier == EXPAND_INITIALIZER
7982 ? EXPAND_INITIALIZER : EXPAND_SUM));
7983 if (! CONSTANT_P (op0))
7985 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7986 VOIDmode, modifier);
7987 /* Don't go to both_summands if modifier
7988 says it's not right to return a PLUS. */
7989 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7990 goto binop2;
7991 goto both_summands;
7993 /* Use immed_double_const to ensure that the constant is
7994 truncated according to the mode of OP1, then sign extended
7995 to a HOST_WIDE_INT. Using the constant directly can result
7996 in non-canonical RTL in a 64x32 cross compile. */
7997 constant_part
7998 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7999 (HOST_WIDE_INT) 0,
8000 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8001 op0 = plus_constant (op0, INTVAL (constant_part));
8002 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8003 op0 = force_operand (op0, target);
8004 return op0;
8008 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8009 subtarget = 0;
8011 /* No sense saving up arithmetic to be done
8012 if it's all in the wrong mode to form part of an address.
8013 And force_operand won't know whether to sign-extend or
8014 zero-extend. */
8015 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8016 || mode != ptr_mode)
8018 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8019 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8020 if (op0 == const0_rtx)
8021 return op1;
8022 if (op1 == const0_rtx)
8023 return op0;
8024 goto binop2;
8027 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8028 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8030 /* We come here from MINUS_EXPR when the second operand is a
8031 constant. */
8032 both_summands:
8033 /* Make sure any term that's a sum with a constant comes last. */
8034 if (GET_CODE (op0) == PLUS
8035 && CONSTANT_P (XEXP (op0, 1)))
8037 temp = op0;
8038 op0 = op1;
8039 op1 = temp;
8041 /* If adding to a sum including a constant,
8042 associate it to put the constant outside. */
8043 if (GET_CODE (op1) == PLUS
8044 && CONSTANT_P (XEXP (op1, 1)))
8046 rtx constant_term = const0_rtx;
8048 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8049 if (temp != 0)
8050 op0 = temp;
8051 /* Ensure that MULT comes first if there is one. */
8052 else if (GET_CODE (op0) == MULT)
8053 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8054 else
8055 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8057 /* Let's also eliminate constants from op0 if possible. */
8058 op0 = eliminate_constant_term (op0, &constant_term);
8060 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8061 their sum should be a constant. Form it into OP1, since the
8062 result we want will then be OP0 + OP1. */
8064 temp = simplify_binary_operation (PLUS, mode, constant_term,
8065 XEXP (op1, 1));
8066 if (temp != 0)
8067 op1 = temp;
8068 else
8069 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8072 /* Put a constant term last and put a multiplication first. */
8073 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8074 temp = op1, op1 = op0, op0 = temp;
8076 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8077 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8079 case MINUS_EXPR:
8080 /* For initializers, we are allowed to return a MINUS of two
8081 symbolic constants. Here we handle all cases when both operands
8082 are constant. */
8083 /* Handle difference of two symbolic constants,
8084 for the sake of an initializer. */
8085 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8086 && really_constant_p (TREE_OPERAND (exp, 0))
8087 && really_constant_p (TREE_OPERAND (exp, 1)))
8089 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8090 modifier);
8091 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8092 modifier);
8094 /* If the last operand is a CONST_INT, use plus_constant of
8095 the negated constant. Else make the MINUS. */
8096 if (GET_CODE (op1) == CONST_INT)
8097 return plus_constant (op0, - INTVAL (op1));
8098 else
8099 return gen_rtx_MINUS (mode, op0, op1);
8102 this_optab = ! unsignedp && flag_trapv
8103 && (GET_MODE_CLASS(mode) == MODE_INT)
8104 ? subv_optab : sub_optab;
8106 /* No sense saving up arithmetic to be done
8107 if it's all in the wrong mode to form part of an address.
8108 And force_operand won't know whether to sign-extend or
8109 zero-extend. */
8110 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8111 || mode != ptr_mode)
8112 goto binop;
8114 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8115 subtarget = 0;
8117 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8118 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8120 /* Convert A - const to A + (-const). */
8121 if (GET_CODE (op1) == CONST_INT)
8123 op1 = negate_rtx (mode, op1);
8124 goto both_summands;
8127 goto binop2;
8129 case MULT_EXPR:
8130 /* If first operand is constant, swap them.
8131 Thus the following special case checks need only
8132 check the second operand. */
8133 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8135 tree t1 = TREE_OPERAND (exp, 0);
8136 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8137 TREE_OPERAND (exp, 1) = t1;
8140 /* Attempt to return something suitable for generating an
8141 indexed address, for machines that support that. */
8143 if (modifier == EXPAND_SUM && mode == ptr_mode
8144 && host_integerp (TREE_OPERAND (exp, 1), 0))
8146 tree exp1 = TREE_OPERAND (exp, 1);
8148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8149 EXPAND_SUM);
8151 /* If we knew for certain that this is arithmetic for an array
8152 reference, and we knew the bounds of the array, then we could
8153 apply the distributive law across (PLUS X C) for constant C.
8154 Without such knowledge, we risk overflowing the computation
8155 when both X and C are large, but X+C isn't. */
8156 /* ??? Could perhaps special-case EXP being unsigned and C being
8157 positive. In that case we are certain that X+C is no smaller
8158 than X and so the transformed expression will overflow iff the
8159 original would have. */
8161 if (GET_CODE (op0) != REG)
8162 op0 = force_operand (op0, NULL_RTX);
8163 if (GET_CODE (op0) != REG)
8164 op0 = copy_to_mode_reg (mode, op0);
8166 return gen_rtx_MULT (mode, op0,
8167 gen_int_mode (tree_low_cst (exp1, 0),
8168 TYPE_MODE (TREE_TYPE (exp1))));
8171 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8172 subtarget = 0;
8174 if (modifier == EXPAND_STACK_PARM)
8175 target = 0;
8177 /* Check for multiplying things that have been extended
8178 from a narrower type. If this machine supports multiplying
8179 in that narrower type with a result in the desired type,
8180 do it that way, and avoid the explicit type-conversion. */
8181 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8182 && TREE_CODE (type) == INTEGER_TYPE
8183 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8184 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8185 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8186 && int_fits_type_p (TREE_OPERAND (exp, 1),
8187 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8188 /* Don't use a widening multiply if a shift will do. */
8189 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8190 > HOST_BITS_PER_WIDE_INT)
8191 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8193 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8194 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8196 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8197 /* If both operands are extended, they must either both
8198 be zero-extended or both be sign-extended. */
8199 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8201 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8203 enum machine_mode innermode
8204 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8205 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8206 ? smul_widen_optab : umul_widen_optab);
8207 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8208 ? umul_widen_optab : smul_widen_optab);
8209 if (mode == GET_MODE_WIDER_MODE (innermode))
8211 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8213 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8214 NULL_RTX, VOIDmode, 0);
8215 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8217 VOIDmode, 0);
8218 else
8219 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8220 NULL_RTX, VOIDmode, 0);
8221 goto binop2;
8223 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8224 && innermode == word_mode)
8226 rtx htem;
8227 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8228 NULL_RTX, VOIDmode, 0);
8229 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8230 op1 = convert_modes (innermode, mode,
8231 expand_expr (TREE_OPERAND (exp, 1),
8232 NULL_RTX, VOIDmode, 0),
8233 unsignedp);
8234 else
8235 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8236 NULL_RTX, VOIDmode, 0);
8237 temp = expand_binop (mode, other_optab, op0, op1, target,
8238 unsignedp, OPTAB_LIB_WIDEN);
8239 htem = expand_mult_highpart_adjust (innermode,
8240 gen_highpart (innermode, temp),
8241 op0, op1,
8242 gen_highpart (innermode, temp),
8243 unsignedp);
8244 emit_move_insn (gen_highpart (innermode, temp), htem);
8245 return temp;
8249 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8250 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8251 return expand_mult (mode, op0, op1, target, unsignedp);
8253 case TRUNC_DIV_EXPR:
8254 case FLOOR_DIV_EXPR:
8255 case CEIL_DIV_EXPR:
8256 case ROUND_DIV_EXPR:
8257 case EXACT_DIV_EXPR:
8258 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8259 subtarget = 0;
8260 if (modifier == EXPAND_STACK_PARM)
8261 target = 0;
8262 /* Possible optimization: compute the dividend with EXPAND_SUM
8263 then if the divisor is constant can optimize the case
8264 where some terms of the dividend have coeffs divisible by it. */
8265 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8266 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8267 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8269 case RDIV_EXPR:
8270 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8271 expensive divide. If not, combine will rebuild the original
8272 computation. */
8273 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8274 && TREE_CODE (type) == REAL_TYPE
8275 && !real_onep (TREE_OPERAND (exp, 0)))
8276 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8277 build (RDIV_EXPR, type,
8278 build_real (type, dconst1),
8279 TREE_OPERAND (exp, 1))),
8280 target, tmode, modifier);
8281 this_optab = sdiv_optab;
8282 goto binop;
8284 case TRUNC_MOD_EXPR:
8285 case FLOOR_MOD_EXPR:
8286 case CEIL_MOD_EXPR:
8287 case ROUND_MOD_EXPR:
8288 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8289 subtarget = 0;
8290 if (modifier == EXPAND_STACK_PARM)
8291 target = 0;
8292 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8293 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8294 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8296 case FIX_ROUND_EXPR:
8297 case FIX_FLOOR_EXPR:
8298 case FIX_CEIL_EXPR:
8299 abort (); /* Not used for C. */
8301 case FIX_TRUNC_EXPR:
8302 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8303 if (target == 0 || modifier == EXPAND_STACK_PARM)
8304 target = gen_reg_rtx (mode);
8305 expand_fix (target, op0, unsignedp);
8306 return target;
8308 case FLOAT_EXPR:
8309 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8310 if (target == 0 || modifier == EXPAND_STACK_PARM)
8311 target = gen_reg_rtx (mode);
8312 /* expand_float can't figure out what to do if FROM has VOIDmode.
8313 So give it the correct mode. With -O, cse will optimize this. */
8314 if (GET_MODE (op0) == VOIDmode)
8315 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8316 op0);
8317 expand_float (target, op0,
8318 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8319 return target;
8321 case NEGATE_EXPR:
8322 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8323 if (modifier == EXPAND_STACK_PARM)
8324 target = 0;
8325 temp = expand_unop (mode,
8326 ! unsignedp && flag_trapv
8327 && (GET_MODE_CLASS(mode) == MODE_INT)
8328 ? negv_optab : neg_optab, op0, target, 0);
8329 if (temp == 0)
8330 abort ();
8331 return temp;
8333 case ABS_EXPR:
8334 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8335 if (modifier == EXPAND_STACK_PARM)
8336 target = 0;
8338 /* Handle complex values specially. */
8339 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8340 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8341 return expand_complex_abs (mode, op0, target, unsignedp);
8343 /* Unsigned abs is simply the operand. Testing here means we don't
8344 risk generating incorrect code below. */
8345 if (TREE_UNSIGNED (type))
8346 return op0;
8348 return expand_abs (mode, op0, target, unsignedp,
8349 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8351 case MAX_EXPR:
8352 case MIN_EXPR:
8353 target = original_target;
8354 if (target == 0
8355 || modifier == EXPAND_STACK_PARM
8356 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8357 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8358 || GET_MODE (target) != mode
8359 || (GET_CODE (target) == REG
8360 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8361 target = gen_reg_rtx (mode);
8362 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8363 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8365 /* First try to do it with a special MIN or MAX instruction.
8366 If that does not win, use a conditional jump to select the proper
8367 value. */
8368 this_optab = (TREE_UNSIGNED (type)
8369 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8370 : (code == MIN_EXPR ? smin_optab : smax_optab));
8372 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8373 OPTAB_WIDEN);
8374 if (temp != 0)
8375 return temp;
8377 /* At this point, a MEM target is no longer useful; we will get better
8378 code without it. */
8380 if (GET_CODE (target) == MEM)
8381 target = gen_reg_rtx (mode);
8383 if (target != op0)
8384 emit_move_insn (target, op0);
8386 op0 = gen_label_rtx ();
8388 /* If this mode is an integer too wide to compare properly,
8389 compare word by word. Rely on cse to optimize constant cases. */
8390 if (GET_MODE_CLASS (mode) == MODE_INT
8391 && ! can_compare_p (GE, mode, ccp_jump))
8393 if (code == MAX_EXPR)
8394 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8395 target, op1, NULL_RTX, op0);
8396 else
8397 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8398 op1, target, NULL_RTX, op0);
8400 else
8402 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8403 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8404 unsignedp, mode, NULL_RTX, NULL_RTX,
8405 op0);
8407 emit_move_insn (target, op1);
8408 emit_label (op0);
8409 return target;
8411 case BIT_NOT_EXPR:
8412 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8413 if (modifier == EXPAND_STACK_PARM)
8414 target = 0;
8415 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8416 if (temp == 0)
8417 abort ();
8418 return temp;
8420 case FFS_EXPR:
8421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8422 if (modifier == EXPAND_STACK_PARM)
8423 target = 0;
8424 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8425 if (temp == 0)
8426 abort ();
8427 return temp;
8429 case CLZ_EXPR:
8430 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8431 temp = expand_unop (mode, clz_optab, op0, target, 1);
8432 if (temp == 0)
8433 abort ();
8434 return temp;
8436 case CTZ_EXPR:
8437 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8438 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8439 if (temp == 0)
8440 abort ();
8441 return temp;
8443 case POPCOUNT_EXPR:
8444 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8445 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8446 if (temp == 0)
8447 abort ();
8448 return temp;
8450 case PARITY_EXPR:
8451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8452 temp = expand_unop (mode, parity_optab, op0, target, 1);
8453 if (temp == 0)
8454 abort ();
8455 return temp;
8457 /* ??? Can optimize bitwise operations with one arg constant.
8458 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8459 and (a bitwise1 b) bitwise2 b (etc)
8460 but that is probably not worth while. */
8462 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8463 boolean values when we want in all cases to compute both of them. In
8464 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8465 as actual zero-or-1 values and then bitwise anding. In cases where
8466 there cannot be any side effects, better code would be made by
8467 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8468 how to recognize those cases. */
8470 case TRUTH_AND_EXPR:
8471 case BIT_AND_EXPR:
8472 this_optab = and_optab;
8473 goto binop;
8475 case TRUTH_OR_EXPR:
8476 case BIT_IOR_EXPR:
8477 this_optab = ior_optab;
8478 goto binop;
8480 case TRUTH_XOR_EXPR:
8481 case BIT_XOR_EXPR:
8482 this_optab = xor_optab;
8483 goto binop;
8485 case LSHIFT_EXPR:
8486 case RSHIFT_EXPR:
8487 case LROTATE_EXPR:
8488 case RROTATE_EXPR:
8489 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8490 subtarget = 0;
8491 if (modifier == EXPAND_STACK_PARM)
8492 target = 0;
8493 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8494 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8495 unsignedp);
8497 /* Could determine the answer when only additive constants differ. Also,
8498 the addition of one can be handled by changing the condition. */
8499 case LT_EXPR:
8500 case LE_EXPR:
8501 case GT_EXPR:
8502 case GE_EXPR:
8503 case EQ_EXPR:
8504 case NE_EXPR:
8505 case UNORDERED_EXPR:
8506 case ORDERED_EXPR:
8507 case UNLT_EXPR:
8508 case UNLE_EXPR:
8509 case UNGT_EXPR:
8510 case UNGE_EXPR:
8511 case UNEQ_EXPR:
8512 temp = do_store_flag (exp,
8513 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8514 tmode != VOIDmode ? tmode : mode, 0);
8515 if (temp != 0)
8516 return temp;
8518 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8519 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8520 && original_target
8521 && GET_CODE (original_target) == REG
8522 && (GET_MODE (original_target)
8523 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8525 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8526 VOIDmode, 0);
8528 /* If temp is constant, we can just compute the result. */
8529 if (GET_CODE (temp) == CONST_INT)
8531 if (INTVAL (temp) != 0)
8532 emit_move_insn (target, const1_rtx);
8533 else
8534 emit_move_insn (target, const0_rtx);
8536 return target;
8539 if (temp != original_target)
8541 enum machine_mode mode1 = GET_MODE (temp);
8542 if (mode1 == VOIDmode)
8543 mode1 = tmode != VOIDmode ? tmode : mode;
8545 temp = copy_to_mode_reg (mode1, temp);
8548 op1 = gen_label_rtx ();
8549 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8550 GET_MODE (temp), unsignedp, op1);
8551 emit_move_insn (temp, const1_rtx);
8552 emit_label (op1);
8553 return temp;
8556 /* If no set-flag instruction, must generate a conditional
8557 store into a temporary variable. Drop through
8558 and handle this like && and ||. */
8560 case TRUTH_ANDIF_EXPR:
8561 case TRUTH_ORIF_EXPR:
8562 if (! ignore
8563 && (target == 0
8564 || modifier == EXPAND_STACK_PARM
8565 || ! safe_from_p (target, exp, 1)
8566 /* Make sure we don't have a hard reg (such as function's return
8567 value) live across basic blocks, if not optimizing. */
8568 || (!optimize && GET_CODE (target) == REG
8569 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8570 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8572 if (target)
8573 emit_clr_insn (target);
8575 op1 = gen_label_rtx ();
8576 jumpifnot (exp, op1);
8578 if (target)
8579 emit_0_to_1_insn (target);
8581 emit_label (op1);
8582 return ignore ? const0_rtx : target;
8584 case TRUTH_NOT_EXPR:
8585 if (modifier == EXPAND_STACK_PARM)
8586 target = 0;
8587 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8588 /* The parser is careful to generate TRUTH_NOT_EXPR
8589 only with operands that are always zero or one. */
8590 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8591 target, 1, OPTAB_LIB_WIDEN);
8592 if (temp == 0)
8593 abort ();
8594 return temp;
8596 case COMPOUND_EXPR:
8597 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8598 emit_queue ();
8599 return expand_expr (TREE_OPERAND (exp, 1),
8600 (ignore ? const0_rtx : target),
8601 VOIDmode, modifier);
8603 case COND_EXPR:
8604 /* If we would have a "singleton" (see below) were it not for a
8605 conversion in each arm, bring that conversion back out. */
8606 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8607 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8608 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8609 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8611 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8612 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8614 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8615 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8616 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8617 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8618 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8619 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8620 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8621 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8622 return expand_expr (build1 (NOP_EXPR, type,
8623 build (COND_EXPR, TREE_TYPE (iftrue),
8624 TREE_OPERAND (exp, 0),
8625 iftrue, iffalse)),
8626 target, tmode, modifier);
8630 /* Note that COND_EXPRs whose type is a structure or union
8631 are required to be constructed to contain assignments of
8632 a temporary variable, so that we can evaluate them here
8633 for side effect only. If type is void, we must do likewise. */
8635 /* If an arm of the branch requires a cleanup,
8636 only that cleanup is performed. */
8638 tree singleton = 0;
8639 tree binary_op = 0, unary_op = 0;
8641 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8642 convert it to our mode, if necessary. */
8643 if (integer_onep (TREE_OPERAND (exp, 1))
8644 && integer_zerop (TREE_OPERAND (exp, 2))
8645 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8647 if (ignore)
8649 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8650 modifier);
8651 return const0_rtx;
8654 if (modifier == EXPAND_STACK_PARM)
8655 target = 0;
8656 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8657 if (GET_MODE (op0) == mode)
8658 return op0;
8660 if (target == 0)
8661 target = gen_reg_rtx (mode);
8662 convert_move (target, op0, unsignedp);
8663 return target;
8666 /* Check for X ? A + B : A. If we have this, we can copy A to the
8667 output and conditionally add B. Similarly for unary operations.
8668 Don't do this if X has side-effects because those side effects
8669 might affect A or B and the "?" operation is a sequence point in
8670 ANSI. (operand_equal_p tests for side effects.) */
8672 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8673 && operand_equal_p (TREE_OPERAND (exp, 2),
8674 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8675 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8676 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8677 && operand_equal_p (TREE_OPERAND (exp, 1),
8678 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8679 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8680 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8681 && operand_equal_p (TREE_OPERAND (exp, 2),
8682 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8683 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8684 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8685 && operand_equal_p (TREE_OPERAND (exp, 1),
8686 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8687 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8689 /* If we are not to produce a result, we have no target. Otherwise,
8690 if a target was specified use it; it will not be used as an
8691 intermediate target unless it is safe. If no target, use a
8692 temporary. */
8694 if (ignore)
8695 temp = 0;
8696 else if (modifier == EXPAND_STACK_PARM)
8697 temp = assign_temp (type, 0, 0, 1);
8698 else if (original_target
8699 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8700 || (singleton && GET_CODE (original_target) == REG
8701 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8702 && original_target == var_rtx (singleton)))
8703 && GET_MODE (original_target) == mode
8704 #ifdef HAVE_conditional_move
8705 && (! can_conditionally_move_p (mode)
8706 || GET_CODE (original_target) == REG
8707 || TREE_ADDRESSABLE (type))
8708 #endif
8709 && (GET_CODE (original_target) != MEM
8710 || TREE_ADDRESSABLE (type)))
8711 temp = original_target;
8712 else if (TREE_ADDRESSABLE (type))
8713 abort ();
8714 else
8715 temp = assign_temp (type, 0, 0, 1);
8717 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8718 do the test of X as a store-flag operation, do this as
8719 A + ((X != 0) << log C). Similarly for other simple binary
8720 operators. Only do for C == 1 if BRANCH_COST is low. */
8721 if (temp && singleton && binary_op
8722 && (TREE_CODE (binary_op) == PLUS_EXPR
8723 || TREE_CODE (binary_op) == MINUS_EXPR
8724 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8725 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8726 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8727 : integer_onep (TREE_OPERAND (binary_op, 1)))
8728 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8730 rtx result;
8731 tree cond;
8732 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8733 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8734 ? addv_optab : add_optab)
8735 : TREE_CODE (binary_op) == MINUS_EXPR
8736 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8737 ? subv_optab : sub_optab)
8738 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8739 : xor_optab);
8741 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8742 if (singleton == TREE_OPERAND (exp, 1))
8743 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8744 else
8745 cond = TREE_OPERAND (exp, 0);
8747 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8748 ? temp : NULL_RTX),
8749 mode, BRANCH_COST <= 1);
8751 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8752 result = expand_shift (LSHIFT_EXPR, mode, result,
8753 build_int_2 (tree_log2
8754 (TREE_OPERAND
8755 (binary_op, 1)),
8757 (safe_from_p (temp, singleton, 1)
8758 ? temp : NULL_RTX), 0);
8760 if (result)
8762 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8763 return expand_binop (mode, boptab, op1, result, temp,
8764 unsignedp, OPTAB_LIB_WIDEN);
8768 do_pending_stack_adjust ();
8769 NO_DEFER_POP;
8770 op0 = gen_label_rtx ();
8772 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8774 if (temp != 0)
8776 /* If the target conflicts with the other operand of the
8777 binary op, we can't use it. Also, we can't use the target
8778 if it is a hard register, because evaluating the condition
8779 might clobber it. */
8780 if ((binary_op
8781 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8782 || (GET_CODE (temp) == REG
8783 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8784 temp = gen_reg_rtx (mode);
8785 store_expr (singleton, temp,
8786 modifier == EXPAND_STACK_PARM ? 2 : 0);
8788 else
8789 expand_expr (singleton,
8790 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8791 if (singleton == TREE_OPERAND (exp, 1))
8792 jumpif (TREE_OPERAND (exp, 0), op0);
8793 else
8794 jumpifnot (TREE_OPERAND (exp, 0), op0);
8796 start_cleanup_deferral ();
8797 if (binary_op && temp == 0)
8798 /* Just touch the other operand. */
8799 expand_expr (TREE_OPERAND (binary_op, 1),
8800 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8801 else if (binary_op)
8802 store_expr (build (TREE_CODE (binary_op), type,
8803 make_tree (type, temp),
8804 TREE_OPERAND (binary_op, 1)),
8805 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8806 else
8807 store_expr (build1 (TREE_CODE (unary_op), type,
8808 make_tree (type, temp)),
8809 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8810 op1 = op0;
8812 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8813 comparison operator. If we have one of these cases, set the
8814 output to A, branch on A (cse will merge these two references),
8815 then set the output to FOO. */
8816 else if (temp
8817 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8818 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8819 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8820 TREE_OPERAND (exp, 1), 0)
8821 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8822 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8823 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8825 if (GET_CODE (temp) == REG
8826 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8827 temp = gen_reg_rtx (mode);
8828 store_expr (TREE_OPERAND (exp, 1), temp,
8829 modifier == EXPAND_STACK_PARM ? 2 : 0);
8830 jumpif (TREE_OPERAND (exp, 0), op0);
8832 start_cleanup_deferral ();
8833 store_expr (TREE_OPERAND (exp, 2), temp,
8834 modifier == EXPAND_STACK_PARM ? 2 : 0);
8835 op1 = op0;
8837 else if (temp
8838 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8839 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8840 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8841 TREE_OPERAND (exp, 2), 0)
8842 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8843 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8844 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8846 if (GET_CODE (temp) == REG
8847 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8848 temp = gen_reg_rtx (mode);
8849 store_expr (TREE_OPERAND (exp, 2), temp,
8850 modifier == EXPAND_STACK_PARM ? 2 : 0);
8851 jumpifnot (TREE_OPERAND (exp, 0), op0);
8853 start_cleanup_deferral ();
8854 store_expr (TREE_OPERAND (exp, 1), temp,
8855 modifier == EXPAND_STACK_PARM ? 2 : 0);
8856 op1 = op0;
8858 else
8860 op1 = gen_label_rtx ();
8861 jumpifnot (TREE_OPERAND (exp, 0), op0);
8863 start_cleanup_deferral ();
8865 /* One branch of the cond can be void, if it never returns. For
8866 example A ? throw : E */
8867 if (temp != 0
8868 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8869 store_expr (TREE_OPERAND (exp, 1), temp,
8870 modifier == EXPAND_STACK_PARM ? 2 : 0);
8871 else
8872 expand_expr (TREE_OPERAND (exp, 1),
8873 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8874 end_cleanup_deferral ();
8875 emit_queue ();
8876 emit_jump_insn (gen_jump (op1));
8877 emit_barrier ();
8878 emit_label (op0);
8879 start_cleanup_deferral ();
8880 if (temp != 0
8881 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8882 store_expr (TREE_OPERAND (exp, 2), temp,
8883 modifier == EXPAND_STACK_PARM ? 2 : 0);
8884 else
8885 expand_expr (TREE_OPERAND (exp, 2),
8886 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8889 end_cleanup_deferral ();
8891 emit_queue ();
8892 emit_label (op1);
8893 OK_DEFER_POP;
8895 return temp;
8898 case TARGET_EXPR:
8900 /* Something needs to be initialized, but we didn't know
8901 where that thing was when building the tree. For example,
8902 it could be the return value of a function, or a parameter
8903 to a function which lays down in the stack, or a temporary
8904 variable which must be passed by reference.
8906 We guarantee that the expression will either be constructed
8907 or copied into our original target. */
8909 tree slot = TREE_OPERAND (exp, 0);
8910 tree cleanups = NULL_TREE;
8911 tree exp1;
8913 if (TREE_CODE (slot) != VAR_DECL)
8914 abort ();
8916 if (! ignore)
8917 target = original_target;
8919 /* Set this here so that if we get a target that refers to a
8920 register variable that's already been used, put_reg_into_stack
8921 knows that it should fix up those uses. */
8922 TREE_USED (slot) = 1;
8924 if (target == 0)
8926 if (DECL_RTL_SET_P (slot))
8928 target = DECL_RTL (slot);
8929 /* If we have already expanded the slot, so don't do
8930 it again. (mrs) */
8931 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8932 return target;
8934 else
8936 target = assign_temp (type, 2, 0, 1);
8937 /* All temp slots at this level must not conflict. */
8938 preserve_temp_slots (target);
8939 SET_DECL_RTL (slot, target);
8940 if (TREE_ADDRESSABLE (slot))
8941 put_var_into_stack (slot);
8943 /* Since SLOT is not known to the called function
8944 to belong to its stack frame, we must build an explicit
8945 cleanup. This case occurs when we must build up a reference
8946 to pass the reference as an argument. In this case,
8947 it is very likely that such a reference need not be
8948 built here. */
8950 if (TREE_OPERAND (exp, 2) == 0)
8951 TREE_OPERAND (exp, 2)
8952 = (*lang_hooks.maybe_build_cleanup) (slot);
8953 cleanups = TREE_OPERAND (exp, 2);
8956 else
8958 /* This case does occur, when expanding a parameter which
8959 needs to be constructed on the stack. The target
8960 is the actual stack address that we want to initialize.
8961 The function we call will perform the cleanup in this case. */
8963 /* If we have already assigned it space, use that space,
8964 not target that we were passed in, as our target
8965 parameter is only a hint. */
8966 if (DECL_RTL_SET_P (slot))
8968 target = DECL_RTL (slot);
8969 /* If we have already expanded the slot, so don't do
8970 it again. (mrs) */
8971 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8972 return target;
8974 else
8976 SET_DECL_RTL (slot, target);
8977 /* If we must have an addressable slot, then make sure that
8978 the RTL that we just stored in slot is OK. */
8979 if (TREE_ADDRESSABLE (slot))
8980 put_var_into_stack (slot);
8984 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8985 /* Mark it as expanded. */
8986 TREE_OPERAND (exp, 1) = NULL_TREE;
8988 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8990 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8992 return target;
8995 case INIT_EXPR:
8997 tree lhs = TREE_OPERAND (exp, 0);
8998 tree rhs = TREE_OPERAND (exp, 1);
9000 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9001 return temp;
9004 case MODIFY_EXPR:
9006 /* If lhs is complex, expand calls in rhs before computing it.
9007 That's so we don't compute a pointer and save it over a
9008 call. If lhs is simple, compute it first so we can give it
9009 as a target if the rhs is just a call. This avoids an
9010 extra temp and copy and that prevents a partial-subsumption
9011 which makes bad code. Actually we could treat
9012 component_ref's of vars like vars. */
9014 tree lhs = TREE_OPERAND (exp, 0);
9015 tree rhs = TREE_OPERAND (exp, 1);
9017 temp = 0;
9019 /* Check for |= or &= of a bitfield of size one into another bitfield
9020 of size 1. In this case, (unless we need the result of the
9021 assignment) we can do this more efficiently with a
9022 test followed by an assignment, if necessary.
9024 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9025 things change so we do, this code should be enhanced to
9026 support it. */
9027 if (ignore
9028 && TREE_CODE (lhs) == COMPONENT_REF
9029 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9030 || TREE_CODE (rhs) == BIT_AND_EXPR)
9031 && TREE_OPERAND (rhs, 0) == lhs
9032 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9033 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9034 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9036 rtx label = gen_label_rtx ();
9038 do_jump (TREE_OPERAND (rhs, 1),
9039 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9040 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9041 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9042 (TREE_CODE (rhs) == BIT_IOR_EXPR
9043 ? integer_one_node
9044 : integer_zero_node)),
9045 0, 0);
9046 do_pending_stack_adjust ();
9047 emit_label (label);
9048 return const0_rtx;
9051 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9053 return temp;
9056 case RETURN_EXPR:
9057 if (!TREE_OPERAND (exp, 0))
9058 expand_null_return ();
9059 else
9060 expand_return (TREE_OPERAND (exp, 0));
9061 return const0_rtx;
9063 case PREINCREMENT_EXPR:
9064 case PREDECREMENT_EXPR:
9065 return expand_increment (exp, 0, ignore);
9067 case POSTINCREMENT_EXPR:
9068 case POSTDECREMENT_EXPR:
9069 /* Faster to treat as pre-increment if result is not used. */
9070 return expand_increment (exp, ! ignore, ignore);
9072 case ADDR_EXPR:
9073 if (modifier == EXPAND_STACK_PARM)
9074 target = 0;
9075 /* Are we taking the address of a nested function? */
9076 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9077 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9078 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9079 && ! TREE_STATIC (exp))
9081 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9082 op0 = force_operand (op0, target);
9084 /* If we are taking the address of something erroneous, just
9085 return a zero. */
9086 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9087 return const0_rtx;
9088 /* If we are taking the address of a constant and are at the
9089 top level, we have to use output_constant_def since we can't
9090 call force_const_mem at top level. */
9091 else if (cfun == 0
9092 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9093 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9094 == 'c')))
9095 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9096 else
9098 /* We make sure to pass const0_rtx down if we came in with
9099 ignore set, to avoid doing the cleanups twice for something. */
9100 op0 = expand_expr (TREE_OPERAND (exp, 0),
9101 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9102 (modifier == EXPAND_INITIALIZER
9103 ? modifier : EXPAND_CONST_ADDRESS));
9105 /* If we are going to ignore the result, OP0 will have been set
9106 to const0_rtx, so just return it. Don't get confused and
9107 think we are taking the address of the constant. */
9108 if (ignore)
9109 return op0;
9111 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9112 clever and returns a REG when given a MEM. */
9113 op0 = protect_from_queue (op0, 1);
9115 /* We would like the object in memory. If it is a constant, we can
9116 have it be statically allocated into memory. For a non-constant,
9117 we need to allocate some memory and store the value into it. */
9119 if (CONSTANT_P (op0))
9120 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9121 op0);
9122 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9123 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9124 || GET_CODE (op0) == PARALLEL)
9126 /* If the operand is a SAVE_EXPR, we can deal with this by
9127 forcing the SAVE_EXPR into memory. */
9128 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9130 put_var_into_stack (TREE_OPERAND (exp, 0));
9131 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9133 else
9135 /* If this object is in a register, it can't be BLKmode. */
9136 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9137 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9139 if (GET_CODE (op0) == PARALLEL)
9140 /* Handle calls that pass values in multiple
9141 non-contiguous locations. The Irix 6 ABI has examples
9142 of this. */
9143 emit_group_store (memloc, op0,
9144 int_size_in_bytes (inner_type));
9145 else
9146 emit_move_insn (memloc, op0);
9148 op0 = memloc;
9152 if (GET_CODE (op0) != MEM)
9153 abort ();
9155 mark_temp_addr_taken (op0);
9156 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9158 op0 = XEXP (op0, 0);
9159 #ifdef POINTERS_EXTEND_UNSIGNED
9160 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9161 && mode == ptr_mode)
9162 op0 = convert_memory_address (ptr_mode, op0);
9163 #endif
9164 return op0;
9167 /* If OP0 is not aligned as least as much as the type requires, we
9168 need to make a temporary, copy OP0 to it, and take the address of
9169 the temporary. We want to use the alignment of the type, not of
9170 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9171 the test for BLKmode means that can't happen. The test for
9172 BLKmode is because we never make mis-aligned MEMs with
9173 non-BLKmode.
9175 We don't need to do this at all if the machine doesn't have
9176 strict alignment. */
9177 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9178 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9179 > MEM_ALIGN (op0))
9180 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9182 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9183 rtx new
9184 = assign_stack_temp_for_type
9185 (TYPE_MODE (inner_type),
9186 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9187 : int_size_in_bytes (inner_type),
9188 1, build_qualified_type (inner_type,
9189 (TYPE_QUALS (inner_type)
9190 | TYPE_QUAL_CONST)));
9192 if (TYPE_ALIGN_OK (inner_type))
9193 abort ();
9195 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9196 (modifier == EXPAND_STACK_PARM
9197 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9198 op0 = new;
9201 op0 = force_operand (XEXP (op0, 0), target);
9204 if (flag_force_addr
9205 && GET_CODE (op0) != REG
9206 && modifier != EXPAND_CONST_ADDRESS
9207 && modifier != EXPAND_INITIALIZER
9208 && modifier != EXPAND_SUM)
9209 op0 = force_reg (Pmode, op0);
9211 if (GET_CODE (op0) == REG
9212 && ! REG_USERVAR_P (op0))
9213 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9215 #ifdef POINTERS_EXTEND_UNSIGNED
9216 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9217 && mode == ptr_mode)
9218 op0 = convert_memory_address (ptr_mode, op0);
9219 #endif
9221 return op0;
9223 case ENTRY_VALUE_EXPR:
9224 abort ();
9226 /* COMPLEX type for Extended Pascal & Fortran */
9227 case COMPLEX_EXPR:
9229 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9230 rtx insns;
9232 /* Get the rtx code of the operands. */
9233 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9234 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9236 if (! target)
9237 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9239 start_sequence ();
9241 /* Move the real (op0) and imaginary (op1) parts to their location. */
9242 emit_move_insn (gen_realpart (mode, target), op0);
9243 emit_move_insn (gen_imagpart (mode, target), op1);
9245 insns = get_insns ();
9246 end_sequence ();
9248 /* Complex construction should appear as a single unit. */
9249 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9250 each with a separate pseudo as destination.
9251 It's not correct for flow to treat them as a unit. */
9252 if (GET_CODE (target) != CONCAT)
9253 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9254 else
9255 emit_insn (insns);
9257 return target;
9260 case REALPART_EXPR:
9261 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9262 return gen_realpart (mode, op0);
9264 case IMAGPART_EXPR:
9265 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9266 return gen_imagpart (mode, op0);
9268 case CONJ_EXPR:
9270 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9271 rtx imag_t;
9272 rtx insns;
9274 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9276 if (! target)
9277 target = gen_reg_rtx (mode);
9279 start_sequence ();
9281 /* Store the realpart and the negated imagpart to target. */
9282 emit_move_insn (gen_realpart (partmode, target),
9283 gen_realpart (partmode, op0));
9285 imag_t = gen_imagpart (partmode, target);
9286 temp = expand_unop (partmode,
9287 ! unsignedp && flag_trapv
9288 && (GET_MODE_CLASS(partmode) == MODE_INT)
9289 ? negv_optab : neg_optab,
9290 gen_imagpart (partmode, op0), imag_t, 0);
9291 if (temp != imag_t)
9292 emit_move_insn (imag_t, temp);
9294 insns = get_insns ();
9295 end_sequence ();
9297 /* Conjugate should appear as a single unit
9298 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9299 each with a separate pseudo as destination.
9300 It's not correct for flow to treat them as a unit. */
9301 if (GET_CODE (target) != CONCAT)
9302 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9303 else
9304 emit_insn (insns);
9306 return target;
9309 case TRY_CATCH_EXPR:
9311 tree handler = TREE_OPERAND (exp, 1);
9313 expand_eh_region_start ();
9315 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9317 expand_eh_region_end_cleanup (handler);
9319 return op0;
9322 case TRY_FINALLY_EXPR:
9324 tree try_block = TREE_OPERAND (exp, 0);
9325 tree finally_block = TREE_OPERAND (exp, 1);
9327 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9329 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9330 is not sufficient, so we cannot expand the block twice.
9331 So we play games with GOTO_SUBROUTINE_EXPR to let us
9332 expand the thing only once. */
9333 /* When not optimizing, we go ahead with this form since
9334 (1) user breakpoints operate more predictably without
9335 code duplication, and
9336 (2) we're not running any of the global optimizers
9337 that would explode in time/space with the highly
9338 connected CFG created by the indirect branching. */
9340 rtx finally_label = gen_label_rtx ();
9341 rtx done_label = gen_label_rtx ();
9342 rtx return_link = gen_reg_rtx (Pmode);
9343 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9344 (tree) finally_label, (tree) return_link);
9345 TREE_SIDE_EFFECTS (cleanup) = 1;
9347 /* Start a new binding layer that will keep track of all cleanup
9348 actions to be performed. */
9349 expand_start_bindings (2);
9350 target_temp_slot_level = temp_slot_level;
9352 expand_decl_cleanup (NULL_TREE, cleanup);
9353 op0 = expand_expr (try_block, target, tmode, modifier);
9355 preserve_temp_slots (op0);
9356 expand_end_bindings (NULL_TREE, 0, 0);
9357 emit_jump (done_label);
9358 emit_label (finally_label);
9359 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9360 emit_indirect_jump (return_link);
9361 emit_label (done_label);
9363 else
9365 expand_start_bindings (2);
9366 target_temp_slot_level = temp_slot_level;
9368 expand_decl_cleanup (NULL_TREE, finally_block);
9369 op0 = expand_expr (try_block, target, tmode, modifier);
9371 preserve_temp_slots (op0);
9372 expand_end_bindings (NULL_TREE, 0, 0);
9375 return op0;
9378 case GOTO_SUBROUTINE_EXPR:
9380 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9381 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9382 rtx return_address = gen_label_rtx ();
9383 emit_move_insn (return_link,
9384 gen_rtx_LABEL_REF (Pmode, return_address));
9385 emit_jump (subr);
9386 emit_label (return_address);
9387 return const0_rtx;
9390 case VA_ARG_EXPR:
9391 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9393 case EXC_PTR_EXPR:
9394 return get_exception_pointer (cfun);
9396 case FDESC_EXPR:
9397 /* Function descriptors are not valid except for as
9398 initialization constants, and should not be expanded. */
9399 abort ();
9401 default:
9402 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9405 /* Here to do an ordinary binary operator, generating an instruction
9406 from the optab already placed in `this_optab'. */
9407 binop:
9408 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9409 subtarget = 0;
9410 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9411 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9412 binop2:
9413 if (modifier == EXPAND_STACK_PARM)
9414 target = 0;
9415 temp = expand_binop (mode, this_optab, op0, op1, target,
9416 unsignedp, OPTAB_LIB_WIDEN);
9417 if (temp == 0)
9418 abort ();
9419 return temp;
9422 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9423 when applied to the address of EXP produces an address known to be
9424 aligned more than BIGGEST_ALIGNMENT. */
9426 static int
9427 is_aligning_offset (offset, exp)
9428 tree offset;
9429 tree exp;
9431 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9432 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9433 || TREE_CODE (offset) == NOP_EXPR
9434 || TREE_CODE (offset) == CONVERT_EXPR
9435 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9436 offset = TREE_OPERAND (offset, 0);
9438 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9439 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9440 if (TREE_CODE (offset) != BIT_AND_EXPR
9441 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9442 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9443 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9444 return 0;
9446 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9447 It must be NEGATE_EXPR. Then strip any more conversions. */
9448 offset = TREE_OPERAND (offset, 0);
9449 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9450 || TREE_CODE (offset) == NOP_EXPR
9451 || TREE_CODE (offset) == CONVERT_EXPR)
9452 offset = TREE_OPERAND (offset, 0);
9454 if (TREE_CODE (offset) != NEGATE_EXPR)
9455 return 0;
9457 offset = TREE_OPERAND (offset, 0);
9458 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9459 || TREE_CODE (offset) == NOP_EXPR
9460 || TREE_CODE (offset) == CONVERT_EXPR)
9461 offset = TREE_OPERAND (offset, 0);
9463 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9464 whose type is the same as EXP. */
9465 return (TREE_CODE (offset) == ADDR_EXPR
9466 && (TREE_OPERAND (offset, 0) == exp
9467 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9468 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9469 == TREE_TYPE (exp)))));
9472 /* Return the tree node if an ARG corresponds to a string constant or zero
9473 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9474 in bytes within the string that ARG is accessing. The type of the
9475 offset will be `sizetype'. */
9477 tree
9478 string_constant (arg, ptr_offset)
9479 tree arg;
9480 tree *ptr_offset;
9482 STRIP_NOPS (arg);
9484 if (TREE_CODE (arg) == ADDR_EXPR
9485 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9487 *ptr_offset = size_zero_node;
9488 return TREE_OPERAND (arg, 0);
9490 else if (TREE_CODE (arg) == PLUS_EXPR)
9492 tree arg0 = TREE_OPERAND (arg, 0);
9493 tree arg1 = TREE_OPERAND (arg, 1);
9495 STRIP_NOPS (arg0);
9496 STRIP_NOPS (arg1);
9498 if (TREE_CODE (arg0) == ADDR_EXPR
9499 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9501 *ptr_offset = convert (sizetype, arg1);
9502 return TREE_OPERAND (arg0, 0);
9504 else if (TREE_CODE (arg1) == ADDR_EXPR
9505 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9507 *ptr_offset = convert (sizetype, arg0);
9508 return TREE_OPERAND (arg1, 0);
9512 return 0;
9515 /* Expand code for a post- or pre- increment or decrement
9516 and return the RTX for the result.
9517 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9519 static rtx
9520 expand_increment (exp, post, ignore)
9521 tree exp;
9522 int post, ignore;
9524 rtx op0, op1;
9525 rtx temp, value;
9526 tree incremented = TREE_OPERAND (exp, 0);
9527 optab this_optab = add_optab;
9528 int icode;
9529 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9530 int op0_is_copy = 0;
9531 int single_insn = 0;
9532 /* 1 means we can't store into OP0 directly,
9533 because it is a subreg narrower than a word,
9534 and we don't dare clobber the rest of the word. */
9535 int bad_subreg = 0;
9537 /* Stabilize any component ref that might need to be
9538 evaluated more than once below. */
9539 if (!post
9540 || TREE_CODE (incremented) == BIT_FIELD_REF
9541 || (TREE_CODE (incremented) == COMPONENT_REF
9542 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9543 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9544 incremented = stabilize_reference (incremented);
9545 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9546 ones into save exprs so that they don't accidentally get evaluated
9547 more than once by the code below. */
9548 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9549 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9550 incremented = save_expr (incremented);
9552 /* Compute the operands as RTX.
9553 Note whether OP0 is the actual lvalue or a copy of it:
9554 I believe it is a copy iff it is a register or subreg
9555 and insns were generated in computing it. */
9557 temp = get_last_insn ();
9558 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9560 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9561 in place but instead must do sign- or zero-extension during assignment,
9562 so we copy it into a new register and let the code below use it as
9563 a copy.
9565 Note that we can safely modify this SUBREG since it is know not to be
9566 shared (it was made by the expand_expr call above). */
9568 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9570 if (post)
9571 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9572 else
9573 bad_subreg = 1;
9575 else if (GET_CODE (op0) == SUBREG
9576 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9578 /* We cannot increment this SUBREG in place. If we are
9579 post-incrementing, get a copy of the old value. Otherwise,
9580 just mark that we cannot increment in place. */
9581 if (post)
9582 op0 = copy_to_reg (op0);
9583 else
9584 bad_subreg = 1;
9587 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9588 && temp != get_last_insn ());
9589 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9591 /* Decide whether incrementing or decrementing. */
9592 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9593 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9594 this_optab = sub_optab;
9596 /* Convert decrement by a constant into a negative increment. */
9597 if (this_optab == sub_optab
9598 && GET_CODE (op1) == CONST_INT)
9600 op1 = GEN_INT (-INTVAL (op1));
9601 this_optab = add_optab;
9604 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9605 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9607 /* For a preincrement, see if we can do this with a single instruction. */
9608 if (!post)
9610 icode = (int) this_optab->handlers[(int) mode].insn_code;
9611 if (icode != (int) CODE_FOR_nothing
9612 /* Make sure that OP0 is valid for operands 0 and 1
9613 of the insn we want to queue. */
9614 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9615 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9616 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9617 single_insn = 1;
9620 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9621 then we cannot just increment OP0. We must therefore contrive to
9622 increment the original value. Then, for postincrement, we can return
9623 OP0 since it is a copy of the old value. For preincrement, expand here
9624 unless we can do it with a single insn.
9626 Likewise if storing directly into OP0 would clobber high bits
9627 we need to preserve (bad_subreg). */
9628 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9630 /* This is the easiest way to increment the value wherever it is.
9631 Problems with multiple evaluation of INCREMENTED are prevented
9632 because either (1) it is a component_ref or preincrement,
9633 in which case it was stabilized above, or (2) it is an array_ref
9634 with constant index in an array in a register, which is
9635 safe to reevaluate. */
9636 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9637 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9638 ? MINUS_EXPR : PLUS_EXPR),
9639 TREE_TYPE (exp),
9640 incremented,
9641 TREE_OPERAND (exp, 1));
9643 while (TREE_CODE (incremented) == NOP_EXPR
9644 || TREE_CODE (incremented) == CONVERT_EXPR)
9646 newexp = convert (TREE_TYPE (incremented), newexp);
9647 incremented = TREE_OPERAND (incremented, 0);
9650 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9651 return post ? op0 : temp;
9654 if (post)
9656 /* We have a true reference to the value in OP0.
9657 If there is an insn to add or subtract in this mode, queue it.
9658 Queueing the increment insn avoids the register shuffling
9659 that often results if we must increment now and first save
9660 the old value for subsequent use. */
9662 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9663 op0 = stabilize (op0);
9664 #endif
9666 icode = (int) this_optab->handlers[(int) mode].insn_code;
9667 if (icode != (int) CODE_FOR_nothing
9668 /* Make sure that OP0 is valid for operands 0 and 1
9669 of the insn we want to queue. */
9670 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9671 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9673 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9674 op1 = force_reg (mode, op1);
9676 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9678 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9680 rtx addr = (general_operand (XEXP (op0, 0), mode)
9681 ? force_reg (Pmode, XEXP (op0, 0))
9682 : copy_to_reg (XEXP (op0, 0)));
9683 rtx temp, result;
9685 op0 = replace_equiv_address (op0, addr);
9686 temp = force_reg (GET_MODE (op0), op0);
9687 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9688 op1 = force_reg (mode, op1);
9690 /* The increment queue is LIFO, thus we have to `queue'
9691 the instructions in reverse order. */
9692 enqueue_insn (op0, gen_move_insn (op0, temp));
9693 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9694 return result;
9698 /* Preincrement, or we can't increment with one simple insn. */
9699 if (post)
9700 /* Save a copy of the value before inc or dec, to return it later. */
9701 temp = value = copy_to_reg (op0);
9702 else
9703 /* Arrange to return the incremented value. */
9704 /* Copy the rtx because expand_binop will protect from the queue,
9705 and the results of that would be invalid for us to return
9706 if our caller does emit_queue before using our result. */
9707 temp = copy_rtx (value = op0);
9709 /* Increment however we can. */
9710 op1 = expand_binop (mode, this_optab, value, op1, op0,
9711 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9713 /* Make sure the value is stored into OP0. */
9714 if (op1 != op0)
9715 emit_move_insn (op0, op1);
9717 return temp;
9720 /* At the start of a function, record that we have no previously-pushed
9721 arguments waiting to be popped. */
9723 void
9724 init_pending_stack_adjust ()
9726 pending_stack_adjust = 0;
9729 /* When exiting from function, if safe, clear out any pending stack adjust
9730 so the adjustment won't get done.
9732 Note, if the current function calls alloca, then it must have a
9733 frame pointer regardless of the value of flag_omit_frame_pointer. */
9735 void
9736 clear_pending_stack_adjust ()
9738 #ifdef EXIT_IGNORE_STACK
9739 if (optimize > 0
9740 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9741 && EXIT_IGNORE_STACK
9742 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9743 && ! flag_inline_functions)
9745 stack_pointer_delta -= pending_stack_adjust,
9746 pending_stack_adjust = 0;
9748 #endif
9751 /* Pop any previously-pushed arguments that have not been popped yet. */
9753 void
9754 do_pending_stack_adjust ()
9756 if (inhibit_defer_pop == 0)
9758 if (pending_stack_adjust != 0)
9759 adjust_stack (GEN_INT (pending_stack_adjust));
9760 pending_stack_adjust = 0;
9764 /* Expand conditional expressions. */
9766 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9767 LABEL is an rtx of code CODE_LABEL, in this function and all the
9768 functions here. */
9770 void
9771 jumpifnot (exp, label)
9772 tree exp;
9773 rtx label;
9775 do_jump (exp, label, NULL_RTX);
9778 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9780 void
9781 jumpif (exp, label)
9782 tree exp;
9783 rtx label;
9785 do_jump (exp, NULL_RTX, label);
9788 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9789 the result is zero, or IF_TRUE_LABEL if the result is one.
9790 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9791 meaning fall through in that case.
9793 do_jump always does any pending stack adjust except when it does not
9794 actually perform a jump. An example where there is no jump
9795 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9797 This function is responsible for optimizing cases such as
9798 &&, || and comparison operators in EXP. */
9800 void
9801 do_jump (exp, if_false_label, if_true_label)
9802 tree exp;
9803 rtx if_false_label, if_true_label;
9805 enum tree_code code = TREE_CODE (exp);
9806 /* Some cases need to create a label to jump to
9807 in order to properly fall through.
9808 These cases set DROP_THROUGH_LABEL nonzero. */
9809 rtx drop_through_label = 0;
9810 rtx temp;
9811 int i;
9812 tree type;
9813 enum machine_mode mode;
9815 #ifdef MAX_INTEGER_COMPUTATION_MODE
9816 check_max_integer_computation_mode (exp);
9817 #endif
9819 emit_queue ();
9821 switch (code)
9823 case ERROR_MARK:
9824 break;
9826 case INTEGER_CST:
9827 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9828 if (temp)
9829 emit_jump (temp);
9830 break;
9832 #if 0
9833 /* This is not true with #pragma weak */
9834 case ADDR_EXPR:
9835 /* The address of something can never be zero. */
9836 if (if_true_label)
9837 emit_jump (if_true_label);
9838 break;
9839 #endif
9841 case NOP_EXPR:
9842 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9843 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9844 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9845 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9846 goto normal;
9847 case CONVERT_EXPR:
9848 /* If we are narrowing the operand, we have to do the compare in the
9849 narrower mode. */
9850 if ((TYPE_PRECISION (TREE_TYPE (exp))
9851 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9852 goto normal;
9853 case NON_LVALUE_EXPR:
9854 case REFERENCE_EXPR:
9855 case ABS_EXPR:
9856 case NEGATE_EXPR:
9857 case LROTATE_EXPR:
9858 case RROTATE_EXPR:
9859 /* These cannot change zero->nonzero or vice versa. */
9860 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9861 break;
9863 case WITH_RECORD_EXPR:
9864 /* Put the object on the placeholder list, recurse through our first
9865 operand, and pop the list. */
9866 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9867 placeholder_list);
9868 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9869 placeholder_list = TREE_CHAIN (placeholder_list);
9870 break;
9872 #if 0
9873 /* This is never less insns than evaluating the PLUS_EXPR followed by
9874 a test and can be longer if the test is eliminated. */
9875 case PLUS_EXPR:
9876 /* Reduce to minus. */
9877 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9878 TREE_OPERAND (exp, 0),
9879 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9880 TREE_OPERAND (exp, 1))));
9881 /* Process as MINUS. */
9882 #endif
9884 case MINUS_EXPR:
9885 /* Nonzero iff operands of minus differ. */
9886 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9887 TREE_OPERAND (exp, 0),
9888 TREE_OPERAND (exp, 1)),
9889 NE, NE, if_false_label, if_true_label);
9890 break;
9892 case BIT_AND_EXPR:
9893 /* If we are AND'ing with a small constant, do this comparison in the
9894 smallest type that fits. If the machine doesn't have comparisons
9895 that small, it will be converted back to the wider comparison.
9896 This helps if we are testing the sign bit of a narrower object.
9897 combine can't do this for us because it can't know whether a
9898 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9900 if (! SLOW_BYTE_ACCESS
9901 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9902 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9903 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9904 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9905 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9906 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9907 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9908 != CODE_FOR_nothing))
9910 do_jump (convert (type, exp), if_false_label, if_true_label);
9911 break;
9913 goto normal;
9915 case TRUTH_NOT_EXPR:
9916 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9917 break;
9919 case TRUTH_ANDIF_EXPR:
9920 if (if_false_label == 0)
9921 if_false_label = drop_through_label = gen_label_rtx ();
9922 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9923 start_cleanup_deferral ();
9924 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9925 end_cleanup_deferral ();
9926 break;
9928 case TRUTH_ORIF_EXPR:
9929 if (if_true_label == 0)
9930 if_true_label = drop_through_label = gen_label_rtx ();
9931 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9932 start_cleanup_deferral ();
9933 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9934 end_cleanup_deferral ();
9935 break;
9937 case COMPOUND_EXPR:
9938 push_temp_slots ();
9939 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9940 preserve_temp_slots (NULL_RTX);
9941 free_temp_slots ();
9942 pop_temp_slots ();
9943 emit_queue ();
9944 do_pending_stack_adjust ();
9945 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9946 break;
9948 case COMPONENT_REF:
9949 case BIT_FIELD_REF:
9950 case ARRAY_REF:
9951 case ARRAY_RANGE_REF:
9953 HOST_WIDE_INT bitsize, bitpos;
9954 int unsignedp;
9955 enum machine_mode mode;
9956 tree type;
9957 tree offset;
9958 int volatilep = 0;
9960 /* Get description of this reference. We don't actually care
9961 about the underlying object here. */
9962 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9963 &unsignedp, &volatilep);
9965 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9966 if (! SLOW_BYTE_ACCESS
9967 && type != 0 && bitsize >= 0
9968 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9969 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9970 != CODE_FOR_nothing))
9972 do_jump (convert (type, exp), if_false_label, if_true_label);
9973 break;
9975 goto normal;
9978 case COND_EXPR:
9979 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9980 if (integer_onep (TREE_OPERAND (exp, 1))
9981 && integer_zerop (TREE_OPERAND (exp, 2)))
9982 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9984 else if (integer_zerop (TREE_OPERAND (exp, 1))
9985 && integer_onep (TREE_OPERAND (exp, 2)))
9986 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9988 else
9990 rtx label1 = gen_label_rtx ();
9991 drop_through_label = gen_label_rtx ();
9993 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9995 start_cleanup_deferral ();
9996 /* Now the THEN-expression. */
9997 do_jump (TREE_OPERAND (exp, 1),
9998 if_false_label ? if_false_label : drop_through_label,
9999 if_true_label ? if_true_label : drop_through_label);
10000 /* In case the do_jump just above never jumps. */
10001 do_pending_stack_adjust ();
10002 emit_label (label1);
10004 /* Now the ELSE-expression. */
10005 do_jump (TREE_OPERAND (exp, 2),
10006 if_false_label ? if_false_label : drop_through_label,
10007 if_true_label ? if_true_label : drop_through_label);
10008 end_cleanup_deferral ();
10010 break;
10012 case EQ_EXPR:
10014 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10016 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10017 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10019 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10020 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10021 do_jump
10022 (fold
10023 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10024 fold (build (EQ_EXPR, TREE_TYPE (exp),
10025 fold (build1 (REALPART_EXPR,
10026 TREE_TYPE (inner_type),
10027 exp0)),
10028 fold (build1 (REALPART_EXPR,
10029 TREE_TYPE (inner_type),
10030 exp1)))),
10031 fold (build (EQ_EXPR, TREE_TYPE (exp),
10032 fold (build1 (IMAGPART_EXPR,
10033 TREE_TYPE (inner_type),
10034 exp0)),
10035 fold (build1 (IMAGPART_EXPR,
10036 TREE_TYPE (inner_type),
10037 exp1)))))),
10038 if_false_label, if_true_label);
10041 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10042 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10044 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10045 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
10046 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10047 else
10048 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
10049 break;
10052 case NE_EXPR:
10054 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10056 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10057 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10059 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10060 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10061 do_jump
10062 (fold
10063 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10064 fold (build (NE_EXPR, TREE_TYPE (exp),
10065 fold (build1 (REALPART_EXPR,
10066 TREE_TYPE (inner_type),
10067 exp0)),
10068 fold (build1 (REALPART_EXPR,
10069 TREE_TYPE (inner_type),
10070 exp1)))),
10071 fold (build (NE_EXPR, TREE_TYPE (exp),
10072 fold (build1 (IMAGPART_EXPR,
10073 TREE_TYPE (inner_type),
10074 exp0)),
10075 fold (build1 (IMAGPART_EXPR,
10076 TREE_TYPE (inner_type),
10077 exp1)))))),
10078 if_false_label, if_true_label);
10081 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10082 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10084 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10085 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
10086 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10087 else
10088 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
10089 break;
10092 case LT_EXPR:
10093 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10094 if (GET_MODE_CLASS (mode) == MODE_INT
10095 && ! can_compare_p (LT, mode, ccp_jump))
10096 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10097 else
10098 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
10099 break;
10101 case LE_EXPR:
10102 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10103 if (GET_MODE_CLASS (mode) == MODE_INT
10104 && ! can_compare_p (LE, mode, ccp_jump))
10105 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10106 else
10107 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
10108 break;
10110 case GT_EXPR:
10111 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10112 if (GET_MODE_CLASS (mode) == MODE_INT
10113 && ! can_compare_p (GT, mode, ccp_jump))
10114 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10115 else
10116 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
10117 break;
10119 case GE_EXPR:
10120 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10121 if (GET_MODE_CLASS (mode) == MODE_INT
10122 && ! can_compare_p (GE, mode, ccp_jump))
10123 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10124 else
10125 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10126 break;
10128 case UNORDERED_EXPR:
10129 case ORDERED_EXPR:
10131 enum rtx_code cmp, rcmp;
10132 int do_rev;
10134 if (code == UNORDERED_EXPR)
10135 cmp = UNORDERED, rcmp = ORDERED;
10136 else
10137 cmp = ORDERED, rcmp = UNORDERED;
10138 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10140 do_rev = 0;
10141 if (! can_compare_p (cmp, mode, ccp_jump)
10142 && (can_compare_p (rcmp, mode, ccp_jump)
10143 /* If the target doesn't provide either UNORDERED or ORDERED
10144 comparisons, canonicalize on UNORDERED for the library. */
10145 || rcmp == UNORDERED))
10146 do_rev = 1;
10148 if (! do_rev)
10149 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10150 else
10151 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10153 break;
10156 enum rtx_code rcode1;
10157 enum tree_code tcode2;
10159 case UNLT_EXPR:
10160 rcode1 = UNLT;
10161 tcode2 = LT_EXPR;
10162 goto unordered_bcc;
10163 case UNLE_EXPR:
10164 rcode1 = UNLE;
10165 tcode2 = LE_EXPR;
10166 goto unordered_bcc;
10167 case UNGT_EXPR:
10168 rcode1 = UNGT;
10169 tcode2 = GT_EXPR;
10170 goto unordered_bcc;
10171 case UNGE_EXPR:
10172 rcode1 = UNGE;
10173 tcode2 = GE_EXPR;
10174 goto unordered_bcc;
10175 case UNEQ_EXPR:
10176 rcode1 = UNEQ;
10177 tcode2 = EQ_EXPR;
10178 goto unordered_bcc;
10180 unordered_bcc:
10181 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10182 if (can_compare_p (rcode1, mode, ccp_jump))
10183 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10184 if_true_label);
10185 else
10187 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10188 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10189 tree cmp0, cmp1;
10191 /* If the target doesn't support combined unordered
10192 compares, decompose into UNORDERED + comparison. */
10193 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10194 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10195 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10196 do_jump (exp, if_false_label, if_true_label);
10199 break;
10201 /* Special case:
10202 __builtin_expect (<test>, 0) and
10203 __builtin_expect (<test>, 1)
10205 We need to do this here, so that <test> is not converted to a SCC
10206 operation on machines that use condition code registers and COMPARE
10207 like the PowerPC, and then the jump is done based on whether the SCC
10208 operation produced a 1 or 0. */
10209 case CALL_EXPR:
10210 /* Check for a built-in function. */
10211 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10213 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10214 tree arglist = TREE_OPERAND (exp, 1);
10216 if (TREE_CODE (fndecl) == FUNCTION_DECL
10217 && DECL_BUILT_IN (fndecl)
10218 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10219 && arglist != NULL_TREE
10220 && TREE_CHAIN (arglist) != NULL_TREE)
10222 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10223 if_true_label);
10225 if (seq != NULL_RTX)
10227 emit_insn (seq);
10228 return;
10232 /* fall through and generate the normal code. */
10234 default:
10235 normal:
10236 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10237 #if 0
10238 /* This is not needed any more and causes poor code since it causes
10239 comparisons and tests from non-SI objects to have different code
10240 sequences. */
10241 /* Copy to register to avoid generating bad insns by cse
10242 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10243 if (!cse_not_expected && GET_CODE (temp) == MEM)
10244 temp = copy_to_reg (temp);
10245 #endif
10246 do_pending_stack_adjust ();
10247 /* Do any postincrements in the expression that was tested. */
10248 emit_queue ();
10250 if (GET_CODE (temp) == CONST_INT
10251 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10252 || GET_CODE (temp) == LABEL_REF)
10254 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10255 if (target)
10256 emit_jump (target);
10258 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10259 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10260 /* Note swapping the labels gives us not-equal. */
10261 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10262 else if (GET_MODE (temp) != VOIDmode)
10263 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10264 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10265 GET_MODE (temp), NULL_RTX,
10266 if_false_label, if_true_label);
10267 else
10268 abort ();
10271 if (drop_through_label)
10273 /* If do_jump produces code that might be jumped around,
10274 do any stack adjusts from that code, before the place
10275 where control merges in. */
10276 do_pending_stack_adjust ();
10277 emit_label (drop_through_label);
10281 /* Given a comparison expression EXP for values too wide to be compared
10282 with one insn, test the comparison and jump to the appropriate label.
10283 The code of EXP is ignored; we always test GT if SWAP is 0,
10284 and LT if SWAP is 1. */
10286 static void
10287 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10288 tree exp;
10289 int swap;
10290 rtx if_false_label, if_true_label;
10292 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10293 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10294 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10295 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10297 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10300 /* Compare OP0 with OP1, word at a time, in mode MODE.
10301 UNSIGNEDP says to do unsigned comparison.
10302 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10304 void
10305 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10306 enum machine_mode mode;
10307 int unsignedp;
10308 rtx op0, op1;
10309 rtx if_false_label, if_true_label;
10311 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10312 rtx drop_through_label = 0;
10313 int i;
10315 if (! if_true_label || ! if_false_label)
10316 drop_through_label = gen_label_rtx ();
10317 if (! if_true_label)
10318 if_true_label = drop_through_label;
10319 if (! if_false_label)
10320 if_false_label = drop_through_label;
10322 /* Compare a word at a time, high order first. */
10323 for (i = 0; i < nwords; i++)
10325 rtx op0_word, op1_word;
10327 if (WORDS_BIG_ENDIAN)
10329 op0_word = operand_subword_force (op0, i, mode);
10330 op1_word = operand_subword_force (op1, i, mode);
10332 else
10334 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10335 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10338 /* All but high-order word must be compared as unsigned. */
10339 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10340 (unsignedp || i > 0), word_mode, NULL_RTX,
10341 NULL_RTX, if_true_label);
10343 /* Consider lower words only if these are equal. */
10344 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10345 NULL_RTX, NULL_RTX, if_false_label);
10348 if (if_false_label)
10349 emit_jump (if_false_label);
10350 if (drop_through_label)
10351 emit_label (drop_through_label);
10354 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10355 with one insn, test the comparison and jump to the appropriate label. */
10357 static void
10358 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10359 tree exp;
10360 rtx if_false_label, if_true_label;
10362 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10363 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10364 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10365 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10366 int i;
10367 rtx drop_through_label = 0;
10369 if (! if_false_label)
10370 drop_through_label = if_false_label = gen_label_rtx ();
10372 for (i = 0; i < nwords; i++)
10373 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10374 operand_subword_force (op1, i, mode),
10375 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10376 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10378 if (if_true_label)
10379 emit_jump (if_true_label);
10380 if (drop_through_label)
10381 emit_label (drop_through_label);
10384 /* Jump according to whether OP0 is 0.
10385 We assume that OP0 has an integer mode that is too wide
10386 for the available compare insns. */
10388 void
10389 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10390 rtx op0;
10391 rtx if_false_label, if_true_label;
10393 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10394 rtx part;
10395 int i;
10396 rtx drop_through_label = 0;
10398 /* The fastest way of doing this comparison on almost any machine is to
10399 "or" all the words and compare the result. If all have to be loaded
10400 from memory and this is a very wide item, it's possible this may
10401 be slower, but that's highly unlikely. */
10403 part = gen_reg_rtx (word_mode);
10404 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10405 for (i = 1; i < nwords && part != 0; i++)
10406 part = expand_binop (word_mode, ior_optab, part,
10407 operand_subword_force (op0, i, GET_MODE (op0)),
10408 part, 1, OPTAB_WIDEN);
10410 if (part != 0)
10412 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10413 NULL_RTX, if_false_label, if_true_label);
10415 return;
10418 /* If we couldn't do the "or" simply, do this with a series of compares. */
10419 if (! if_false_label)
10420 drop_through_label = if_false_label = gen_label_rtx ();
10422 for (i = 0; i < nwords; i++)
10423 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10424 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10425 if_false_label, NULL_RTX);
10427 if (if_true_label)
10428 emit_jump (if_true_label);
10430 if (drop_through_label)
10431 emit_label (drop_through_label);
10434 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10435 (including code to compute the values to be compared)
10436 and set (CC0) according to the result.
10437 The decision as to signed or unsigned comparison must be made by the caller.
10439 We force a stack adjustment unless there are currently
10440 things pushed on the stack that aren't yet used.
10442 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10443 compared. */
10446 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10447 rtx op0, op1;
10448 enum rtx_code code;
10449 int unsignedp;
10450 enum machine_mode mode;
10451 rtx size;
10453 enum rtx_code ucode;
10454 rtx tem;
10456 /* If one operand is constant, make it the second one. Only do this
10457 if the other operand is not constant as well. */
10459 if (swap_commutative_operands_p (op0, op1))
10461 tem = op0;
10462 op0 = op1;
10463 op1 = tem;
10464 code = swap_condition (code);
10467 if (flag_force_mem)
10469 op0 = force_not_mem (op0);
10470 op1 = force_not_mem (op1);
10473 do_pending_stack_adjust ();
10475 ucode = unsignedp ? unsigned_condition (code) : code;
10476 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10477 return tem;
10479 #if 0
10480 /* There's no need to do this now that combine.c can eliminate lots of
10481 sign extensions. This can be less efficient in certain cases on other
10482 machines. */
10484 /* If this is a signed equality comparison, we can do it as an
10485 unsigned comparison since zero-extension is cheaper than sign
10486 extension and comparisons with zero are done as unsigned. This is
10487 the case even on machines that can do fast sign extension, since
10488 zero-extension is easier to combine with other operations than
10489 sign-extension is. If we are comparing against a constant, we must
10490 convert it to what it would look like unsigned. */
10491 if ((code == EQ || code == NE) && ! unsignedp
10492 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10494 if (GET_CODE (op1) == CONST_INT
10495 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10496 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10497 unsignedp = 1;
10499 #endif
10501 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10503 #if HAVE_cc0
10504 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10505 #else
10506 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10507 #endif
10510 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10511 The decision as to signed or unsigned comparison must be made by the caller.
10513 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10514 compared. */
10516 void
10517 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10518 if_false_label, if_true_label)
10519 rtx op0, op1;
10520 enum rtx_code code;
10521 int unsignedp;
10522 enum machine_mode mode;
10523 rtx size;
10524 rtx if_false_label, if_true_label;
10526 enum rtx_code ucode;
10527 rtx tem;
10528 int dummy_true_label = 0;
10530 /* Reverse the comparison if that is safe and we want to jump if it is
10531 false. */
10532 if (! if_true_label && ! FLOAT_MODE_P (mode))
10534 if_true_label = if_false_label;
10535 if_false_label = 0;
10536 code = reverse_condition (code);
10539 /* If one operand is constant, make it the second one. Only do this
10540 if the other operand is not constant as well. */
10542 if (swap_commutative_operands_p (op0, op1))
10544 tem = op0;
10545 op0 = op1;
10546 op1 = tem;
10547 code = swap_condition (code);
10550 if (flag_force_mem)
10552 op0 = force_not_mem (op0);
10553 op1 = force_not_mem (op1);
10556 do_pending_stack_adjust ();
10558 ucode = unsignedp ? unsigned_condition (code) : code;
10559 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10561 if (tem == const_true_rtx)
10563 if (if_true_label)
10564 emit_jump (if_true_label);
10566 else
10568 if (if_false_label)
10569 emit_jump (if_false_label);
10571 return;
10574 #if 0
10575 /* There's no need to do this now that combine.c can eliminate lots of
10576 sign extensions. This can be less efficient in certain cases on other
10577 machines. */
10579 /* If this is a signed equality comparison, we can do it as an
10580 unsigned comparison since zero-extension is cheaper than sign
10581 extension and comparisons with zero are done as unsigned. This is
10582 the case even on machines that can do fast sign extension, since
10583 zero-extension is easier to combine with other operations than
10584 sign-extension is. If we are comparing against a constant, we must
10585 convert it to what it would look like unsigned. */
10586 if ((code == EQ || code == NE) && ! unsignedp
10587 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10589 if (GET_CODE (op1) == CONST_INT
10590 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10591 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10592 unsignedp = 1;
10594 #endif
10596 if (! if_true_label)
10598 dummy_true_label = 1;
10599 if_true_label = gen_label_rtx ();
10602 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10603 if_true_label);
10605 if (if_false_label)
10606 emit_jump (if_false_label);
10607 if (dummy_true_label)
10608 emit_label (if_true_label);
10611 /* Generate code for a comparison expression EXP (including code to compute
10612 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10613 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10614 generated code will drop through.
10615 SIGNED_CODE should be the rtx operation for this comparison for
10616 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10618 We force a stack adjustment unless there are currently
10619 things pushed on the stack that aren't yet used. */
10621 static void
10622 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10623 if_true_label)
10624 tree exp;
10625 enum rtx_code signed_code, unsigned_code;
10626 rtx if_false_label, if_true_label;
10628 rtx op0, op1;
10629 tree type;
10630 enum machine_mode mode;
10631 int unsignedp;
10632 enum rtx_code code;
10634 /* Don't crash if the comparison was erroneous. */
10635 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10636 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10637 return;
10639 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10640 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10641 return;
10643 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10644 mode = TYPE_MODE (type);
10645 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10646 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10647 || (GET_MODE_BITSIZE (mode)
10648 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10649 1)))))))
10651 /* op0 might have been replaced by promoted constant, in which
10652 case the type of second argument should be used. */
10653 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10654 mode = TYPE_MODE (type);
10656 unsignedp = TREE_UNSIGNED (type);
10657 code = unsignedp ? unsigned_code : signed_code;
10659 #ifdef HAVE_canonicalize_funcptr_for_compare
10660 /* If function pointers need to be "canonicalized" before they can
10661 be reliably compared, then canonicalize them. */
10662 if (HAVE_canonicalize_funcptr_for_compare
10663 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10664 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10665 == FUNCTION_TYPE))
10667 rtx new_op0 = gen_reg_rtx (mode);
10669 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10670 op0 = new_op0;
10673 if (HAVE_canonicalize_funcptr_for_compare
10674 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10675 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10676 == FUNCTION_TYPE))
10678 rtx new_op1 = gen_reg_rtx (mode);
10680 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10681 op1 = new_op1;
10683 #endif
10685 /* Do any postincrements in the expression that was tested. */
10686 emit_queue ();
10688 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10689 ((mode == BLKmode)
10690 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10691 if_false_label, if_true_label);
10694 /* Generate code to calculate EXP using a store-flag instruction
10695 and return an rtx for the result. EXP is either a comparison
10696 or a TRUTH_NOT_EXPR whose operand is a comparison.
10698 If TARGET is nonzero, store the result there if convenient.
10700 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10701 cheap.
10703 Return zero if there is no suitable set-flag instruction
10704 available on this machine.
10706 Once expand_expr has been called on the arguments of the comparison,
10707 we are committed to doing the store flag, since it is not safe to
10708 re-evaluate the expression. We emit the store-flag insn by calling
10709 emit_store_flag, but only expand the arguments if we have a reason
10710 to believe that emit_store_flag will be successful. If we think that
10711 it will, but it isn't, we have to simulate the store-flag with a
10712 set/jump/set sequence. */
10714 static rtx
10715 do_store_flag (exp, target, mode, only_cheap)
10716 tree exp;
10717 rtx target;
10718 enum machine_mode mode;
10719 int only_cheap;
10721 enum rtx_code code;
10722 tree arg0, arg1, type;
10723 tree tem;
10724 enum machine_mode operand_mode;
10725 int invert = 0;
10726 int unsignedp;
10727 rtx op0, op1;
10728 enum insn_code icode;
10729 rtx subtarget = target;
10730 rtx result, label;
10732 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10733 result at the end. We can't simply invert the test since it would
10734 have already been inverted if it were valid. This case occurs for
10735 some floating-point comparisons. */
10737 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10738 invert = 1, exp = TREE_OPERAND (exp, 0);
10740 arg0 = TREE_OPERAND (exp, 0);
10741 arg1 = TREE_OPERAND (exp, 1);
10743 /* Don't crash if the comparison was erroneous. */
10744 if (arg0 == error_mark_node || arg1 == error_mark_node)
10745 return const0_rtx;
10747 type = TREE_TYPE (arg0);
10748 operand_mode = TYPE_MODE (type);
10749 unsignedp = TREE_UNSIGNED (type);
10751 /* We won't bother with BLKmode store-flag operations because it would mean
10752 passing a lot of information to emit_store_flag. */
10753 if (operand_mode == BLKmode)
10754 return 0;
10756 /* We won't bother with store-flag operations involving function pointers
10757 when function pointers must be canonicalized before comparisons. */
10758 #ifdef HAVE_canonicalize_funcptr_for_compare
10759 if (HAVE_canonicalize_funcptr_for_compare
10760 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10761 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10762 == FUNCTION_TYPE))
10763 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10764 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10765 == FUNCTION_TYPE))))
10766 return 0;
10767 #endif
10769 STRIP_NOPS (arg0);
10770 STRIP_NOPS (arg1);
10772 /* Get the rtx comparison code to use. We know that EXP is a comparison
10773 operation of some type. Some comparisons against 1 and -1 can be
10774 converted to comparisons with zero. Do so here so that the tests
10775 below will be aware that we have a comparison with zero. These
10776 tests will not catch constants in the first operand, but constants
10777 are rarely passed as the first operand. */
10779 switch (TREE_CODE (exp))
10781 case EQ_EXPR:
10782 code = EQ;
10783 break;
10784 case NE_EXPR:
10785 code = NE;
10786 break;
10787 case LT_EXPR:
10788 if (integer_onep (arg1))
10789 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10790 else
10791 code = unsignedp ? LTU : LT;
10792 break;
10793 case LE_EXPR:
10794 if (! unsignedp && integer_all_onesp (arg1))
10795 arg1 = integer_zero_node, code = LT;
10796 else
10797 code = unsignedp ? LEU : LE;
10798 break;
10799 case GT_EXPR:
10800 if (! unsignedp && integer_all_onesp (arg1))
10801 arg1 = integer_zero_node, code = GE;
10802 else
10803 code = unsignedp ? GTU : GT;
10804 break;
10805 case GE_EXPR:
10806 if (integer_onep (arg1))
10807 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10808 else
10809 code = unsignedp ? GEU : GE;
10810 break;
10812 case UNORDERED_EXPR:
10813 code = UNORDERED;
10814 break;
10815 case ORDERED_EXPR:
10816 code = ORDERED;
10817 break;
10818 case UNLT_EXPR:
10819 code = UNLT;
10820 break;
10821 case UNLE_EXPR:
10822 code = UNLE;
10823 break;
10824 case UNGT_EXPR:
10825 code = UNGT;
10826 break;
10827 case UNGE_EXPR:
10828 code = UNGE;
10829 break;
10830 case UNEQ_EXPR:
10831 code = UNEQ;
10832 break;
10834 default:
10835 abort ();
10838 /* Put a constant second. */
10839 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10841 tem = arg0; arg0 = arg1; arg1 = tem;
10842 code = swap_condition (code);
10845 /* If this is an equality or inequality test of a single bit, we can
10846 do this by shifting the bit being tested to the low-order bit and
10847 masking the result with the constant 1. If the condition was EQ,
10848 we xor it with 1. This does not require an scc insn and is faster
10849 than an scc insn even if we have it. */
10851 if ((code == NE || code == EQ)
10852 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10853 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10855 tree inner = TREE_OPERAND (arg0, 0);
10856 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10857 int ops_unsignedp;
10859 /* If INNER is a right shift of a constant and it plus BITNUM does
10860 not overflow, adjust BITNUM and INNER. */
10862 if (TREE_CODE (inner) == RSHIFT_EXPR
10863 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10864 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10865 && bitnum < TYPE_PRECISION (type)
10866 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10867 bitnum - TYPE_PRECISION (type)))
10869 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10870 inner = TREE_OPERAND (inner, 0);
10873 /* If we are going to be able to omit the AND below, we must do our
10874 operations as unsigned. If we must use the AND, we have a choice.
10875 Normally unsigned is faster, but for some machines signed is. */
10876 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10877 #ifdef LOAD_EXTEND_OP
10878 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10879 #else
10881 #endif
10884 if (! get_subtarget (subtarget)
10885 || GET_MODE (subtarget) != operand_mode
10886 || ! safe_from_p (subtarget, inner, 1))
10887 subtarget = 0;
10889 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10891 if (bitnum != 0)
10892 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10893 size_int (bitnum), subtarget, ops_unsignedp);
10895 if (GET_MODE (op0) != mode)
10896 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10898 if ((code == EQ && ! invert) || (code == NE && invert))
10899 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10900 ops_unsignedp, OPTAB_LIB_WIDEN);
10902 /* Put the AND last so it can combine with more things. */
10903 if (bitnum != TYPE_PRECISION (type) - 1)
10904 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10906 return op0;
10909 /* Now see if we are likely to be able to do this. Return if not. */
10910 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10911 return 0;
10913 icode = setcc_gen_code[(int) code];
10914 if (icode == CODE_FOR_nothing
10915 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10917 /* We can only do this if it is one of the special cases that
10918 can be handled without an scc insn. */
10919 if ((code == LT && integer_zerop (arg1))
10920 || (! only_cheap && code == GE && integer_zerop (arg1)))
10922 else if (BRANCH_COST >= 0
10923 && ! only_cheap && (code == NE || code == EQ)
10924 && TREE_CODE (type) != REAL_TYPE
10925 && ((abs_optab->handlers[(int) operand_mode].insn_code
10926 != CODE_FOR_nothing)
10927 || (ffs_optab->handlers[(int) operand_mode].insn_code
10928 != CODE_FOR_nothing)))
10930 else
10931 return 0;
10934 if (! get_subtarget (target)
10935 || GET_MODE (subtarget) != operand_mode
10936 || ! safe_from_p (subtarget, arg1, 1))
10937 subtarget = 0;
10939 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10940 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10942 if (target == 0)
10943 target = gen_reg_rtx (mode);
10945 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10946 because, if the emit_store_flag does anything it will succeed and
10947 OP0 and OP1 will not be used subsequently. */
10949 result = emit_store_flag (target, code,
10950 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10951 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10952 operand_mode, unsignedp, 1);
10954 if (result)
10956 if (invert)
10957 result = expand_binop (mode, xor_optab, result, const1_rtx,
10958 result, 0, OPTAB_LIB_WIDEN);
10959 return result;
10962 /* If this failed, we have to do this with set/compare/jump/set code. */
10963 if (GET_CODE (target) != REG
10964 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10965 target = gen_reg_rtx (GET_MODE (target));
10967 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10968 result = compare_from_rtx (op0, op1, code, unsignedp,
10969 operand_mode, NULL_RTX);
10970 if (GET_CODE (result) == CONST_INT)
10971 return (((result == const0_rtx && ! invert)
10972 || (result != const0_rtx && invert))
10973 ? const0_rtx : const1_rtx);
10975 /* The code of RESULT may not match CODE if compare_from_rtx
10976 decided to swap its operands and reverse the original code.
10978 We know that compare_from_rtx returns either a CONST_INT or
10979 a new comparison code, so it is safe to just extract the
10980 code from RESULT. */
10981 code = GET_CODE (result);
10983 label = gen_label_rtx ();
10984 if (bcc_gen_fctn[(int) code] == 0)
10985 abort ();
10987 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10988 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10989 emit_label (label);
10991 return target;
10995 /* Stubs in case we haven't got a casesi insn. */
10996 #ifndef HAVE_casesi
10997 # define HAVE_casesi 0
10998 # define gen_casesi(a, b, c, d, e) (0)
10999 # define CODE_FOR_casesi CODE_FOR_nothing
11000 #endif
11002 /* If the machine does not have a case insn that compares the bounds,
11003 this means extra overhead for dispatch tables, which raises the
11004 threshold for using them. */
11005 #ifndef CASE_VALUES_THRESHOLD
11006 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
11007 #endif /* CASE_VALUES_THRESHOLD */
11009 unsigned int
11010 case_values_threshold ()
11012 return CASE_VALUES_THRESHOLD;
11015 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11016 0 otherwise (i.e. if there is no casesi instruction). */
11018 try_casesi (index_type, index_expr, minval, range,
11019 table_label, default_label)
11020 tree index_type, index_expr, minval, range;
11021 rtx table_label ATTRIBUTE_UNUSED;
11022 rtx default_label;
11024 enum machine_mode index_mode = SImode;
11025 int index_bits = GET_MODE_BITSIZE (index_mode);
11026 rtx op1, op2, index;
11027 enum machine_mode op_mode;
11029 if (! HAVE_casesi)
11030 return 0;
11032 /* Convert the index to SImode. */
11033 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11035 enum machine_mode omode = TYPE_MODE (index_type);
11036 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
11038 /* We must handle the endpoints in the original mode. */
11039 index_expr = build (MINUS_EXPR, index_type,
11040 index_expr, minval);
11041 minval = integer_zero_node;
11042 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11043 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11044 omode, 1, default_label);
11045 /* Now we can safely truncate. */
11046 index = convert_to_mode (index_mode, index, 0);
11048 else
11050 if (TYPE_MODE (index_type) != index_mode)
11052 index_expr = convert ((*lang_hooks.types.type_for_size)
11053 (index_bits, 0), index_expr);
11054 index_type = TREE_TYPE (index_expr);
11057 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11059 emit_queue ();
11060 index = protect_from_queue (index, 0);
11061 do_pending_stack_adjust ();
11063 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
11064 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
11065 (index, op_mode))
11066 index = copy_to_mode_reg (op_mode, index);
11068 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
11070 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
11071 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
11072 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
11073 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
11074 (op1, op_mode))
11075 op1 = copy_to_mode_reg (op_mode, op1);
11077 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
11079 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
11080 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
11081 op2, TREE_UNSIGNED (TREE_TYPE (range)));
11082 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
11083 (op2, op_mode))
11084 op2 = copy_to_mode_reg (op_mode, op2);
11086 emit_jump_insn (gen_casesi (index, op1, op2,
11087 table_label, default_label));
11088 return 1;
11091 /* Attempt to generate a tablejump instruction; same concept. */
11092 #ifndef HAVE_tablejump
11093 #define HAVE_tablejump 0
11094 #define gen_tablejump(x, y) (0)
11095 #endif
11097 /* Subroutine of the next function.
11099 INDEX is the value being switched on, with the lowest value
11100 in the table already subtracted.
11101 MODE is its expected mode (needed if INDEX is constant).
11102 RANGE is the length of the jump table.
11103 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11105 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11106 index value is out of range. */
11108 static void
11109 do_tablejump (index, mode, range, table_label, default_label)
11110 rtx index, range, table_label, default_label;
11111 enum machine_mode mode;
11113 rtx temp, vector;
11115 if (INTVAL (range) > cfun->max_jumptable_ents)
11116 cfun->max_jumptable_ents = INTVAL (range);
11118 /* Do an unsigned comparison (in the proper mode) between the index
11119 expression and the value which represents the length of the range.
11120 Since we just finished subtracting the lower bound of the range
11121 from the index expression, this comparison allows us to simultaneously
11122 check that the original index expression value is both greater than
11123 or equal to the minimum value of the range and less than or equal to
11124 the maximum value of the range. */
11126 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11127 default_label);
11129 /* If index is in range, it must fit in Pmode.
11130 Convert to Pmode so we can index with it. */
11131 if (mode != Pmode)
11132 index = convert_to_mode (Pmode, index, 1);
11134 /* Don't let a MEM slip thru, because then INDEX that comes
11135 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11136 and break_out_memory_refs will go to work on it and mess it up. */
11137 #ifdef PIC_CASE_VECTOR_ADDRESS
11138 if (flag_pic && GET_CODE (index) != REG)
11139 index = copy_to_mode_reg (Pmode, index);
11140 #endif
11142 /* If flag_force_addr were to affect this address
11143 it could interfere with the tricky assumptions made
11144 about addresses that contain label-refs,
11145 which may be valid only very near the tablejump itself. */
11146 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11147 GET_MODE_SIZE, because this indicates how large insns are. The other
11148 uses should all be Pmode, because they are addresses. This code
11149 could fail if addresses and insns are not the same size. */
11150 index = gen_rtx_PLUS (Pmode,
11151 gen_rtx_MULT (Pmode, index,
11152 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11153 gen_rtx_LABEL_REF (Pmode, table_label));
11154 #ifdef PIC_CASE_VECTOR_ADDRESS
11155 if (flag_pic)
11156 index = PIC_CASE_VECTOR_ADDRESS (index);
11157 else
11158 #endif
11159 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11160 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11161 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11162 RTX_UNCHANGING_P (vector) = 1;
11163 convert_move (temp, vector, 0);
11165 emit_jump_insn (gen_tablejump (temp, table_label));
11167 /* If we are generating PIC code or if the table is PC-relative, the
11168 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11169 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11170 emit_barrier ();
11174 try_tablejump (index_type, index_expr, minval, range,
11175 table_label, default_label)
11176 tree index_type, index_expr, minval, range;
11177 rtx table_label, default_label;
11179 rtx index;
11181 if (! HAVE_tablejump)
11182 return 0;
11184 index_expr = fold (build (MINUS_EXPR, index_type,
11185 convert (index_type, index_expr),
11186 convert (index_type, minval)));
11187 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11188 emit_queue ();
11189 index = protect_from_queue (index, 0);
11190 do_pending_stack_adjust ();
11192 do_tablejump (index, TYPE_MODE (index_type),
11193 convert_modes (TYPE_MODE (index_type),
11194 TYPE_MODE (TREE_TYPE (range)),
11195 expand_expr (range, NULL_RTX,
11196 VOIDmode, 0),
11197 TREE_UNSIGNED (TREE_TYPE (range))),
11198 table_label, default_label);
11199 return 1;
11202 /* Nonzero if the mode is a valid vector mode for this architecture.
11203 This returns nonzero even if there is no hardware support for the
11204 vector mode, but we can emulate with narrower modes. */
11207 vector_mode_valid_p (mode)
11208 enum machine_mode mode;
11210 enum mode_class class = GET_MODE_CLASS (mode);
11211 enum machine_mode innermode;
11213 /* Doh! What's going on? */
11214 if (class != MODE_VECTOR_INT
11215 && class != MODE_VECTOR_FLOAT)
11216 return 0;
11218 /* Hardware support. Woo hoo! */
11219 if (VECTOR_MODE_SUPPORTED_P (mode))
11220 return 1;
11222 innermode = GET_MODE_INNER (mode);
11224 /* We should probably return 1 if requesting V4DI and we have no DI,
11225 but we have V2DI, but this is probably very unlikely. */
11227 /* If we have support for the inner mode, we can safely emulate it.
11228 We may not have V2DI, but me can emulate with a pair of DIs. */
11229 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11232 #include "gt-expr.h"