std_bitset.h: Better comments.
[official-gcc.git] / gcc / expr.c
blobf4ea7c9c293592760bd375d220f3fa7db80d4eed
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
169 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
170 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
171 static int is_aligning_offset PARAMS ((tree, tree));
172 static rtx expand_increment PARAMS ((tree, int, int));
173 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
174 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
175 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
176 rtx, rtx));
177 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 #ifdef PUSH_ROUNDING
179 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 #endif
181 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
280 int regno;
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
298 REGNO (reg) = regno;
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
331 enum insn_code ic;
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
347 void
348 init_expr ()
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
363 void
364 finish_expr_for_function ()
366 if (pending_chain)
367 abort ();
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
417 if (code != QUEUED)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
430 if (QUEUED_INSN (y))
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
466 return x;
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
491 queued_subexp_p (x)
492 rtx x;
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
511 /* Perform all the pending incrementations. */
513 void
514 emit_queue ()
516 rtx p;
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
573 abort ();
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
592 return;
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
606 return;
609 if (to_real != from_real)
610 abort ();
612 if (to_real)
614 rtx value, insns;
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
647 #endif
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
668 #endif
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
704 #endif
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
712 #endif
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
726 #endif
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
733 #endif
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
769 #endif
771 libcall = (rtx) 0;
772 switch (from_mode)
774 case SFmode:
775 switch (to_mode)
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 case DFmode:
795 switch (to_mode)
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
809 default:
810 break;
812 break;
814 case XFmode:
815 switch (to_mode)
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
825 default:
826 break;
828 break;
830 case TFmode:
831 switch (to_mode)
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
841 default:
842 break;
844 break;
846 default:
847 break;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
852 abort ();
854 start_sequence ();
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
856 1, from, from_mode);
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
861 return;
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
929 #ifdef HAVE_slt
930 if (HAVE_slt
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
935 lowpart_mode, 0);
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
939 else
940 #endif
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
945 NULL_RTX, 0);
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
956 if (subword == 0)
957 abort ();
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
964 end_sequence ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 return;
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_truncqipqi2 */
999 abort ();
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1009 else
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_extendpqiqi2 */
1018 abort ();
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1031 return;
1033 #endif /* HAVE_truncsipsi2 */
1034 abort ();
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1044 else
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1050 return;
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1059 #endif /* HAVE_zero_extendpsisi2 */
1060 abort ();
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1075 #endif /* HAVE_truncdipdi2 */
1076 abort ();
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1086 else
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1094 #endif /* HAVE_extendpdidi2 */
1095 abort ();
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1134 else
1136 enum machine_mode intermediate;
1137 rtx tmp;
1138 tree shift_amount;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1313 /* Mode combination is not recognized. */
1314 abort ();
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
1355 rtx temp;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1369 return x;
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1410 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1412 /* ?? If we don't know OLDMODE, we have to assume here that
1413 X does not need sign- or zero-extension. This may not be
1414 the case, but it's the best we can do. */
1415 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1416 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1418 HOST_WIDE_INT val = INTVAL (x);
1419 int width = GET_MODE_BITSIZE (oldmode);
1421 /* We must sign or zero-extend in this case. Start by
1422 zero-extending, then sign extend if we need to. */
1423 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1424 if (! unsignedp
1425 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1426 val |= (HOST_WIDE_INT) (-1) << width;
1428 return gen_int_mode (val, mode);
1431 return gen_lowpart (mode, x);
1434 temp = gen_reg_rtx (mode);
1435 convert_move (temp, x, unsignedp);
1436 return temp;
1439 /* This macro is used to determine what the largest unit size that
1440 move_by_pieces can use is. */
1442 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1443 move efficiently, as opposed to MOVE_MAX which is the maximum
1444 number of bytes we can move with a single instruction. */
1446 #ifndef MOVE_MAX_PIECES
1447 #define MOVE_MAX_PIECES MOVE_MAX
1448 #endif
1450 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1451 store efficiently. Due to internal GCC limitations, this is
1452 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1453 for an immediate constant. */
1455 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1457 /* Generate several move instructions to copy LEN bytes from block FROM to
1458 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1459 and TO through protect_from_queue before calling.
1461 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1462 used to push FROM to the stack.
1464 ALIGN is maximum alignment we can assume. */
1466 void
1467 move_by_pieces (to, from, len, align)
1468 rtx to, from;
1469 unsigned HOST_WIDE_INT len;
1470 unsigned int align;
1472 struct move_by_pieces data;
1473 rtx to_addr, from_addr = XEXP (from, 0);
1474 unsigned int max_size = MOVE_MAX_PIECES + 1;
1475 enum machine_mode mode = VOIDmode, tmode;
1476 enum insn_code icode;
1478 data.offset = 0;
1479 data.from_addr = from_addr;
1480 if (to)
1482 to_addr = XEXP (to, 0);
1483 data.to = to;
1484 data.autinc_to
1485 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1486 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1487 data.reverse
1488 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1490 else
1492 to_addr = NULL_RTX;
1493 data.to = NULL_RTX;
1494 data.autinc_to = 1;
1495 #ifdef STACK_GROWS_DOWNWARD
1496 data.reverse = 1;
1497 #else
1498 data.reverse = 0;
1499 #endif
1501 data.to_addr = to_addr;
1502 data.from = from;
1503 data.autinc_from
1504 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1505 || GET_CODE (from_addr) == POST_INC
1506 || GET_CODE (from_addr) == POST_DEC);
1508 data.explicit_inc_from = 0;
1509 data.explicit_inc_to = 0;
1510 if (data.reverse) data.offset = len;
1511 data.len = len;
1513 /* If copying requires more than two move insns,
1514 copy addresses to registers (to make displacements shorter)
1515 and use post-increment if available. */
1516 if (!(data.autinc_from && data.autinc_to)
1517 && move_by_pieces_ninsns (len, align) > 2)
1519 /* Find the mode of the largest move... */
1520 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1521 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1522 if (GET_MODE_SIZE (tmode) < max_size)
1523 mode = tmode;
1525 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1527 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1528 data.autinc_from = 1;
1529 data.explicit_inc_from = -1;
1531 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1533 data.from_addr = copy_addr_to_reg (from_addr);
1534 data.autinc_from = 1;
1535 data.explicit_inc_from = 1;
1537 if (!data.autinc_from && CONSTANT_P (from_addr))
1538 data.from_addr = copy_addr_to_reg (from_addr);
1539 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1541 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1542 data.autinc_to = 1;
1543 data.explicit_inc_to = -1;
1545 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1547 data.to_addr = copy_addr_to_reg (to_addr);
1548 data.autinc_to = 1;
1549 data.explicit_inc_to = 1;
1551 if (!data.autinc_to && CONSTANT_P (to_addr))
1552 data.to_addr = copy_addr_to_reg (to_addr);
1555 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1556 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1557 align = MOVE_MAX * BITS_PER_UNIT;
1559 /* First move what we can in the largest integer mode, then go to
1560 successively smaller modes. */
1562 while (max_size > 1)
1564 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1565 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1566 if (GET_MODE_SIZE (tmode) < max_size)
1567 mode = tmode;
1569 if (mode == VOIDmode)
1570 break;
1572 icode = mov_optab->handlers[(int) mode].insn_code;
1573 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1574 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1576 max_size = GET_MODE_SIZE (mode);
1579 /* The code above should have handled everything. */
1580 if (data.len > 0)
1581 abort ();
1584 /* Return number of insns required to move L bytes by pieces.
1585 ALIGN (in bits) is maximum alignment we can assume. */
1587 static unsigned HOST_WIDE_INT
1588 move_by_pieces_ninsns (l, align)
1589 unsigned HOST_WIDE_INT l;
1590 unsigned int align;
1592 unsigned HOST_WIDE_INT n_insns = 0;
1593 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1595 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1596 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1597 align = MOVE_MAX * BITS_PER_UNIT;
1599 while (max_size > 1)
1601 enum machine_mode mode = VOIDmode, tmode;
1602 enum insn_code icode;
1604 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1605 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1606 if (GET_MODE_SIZE (tmode) < max_size)
1607 mode = tmode;
1609 if (mode == VOIDmode)
1610 break;
1612 icode = mov_optab->handlers[(int) mode].insn_code;
1613 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1614 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1616 max_size = GET_MODE_SIZE (mode);
1619 if (l)
1620 abort ();
1621 return n_insns;
1624 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1625 with move instructions for mode MODE. GENFUN is the gen_... function
1626 to make a move insn for that mode. DATA has all the other info. */
1628 static void
1629 move_by_pieces_1 (genfun, mode, data)
1630 rtx (*genfun) PARAMS ((rtx, ...));
1631 enum machine_mode mode;
1632 struct move_by_pieces *data;
1634 unsigned int size = GET_MODE_SIZE (mode);
1635 rtx to1 = NULL_RTX, from1;
1637 while (data->len >= size)
1639 if (data->reverse)
1640 data->offset -= size;
1642 if (data->to)
1644 if (data->autinc_to)
1645 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1646 data->offset);
1647 else
1648 to1 = adjust_address (data->to, mode, data->offset);
1651 if (data->autinc_from)
1652 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1653 data->offset);
1654 else
1655 from1 = adjust_address (data->from, mode, data->offset);
1657 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1658 emit_insn (gen_add2_insn (data->to_addr,
1659 GEN_INT (-(HOST_WIDE_INT)size)));
1660 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1661 emit_insn (gen_add2_insn (data->from_addr,
1662 GEN_INT (-(HOST_WIDE_INT)size)));
1664 if (data->to)
1665 emit_insn ((*genfun) (to1, from1));
1666 else
1668 #ifdef PUSH_ROUNDING
1669 emit_single_push_insn (mode, from1, NULL);
1670 #else
1671 abort ();
1672 #endif
1675 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1676 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1678 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1680 if (! data->reverse)
1681 data->offset += size;
1683 data->len -= size;
1687 /* Emit code to move a block Y to a block X. This may be done with
1688 string-move instructions, with multiple scalar move instructions,
1689 or with a library call.
1691 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1692 SIZE is an rtx that says how long they are.
1693 ALIGN is the maximum alignment we can assume they have.
1694 METHOD describes what kind of copy this is, and what mechanisms may be used.
1696 Return the address of the new block, if memcpy is called and returns it,
1697 0 otherwise. */
1700 emit_block_move (x, y, size, method)
1701 rtx x, y, size;
1702 enum block_op_methods method;
1704 bool may_use_call;
1705 rtx retval = 0;
1706 unsigned int align;
1708 switch (method)
1710 case BLOCK_OP_NORMAL:
1711 may_use_call = true;
1712 break;
1714 case BLOCK_OP_CALL_PARM:
1715 may_use_call = block_move_libcall_safe_for_call_parm ();
1717 /* Make inhibit_defer_pop nonzero around the library call
1718 to force it to pop the arguments right away. */
1719 NO_DEFER_POP;
1720 break;
1722 case BLOCK_OP_NO_LIBCALL:
1723 may_use_call = false;
1724 break;
1726 default:
1727 abort ();
1730 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1732 if (GET_MODE (x) != BLKmode)
1733 abort ();
1734 if (GET_MODE (y) != BLKmode)
1735 abort ();
1737 x = protect_from_queue (x, 1);
1738 y = protect_from_queue (y, 0);
1739 size = protect_from_queue (size, 0);
1741 if (GET_CODE (x) != MEM)
1742 abort ();
1743 if (GET_CODE (y) != MEM)
1744 abort ();
1745 if (size == 0)
1746 abort ();
1748 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1749 can be incorrect is coming from __builtin_memcpy. */
1750 if (GET_CODE (size) == CONST_INT)
1752 x = shallow_copy_rtx (x);
1753 y = shallow_copy_rtx (y);
1754 set_mem_size (x, size);
1755 set_mem_size (y, size);
1758 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1759 move_by_pieces (x, y, INTVAL (size), align);
1760 else if (emit_block_move_via_movstr (x, y, size, align))
1762 else if (may_use_call)
1763 retval = emit_block_move_via_libcall (x, y, size);
1764 else
1765 emit_block_move_via_loop (x, y, size, align);
1767 if (method == BLOCK_OP_CALL_PARM)
1768 OK_DEFER_POP;
1770 return retval;
1773 /* A subroutine of emit_block_move. Returns true if calling the
1774 block move libcall will not clobber any parameters which may have
1775 already been placed on the stack. */
1777 static bool
1778 block_move_libcall_safe_for_call_parm ()
1780 if (PUSH_ARGS)
1781 return true;
1782 else
1784 /* Check to see whether memcpy takes all register arguments. */
1785 static enum {
1786 takes_regs_uninit, takes_regs_no, takes_regs_yes
1787 } takes_regs = takes_regs_uninit;
1789 switch (takes_regs)
1791 case takes_regs_uninit:
1793 CUMULATIVE_ARGS args_so_far;
1794 tree fn, arg;
1796 fn = emit_block_move_libcall_fn (false);
1797 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1799 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1800 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1802 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1803 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1804 if (!tmp || !REG_P (tmp))
1805 goto fail_takes_regs;
1806 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1807 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1808 NULL_TREE, 1))
1809 goto fail_takes_regs;
1810 #endif
1811 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1814 takes_regs = takes_regs_yes;
1815 /* FALLTHRU */
1817 case takes_regs_yes:
1818 return true;
1820 fail_takes_regs:
1821 takes_regs = takes_regs_no;
1822 /* FALLTHRU */
1823 case takes_regs_no:
1824 return false;
1826 default:
1827 abort ();
1832 /* A subroutine of emit_block_move. Expand a movstr pattern;
1833 return true if successful. */
1835 static bool
1836 emit_block_move_via_movstr (x, y, size, align)
1837 rtx x, y, size;
1838 unsigned int align;
1840 /* Try the most limited insn first, because there's no point
1841 including more than one in the machine description unless
1842 the more limited one has some advantage. */
1844 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1845 enum machine_mode mode;
1847 /* Since this is a move insn, we don't care about volatility. */
1848 volatile_ok = 1;
1850 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1851 mode = GET_MODE_WIDER_MODE (mode))
1853 enum insn_code code = movstr_optab[(int) mode];
1854 insn_operand_predicate_fn pred;
1856 if (code != CODE_FOR_nothing
1857 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1858 here because if SIZE is less than the mode mask, as it is
1859 returned by the macro, it will definitely be less than the
1860 actual mode mask. */
1861 && ((GET_CODE (size) == CONST_INT
1862 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1863 <= (GET_MODE_MASK (mode) >> 1)))
1864 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1865 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1866 || (*pred) (x, BLKmode))
1867 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1868 || (*pred) (y, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1870 || (*pred) (opalign, VOIDmode)))
1872 rtx op2;
1873 rtx last = get_last_insn ();
1874 rtx pat;
1876 op2 = convert_to_mode (mode, size, 1);
1877 pred = insn_data[(int) code].operand[2].predicate;
1878 if (pred != 0 && ! (*pred) (op2, mode))
1879 op2 = copy_to_mode_reg (mode, op2);
1881 /* ??? When called via emit_block_move_for_call, it'd be
1882 nice if there were some way to inform the backend, so
1883 that it doesn't fail the expansion because it thinks
1884 emitting the libcall would be more efficient. */
1886 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1887 if (pat)
1889 emit_insn (pat);
1890 volatile_ok = 0;
1891 return true;
1893 else
1894 delete_insns_since (last);
1898 volatile_ok = 0;
1899 return false;
1902 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1903 Return the return value from memcpy, 0 otherwise. */
1905 static rtx
1906 emit_block_move_via_libcall (dst, src, size)
1907 rtx dst, src, size;
1909 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1910 enum machine_mode size_mode;
1911 rtx retval;
1913 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1915 It is unsafe to save the value generated by protect_from_queue
1916 and reuse it later. Consider what happens if emit_queue is
1917 called before the return value from protect_from_queue is used.
1919 Expansion of the CALL_EXPR below will call emit_queue before
1920 we are finished emitting RTL for argument setup. So if we are
1921 not careful we could get the wrong value for an argument.
1923 To avoid this problem we go ahead and emit code to copy X, Y &
1924 SIZE into new pseudos. We can then place those new pseudos
1925 into an RTL_EXPR and use them later, even after a call to
1926 emit_queue.
1928 Note this is not strictly needed for library calls since they
1929 do not call emit_queue before loading their arguments. However,
1930 we may need to have library calls call emit_queue in the future
1931 since failing to do so could cause problems for targets which
1932 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1934 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1935 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1937 if (TARGET_MEM_FUNCTIONS)
1938 size_mode = TYPE_MODE (sizetype);
1939 else
1940 size_mode = TYPE_MODE (unsigned_type_node);
1941 size = convert_to_mode (size_mode, size, 1);
1942 size = copy_to_mode_reg (size_mode, size);
1944 /* It is incorrect to use the libcall calling conventions to call
1945 memcpy in this context. This could be a user call to memcpy and
1946 the user may wish to examine the return value from memcpy. For
1947 targets where libcalls and normal calls have different conventions
1948 for returning pointers, we could end up generating incorrect code.
1950 For convenience, we generate the call to bcopy this way as well. */
1952 dst_tree = make_tree (ptr_type_node, dst);
1953 src_tree = make_tree (ptr_type_node, src);
1954 if (TARGET_MEM_FUNCTIONS)
1955 size_tree = make_tree (sizetype, size);
1956 else
1957 size_tree = make_tree (unsigned_type_node, size);
1959 fn = emit_block_move_libcall_fn (true);
1960 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1961 if (TARGET_MEM_FUNCTIONS)
1963 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1964 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1966 else
1968 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1969 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1972 /* Now we have to build up the CALL_EXPR itself. */
1973 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1974 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1975 call_expr, arg_list, NULL_TREE);
1976 TREE_SIDE_EFFECTS (call_expr) = 1;
1978 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1980 /* If we are initializing a readonly value, show the above call
1981 clobbered it. Otherwise, a load from it may erroneously be
1982 hoisted from a loop. */
1983 if (RTX_UNCHANGING_P (dst))
1984 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1986 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1989 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1990 for the function we use for block copies. The first time FOR_CALL
1991 is true, we call assemble_external. */
1993 static GTY(()) tree block_move_fn;
1995 static tree
1996 emit_block_move_libcall_fn (for_call)
1997 int for_call;
1999 static bool emitted_extern;
2000 tree fn = block_move_fn, args;
2002 if (!fn)
2004 if (TARGET_MEM_FUNCTIONS)
2006 fn = get_identifier ("memcpy");
2007 args = build_function_type_list (ptr_type_node, ptr_type_node,
2008 const_ptr_type_node, sizetype,
2009 NULL_TREE);
2011 else
2013 fn = get_identifier ("bcopy");
2014 args = build_function_type_list (void_type_node, const_ptr_type_node,
2015 ptr_type_node, unsigned_type_node,
2016 NULL_TREE);
2019 fn = build_decl (FUNCTION_DECL, fn, args);
2020 DECL_EXTERNAL (fn) = 1;
2021 TREE_PUBLIC (fn) = 1;
2022 DECL_ARTIFICIAL (fn) = 1;
2023 TREE_NOTHROW (fn) = 1;
2025 block_move_fn = fn;
2028 if (for_call && !emitted_extern)
2030 emitted_extern = true;
2031 make_decl_rtl (fn, NULL);
2032 assemble_external (fn);
2035 return fn;
2038 /* A subroutine of emit_block_move. Copy the data via an explicit
2039 loop. This is used only when libcalls are forbidden. */
2040 /* ??? It'd be nice to copy in hunks larger than QImode. */
2042 static void
2043 emit_block_move_via_loop (x, y, size, align)
2044 rtx x, y, size;
2045 unsigned int align ATTRIBUTE_UNUSED;
2047 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2048 enum machine_mode iter_mode;
2050 iter_mode = GET_MODE (size);
2051 if (iter_mode == VOIDmode)
2052 iter_mode = word_mode;
2054 top_label = gen_label_rtx ();
2055 cmp_label = gen_label_rtx ();
2056 iter = gen_reg_rtx (iter_mode);
2058 emit_move_insn (iter, const0_rtx);
2060 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2061 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2062 do_pending_stack_adjust ();
2064 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2066 emit_jump (cmp_label);
2067 emit_label (top_label);
2069 tmp = convert_modes (Pmode, iter_mode, iter, true);
2070 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2071 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2072 x = change_address (x, QImode, x_addr);
2073 y = change_address (y, QImode, y_addr);
2075 emit_move_insn (x, y);
2077 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2078 true, OPTAB_LIB_WIDEN);
2079 if (tmp != iter)
2080 emit_move_insn (iter, tmp);
2082 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2083 emit_label (cmp_label);
2085 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2086 true, top_label);
2088 emit_note (NULL, NOTE_INSN_LOOP_END);
2091 /* Copy all or part of a value X into registers starting at REGNO.
2092 The number of registers to be filled is NREGS. */
2094 void
2095 move_block_to_reg (regno, x, nregs, mode)
2096 int regno;
2097 rtx x;
2098 int nregs;
2099 enum machine_mode mode;
2101 int i;
2102 #ifdef HAVE_load_multiple
2103 rtx pat;
2104 rtx last;
2105 #endif
2107 if (nregs == 0)
2108 return;
2110 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2111 x = validize_mem (force_const_mem (mode, x));
2113 /* See if the machine can do this with a load multiple insn. */
2114 #ifdef HAVE_load_multiple
2115 if (HAVE_load_multiple)
2117 last = get_last_insn ();
2118 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2119 GEN_INT (nregs));
2120 if (pat)
2122 emit_insn (pat);
2123 return;
2125 else
2126 delete_insns_since (last);
2128 #endif
2130 for (i = 0; i < nregs; i++)
2131 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2132 operand_subword_force (x, i, mode));
2135 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2136 The number of registers to be filled is NREGS. SIZE indicates the number
2137 of bytes in the object X. */
2139 void
2140 move_block_from_reg (regno, x, nregs, size)
2141 int regno;
2142 rtx x;
2143 int nregs;
2144 int size;
2146 int i;
2147 #ifdef HAVE_store_multiple
2148 rtx pat;
2149 rtx last;
2150 #endif
2151 enum machine_mode mode;
2153 if (nregs == 0)
2154 return;
2156 /* If SIZE is that of a mode no bigger than a word, just use that
2157 mode's store operation. */
2158 if (size <= UNITS_PER_WORD
2159 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2161 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2162 return;
2165 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2166 to the left before storing to memory. Note that the previous test
2167 doesn't handle all cases (e.g. SIZE == 3). */
2168 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2170 rtx tem = operand_subword (x, 0, 1, BLKmode);
2171 rtx shift;
2173 if (tem == 0)
2174 abort ();
2176 shift = expand_shift (LSHIFT_EXPR, word_mode,
2177 gen_rtx_REG (word_mode, regno),
2178 build_int_2 ((UNITS_PER_WORD - size)
2179 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2180 emit_move_insn (tem, shift);
2181 return;
2184 /* See if the machine can do this with a store multiple insn. */
2185 #ifdef HAVE_store_multiple
2186 if (HAVE_store_multiple)
2188 last = get_last_insn ();
2189 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2190 GEN_INT (nregs));
2191 if (pat)
2193 emit_insn (pat);
2194 return;
2196 else
2197 delete_insns_since (last);
2199 #endif
2201 for (i = 0; i < nregs; i++)
2203 rtx tem = operand_subword (x, i, 1, BLKmode);
2205 if (tem == 0)
2206 abort ();
2208 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2212 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2213 ORIG, where ORIG is a non-consecutive group of registers represented by
2214 a PARALLEL. The clone is identical to the original except in that the
2215 original set of registers is replaced by a new set of pseudo registers.
2216 The new set has the same modes as the original set. */
2219 gen_group_rtx (orig)
2220 rtx orig;
2222 int i, length;
2223 rtx *tmps;
2225 if (GET_CODE (orig) != PARALLEL)
2226 abort ();
2228 length = XVECLEN (orig, 0);
2229 tmps = (rtx *) alloca (sizeof (rtx) * length);
2231 /* Skip a NULL entry in first slot. */
2232 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2234 if (i)
2235 tmps[0] = 0;
2237 for (; i < length; i++)
2239 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2240 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2242 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2245 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2248 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2249 registers represented by a PARALLEL. SSIZE represents the total size of
2250 block SRC in bytes, or -1 if not known. */
2251 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2252 the balance will be in what would be the low-order memory addresses, i.e.
2253 left justified for big endian, right justified for little endian. This
2254 happens to be true for the targets currently using this support. If this
2255 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2256 would be needed. */
2258 void
2259 emit_group_load (dst, orig_src, ssize)
2260 rtx dst, orig_src;
2261 int ssize;
2263 rtx *tmps, src;
2264 int start, i;
2266 if (GET_CODE (dst) != PARALLEL)
2267 abort ();
2269 /* Check for a NULL entry, used to indicate that the parameter goes
2270 both on the stack and in registers. */
2271 if (XEXP (XVECEXP (dst, 0, 0), 0))
2272 start = 0;
2273 else
2274 start = 1;
2276 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2278 /* Process the pieces. */
2279 for (i = start; i < XVECLEN (dst, 0); i++)
2281 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2282 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2283 unsigned int bytelen = GET_MODE_SIZE (mode);
2284 int shift = 0;
2286 /* Handle trailing fragments that run over the size of the struct. */
2287 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2289 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2290 bytelen = ssize - bytepos;
2291 if (bytelen <= 0)
2292 abort ();
2295 /* If we won't be loading directly from memory, protect the real source
2296 from strange tricks we might play; but make sure that the source can
2297 be loaded directly into the destination. */
2298 src = orig_src;
2299 if (GET_CODE (orig_src) != MEM
2300 && (!CONSTANT_P (orig_src)
2301 || (GET_MODE (orig_src) != mode
2302 && GET_MODE (orig_src) != VOIDmode)))
2304 if (GET_MODE (orig_src) == VOIDmode)
2305 src = gen_reg_rtx (mode);
2306 else
2307 src = gen_reg_rtx (GET_MODE (orig_src));
2309 emit_move_insn (src, orig_src);
2312 /* Optimize the access just a bit. */
2313 if (GET_CODE (src) == MEM
2314 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2315 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2316 && bytelen == GET_MODE_SIZE (mode))
2318 tmps[i] = gen_reg_rtx (mode);
2319 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2321 else if (GET_CODE (src) == CONCAT)
2323 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2324 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2326 if ((bytepos == 0 && bytelen == slen0)
2327 || (bytepos != 0 && bytepos + bytelen <= slen))
2329 /* The following assumes that the concatenated objects all
2330 have the same size. In this case, a simple calculation
2331 can be used to determine the object and the bit field
2332 to be extracted. */
2333 tmps[i] = XEXP (src, bytepos / slen0);
2334 if (! CONSTANT_P (tmps[i])
2335 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2336 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2337 (bytepos % slen0) * BITS_PER_UNIT,
2338 1, NULL_RTX, mode, mode, ssize);
2340 else if (bytepos == 0)
2342 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2343 emit_move_insn (mem, src);
2344 tmps[i] = adjust_address (mem, mode, 0);
2346 else
2347 abort ();
2349 else if (CONSTANT_P (src)
2350 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2351 tmps[i] = src;
2352 else
2353 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2354 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2355 mode, mode, ssize);
2357 if (BYTES_BIG_ENDIAN && shift)
2358 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2359 tmps[i], 0, OPTAB_WIDEN);
2362 emit_queue ();
2364 /* Copy the extracted pieces into the proper (probable) hard regs. */
2365 for (i = start; i < XVECLEN (dst, 0); i++)
2366 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2369 /* Emit code to move a block SRC to block DST, where SRC and DST are
2370 non-consecutive groups of registers, each represented by a PARALLEL. */
2372 void
2373 emit_group_move (dst, src)
2374 rtx dst, src;
2376 int i;
2378 if (GET_CODE (src) != PARALLEL
2379 || GET_CODE (dst) != PARALLEL
2380 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2381 abort ();
2383 /* Skip first entry if NULL. */
2384 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2385 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2386 XEXP (XVECEXP (src, 0, i), 0));
2389 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2390 registers represented by a PARALLEL. SSIZE represents the total size of
2391 block DST, or -1 if not known. */
2393 void
2394 emit_group_store (orig_dst, src, ssize)
2395 rtx orig_dst, src;
2396 int ssize;
2398 rtx *tmps, dst;
2399 int start, i;
2401 if (GET_CODE (src) != PARALLEL)
2402 abort ();
2404 /* Check for a NULL entry, used to indicate that the parameter goes
2405 both on the stack and in registers. */
2406 if (XEXP (XVECEXP (src, 0, 0), 0))
2407 start = 0;
2408 else
2409 start = 1;
2411 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2413 /* Copy the (probable) hard regs into pseudos. */
2414 for (i = start; i < XVECLEN (src, 0); i++)
2416 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2417 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2418 emit_move_insn (tmps[i], reg);
2420 emit_queue ();
2422 /* If we won't be storing directly into memory, protect the real destination
2423 from strange tricks we might play. */
2424 dst = orig_dst;
2425 if (GET_CODE (dst) == PARALLEL)
2427 rtx temp;
2429 /* We can get a PARALLEL dst if there is a conditional expression in
2430 a return statement. In that case, the dst and src are the same,
2431 so no action is necessary. */
2432 if (rtx_equal_p (dst, src))
2433 return;
2435 /* It is unclear if we can ever reach here, but we may as well handle
2436 it. Allocate a temporary, and split this into a store/load to/from
2437 the temporary. */
2439 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2440 emit_group_store (temp, src, ssize);
2441 emit_group_load (dst, temp, ssize);
2442 return;
2444 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2446 dst = gen_reg_rtx (GET_MODE (orig_dst));
2447 /* Make life a bit easier for combine. */
2448 emit_move_insn (dst, const0_rtx);
2451 /* Process the pieces. */
2452 for (i = start; i < XVECLEN (src, 0); i++)
2454 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2455 enum machine_mode mode = GET_MODE (tmps[i]);
2456 unsigned int bytelen = GET_MODE_SIZE (mode);
2457 rtx dest = dst;
2459 /* Handle trailing fragments that run over the size of the struct. */
2460 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2462 if (BYTES_BIG_ENDIAN)
2464 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2465 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2466 tmps[i], 0, OPTAB_WIDEN);
2468 bytelen = ssize - bytepos;
2471 if (GET_CODE (dst) == CONCAT)
2473 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2474 dest = XEXP (dst, 0);
2475 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2477 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2478 dest = XEXP (dst, 1);
2480 else
2481 abort ();
2484 /* Optimize the access just a bit. */
2485 if (GET_CODE (dest) == MEM
2486 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2487 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2488 && bytelen == GET_MODE_SIZE (mode))
2489 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2490 else
2491 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2492 mode, tmps[i], ssize);
2495 emit_queue ();
2497 /* Copy from the pseudo into the (probable) hard reg. */
2498 if (GET_CODE (dst) == REG)
2499 emit_move_insn (orig_dst, dst);
2502 /* Generate code to copy a BLKmode object of TYPE out of a
2503 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2504 is null, a stack temporary is created. TGTBLK is returned.
2506 The primary purpose of this routine is to handle functions
2507 that return BLKmode structures in registers. Some machines
2508 (the PA for example) want to return all small structures
2509 in registers regardless of the structure's alignment. */
2512 copy_blkmode_from_reg (tgtblk, srcreg, type)
2513 rtx tgtblk;
2514 rtx srcreg;
2515 tree type;
2517 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2518 rtx src = NULL, dst = NULL;
2519 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2520 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2522 if (tgtblk == 0)
2524 tgtblk = assign_temp (build_qualified_type (type,
2525 (TYPE_QUALS (type)
2526 | TYPE_QUAL_CONST)),
2527 0, 1, 1);
2528 preserve_temp_slots (tgtblk);
2531 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2532 into a new pseudo which is a full word. */
2534 if (GET_MODE (srcreg) != BLKmode
2535 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2536 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2538 /* Structures whose size is not a multiple of a word are aligned
2539 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2540 machine, this means we must skip the empty high order bytes when
2541 calculating the bit offset. */
2542 if (BYTES_BIG_ENDIAN
2543 && bytes % UNITS_PER_WORD)
2544 big_endian_correction
2545 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2547 /* Copy the structure BITSIZE bites at a time.
2549 We could probably emit more efficient code for machines which do not use
2550 strict alignment, but it doesn't seem worth the effort at the current
2551 time. */
2552 for (bitpos = 0, xbitpos = big_endian_correction;
2553 bitpos < bytes * BITS_PER_UNIT;
2554 bitpos += bitsize, xbitpos += bitsize)
2556 /* We need a new source operand each time xbitpos is on a
2557 word boundary and when xbitpos == big_endian_correction
2558 (the first time through). */
2559 if (xbitpos % BITS_PER_WORD == 0
2560 || xbitpos == big_endian_correction)
2561 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2562 GET_MODE (srcreg));
2564 /* We need a new destination operand each time bitpos is on
2565 a word boundary. */
2566 if (bitpos % BITS_PER_WORD == 0)
2567 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2569 /* Use xbitpos for the source extraction (right justified) and
2570 xbitpos for the destination store (left justified). */
2571 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2572 extract_bit_field (src, bitsize,
2573 xbitpos % BITS_PER_WORD, 1,
2574 NULL_RTX, word_mode, word_mode,
2575 BITS_PER_WORD),
2576 BITS_PER_WORD);
2579 return tgtblk;
2582 /* Add a USE expression for REG to the (possibly empty) list pointed
2583 to by CALL_FUSAGE. REG must denote a hard register. */
2585 void
2586 use_reg (call_fusage, reg)
2587 rtx *call_fusage, reg;
2589 if (GET_CODE (reg) != REG
2590 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2591 abort ();
2593 *call_fusage
2594 = gen_rtx_EXPR_LIST (VOIDmode,
2595 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2598 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2599 starting at REGNO. All of these registers must be hard registers. */
2601 void
2602 use_regs (call_fusage, regno, nregs)
2603 rtx *call_fusage;
2604 int regno;
2605 int nregs;
2607 int i;
2609 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2610 abort ();
2612 for (i = 0; i < nregs; i++)
2613 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2616 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2617 PARALLEL REGS. This is for calls that pass values in multiple
2618 non-contiguous locations. The Irix 6 ABI has examples of this. */
2620 void
2621 use_group_regs (call_fusage, regs)
2622 rtx *call_fusage;
2623 rtx regs;
2625 int i;
2627 for (i = 0; i < XVECLEN (regs, 0); i++)
2629 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2631 /* A NULL entry means the parameter goes both on the stack and in
2632 registers. This can also be a MEM for targets that pass values
2633 partially on the stack and partially in registers. */
2634 if (reg != 0 && GET_CODE (reg) == REG)
2635 use_reg (call_fusage, reg);
2640 /* Determine whether the LEN bytes generated by CONSTFUN can be
2641 stored to memory using several move instructions. CONSTFUNDATA is
2642 a pointer which will be passed as argument in every CONSTFUN call.
2643 ALIGN is maximum alignment we can assume. Return nonzero if a
2644 call to store_by_pieces should succeed. */
2647 can_store_by_pieces (len, constfun, constfundata, align)
2648 unsigned HOST_WIDE_INT len;
2649 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2650 PTR constfundata;
2651 unsigned int align;
2653 unsigned HOST_WIDE_INT max_size, l;
2654 HOST_WIDE_INT offset = 0;
2655 enum machine_mode mode, tmode;
2656 enum insn_code icode;
2657 int reverse;
2658 rtx cst;
2660 if (! STORE_BY_PIECES_P (len, align))
2661 return 0;
2663 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2664 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2665 align = MOVE_MAX * BITS_PER_UNIT;
2667 /* We would first store what we can in the largest integer mode, then go to
2668 successively smaller modes. */
2670 for (reverse = 0;
2671 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2672 reverse++)
2674 l = len;
2675 mode = VOIDmode;
2676 max_size = STORE_MAX_PIECES + 1;
2677 while (max_size > 1)
2679 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2680 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2681 if (GET_MODE_SIZE (tmode) < max_size)
2682 mode = tmode;
2684 if (mode == VOIDmode)
2685 break;
2687 icode = mov_optab->handlers[(int) mode].insn_code;
2688 if (icode != CODE_FOR_nothing
2689 && align >= GET_MODE_ALIGNMENT (mode))
2691 unsigned int size = GET_MODE_SIZE (mode);
2693 while (l >= size)
2695 if (reverse)
2696 offset -= size;
2698 cst = (*constfun) (constfundata, offset, mode);
2699 if (!LEGITIMATE_CONSTANT_P (cst))
2700 return 0;
2702 if (!reverse)
2703 offset += size;
2705 l -= size;
2709 max_size = GET_MODE_SIZE (mode);
2712 /* The code above should have handled everything. */
2713 if (l != 0)
2714 abort ();
2717 return 1;
2720 /* Generate several move instructions to store LEN bytes generated by
2721 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2722 pointer which will be passed as argument in every CONSTFUN call.
2723 ALIGN is maximum alignment we can assume. */
2725 void
2726 store_by_pieces (to, len, constfun, constfundata, align)
2727 rtx to;
2728 unsigned HOST_WIDE_INT len;
2729 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2730 PTR constfundata;
2731 unsigned int align;
2733 struct store_by_pieces data;
2735 if (! STORE_BY_PIECES_P (len, align))
2736 abort ();
2737 to = protect_from_queue (to, 1);
2738 data.constfun = constfun;
2739 data.constfundata = constfundata;
2740 data.len = len;
2741 data.to = to;
2742 store_by_pieces_1 (&data, align);
2745 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2746 rtx with BLKmode). The caller must pass TO through protect_from_queue
2747 before calling. ALIGN is maximum alignment we can assume. */
2749 static void
2750 clear_by_pieces (to, len, align)
2751 rtx to;
2752 unsigned HOST_WIDE_INT len;
2753 unsigned int align;
2755 struct store_by_pieces data;
2757 data.constfun = clear_by_pieces_1;
2758 data.constfundata = NULL;
2759 data.len = len;
2760 data.to = to;
2761 store_by_pieces_1 (&data, align);
2764 /* Callback routine for clear_by_pieces.
2765 Return const0_rtx unconditionally. */
2767 static rtx
2768 clear_by_pieces_1 (data, offset, mode)
2769 PTR data ATTRIBUTE_UNUSED;
2770 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2771 enum machine_mode mode ATTRIBUTE_UNUSED;
2773 return const0_rtx;
2776 /* Subroutine of clear_by_pieces and store_by_pieces.
2777 Generate several move instructions to store LEN bytes of block TO. (A MEM
2778 rtx with BLKmode). The caller must pass TO through protect_from_queue
2779 before calling. ALIGN is maximum alignment we can assume. */
2781 static void
2782 store_by_pieces_1 (data, align)
2783 struct store_by_pieces *data;
2784 unsigned int align;
2786 rtx to_addr = XEXP (data->to, 0);
2787 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2788 enum machine_mode mode = VOIDmode, tmode;
2789 enum insn_code icode;
2791 data->offset = 0;
2792 data->to_addr = to_addr;
2793 data->autinc_to
2794 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2795 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2797 data->explicit_inc_to = 0;
2798 data->reverse
2799 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2800 if (data->reverse)
2801 data->offset = data->len;
2803 /* If storing requires more than two move insns,
2804 copy addresses to registers (to make displacements shorter)
2805 and use post-increment if available. */
2806 if (!data->autinc_to
2807 && move_by_pieces_ninsns (data->len, align) > 2)
2809 /* Determine the main mode we'll be using. */
2810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2811 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2812 if (GET_MODE_SIZE (tmode) < max_size)
2813 mode = tmode;
2815 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2817 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2818 data->autinc_to = 1;
2819 data->explicit_inc_to = -1;
2822 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2823 && ! data->autinc_to)
2825 data->to_addr = copy_addr_to_reg (to_addr);
2826 data->autinc_to = 1;
2827 data->explicit_inc_to = 1;
2830 if ( !data->autinc_to && CONSTANT_P (to_addr))
2831 data->to_addr = copy_addr_to_reg (to_addr);
2834 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2835 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2836 align = MOVE_MAX * BITS_PER_UNIT;
2838 /* First store what we can in the largest integer mode, then go to
2839 successively smaller modes. */
2841 while (max_size > 1)
2843 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2844 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2845 if (GET_MODE_SIZE (tmode) < max_size)
2846 mode = tmode;
2848 if (mode == VOIDmode)
2849 break;
2851 icode = mov_optab->handlers[(int) mode].insn_code;
2852 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2853 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2855 max_size = GET_MODE_SIZE (mode);
2858 /* The code above should have handled everything. */
2859 if (data->len != 0)
2860 abort ();
2863 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2864 with move instructions for mode MODE. GENFUN is the gen_... function
2865 to make a move insn for that mode. DATA has all the other info. */
2867 static void
2868 store_by_pieces_2 (genfun, mode, data)
2869 rtx (*genfun) PARAMS ((rtx, ...));
2870 enum machine_mode mode;
2871 struct store_by_pieces *data;
2873 unsigned int size = GET_MODE_SIZE (mode);
2874 rtx to1, cst;
2876 while (data->len >= size)
2878 if (data->reverse)
2879 data->offset -= size;
2881 if (data->autinc_to)
2882 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2883 data->offset);
2884 else
2885 to1 = adjust_address (data->to, mode, data->offset);
2887 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2888 emit_insn (gen_add2_insn (data->to_addr,
2889 GEN_INT (-(HOST_WIDE_INT) size)));
2891 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2892 emit_insn ((*genfun) (to1, cst));
2894 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2895 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2897 if (! data->reverse)
2898 data->offset += size;
2900 data->len -= size;
2904 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2905 its length in bytes. */
2908 clear_storage (object, size)
2909 rtx object;
2910 rtx size;
2912 rtx retval = 0;
2913 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2914 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2916 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2917 just move a zero. Otherwise, do this a piece at a time. */
2918 if (GET_MODE (object) != BLKmode
2919 && GET_CODE (size) == CONST_INT
2920 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2921 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2922 else
2924 object = protect_from_queue (object, 1);
2925 size = protect_from_queue (size, 0);
2927 if (GET_CODE (size) == CONST_INT
2928 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2929 clear_by_pieces (object, INTVAL (size), align);
2930 else if (clear_storage_via_clrstr (object, size, align))
2932 else
2933 retval = clear_storage_via_libcall (object, size);
2936 return retval;
2939 /* A subroutine of clear_storage. Expand a clrstr pattern;
2940 return true if successful. */
2942 static bool
2943 clear_storage_via_clrstr (object, size, align)
2944 rtx object, size;
2945 unsigned int align;
2947 /* Try the most limited insn first, because there's no point
2948 including more than one in the machine description unless
2949 the more limited one has some advantage. */
2951 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2952 enum machine_mode mode;
2954 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2955 mode = GET_MODE_WIDER_MODE (mode))
2957 enum insn_code code = clrstr_optab[(int) mode];
2958 insn_operand_predicate_fn pred;
2960 if (code != CODE_FOR_nothing
2961 /* We don't need MODE to be narrower than
2962 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2963 the mode mask, as it is returned by the macro, it will
2964 definitely be less than the actual mode mask. */
2965 && ((GET_CODE (size) == CONST_INT
2966 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2967 <= (GET_MODE_MASK (mode) >> 1)))
2968 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2969 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2970 || (*pred) (object, BLKmode))
2971 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2972 || (*pred) (opalign, VOIDmode)))
2974 rtx op1;
2975 rtx last = get_last_insn ();
2976 rtx pat;
2978 op1 = convert_to_mode (mode, size, 1);
2979 pred = insn_data[(int) code].operand[1].predicate;
2980 if (pred != 0 && ! (*pred) (op1, mode))
2981 op1 = copy_to_mode_reg (mode, op1);
2983 pat = GEN_FCN ((int) code) (object, op1, opalign);
2984 if (pat)
2986 emit_insn (pat);
2987 return true;
2989 else
2990 delete_insns_since (last);
2994 return false;
2997 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2998 Return the return value of memset, 0 otherwise. */
3000 static rtx
3001 clear_storage_via_libcall (object, size)
3002 rtx object, size;
3004 tree call_expr, arg_list, fn, object_tree, size_tree;
3005 enum machine_mode size_mode;
3006 rtx retval;
3008 /* OBJECT or SIZE may have been passed through protect_from_queue.
3010 It is unsafe to save the value generated by protect_from_queue
3011 and reuse it later. Consider what happens if emit_queue is
3012 called before the return value from protect_from_queue is used.
3014 Expansion of the CALL_EXPR below will call emit_queue before
3015 we are finished emitting RTL for argument setup. So if we are
3016 not careful we could get the wrong value for an argument.
3018 To avoid this problem we go ahead and emit code to copy OBJECT
3019 and SIZE into new pseudos. We can then place those new pseudos
3020 into an RTL_EXPR and use them later, even after a call to
3021 emit_queue.
3023 Note this is not strictly needed for library calls since they
3024 do not call emit_queue before loading their arguments. However,
3025 we may need to have library calls call emit_queue in the future
3026 since failing to do so could cause problems for targets which
3027 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3029 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3031 if (TARGET_MEM_FUNCTIONS)
3032 size_mode = TYPE_MODE (sizetype);
3033 else
3034 size_mode = TYPE_MODE (unsigned_type_node);
3035 size = convert_to_mode (size_mode, size, 1);
3036 size = copy_to_mode_reg (size_mode, size);
3038 /* It is incorrect to use the libcall calling conventions to call
3039 memset in this context. This could be a user call to memset and
3040 the user may wish to examine the return value from memset. For
3041 targets where libcalls and normal calls have different conventions
3042 for returning pointers, we could end up generating incorrect code.
3044 For convenience, we generate the call to bzero this way as well. */
3046 object_tree = make_tree (ptr_type_node, object);
3047 if (TARGET_MEM_FUNCTIONS)
3048 size_tree = make_tree (sizetype, size);
3049 else
3050 size_tree = make_tree (unsigned_type_node, size);
3052 fn = clear_storage_libcall_fn (true);
3053 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3054 if (TARGET_MEM_FUNCTIONS)
3055 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3056 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3058 /* Now we have to build up the CALL_EXPR itself. */
3059 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3060 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3061 call_expr, arg_list, NULL_TREE);
3062 TREE_SIDE_EFFECTS (call_expr) = 1;
3064 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3066 /* If we are initializing a readonly value, show the above call
3067 clobbered it. Otherwise, a load from it may erroneously be
3068 hoisted from a loop. */
3069 if (RTX_UNCHANGING_P (object))
3070 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3072 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3075 /* A subroutine of clear_storage_via_libcall. Create the tree node
3076 for the function we use for block clears. The first time FOR_CALL
3077 is true, we call assemble_external. */
3079 static GTY(()) tree block_clear_fn;
3081 static tree
3082 clear_storage_libcall_fn (for_call)
3083 int for_call;
3085 static bool emitted_extern;
3086 tree fn = block_clear_fn, args;
3088 if (!fn)
3090 if (TARGET_MEM_FUNCTIONS)
3092 fn = get_identifier ("memset");
3093 args = build_function_type_list (ptr_type_node, ptr_type_node,
3094 integer_type_node, sizetype,
3095 NULL_TREE);
3097 else
3099 fn = get_identifier ("bzero");
3100 args = build_function_type_list (void_type_node, ptr_type_node,
3101 unsigned_type_node, NULL_TREE);
3104 fn = build_decl (FUNCTION_DECL, fn, args);
3105 DECL_EXTERNAL (fn) = 1;
3106 TREE_PUBLIC (fn) = 1;
3107 DECL_ARTIFICIAL (fn) = 1;
3108 TREE_NOTHROW (fn) = 1;
3110 block_clear_fn = fn;
3113 if (for_call && !emitted_extern)
3115 emitted_extern = true;
3116 make_decl_rtl (fn, NULL);
3117 assemble_external (fn);
3120 return fn;
3123 /* Generate code to copy Y into X.
3124 Both Y and X must have the same mode, except that
3125 Y can be a constant with VOIDmode.
3126 This mode cannot be BLKmode; use emit_block_move for that.
3128 Return the last instruction emitted. */
3131 emit_move_insn (x, y)
3132 rtx x, y;
3134 enum machine_mode mode = GET_MODE (x);
3135 rtx y_cst = NULL_RTX;
3136 rtx last_insn;
3138 x = protect_from_queue (x, 1);
3139 y = protect_from_queue (y, 0);
3141 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3142 abort ();
3144 /* Never force constant_p_rtx to memory. */
3145 if (GET_CODE (y) == CONSTANT_P_RTX)
3147 else if (CONSTANT_P (y))
3149 if (optimize
3150 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3151 && (last_insn = compress_float_constant (x, y)))
3152 return last_insn;
3154 if (!LEGITIMATE_CONSTANT_P (y))
3156 y_cst = y;
3157 y = force_const_mem (mode, y);
3159 /* If the target's cannot_force_const_mem prevented the spill,
3160 assume that the target's move expanders will also take care
3161 of the non-legitimate constant. */
3162 if (!y)
3163 y = y_cst;
3167 /* If X or Y are memory references, verify that their addresses are valid
3168 for the machine. */
3169 if (GET_CODE (x) == MEM
3170 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3171 && ! push_operand (x, GET_MODE (x)))
3172 || (flag_force_addr
3173 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3174 x = validize_mem (x);
3176 if (GET_CODE (y) == MEM
3177 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3178 || (flag_force_addr
3179 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3180 y = validize_mem (y);
3182 if (mode == BLKmode)
3183 abort ();
3185 last_insn = emit_move_insn_1 (x, y);
3187 if (y_cst && GET_CODE (x) == REG)
3188 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3190 return last_insn;
3193 /* Low level part of emit_move_insn.
3194 Called just like emit_move_insn, but assumes X and Y
3195 are basically valid. */
3198 emit_move_insn_1 (x, y)
3199 rtx x, y;
3201 enum machine_mode mode = GET_MODE (x);
3202 enum machine_mode submode;
3203 enum mode_class class = GET_MODE_CLASS (mode);
3205 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3206 abort ();
3208 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3209 return
3210 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3212 /* Expand complex moves by moving real part and imag part, if possible. */
3213 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3214 && BLKmode != (submode = GET_MODE_INNER (mode))
3215 && (mov_optab->handlers[(int) submode].insn_code
3216 != CODE_FOR_nothing))
3218 /* Don't split destination if it is a stack push. */
3219 int stack = push_operand (x, GET_MODE (x));
3221 #ifdef PUSH_ROUNDING
3222 /* In case we output to the stack, but the size is smaller machine can
3223 push exactly, we need to use move instructions. */
3224 if (stack
3225 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3226 != GET_MODE_SIZE (submode)))
3228 rtx temp;
3229 HOST_WIDE_INT offset1, offset2;
3231 /* Do not use anti_adjust_stack, since we don't want to update
3232 stack_pointer_delta. */
3233 temp = expand_binop (Pmode,
3234 #ifdef STACK_GROWS_DOWNWARD
3235 sub_optab,
3236 #else
3237 add_optab,
3238 #endif
3239 stack_pointer_rtx,
3240 GEN_INT
3241 (PUSH_ROUNDING
3242 (GET_MODE_SIZE (GET_MODE (x)))),
3243 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3245 if (temp != stack_pointer_rtx)
3246 emit_move_insn (stack_pointer_rtx, temp);
3248 #ifdef STACK_GROWS_DOWNWARD
3249 offset1 = 0;
3250 offset2 = GET_MODE_SIZE (submode);
3251 #else
3252 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3253 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3254 + GET_MODE_SIZE (submode));
3255 #endif
3257 emit_move_insn (change_address (x, submode,
3258 gen_rtx_PLUS (Pmode,
3259 stack_pointer_rtx,
3260 GEN_INT (offset1))),
3261 gen_realpart (submode, y));
3262 emit_move_insn (change_address (x, submode,
3263 gen_rtx_PLUS (Pmode,
3264 stack_pointer_rtx,
3265 GEN_INT (offset2))),
3266 gen_imagpart (submode, y));
3268 else
3269 #endif
3270 /* If this is a stack, push the highpart first, so it
3271 will be in the argument order.
3273 In that case, change_address is used only to convert
3274 the mode, not to change the address. */
3275 if (stack)
3277 /* Note that the real part always precedes the imag part in memory
3278 regardless of machine's endianness. */
3279 #ifdef STACK_GROWS_DOWNWARD
3280 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3281 (gen_rtx_MEM (submode, XEXP (x, 0)),
3282 gen_imagpart (submode, y)));
3283 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3284 (gen_rtx_MEM (submode, XEXP (x, 0)),
3285 gen_realpart (submode, y)));
3286 #else
3287 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3288 (gen_rtx_MEM (submode, XEXP (x, 0)),
3289 gen_realpart (submode, y)));
3290 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3291 (gen_rtx_MEM (submode, XEXP (x, 0)),
3292 gen_imagpart (submode, y)));
3293 #endif
3295 else
3297 rtx realpart_x, realpart_y;
3298 rtx imagpart_x, imagpart_y;
3300 /* If this is a complex value with each part being smaller than a
3301 word, the usual calling sequence will likely pack the pieces into
3302 a single register. Unfortunately, SUBREG of hard registers only
3303 deals in terms of words, so we have a problem converting input
3304 arguments to the CONCAT of two registers that is used elsewhere
3305 for complex values. If this is before reload, we can copy it into
3306 memory and reload. FIXME, we should see about using extract and
3307 insert on integer registers, but complex short and complex char
3308 variables should be rarely used. */
3309 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3310 && (reload_in_progress | reload_completed) == 0)
3312 int packed_dest_p
3313 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3314 int packed_src_p
3315 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3317 if (packed_dest_p || packed_src_p)
3319 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3320 ? MODE_FLOAT : MODE_INT);
3322 enum machine_mode reg_mode
3323 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3325 if (reg_mode != BLKmode)
3327 rtx mem = assign_stack_temp (reg_mode,
3328 GET_MODE_SIZE (mode), 0);
3329 rtx cmem = adjust_address (mem, mode, 0);
3331 cfun->cannot_inline
3332 = N_("function using short complex types cannot be inline");
3334 if (packed_dest_p)
3336 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3338 emit_move_insn_1 (cmem, y);
3339 return emit_move_insn_1 (sreg, mem);
3341 else
3343 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3345 emit_move_insn_1 (mem, sreg);
3346 return emit_move_insn_1 (x, cmem);
3352 realpart_x = gen_realpart (submode, x);
3353 realpart_y = gen_realpart (submode, y);
3354 imagpart_x = gen_imagpart (submode, x);
3355 imagpart_y = gen_imagpart (submode, y);
3357 /* Show the output dies here. This is necessary for SUBREGs
3358 of pseudos since we cannot track their lifetimes correctly;
3359 hard regs shouldn't appear here except as return values.
3360 We never want to emit such a clobber after reload. */
3361 if (x != y
3362 && ! (reload_in_progress || reload_completed)
3363 && (GET_CODE (realpart_x) == SUBREG
3364 || GET_CODE (imagpart_x) == SUBREG))
3365 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3367 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3368 (realpart_x, realpart_y));
3369 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3370 (imagpart_x, imagpart_y));
3373 return get_last_insn ();
3376 /* This will handle any multi-word or full-word mode that lacks a move_insn
3377 pattern. However, you will get better code if you define such patterns,
3378 even if they must turn into multiple assembler instructions. */
3379 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3381 rtx last_insn = 0;
3382 rtx seq, inner;
3383 int need_clobber;
3384 int i;
3386 #ifdef PUSH_ROUNDING
3388 /* If X is a push on the stack, do the push now and replace
3389 X with a reference to the stack pointer. */
3390 if (push_operand (x, GET_MODE (x)))
3392 rtx temp;
3393 enum rtx_code code;
3395 /* Do not use anti_adjust_stack, since we don't want to update
3396 stack_pointer_delta. */
3397 temp = expand_binop (Pmode,
3398 #ifdef STACK_GROWS_DOWNWARD
3399 sub_optab,
3400 #else
3401 add_optab,
3402 #endif
3403 stack_pointer_rtx,
3404 GEN_INT
3405 (PUSH_ROUNDING
3406 (GET_MODE_SIZE (GET_MODE (x)))),
3407 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3409 if (temp != stack_pointer_rtx)
3410 emit_move_insn (stack_pointer_rtx, temp);
3412 code = GET_CODE (XEXP (x, 0));
3414 /* Just hope that small offsets off SP are OK. */
3415 if (code == POST_INC)
3416 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3417 GEN_INT (-((HOST_WIDE_INT)
3418 GET_MODE_SIZE (GET_MODE (x)))));
3419 else if (code == POST_DEC)
3420 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3421 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3422 else
3423 temp = stack_pointer_rtx;
3425 x = change_address (x, VOIDmode, temp);
3427 #endif
3429 /* If we are in reload, see if either operand is a MEM whose address
3430 is scheduled for replacement. */
3431 if (reload_in_progress && GET_CODE (x) == MEM
3432 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3433 x = replace_equiv_address_nv (x, inner);
3434 if (reload_in_progress && GET_CODE (y) == MEM
3435 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3436 y = replace_equiv_address_nv (y, inner);
3438 start_sequence ();
3440 need_clobber = 0;
3441 for (i = 0;
3442 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3443 i++)
3445 rtx xpart = operand_subword (x, i, 1, mode);
3446 rtx ypart = operand_subword (y, i, 1, mode);
3448 /* If we can't get a part of Y, put Y into memory if it is a
3449 constant. Otherwise, force it into a register. If we still
3450 can't get a part of Y, abort. */
3451 if (ypart == 0 && CONSTANT_P (y))
3453 y = force_const_mem (mode, y);
3454 ypart = operand_subword (y, i, 1, mode);
3456 else if (ypart == 0)
3457 ypart = operand_subword_force (y, i, mode);
3459 if (xpart == 0 || ypart == 0)
3460 abort ();
3462 need_clobber |= (GET_CODE (xpart) == SUBREG);
3464 last_insn = emit_move_insn (xpart, ypart);
3467 seq = get_insns ();
3468 end_sequence ();
3470 /* Show the output dies here. This is necessary for SUBREGs
3471 of pseudos since we cannot track their lifetimes correctly;
3472 hard regs shouldn't appear here except as return values.
3473 We never want to emit such a clobber after reload. */
3474 if (x != y
3475 && ! (reload_in_progress || reload_completed)
3476 && need_clobber != 0)
3477 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3479 emit_insn (seq);
3481 return last_insn;
3483 else
3484 abort ();
3487 /* If Y is representable exactly in a narrower mode, and the target can
3488 perform the extension directly from constant or memory, then emit the
3489 move as an extension. */
3491 static rtx
3492 compress_float_constant (x, y)
3493 rtx x, y;
3495 enum machine_mode dstmode = GET_MODE (x);
3496 enum machine_mode orig_srcmode = GET_MODE (y);
3497 enum machine_mode srcmode;
3498 REAL_VALUE_TYPE r;
3500 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3502 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3503 srcmode != orig_srcmode;
3504 srcmode = GET_MODE_WIDER_MODE (srcmode))
3506 enum insn_code ic;
3507 rtx trunc_y, last_insn;
3509 /* Skip if the target can't extend this way. */
3510 ic = can_extend_p (dstmode, srcmode, 0);
3511 if (ic == CODE_FOR_nothing)
3512 continue;
3514 /* Skip if the narrowed value isn't exact. */
3515 if (! exact_real_truncate (srcmode, &r))
3516 continue;
3518 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3520 if (LEGITIMATE_CONSTANT_P (trunc_y))
3522 /* Skip if the target needs extra instructions to perform
3523 the extension. */
3524 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3525 continue;
3527 else if (float_extend_from_mem[dstmode][srcmode])
3528 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3529 else
3530 continue;
3532 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3533 last_insn = get_last_insn ();
3535 if (GET_CODE (x) == REG)
3536 REG_NOTES (last_insn)
3537 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3539 return last_insn;
3542 return NULL_RTX;
3545 /* Pushing data onto the stack. */
3547 /* Push a block of length SIZE (perhaps variable)
3548 and return an rtx to address the beginning of the block.
3549 Note that it is not possible for the value returned to be a QUEUED.
3550 The value may be virtual_outgoing_args_rtx.
3552 EXTRA is the number of bytes of padding to push in addition to SIZE.
3553 BELOW nonzero means this padding comes at low addresses;
3554 otherwise, the padding comes at high addresses. */
3557 push_block (size, extra, below)
3558 rtx size;
3559 int extra, below;
3561 rtx temp;
3563 size = convert_modes (Pmode, ptr_mode, size, 1);
3564 if (CONSTANT_P (size))
3565 anti_adjust_stack (plus_constant (size, extra));
3566 else if (GET_CODE (size) == REG && extra == 0)
3567 anti_adjust_stack (size);
3568 else
3570 temp = copy_to_mode_reg (Pmode, size);
3571 if (extra != 0)
3572 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3573 temp, 0, OPTAB_LIB_WIDEN);
3574 anti_adjust_stack (temp);
3577 #ifndef STACK_GROWS_DOWNWARD
3578 if (0)
3579 #else
3580 if (1)
3581 #endif
3583 temp = virtual_outgoing_args_rtx;
3584 if (extra != 0 && below)
3585 temp = plus_constant (temp, extra);
3587 else
3589 if (GET_CODE (size) == CONST_INT)
3590 temp = plus_constant (virtual_outgoing_args_rtx,
3591 -INTVAL (size) - (below ? 0 : extra));
3592 else if (extra != 0 && !below)
3593 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3594 negate_rtx (Pmode, plus_constant (size, extra)));
3595 else
3596 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3597 negate_rtx (Pmode, size));
3600 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3603 #ifdef PUSH_ROUNDING
3605 /* Emit single push insn. */
3607 static void
3608 emit_single_push_insn (mode, x, type)
3609 rtx x;
3610 enum machine_mode mode;
3611 tree type;
3613 rtx dest_addr;
3614 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3615 rtx dest;
3616 enum insn_code icode;
3617 insn_operand_predicate_fn pred;
3619 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3620 /* If there is push pattern, use it. Otherwise try old way of throwing
3621 MEM representing push operation to move expander. */
3622 icode = push_optab->handlers[(int) mode].insn_code;
3623 if (icode != CODE_FOR_nothing)
3625 if (((pred = insn_data[(int) icode].operand[0].predicate)
3626 && !((*pred) (x, mode))))
3627 x = force_reg (mode, x);
3628 emit_insn (GEN_FCN (icode) (x));
3629 return;
3631 if (GET_MODE_SIZE (mode) == rounded_size)
3632 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3633 else
3635 #ifdef STACK_GROWS_DOWNWARD
3636 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3637 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3638 #else
3639 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3640 GEN_INT (rounded_size));
3641 #endif
3642 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3645 dest = gen_rtx_MEM (mode, dest_addr);
3647 if (type != 0)
3649 set_mem_attributes (dest, type, 1);
3651 if (flag_optimize_sibling_calls)
3652 /* Function incoming arguments may overlap with sibling call
3653 outgoing arguments and we cannot allow reordering of reads
3654 from function arguments with stores to outgoing arguments
3655 of sibling calls. */
3656 set_mem_alias_set (dest, 0);
3658 emit_move_insn (dest, x);
3660 #endif
3662 /* Generate code to push X onto the stack, assuming it has mode MODE and
3663 type TYPE.
3664 MODE is redundant except when X is a CONST_INT (since they don't
3665 carry mode info).
3666 SIZE is an rtx for the size of data to be copied (in bytes),
3667 needed only if X is BLKmode.
3669 ALIGN (in bits) is maximum alignment we can assume.
3671 If PARTIAL and REG are both nonzero, then copy that many of the first
3672 words of X into registers starting with REG, and push the rest of X.
3673 The amount of space pushed is decreased by PARTIAL words,
3674 rounded *down* to a multiple of PARM_BOUNDARY.
3675 REG must be a hard register in this case.
3676 If REG is zero but PARTIAL is not, take any all others actions for an
3677 argument partially in registers, but do not actually load any
3678 registers.
3680 EXTRA is the amount in bytes of extra space to leave next to this arg.
3681 This is ignored if an argument block has already been allocated.
3683 On a machine that lacks real push insns, ARGS_ADDR is the address of
3684 the bottom of the argument block for this call. We use indexing off there
3685 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3686 argument block has not been preallocated.
3688 ARGS_SO_FAR is the size of args previously pushed for this call.
3690 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3691 for arguments passed in registers. If nonzero, it will be the number
3692 of bytes required. */
3694 void
3695 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3696 args_addr, args_so_far, reg_parm_stack_space,
3697 alignment_pad)
3698 rtx x;
3699 enum machine_mode mode;
3700 tree type;
3701 rtx size;
3702 unsigned int align;
3703 int partial;
3704 rtx reg;
3705 int extra;
3706 rtx args_addr;
3707 rtx args_so_far;
3708 int reg_parm_stack_space;
3709 rtx alignment_pad;
3711 rtx xinner;
3712 enum direction stack_direction
3713 #ifdef STACK_GROWS_DOWNWARD
3714 = downward;
3715 #else
3716 = upward;
3717 #endif
3719 /* Decide where to pad the argument: `downward' for below,
3720 `upward' for above, or `none' for don't pad it.
3721 Default is below for small data on big-endian machines; else above. */
3722 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3724 /* Invert direction if stack is post-decrement.
3725 FIXME: why? */
3726 if (STACK_PUSH_CODE == POST_DEC)
3727 if (where_pad != none)
3728 where_pad = (where_pad == downward ? upward : downward);
3730 xinner = x = protect_from_queue (x, 0);
3732 if (mode == BLKmode)
3734 /* Copy a block into the stack, entirely or partially. */
3736 rtx temp;
3737 int used = partial * UNITS_PER_WORD;
3738 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3739 int skip;
3741 if (size == 0)
3742 abort ();
3744 used -= offset;
3746 /* USED is now the # of bytes we need not copy to the stack
3747 because registers will take care of them. */
3749 if (partial != 0)
3750 xinner = adjust_address (xinner, BLKmode, used);
3752 /* If the partial register-part of the arg counts in its stack size,
3753 skip the part of stack space corresponding to the registers.
3754 Otherwise, start copying to the beginning of the stack space,
3755 by setting SKIP to 0. */
3756 skip = (reg_parm_stack_space == 0) ? 0 : used;
3758 #ifdef PUSH_ROUNDING
3759 /* Do it with several push insns if that doesn't take lots of insns
3760 and if there is no difficulty with push insns that skip bytes
3761 on the stack for alignment purposes. */
3762 if (args_addr == 0
3763 && PUSH_ARGS
3764 && GET_CODE (size) == CONST_INT
3765 && skip == 0
3766 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3767 /* Here we avoid the case of a structure whose weak alignment
3768 forces many pushes of a small amount of data,
3769 and such small pushes do rounding that causes trouble. */
3770 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3771 || align >= BIGGEST_ALIGNMENT
3772 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3773 == (align / BITS_PER_UNIT)))
3774 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3776 /* Push padding now if padding above and stack grows down,
3777 or if padding below and stack grows up.
3778 But if space already allocated, this has already been done. */
3779 if (extra && args_addr == 0
3780 && where_pad != none && where_pad != stack_direction)
3781 anti_adjust_stack (GEN_INT (extra));
3783 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3785 else
3786 #endif /* PUSH_ROUNDING */
3788 rtx target;
3790 /* Otherwise make space on the stack and copy the data
3791 to the address of that space. */
3793 /* Deduct words put into registers from the size we must copy. */
3794 if (partial != 0)
3796 if (GET_CODE (size) == CONST_INT)
3797 size = GEN_INT (INTVAL (size) - used);
3798 else
3799 size = expand_binop (GET_MODE (size), sub_optab, size,
3800 GEN_INT (used), NULL_RTX, 0,
3801 OPTAB_LIB_WIDEN);
3804 /* Get the address of the stack space.
3805 In this case, we do not deal with EXTRA separately.
3806 A single stack adjust will do. */
3807 if (! args_addr)
3809 temp = push_block (size, extra, where_pad == downward);
3810 extra = 0;
3812 else if (GET_CODE (args_so_far) == CONST_INT)
3813 temp = memory_address (BLKmode,
3814 plus_constant (args_addr,
3815 skip + INTVAL (args_so_far)));
3816 else
3817 temp = memory_address (BLKmode,
3818 plus_constant (gen_rtx_PLUS (Pmode,
3819 args_addr,
3820 args_so_far),
3821 skip));
3823 if (!ACCUMULATE_OUTGOING_ARGS)
3825 /* If the source is referenced relative to the stack pointer,
3826 copy it to another register to stabilize it. We do not need
3827 to do this if we know that we won't be changing sp. */
3829 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3830 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3831 temp = copy_to_reg (temp);
3834 target = gen_rtx_MEM (BLKmode, temp);
3836 if (type != 0)
3838 set_mem_attributes (target, type, 1);
3839 /* Function incoming arguments may overlap with sibling call
3840 outgoing arguments and we cannot allow reordering of reads
3841 from function arguments with stores to outgoing arguments
3842 of sibling calls. */
3843 set_mem_alias_set (target, 0);
3846 /* ALIGN may well be better aligned than TYPE, e.g. due to
3847 PARM_BOUNDARY. Assume the caller isn't lying. */
3848 set_mem_align (target, align);
3850 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3853 else if (partial > 0)
3855 /* Scalar partly in registers. */
3857 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3858 int i;
3859 int not_stack;
3860 /* # words of start of argument
3861 that we must make space for but need not store. */
3862 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3863 int args_offset = INTVAL (args_so_far);
3864 int skip;
3866 /* Push padding now if padding above and stack grows down,
3867 or if padding below and stack grows up.
3868 But if space already allocated, this has already been done. */
3869 if (extra && args_addr == 0
3870 && where_pad != none && where_pad != stack_direction)
3871 anti_adjust_stack (GEN_INT (extra));
3873 /* If we make space by pushing it, we might as well push
3874 the real data. Otherwise, we can leave OFFSET nonzero
3875 and leave the space uninitialized. */
3876 if (args_addr == 0)
3877 offset = 0;
3879 /* Now NOT_STACK gets the number of words that we don't need to
3880 allocate on the stack. */
3881 not_stack = partial - offset;
3883 /* If the partial register-part of the arg counts in its stack size,
3884 skip the part of stack space corresponding to the registers.
3885 Otherwise, start copying to the beginning of the stack space,
3886 by setting SKIP to 0. */
3887 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3889 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3890 x = validize_mem (force_const_mem (mode, x));
3892 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3893 SUBREGs of such registers are not allowed. */
3894 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3895 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3896 x = copy_to_reg (x);
3898 /* Loop over all the words allocated on the stack for this arg. */
3899 /* We can do it by words, because any scalar bigger than a word
3900 has a size a multiple of a word. */
3901 #ifndef PUSH_ARGS_REVERSED
3902 for (i = not_stack; i < size; i++)
3903 #else
3904 for (i = size - 1; i >= not_stack; i--)
3905 #endif
3906 if (i >= not_stack + offset)
3907 emit_push_insn (operand_subword_force (x, i, mode),
3908 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3909 0, args_addr,
3910 GEN_INT (args_offset + ((i - not_stack + skip)
3911 * UNITS_PER_WORD)),
3912 reg_parm_stack_space, alignment_pad);
3914 else
3916 rtx addr;
3917 rtx dest;
3919 /* Push padding now if padding above and stack grows down,
3920 or if padding below and stack grows up.
3921 But if space already allocated, this has already been done. */
3922 if (extra && args_addr == 0
3923 && where_pad != none && where_pad != stack_direction)
3924 anti_adjust_stack (GEN_INT (extra));
3926 #ifdef PUSH_ROUNDING
3927 if (args_addr == 0 && PUSH_ARGS)
3928 emit_single_push_insn (mode, x, type);
3929 else
3930 #endif
3932 if (GET_CODE (args_so_far) == CONST_INT)
3933 addr
3934 = memory_address (mode,
3935 plus_constant (args_addr,
3936 INTVAL (args_so_far)));
3937 else
3938 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3939 args_so_far));
3940 dest = gen_rtx_MEM (mode, addr);
3941 if (type != 0)
3943 set_mem_attributes (dest, type, 1);
3944 /* Function incoming arguments may overlap with sibling call
3945 outgoing arguments and we cannot allow reordering of reads
3946 from function arguments with stores to outgoing arguments
3947 of sibling calls. */
3948 set_mem_alias_set (dest, 0);
3951 emit_move_insn (dest, x);
3955 /* If part should go in registers, copy that part
3956 into the appropriate registers. Do this now, at the end,
3957 since mem-to-mem copies above may do function calls. */
3958 if (partial > 0 && reg != 0)
3960 /* Handle calls that pass values in multiple non-contiguous locations.
3961 The Irix 6 ABI has examples of this. */
3962 if (GET_CODE (reg) == PARALLEL)
3963 emit_group_load (reg, x, -1); /* ??? size? */
3964 else
3965 move_block_to_reg (REGNO (reg), x, partial, mode);
3968 if (extra && args_addr == 0 && where_pad == stack_direction)
3969 anti_adjust_stack (GEN_INT (extra));
3971 if (alignment_pad && args_addr == 0)
3972 anti_adjust_stack (alignment_pad);
3975 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3976 operations. */
3978 static rtx
3979 get_subtarget (x)
3980 rtx x;
3982 return ((x == 0
3983 /* Only registers can be subtargets. */
3984 || GET_CODE (x) != REG
3985 /* If the register is readonly, it can't be set more than once. */
3986 || RTX_UNCHANGING_P (x)
3987 /* Don't use hard regs to avoid extending their life. */
3988 || REGNO (x) < FIRST_PSEUDO_REGISTER
3989 /* Avoid subtargets inside loops,
3990 since they hide some invariant expressions. */
3991 || preserve_subexpressions_p ())
3992 ? 0 : x);
3995 /* Expand an assignment that stores the value of FROM into TO.
3996 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3997 (This may contain a QUEUED rtx;
3998 if the value is constant, this rtx is a constant.)
3999 Otherwise, the returned value is NULL_RTX.
4001 SUGGEST_REG is no longer actually used.
4002 It used to mean, copy the value through a register
4003 and return that register, if that is possible.
4004 We now use WANT_VALUE to decide whether to do this. */
4007 expand_assignment (to, from, want_value, suggest_reg)
4008 tree to, from;
4009 int want_value;
4010 int suggest_reg ATTRIBUTE_UNUSED;
4012 rtx to_rtx = 0;
4013 rtx result;
4015 /* Don't crash if the lhs of the assignment was erroneous. */
4017 if (TREE_CODE (to) == ERROR_MARK)
4019 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4020 return want_value ? result : NULL_RTX;
4023 /* Assignment of a structure component needs special treatment
4024 if the structure component's rtx is not simply a MEM.
4025 Assignment of an array element at a constant index, and assignment of
4026 an array element in an unaligned packed structure field, has the same
4027 problem. */
4029 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4030 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4031 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4033 enum machine_mode mode1;
4034 HOST_WIDE_INT bitsize, bitpos;
4035 rtx orig_to_rtx;
4036 tree offset;
4037 int unsignedp;
4038 int volatilep = 0;
4039 tree tem;
4041 push_temp_slots ();
4042 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4043 &unsignedp, &volatilep);
4045 /* If we are going to use store_bit_field and extract_bit_field,
4046 make sure to_rtx will be safe for multiple use. */
4048 if (mode1 == VOIDmode && want_value)
4049 tem = stabilize_reference (tem);
4051 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4053 if (offset != 0)
4055 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4057 if (GET_CODE (to_rtx) != MEM)
4058 abort ();
4060 #ifdef POINTERS_EXTEND_UNSIGNED
4061 if (GET_MODE (offset_rtx) != Pmode)
4062 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4063 #else
4064 if (GET_MODE (offset_rtx) != ptr_mode)
4065 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4066 #endif
4068 /* A constant address in TO_RTX can have VOIDmode, we must not try
4069 to call force_reg for that case. Avoid that case. */
4070 if (GET_CODE (to_rtx) == MEM
4071 && GET_MODE (to_rtx) == BLKmode
4072 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4073 && bitsize > 0
4074 && (bitpos % bitsize) == 0
4075 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4076 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4078 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4079 bitpos = 0;
4082 to_rtx = offset_address (to_rtx, offset_rtx,
4083 highest_pow2_factor_for_type (TREE_TYPE (to),
4084 offset));
4087 if (GET_CODE (to_rtx) == MEM)
4089 /* If the field is at offset zero, we could have been given the
4090 DECL_RTX of the parent struct. Don't munge it. */
4091 to_rtx = shallow_copy_rtx (to_rtx);
4093 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4096 /* Deal with volatile and readonly fields. The former is only done
4097 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4098 if (volatilep && GET_CODE (to_rtx) == MEM)
4100 if (to_rtx == orig_to_rtx)
4101 to_rtx = copy_rtx (to_rtx);
4102 MEM_VOLATILE_P (to_rtx) = 1;
4105 if (TREE_CODE (to) == COMPONENT_REF
4106 && TREE_READONLY (TREE_OPERAND (to, 1)))
4108 if (to_rtx == orig_to_rtx)
4109 to_rtx = copy_rtx (to_rtx);
4110 RTX_UNCHANGING_P (to_rtx) = 1;
4113 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4115 if (to_rtx == orig_to_rtx)
4116 to_rtx = copy_rtx (to_rtx);
4117 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4120 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4121 (want_value
4122 /* Spurious cast for HPUX compiler. */
4123 ? ((enum machine_mode)
4124 TYPE_MODE (TREE_TYPE (to)))
4125 : VOIDmode),
4126 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4128 preserve_temp_slots (result);
4129 free_temp_slots ();
4130 pop_temp_slots ();
4132 /* If the value is meaningful, convert RESULT to the proper mode.
4133 Otherwise, return nothing. */
4134 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4135 TYPE_MODE (TREE_TYPE (from)),
4136 result,
4137 TREE_UNSIGNED (TREE_TYPE (to)))
4138 : NULL_RTX);
4141 /* If the rhs is a function call and its value is not an aggregate,
4142 call the function before we start to compute the lhs.
4143 This is needed for correct code for cases such as
4144 val = setjmp (buf) on machines where reference to val
4145 requires loading up part of an address in a separate insn.
4147 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4148 since it might be a promoted variable where the zero- or sign- extension
4149 needs to be done. Handling this in the normal way is safe because no
4150 computation is done before the call. */
4151 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4152 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4153 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4154 && GET_CODE (DECL_RTL (to)) == REG))
4156 rtx value;
4158 push_temp_slots ();
4159 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4160 if (to_rtx == 0)
4161 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4163 /* Handle calls that return values in multiple non-contiguous locations.
4164 The Irix 6 ABI has examples of this. */
4165 if (GET_CODE (to_rtx) == PARALLEL)
4166 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4167 else if (GET_MODE (to_rtx) == BLKmode)
4168 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4169 else
4171 #ifdef POINTERS_EXTEND_UNSIGNED
4172 if (POINTER_TYPE_P (TREE_TYPE (to))
4173 && GET_MODE (to_rtx) != GET_MODE (value))
4174 value = convert_memory_address (GET_MODE (to_rtx), value);
4175 #endif
4176 emit_move_insn (to_rtx, value);
4178 preserve_temp_slots (to_rtx);
4179 free_temp_slots ();
4180 pop_temp_slots ();
4181 return want_value ? to_rtx : NULL_RTX;
4184 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4185 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4187 if (to_rtx == 0)
4188 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4190 /* Don't move directly into a return register. */
4191 if (TREE_CODE (to) == RESULT_DECL
4192 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4194 rtx temp;
4196 push_temp_slots ();
4197 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4199 if (GET_CODE (to_rtx) == PARALLEL)
4200 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4201 else
4202 emit_move_insn (to_rtx, temp);
4204 preserve_temp_slots (to_rtx);
4205 free_temp_slots ();
4206 pop_temp_slots ();
4207 return want_value ? to_rtx : NULL_RTX;
4210 /* In case we are returning the contents of an object which overlaps
4211 the place the value is being stored, use a safe function when copying
4212 a value through a pointer into a structure value return block. */
4213 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4214 && current_function_returns_struct
4215 && !current_function_returns_pcc_struct)
4217 rtx from_rtx, size;
4219 push_temp_slots ();
4220 size = expr_size (from);
4221 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4223 if (TARGET_MEM_FUNCTIONS)
4224 emit_library_call (memmove_libfunc, LCT_NORMAL,
4225 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4226 XEXP (from_rtx, 0), Pmode,
4227 convert_to_mode (TYPE_MODE (sizetype),
4228 size, TREE_UNSIGNED (sizetype)),
4229 TYPE_MODE (sizetype));
4230 else
4231 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4232 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4233 XEXP (to_rtx, 0), Pmode,
4234 convert_to_mode (TYPE_MODE (integer_type_node),
4235 size,
4236 TREE_UNSIGNED (integer_type_node)),
4237 TYPE_MODE (integer_type_node));
4239 preserve_temp_slots (to_rtx);
4240 free_temp_slots ();
4241 pop_temp_slots ();
4242 return want_value ? to_rtx : NULL_RTX;
4245 /* Compute FROM and store the value in the rtx we got. */
4247 push_temp_slots ();
4248 result = store_expr (from, to_rtx, want_value);
4249 preserve_temp_slots (result);
4250 free_temp_slots ();
4251 pop_temp_slots ();
4252 return want_value ? result : NULL_RTX;
4255 /* Generate code for computing expression EXP,
4256 and storing the value into TARGET.
4257 TARGET may contain a QUEUED rtx.
4259 If WANT_VALUE is nonzero, return a copy of the value
4260 not in TARGET, so that we can be sure to use the proper
4261 value in a containing expression even if TARGET has something
4262 else stored in it. If possible, we copy the value through a pseudo
4263 and return that pseudo. Or, if the value is constant, we try to
4264 return the constant. In some cases, we return a pseudo
4265 copied *from* TARGET.
4267 If the mode is BLKmode then we may return TARGET itself.
4268 It turns out that in BLKmode it doesn't cause a problem.
4269 because C has no operators that could combine two different
4270 assignments into the same BLKmode object with different values
4271 with no sequence point. Will other languages need this to
4272 be more thorough?
4274 If WANT_VALUE is 0, we return NULL, to make sure
4275 to catch quickly any cases where the caller uses the value
4276 and fails to set WANT_VALUE. */
4279 store_expr (exp, target, want_value)
4280 tree exp;
4281 rtx target;
4282 int want_value;
4284 rtx temp;
4285 int dont_return_target = 0;
4286 int dont_store_target = 0;
4288 if (TREE_CODE (exp) == COMPOUND_EXPR)
4290 /* Perform first part of compound expression, then assign from second
4291 part. */
4292 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4293 emit_queue ();
4294 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4296 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4298 /* For conditional expression, get safe form of the target. Then
4299 test the condition, doing the appropriate assignment on either
4300 side. This avoids the creation of unnecessary temporaries.
4301 For non-BLKmode, it is more efficient not to do this. */
4303 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4305 emit_queue ();
4306 target = protect_from_queue (target, 1);
4308 do_pending_stack_adjust ();
4309 NO_DEFER_POP;
4310 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4311 start_cleanup_deferral ();
4312 store_expr (TREE_OPERAND (exp, 1), target, 0);
4313 end_cleanup_deferral ();
4314 emit_queue ();
4315 emit_jump_insn (gen_jump (lab2));
4316 emit_barrier ();
4317 emit_label (lab1);
4318 start_cleanup_deferral ();
4319 store_expr (TREE_OPERAND (exp, 2), target, 0);
4320 end_cleanup_deferral ();
4321 emit_queue ();
4322 emit_label (lab2);
4323 OK_DEFER_POP;
4325 return want_value ? target : NULL_RTX;
4327 else if (queued_subexp_p (target))
4328 /* If target contains a postincrement, let's not risk
4329 using it as the place to generate the rhs. */
4331 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4333 /* Expand EXP into a new pseudo. */
4334 temp = gen_reg_rtx (GET_MODE (target));
4335 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4337 else
4338 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4340 /* If target is volatile, ANSI requires accessing the value
4341 *from* the target, if it is accessed. So make that happen.
4342 In no case return the target itself. */
4343 if (! MEM_VOLATILE_P (target) && want_value)
4344 dont_return_target = 1;
4346 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4347 && GET_MODE (target) != BLKmode)
4348 /* If target is in memory and caller wants value in a register instead,
4349 arrange that. Pass TARGET as target for expand_expr so that,
4350 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4351 We know expand_expr will not use the target in that case.
4352 Don't do this if TARGET is volatile because we are supposed
4353 to write it and then read it. */
4355 temp = expand_expr (exp, target, GET_MODE (target), 0);
4356 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4358 /* If TEMP is already in the desired TARGET, only copy it from
4359 memory and don't store it there again. */
4360 if (temp == target
4361 || (rtx_equal_p (temp, target)
4362 && ! side_effects_p (temp) && ! side_effects_p (target)))
4363 dont_store_target = 1;
4364 temp = copy_to_reg (temp);
4366 dont_return_target = 1;
4368 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4369 /* If this is a scalar in a register that is stored in a wider mode
4370 than the declared mode, compute the result into its declared mode
4371 and then convert to the wider mode. Our value is the computed
4372 expression. */
4374 rtx inner_target = 0;
4376 /* If we don't want a value, we can do the conversion inside EXP,
4377 which will often result in some optimizations. Do the conversion
4378 in two steps: first change the signedness, if needed, then
4379 the extend. But don't do this if the type of EXP is a subtype
4380 of something else since then the conversion might involve
4381 more than just converting modes. */
4382 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4383 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4385 if (TREE_UNSIGNED (TREE_TYPE (exp))
4386 != SUBREG_PROMOTED_UNSIGNED_P (target))
4387 exp = convert
4388 ((*lang_hooks.types.signed_or_unsigned_type)
4389 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4391 exp = convert ((*lang_hooks.types.type_for_mode)
4392 (GET_MODE (SUBREG_REG (target)),
4393 SUBREG_PROMOTED_UNSIGNED_P (target)),
4394 exp);
4396 inner_target = SUBREG_REG (target);
4399 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4401 /* If TEMP is a volatile MEM and we want a result value, make
4402 the access now so it gets done only once. Likewise if
4403 it contains TARGET. */
4404 if (GET_CODE (temp) == MEM && want_value
4405 && (MEM_VOLATILE_P (temp)
4406 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4407 temp = copy_to_reg (temp);
4409 /* If TEMP is a VOIDmode constant, use convert_modes to make
4410 sure that we properly convert it. */
4411 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4413 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4414 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4415 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4416 GET_MODE (target), temp,
4417 SUBREG_PROMOTED_UNSIGNED_P (target));
4420 convert_move (SUBREG_REG (target), temp,
4421 SUBREG_PROMOTED_UNSIGNED_P (target));
4423 /* If we promoted a constant, change the mode back down to match
4424 target. Otherwise, the caller might get confused by a result whose
4425 mode is larger than expected. */
4427 if (want_value && GET_MODE (temp) != GET_MODE (target))
4429 if (GET_MODE (temp) != VOIDmode)
4431 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4432 SUBREG_PROMOTED_VAR_P (temp) = 1;
4433 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4434 SUBREG_PROMOTED_UNSIGNED_P (target));
4436 else
4437 temp = convert_modes (GET_MODE (target),
4438 GET_MODE (SUBREG_REG (target)),
4439 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4442 return want_value ? temp : NULL_RTX;
4444 else
4446 temp = expand_expr (exp, target, GET_MODE (target), 0);
4447 /* Return TARGET if it's a specified hardware register.
4448 If TARGET is a volatile mem ref, either return TARGET
4449 or return a reg copied *from* TARGET; ANSI requires this.
4451 Otherwise, if TEMP is not TARGET, return TEMP
4452 if it is constant (for efficiency),
4453 or if we really want the correct value. */
4454 if (!(target && GET_CODE (target) == REG
4455 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4456 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4457 && ! rtx_equal_p (temp, target)
4458 && (CONSTANT_P (temp) || want_value))
4459 dont_return_target = 1;
4462 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4463 the same as that of TARGET, adjust the constant. This is needed, for
4464 example, in case it is a CONST_DOUBLE and we want only a word-sized
4465 value. */
4466 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4467 && TREE_CODE (exp) != ERROR_MARK
4468 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4469 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4470 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4472 /* If value was not generated in the target, store it there.
4473 Convert the value to TARGET's type first if necessary.
4474 If TEMP and TARGET compare equal according to rtx_equal_p, but
4475 one or both of them are volatile memory refs, we have to distinguish
4476 two cases:
4477 - expand_expr has used TARGET. In this case, we must not generate
4478 another copy. This can be detected by TARGET being equal according
4479 to == .
4480 - expand_expr has not used TARGET - that means that the source just
4481 happens to have the same RTX form. Since temp will have been created
4482 by expand_expr, it will compare unequal according to == .
4483 We must generate a copy in this case, to reach the correct number
4484 of volatile memory references. */
4486 if ((! rtx_equal_p (temp, target)
4487 || (temp != target && (side_effects_p (temp)
4488 || side_effects_p (target))))
4489 && TREE_CODE (exp) != ERROR_MARK
4490 && ! dont_store_target
4491 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4492 but TARGET is not valid memory reference, TEMP will differ
4493 from TARGET although it is really the same location. */
4494 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4495 || target != DECL_RTL_IF_SET (exp))
4496 /* If there's nothing to copy, don't bother. Don't call expr_size
4497 unless necessary, because some front-ends (C++) expr_size-hook
4498 aborts on objects that are not supposed to be bit-copied or
4499 bit-initialized. */
4500 && expr_size (exp) != const0_rtx)
4502 target = protect_from_queue (target, 1);
4503 if (GET_MODE (temp) != GET_MODE (target)
4504 && GET_MODE (temp) != VOIDmode)
4506 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4507 if (dont_return_target)
4509 /* In this case, we will return TEMP,
4510 so make sure it has the proper mode.
4511 But don't forget to store the value into TARGET. */
4512 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4513 emit_move_insn (target, temp);
4515 else
4516 convert_move (target, temp, unsignedp);
4519 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4521 /* Handle copying a string constant into an array. The string
4522 constant may be shorter than the array. So copy just the string's
4523 actual length, and clear the rest. First get the size of the data
4524 type of the string, which is actually the size of the target. */
4525 rtx size = expr_size (exp);
4527 if (GET_CODE (size) == CONST_INT
4528 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4529 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
4530 else
4532 /* Compute the size of the data to copy from the string. */
4533 tree copy_size
4534 = size_binop (MIN_EXPR,
4535 make_tree (sizetype, size),
4536 size_int (TREE_STRING_LENGTH (exp)));
4537 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4538 VOIDmode, 0);
4539 rtx label = 0;
4541 /* Copy that much. */
4542 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4543 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
4545 /* Figure out how much is left in TARGET that we have to clear.
4546 Do all calculations in ptr_mode. */
4547 if (GET_CODE (copy_size_rtx) == CONST_INT)
4549 size = plus_constant (size, -INTVAL (copy_size_rtx));
4550 target = adjust_address (target, BLKmode,
4551 INTVAL (copy_size_rtx));
4553 else
4555 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4556 copy_size_rtx, NULL_RTX, 0,
4557 OPTAB_LIB_WIDEN);
4559 #ifdef POINTERS_EXTEND_UNSIGNED
4560 if (GET_MODE (copy_size_rtx) != Pmode)
4561 copy_size_rtx = convert_memory_address (Pmode,
4562 copy_size_rtx);
4563 #endif
4565 target = offset_address (target, copy_size_rtx,
4566 highest_pow2_factor (copy_size));
4567 label = gen_label_rtx ();
4568 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4569 GET_MODE (size), 0, label);
4572 if (size != const0_rtx)
4573 clear_storage (target, size);
4575 if (label)
4576 emit_label (label);
4579 /* Handle calls that return values in multiple non-contiguous locations.
4580 The Irix 6 ABI has examples of this. */
4581 else if (GET_CODE (target) == PARALLEL)
4582 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4583 else if (GET_MODE (temp) == BLKmode)
4584 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
4585 else
4586 emit_move_insn (target, temp);
4589 /* If we don't want a value, return NULL_RTX. */
4590 if (! want_value)
4591 return NULL_RTX;
4593 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4594 ??? The latter test doesn't seem to make sense. */
4595 else if (dont_return_target && GET_CODE (temp) != MEM)
4596 return temp;
4598 /* Return TARGET itself if it is a hard register. */
4599 else if (want_value && GET_MODE (target) != BLKmode
4600 && ! (GET_CODE (target) == REG
4601 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4602 return copy_to_reg (target);
4604 else
4605 return target;
4608 /* Return 1 if EXP just contains zeros. */
4610 static int
4611 is_zeros_p (exp)
4612 tree exp;
4614 tree elt;
4616 switch (TREE_CODE (exp))
4618 case CONVERT_EXPR:
4619 case NOP_EXPR:
4620 case NON_LVALUE_EXPR:
4621 case VIEW_CONVERT_EXPR:
4622 return is_zeros_p (TREE_OPERAND (exp, 0));
4624 case INTEGER_CST:
4625 return integer_zerop (exp);
4627 case COMPLEX_CST:
4628 return
4629 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4631 case REAL_CST:
4632 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4634 case VECTOR_CST:
4635 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4636 elt = TREE_CHAIN (elt))
4637 if (!is_zeros_p (TREE_VALUE (elt)))
4638 return 0;
4640 return 1;
4642 case CONSTRUCTOR:
4643 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4644 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4645 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4646 if (! is_zeros_p (TREE_VALUE (elt)))
4647 return 0;
4649 return 1;
4651 default:
4652 return 0;
4656 /* Return 1 if EXP contains mostly (3/4) zeros. */
4658 static int
4659 mostly_zeros_p (exp)
4660 tree exp;
4662 if (TREE_CODE (exp) == CONSTRUCTOR)
4664 int elts = 0, zeros = 0;
4665 tree elt = CONSTRUCTOR_ELTS (exp);
4666 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4668 /* If there are no ranges of true bits, it is all zero. */
4669 return elt == NULL_TREE;
4671 for (; elt; elt = TREE_CHAIN (elt))
4673 /* We do not handle the case where the index is a RANGE_EXPR,
4674 so the statistic will be somewhat inaccurate.
4675 We do make a more accurate count in store_constructor itself,
4676 so since this function is only used for nested array elements,
4677 this should be close enough. */
4678 if (mostly_zeros_p (TREE_VALUE (elt)))
4679 zeros++;
4680 elts++;
4683 return 4 * zeros >= 3 * elts;
4686 return is_zeros_p (exp);
4689 /* Helper function for store_constructor.
4690 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4691 TYPE is the type of the CONSTRUCTOR, not the element type.
4692 CLEARED is as for store_constructor.
4693 ALIAS_SET is the alias set to use for any stores.
4695 This provides a recursive shortcut back to store_constructor when it isn't
4696 necessary to go through store_field. This is so that we can pass through
4697 the cleared field to let store_constructor know that we may not have to
4698 clear a substructure if the outer structure has already been cleared. */
4700 static void
4701 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4702 alias_set)
4703 rtx target;
4704 unsigned HOST_WIDE_INT bitsize;
4705 HOST_WIDE_INT bitpos;
4706 enum machine_mode mode;
4707 tree exp, type;
4708 int cleared;
4709 int alias_set;
4711 if (TREE_CODE (exp) == CONSTRUCTOR
4712 && bitpos % BITS_PER_UNIT == 0
4713 /* If we have a nonzero bitpos for a register target, then we just
4714 let store_field do the bitfield handling. This is unlikely to
4715 generate unnecessary clear instructions anyways. */
4716 && (bitpos == 0 || GET_CODE (target) == MEM))
4718 if (GET_CODE (target) == MEM)
4719 target
4720 = adjust_address (target,
4721 GET_MODE (target) == BLKmode
4722 || 0 != (bitpos
4723 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4724 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4727 /* Update the alias set, if required. */
4728 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4729 && MEM_ALIAS_SET (target) != 0)
4731 target = copy_rtx (target);
4732 set_mem_alias_set (target, alias_set);
4735 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4737 else
4738 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4739 alias_set);
4742 /* Store the value of constructor EXP into the rtx TARGET.
4743 TARGET is either a REG or a MEM; we know it cannot conflict, since
4744 safe_from_p has been called.
4745 CLEARED is true if TARGET is known to have been zero'd.
4746 SIZE is the number of bytes of TARGET we are allowed to modify: this
4747 may not be the same as the size of EXP if we are assigning to a field
4748 which has been packed to exclude padding bits. */
4750 static void
4751 store_constructor (exp, target, cleared, size)
4752 tree exp;
4753 rtx target;
4754 int cleared;
4755 HOST_WIDE_INT size;
4757 tree type = TREE_TYPE (exp);
4758 #ifdef WORD_REGISTER_OPERATIONS
4759 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4760 #endif
4762 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4763 || TREE_CODE (type) == QUAL_UNION_TYPE)
4765 tree elt;
4767 /* We either clear the aggregate or indicate the value is dead. */
4768 if ((TREE_CODE (type) == UNION_TYPE
4769 || TREE_CODE (type) == QUAL_UNION_TYPE)
4770 && ! cleared
4771 && ! CONSTRUCTOR_ELTS (exp))
4772 /* If the constructor is empty, clear the union. */
4774 clear_storage (target, expr_size (exp));
4775 cleared = 1;
4778 /* If we are building a static constructor into a register,
4779 set the initial value as zero so we can fold the value into
4780 a constant. But if more than one register is involved,
4781 this probably loses. */
4782 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4783 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4785 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4786 cleared = 1;
4789 /* If the constructor has fewer fields than the structure
4790 or if we are initializing the structure to mostly zeros,
4791 clear the whole structure first. Don't do this if TARGET is a
4792 register whose mode size isn't equal to SIZE since clear_storage
4793 can't handle this case. */
4794 else if (! cleared && size > 0
4795 && ((list_length (CONSTRUCTOR_ELTS (exp))
4796 != fields_length (type))
4797 || mostly_zeros_p (exp))
4798 && (GET_CODE (target) != REG
4799 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4800 == size)))
4802 clear_storage (target, GEN_INT (size));
4803 cleared = 1;
4806 if (! cleared)
4807 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4809 /* Store each element of the constructor into
4810 the corresponding field of TARGET. */
4812 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4814 tree field = TREE_PURPOSE (elt);
4815 tree value = TREE_VALUE (elt);
4816 enum machine_mode mode;
4817 HOST_WIDE_INT bitsize;
4818 HOST_WIDE_INT bitpos = 0;
4819 tree offset;
4820 rtx to_rtx = target;
4822 /* Just ignore missing fields.
4823 We cleared the whole structure, above,
4824 if any fields are missing. */
4825 if (field == 0)
4826 continue;
4828 if (cleared && is_zeros_p (value))
4829 continue;
4831 if (host_integerp (DECL_SIZE (field), 1))
4832 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4833 else
4834 bitsize = -1;
4836 mode = DECL_MODE (field);
4837 if (DECL_BIT_FIELD (field))
4838 mode = VOIDmode;
4840 offset = DECL_FIELD_OFFSET (field);
4841 if (host_integerp (offset, 0)
4842 && host_integerp (bit_position (field), 0))
4844 bitpos = int_bit_position (field);
4845 offset = 0;
4847 else
4848 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4850 if (offset)
4852 rtx offset_rtx;
4854 if (contains_placeholder_p (offset))
4855 offset = build (WITH_RECORD_EXPR, sizetype,
4856 offset, make_tree (TREE_TYPE (exp), target));
4858 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4859 if (GET_CODE (to_rtx) != MEM)
4860 abort ();
4862 #ifdef POINTERS_EXTEND_UNSIGNED
4863 if (GET_MODE (offset_rtx) != Pmode)
4864 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4865 #else
4866 if (GET_MODE (offset_rtx) != ptr_mode)
4867 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4868 #endif
4870 to_rtx = offset_address (to_rtx, offset_rtx,
4871 highest_pow2_factor (offset));
4874 if (TREE_READONLY (field))
4876 if (GET_CODE (to_rtx) == MEM)
4877 to_rtx = copy_rtx (to_rtx);
4879 RTX_UNCHANGING_P (to_rtx) = 1;
4882 #ifdef WORD_REGISTER_OPERATIONS
4883 /* If this initializes a field that is smaller than a word, at the
4884 start of a word, try to widen it to a full word.
4885 This special case allows us to output C++ member function
4886 initializations in a form that the optimizers can understand. */
4887 if (GET_CODE (target) == REG
4888 && bitsize < BITS_PER_WORD
4889 && bitpos % BITS_PER_WORD == 0
4890 && GET_MODE_CLASS (mode) == MODE_INT
4891 && TREE_CODE (value) == INTEGER_CST
4892 && exp_size >= 0
4893 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4895 tree type = TREE_TYPE (value);
4897 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4899 type = (*lang_hooks.types.type_for_size)
4900 (BITS_PER_WORD, TREE_UNSIGNED (type));
4901 value = convert (type, value);
4904 if (BYTES_BIG_ENDIAN)
4905 value
4906 = fold (build (LSHIFT_EXPR, type, value,
4907 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4908 bitsize = BITS_PER_WORD;
4909 mode = word_mode;
4911 #endif
4913 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4914 && DECL_NONADDRESSABLE_P (field))
4916 to_rtx = copy_rtx (to_rtx);
4917 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4920 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4921 value, type, cleared,
4922 get_alias_set (TREE_TYPE (field)));
4925 else if (TREE_CODE (type) == ARRAY_TYPE
4926 || TREE_CODE (type) == VECTOR_TYPE)
4928 tree elt;
4929 int i;
4930 int need_to_clear;
4931 tree domain = TYPE_DOMAIN (type);
4932 tree elttype = TREE_TYPE (type);
4933 int const_bounds_p;
4934 HOST_WIDE_INT minelt = 0;
4935 HOST_WIDE_INT maxelt = 0;
4937 /* Vectors are like arrays, but the domain is stored via an array
4938 type indirectly. */
4939 if (TREE_CODE (type) == VECTOR_TYPE)
4941 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4942 the same field as TYPE_DOMAIN, we are not guaranteed that
4943 it always will. */
4944 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4945 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4948 const_bounds_p = (TYPE_MIN_VALUE (domain)
4949 && TYPE_MAX_VALUE (domain)
4950 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4951 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4953 /* If we have constant bounds for the range of the type, get them. */
4954 if (const_bounds_p)
4956 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4957 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4960 /* If the constructor has fewer elements than the array,
4961 clear the whole array first. Similarly if this is
4962 static constructor of a non-BLKmode object. */
4963 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4964 need_to_clear = 1;
4965 else
4967 HOST_WIDE_INT count = 0, zero_count = 0;
4968 need_to_clear = ! const_bounds_p;
4970 /* This loop is a more accurate version of the loop in
4971 mostly_zeros_p (it handles RANGE_EXPR in an index).
4972 It is also needed to check for missing elements. */
4973 for (elt = CONSTRUCTOR_ELTS (exp);
4974 elt != NULL_TREE && ! need_to_clear;
4975 elt = TREE_CHAIN (elt))
4977 tree index = TREE_PURPOSE (elt);
4978 HOST_WIDE_INT this_node_count;
4980 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4982 tree lo_index = TREE_OPERAND (index, 0);
4983 tree hi_index = TREE_OPERAND (index, 1);
4985 if (! host_integerp (lo_index, 1)
4986 || ! host_integerp (hi_index, 1))
4988 need_to_clear = 1;
4989 break;
4992 this_node_count = (tree_low_cst (hi_index, 1)
4993 - tree_low_cst (lo_index, 1) + 1);
4995 else
4996 this_node_count = 1;
4998 count += this_node_count;
4999 if (mostly_zeros_p (TREE_VALUE (elt)))
5000 zero_count += this_node_count;
5003 /* Clear the entire array first if there are any missing elements,
5004 or if the incidence of zero elements is >= 75%. */
5005 if (! need_to_clear
5006 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5007 need_to_clear = 1;
5010 if (need_to_clear && size > 0)
5012 if (! cleared)
5014 if (REG_P (target))
5015 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5016 else
5017 clear_storage (target, GEN_INT (size));
5019 cleared = 1;
5021 else if (REG_P (target))
5022 /* Inform later passes that the old value is dead. */
5023 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5025 /* Store each element of the constructor into
5026 the corresponding element of TARGET, determined
5027 by counting the elements. */
5028 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5029 elt;
5030 elt = TREE_CHAIN (elt), i++)
5032 enum machine_mode mode;
5033 HOST_WIDE_INT bitsize;
5034 HOST_WIDE_INT bitpos;
5035 int unsignedp;
5036 tree value = TREE_VALUE (elt);
5037 tree index = TREE_PURPOSE (elt);
5038 rtx xtarget = target;
5040 if (cleared && is_zeros_p (value))
5041 continue;
5043 unsignedp = TREE_UNSIGNED (elttype);
5044 mode = TYPE_MODE (elttype);
5045 if (mode == BLKmode)
5046 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5047 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5048 : -1);
5049 else
5050 bitsize = GET_MODE_BITSIZE (mode);
5052 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5054 tree lo_index = TREE_OPERAND (index, 0);
5055 tree hi_index = TREE_OPERAND (index, 1);
5056 rtx index_r, pos_rtx, loop_end;
5057 struct nesting *loop;
5058 HOST_WIDE_INT lo, hi, count;
5059 tree position;
5061 /* If the range is constant and "small", unroll the loop. */
5062 if (const_bounds_p
5063 && host_integerp (lo_index, 0)
5064 && host_integerp (hi_index, 0)
5065 && (lo = tree_low_cst (lo_index, 0),
5066 hi = tree_low_cst (hi_index, 0),
5067 count = hi - lo + 1,
5068 (GET_CODE (target) != MEM
5069 || count <= 2
5070 || (host_integerp (TYPE_SIZE (elttype), 1)
5071 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5072 <= 40 * 8)))))
5074 lo -= minelt; hi -= minelt;
5075 for (; lo <= hi; lo++)
5077 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5079 if (GET_CODE (target) == MEM
5080 && !MEM_KEEP_ALIAS_SET_P (target)
5081 && TREE_CODE (type) == ARRAY_TYPE
5082 && TYPE_NONALIASED_COMPONENT (type))
5084 target = copy_rtx (target);
5085 MEM_KEEP_ALIAS_SET_P (target) = 1;
5088 store_constructor_field
5089 (target, bitsize, bitpos, mode, value, type, cleared,
5090 get_alias_set (elttype));
5093 else
5095 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5096 loop_end = gen_label_rtx ();
5098 unsignedp = TREE_UNSIGNED (domain);
5100 index = build_decl (VAR_DECL, NULL_TREE, domain);
5102 index_r
5103 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5104 &unsignedp, 0));
5105 SET_DECL_RTL (index, index_r);
5106 if (TREE_CODE (value) == SAVE_EXPR
5107 && SAVE_EXPR_RTL (value) == 0)
5109 /* Make sure value gets expanded once before the
5110 loop. */
5111 expand_expr (value, const0_rtx, VOIDmode, 0);
5112 emit_queue ();
5114 store_expr (lo_index, index_r, 0);
5115 loop = expand_start_loop (0);
5117 /* Assign value to element index. */
5118 position
5119 = convert (ssizetype,
5120 fold (build (MINUS_EXPR, TREE_TYPE (index),
5121 index, TYPE_MIN_VALUE (domain))));
5122 position = size_binop (MULT_EXPR, position,
5123 convert (ssizetype,
5124 TYPE_SIZE_UNIT (elttype)));
5126 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5127 xtarget = offset_address (target, pos_rtx,
5128 highest_pow2_factor (position));
5129 xtarget = adjust_address (xtarget, mode, 0);
5130 if (TREE_CODE (value) == CONSTRUCTOR)
5131 store_constructor (value, xtarget, cleared,
5132 bitsize / BITS_PER_UNIT);
5133 else
5134 store_expr (value, xtarget, 0);
5136 expand_exit_loop_if_false (loop,
5137 build (LT_EXPR, integer_type_node,
5138 index, hi_index));
5140 expand_increment (build (PREINCREMENT_EXPR,
5141 TREE_TYPE (index),
5142 index, integer_one_node), 0, 0);
5143 expand_end_loop ();
5144 emit_label (loop_end);
5147 else if ((index != 0 && ! host_integerp (index, 0))
5148 || ! host_integerp (TYPE_SIZE (elttype), 1))
5150 tree position;
5152 if (index == 0)
5153 index = ssize_int (1);
5155 if (minelt)
5156 index = convert (ssizetype,
5157 fold (build (MINUS_EXPR, index,
5158 TYPE_MIN_VALUE (domain))));
5160 position = size_binop (MULT_EXPR, index,
5161 convert (ssizetype,
5162 TYPE_SIZE_UNIT (elttype)));
5163 xtarget = offset_address (target,
5164 expand_expr (position, 0, VOIDmode, 0),
5165 highest_pow2_factor (position));
5166 xtarget = adjust_address (xtarget, mode, 0);
5167 store_expr (value, xtarget, 0);
5169 else
5171 if (index != 0)
5172 bitpos = ((tree_low_cst (index, 0) - minelt)
5173 * tree_low_cst (TYPE_SIZE (elttype), 1));
5174 else
5175 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5177 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5178 && TREE_CODE (type) == ARRAY_TYPE
5179 && TYPE_NONALIASED_COMPONENT (type))
5181 target = copy_rtx (target);
5182 MEM_KEEP_ALIAS_SET_P (target) = 1;
5185 store_constructor_field (target, bitsize, bitpos, mode, value,
5186 type, cleared, get_alias_set (elttype));
5192 /* Set constructor assignments. */
5193 else if (TREE_CODE (type) == SET_TYPE)
5195 tree elt = CONSTRUCTOR_ELTS (exp);
5196 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5197 tree domain = TYPE_DOMAIN (type);
5198 tree domain_min, domain_max, bitlength;
5200 /* The default implementation strategy is to extract the constant
5201 parts of the constructor, use that to initialize the target,
5202 and then "or" in whatever non-constant ranges we need in addition.
5204 If a large set is all zero or all ones, it is
5205 probably better to set it using memset (if available) or bzero.
5206 Also, if a large set has just a single range, it may also be
5207 better to first clear all the first clear the set (using
5208 bzero/memset), and set the bits we want. */
5210 /* Check for all zeros. */
5211 if (elt == NULL_TREE && size > 0)
5213 if (!cleared)
5214 clear_storage (target, GEN_INT (size));
5215 return;
5218 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5219 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5220 bitlength = size_binop (PLUS_EXPR,
5221 size_diffop (domain_max, domain_min),
5222 ssize_int (1));
5224 nbits = tree_low_cst (bitlength, 1);
5226 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5227 are "complicated" (more than one range), initialize (the
5228 constant parts) by copying from a constant. */
5229 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5230 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5232 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5233 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5234 char *bit_buffer = (char *) alloca (nbits);
5235 HOST_WIDE_INT word = 0;
5236 unsigned int bit_pos = 0;
5237 unsigned int ibit = 0;
5238 unsigned int offset = 0; /* In bytes from beginning of set. */
5240 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5241 for (;;)
5243 if (bit_buffer[ibit])
5245 if (BYTES_BIG_ENDIAN)
5246 word |= (1 << (set_word_size - 1 - bit_pos));
5247 else
5248 word |= 1 << bit_pos;
5251 bit_pos++; ibit++;
5252 if (bit_pos >= set_word_size || ibit == nbits)
5254 if (word != 0 || ! cleared)
5256 rtx datum = GEN_INT (word);
5257 rtx to_rtx;
5259 /* The assumption here is that it is safe to use
5260 XEXP if the set is multi-word, but not if
5261 it's single-word. */
5262 if (GET_CODE (target) == MEM)
5263 to_rtx = adjust_address (target, mode, offset);
5264 else if (offset == 0)
5265 to_rtx = target;
5266 else
5267 abort ();
5268 emit_move_insn (to_rtx, datum);
5271 if (ibit == nbits)
5272 break;
5273 word = 0;
5274 bit_pos = 0;
5275 offset += set_word_size / BITS_PER_UNIT;
5279 else if (!cleared)
5280 /* Don't bother clearing storage if the set is all ones. */
5281 if (TREE_CHAIN (elt) != NULL_TREE
5282 || (TREE_PURPOSE (elt) == NULL_TREE
5283 ? nbits != 1
5284 : ( ! host_integerp (TREE_VALUE (elt), 0)
5285 || ! host_integerp (TREE_PURPOSE (elt), 0)
5286 || (tree_low_cst (TREE_VALUE (elt), 0)
5287 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5288 != (HOST_WIDE_INT) nbits))))
5289 clear_storage (target, expr_size (exp));
5291 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5293 /* Start of range of element or NULL. */
5294 tree startbit = TREE_PURPOSE (elt);
5295 /* End of range of element, or element value. */
5296 tree endbit = TREE_VALUE (elt);
5297 HOST_WIDE_INT startb, endb;
5298 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5300 bitlength_rtx = expand_expr (bitlength,
5301 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5303 /* Handle non-range tuple element like [ expr ]. */
5304 if (startbit == NULL_TREE)
5306 startbit = save_expr (endbit);
5307 endbit = startbit;
5310 startbit = convert (sizetype, startbit);
5311 endbit = convert (sizetype, endbit);
5312 if (! integer_zerop (domain_min))
5314 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5315 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5317 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5318 EXPAND_CONST_ADDRESS);
5319 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5320 EXPAND_CONST_ADDRESS);
5322 if (REG_P (target))
5324 targetx
5325 = assign_temp
5326 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5327 (GET_MODE (target), 0),
5328 TYPE_QUAL_CONST)),
5329 0, 1, 1);
5330 emit_move_insn (targetx, target);
5333 else if (GET_CODE (target) == MEM)
5334 targetx = target;
5335 else
5336 abort ();
5338 /* Optimization: If startbit and endbit are constants divisible
5339 by BITS_PER_UNIT, call memset instead. */
5340 if (TARGET_MEM_FUNCTIONS
5341 && TREE_CODE (startbit) == INTEGER_CST
5342 && TREE_CODE (endbit) == INTEGER_CST
5343 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5344 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5346 emit_library_call (memset_libfunc, LCT_NORMAL,
5347 VOIDmode, 3,
5348 plus_constant (XEXP (targetx, 0),
5349 startb / BITS_PER_UNIT),
5350 Pmode,
5351 constm1_rtx, TYPE_MODE (integer_type_node),
5352 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5353 TYPE_MODE (sizetype));
5355 else
5356 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5357 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5358 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5359 startbit_rtx, TYPE_MODE (sizetype),
5360 endbit_rtx, TYPE_MODE (sizetype));
5362 if (REG_P (target))
5363 emit_move_insn (target, targetx);
5367 else
5368 abort ();
5371 /* Store the value of EXP (an expression tree)
5372 into a subfield of TARGET which has mode MODE and occupies
5373 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5374 If MODE is VOIDmode, it means that we are storing into a bit-field.
5376 If VALUE_MODE is VOIDmode, return nothing in particular.
5377 UNSIGNEDP is not used in this case.
5379 Otherwise, return an rtx for the value stored. This rtx
5380 has mode VALUE_MODE if that is convenient to do.
5381 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5383 TYPE is the type of the underlying object,
5385 ALIAS_SET is the alias set for the destination. This value will
5386 (in general) be different from that for TARGET, since TARGET is a
5387 reference to the containing structure. */
5389 static rtx
5390 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5391 alias_set)
5392 rtx target;
5393 HOST_WIDE_INT bitsize;
5394 HOST_WIDE_INT bitpos;
5395 enum machine_mode mode;
5396 tree exp;
5397 enum machine_mode value_mode;
5398 int unsignedp;
5399 tree type;
5400 int alias_set;
5402 HOST_WIDE_INT width_mask = 0;
5404 if (TREE_CODE (exp) == ERROR_MARK)
5405 return const0_rtx;
5407 /* If we have nothing to store, do nothing unless the expression has
5408 side-effects. */
5409 if (bitsize == 0)
5410 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5411 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5412 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5414 /* If we are storing into an unaligned field of an aligned union that is
5415 in a register, we may have the mode of TARGET being an integer mode but
5416 MODE == BLKmode. In that case, get an aligned object whose size and
5417 alignment are the same as TARGET and store TARGET into it (we can avoid
5418 the store if the field being stored is the entire width of TARGET). Then
5419 call ourselves recursively to store the field into a BLKmode version of
5420 that object. Finally, load from the object into TARGET. This is not
5421 very efficient in general, but should only be slightly more expensive
5422 than the otherwise-required unaligned accesses. Perhaps this can be
5423 cleaned up later. */
5425 if (mode == BLKmode
5426 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5428 rtx object
5429 = assign_temp
5430 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5431 0, 1, 1);
5432 rtx blk_object = adjust_address (object, BLKmode, 0);
5434 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5435 emit_move_insn (object, target);
5437 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5438 alias_set);
5440 emit_move_insn (target, object);
5442 /* We want to return the BLKmode version of the data. */
5443 return blk_object;
5446 if (GET_CODE (target) == CONCAT)
5448 /* We're storing into a struct containing a single __complex. */
5450 if (bitpos != 0)
5451 abort ();
5452 return store_expr (exp, target, 0);
5455 /* If the structure is in a register or if the component
5456 is a bit field, we cannot use addressing to access it.
5457 Use bit-field techniques or SUBREG to store in it. */
5459 if (mode == VOIDmode
5460 || (mode != BLKmode && ! direct_store[(int) mode]
5461 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5462 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5463 || GET_CODE (target) == REG
5464 || GET_CODE (target) == SUBREG
5465 /* If the field isn't aligned enough to store as an ordinary memref,
5466 store it as a bit field. */
5467 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5468 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5469 || bitpos % GET_MODE_ALIGNMENT (mode)))
5470 /* If the RHS and field are a constant size and the size of the
5471 RHS isn't the same size as the bitfield, we must use bitfield
5472 operations. */
5473 || (bitsize >= 0
5474 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5475 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5477 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5479 /* If BITSIZE is narrower than the size of the type of EXP
5480 we will be narrowing TEMP. Normally, what's wanted are the
5481 low-order bits. However, if EXP's type is a record and this is
5482 big-endian machine, we want the upper BITSIZE bits. */
5483 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5484 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5485 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5486 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5487 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5488 - bitsize),
5489 temp, 1);
5491 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5492 MODE. */
5493 if (mode != VOIDmode && mode != BLKmode
5494 && mode != TYPE_MODE (TREE_TYPE (exp)))
5495 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5497 /* If the modes of TARGET and TEMP are both BLKmode, both
5498 must be in memory and BITPOS must be aligned on a byte
5499 boundary. If so, we simply do a block copy. */
5500 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5502 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5503 || bitpos % BITS_PER_UNIT != 0)
5504 abort ();
5506 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5507 emit_block_move (target, temp,
5508 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5509 / BITS_PER_UNIT),
5510 BLOCK_OP_NORMAL);
5512 return value_mode == VOIDmode ? const0_rtx : target;
5515 /* Store the value in the bitfield. */
5516 store_bit_field (target, bitsize, bitpos, mode, temp,
5517 int_size_in_bytes (type));
5519 if (value_mode != VOIDmode)
5521 /* The caller wants an rtx for the value.
5522 If possible, avoid refetching from the bitfield itself. */
5523 if (width_mask != 0
5524 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5526 tree count;
5527 enum machine_mode tmode;
5529 tmode = GET_MODE (temp);
5530 if (tmode == VOIDmode)
5531 tmode = value_mode;
5533 if (unsignedp)
5534 return expand_and (tmode, temp,
5535 gen_int_mode (width_mask, tmode),
5536 NULL_RTX);
5538 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5539 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5540 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5543 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5544 NULL_RTX, value_mode, VOIDmode,
5545 int_size_in_bytes (type));
5547 return const0_rtx;
5549 else
5551 rtx addr = XEXP (target, 0);
5552 rtx to_rtx = target;
5554 /* If a value is wanted, it must be the lhs;
5555 so make the address stable for multiple use. */
5557 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5558 && ! CONSTANT_ADDRESS_P (addr)
5559 /* A frame-pointer reference is already stable. */
5560 && ! (GET_CODE (addr) == PLUS
5561 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5562 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5563 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5564 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5566 /* Now build a reference to just the desired component. */
5568 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5570 if (to_rtx == target)
5571 to_rtx = copy_rtx (to_rtx);
5573 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5574 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5575 set_mem_alias_set (to_rtx, alias_set);
5577 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5581 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5582 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5583 codes and find the ultimate containing object, which we return.
5585 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5586 bit position, and *PUNSIGNEDP to the signedness of the field.
5587 If the position of the field is variable, we store a tree
5588 giving the variable offset (in units) in *POFFSET.
5589 This offset is in addition to the bit position.
5590 If the position is not variable, we store 0 in *POFFSET.
5592 If any of the extraction expressions is volatile,
5593 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5595 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5596 is a mode that can be used to access the field. In that case, *PBITSIZE
5597 is redundant.
5599 If the field describes a variable-sized object, *PMODE is set to
5600 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5601 this case, but the address of the object can be found. */
5603 tree
5604 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5605 punsignedp, pvolatilep)
5606 tree exp;
5607 HOST_WIDE_INT *pbitsize;
5608 HOST_WIDE_INT *pbitpos;
5609 tree *poffset;
5610 enum machine_mode *pmode;
5611 int *punsignedp;
5612 int *pvolatilep;
5614 tree size_tree = 0;
5615 enum machine_mode mode = VOIDmode;
5616 tree offset = size_zero_node;
5617 tree bit_offset = bitsize_zero_node;
5618 tree placeholder_ptr = 0;
5619 tree tem;
5621 /* First get the mode, signedness, and size. We do this from just the
5622 outermost expression. */
5623 if (TREE_CODE (exp) == COMPONENT_REF)
5625 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5626 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5627 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5629 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5631 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5633 size_tree = TREE_OPERAND (exp, 1);
5634 *punsignedp = TREE_UNSIGNED (exp);
5636 else
5638 mode = TYPE_MODE (TREE_TYPE (exp));
5639 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5641 if (mode == BLKmode)
5642 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5643 else
5644 *pbitsize = GET_MODE_BITSIZE (mode);
5647 if (size_tree != 0)
5649 if (! host_integerp (size_tree, 1))
5650 mode = BLKmode, *pbitsize = -1;
5651 else
5652 *pbitsize = tree_low_cst (size_tree, 1);
5655 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5656 and find the ultimate containing object. */
5657 while (1)
5659 if (TREE_CODE (exp) == BIT_FIELD_REF)
5660 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5661 else if (TREE_CODE (exp) == COMPONENT_REF)
5663 tree field = TREE_OPERAND (exp, 1);
5664 tree this_offset = DECL_FIELD_OFFSET (field);
5666 /* If this field hasn't been filled in yet, don't go
5667 past it. This should only happen when folding expressions
5668 made during type construction. */
5669 if (this_offset == 0)
5670 break;
5671 else if (! TREE_CONSTANT (this_offset)
5672 && contains_placeholder_p (this_offset))
5673 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5675 offset = size_binop (PLUS_EXPR, offset, this_offset);
5676 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5677 DECL_FIELD_BIT_OFFSET (field));
5679 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5682 else if (TREE_CODE (exp) == ARRAY_REF
5683 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5685 tree index = TREE_OPERAND (exp, 1);
5686 tree array = TREE_OPERAND (exp, 0);
5687 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5688 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5689 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5691 /* We assume all arrays have sizes that are a multiple of a byte.
5692 First subtract the lower bound, if any, in the type of the
5693 index, then convert to sizetype and multiply by the size of the
5694 array element. */
5695 if (low_bound != 0 && ! integer_zerop (low_bound))
5696 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5697 index, low_bound));
5699 /* If the index has a self-referential type, pass it to a
5700 WITH_RECORD_EXPR; if the component size is, pass our
5701 component to one. */
5702 if (! TREE_CONSTANT (index)
5703 && contains_placeholder_p (index))
5704 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5705 if (! TREE_CONSTANT (unit_size)
5706 && contains_placeholder_p (unit_size))
5707 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5709 offset = size_binop (PLUS_EXPR, offset,
5710 size_binop (MULT_EXPR,
5711 convert (sizetype, index),
5712 unit_size));
5715 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5717 tree new = find_placeholder (exp, &placeholder_ptr);
5719 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5720 We might have been called from tree optimization where we
5721 haven't set up an object yet. */
5722 if (new == 0)
5723 break;
5724 else
5725 exp = new;
5727 continue;
5729 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5730 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5731 && ! ((TREE_CODE (exp) == NOP_EXPR
5732 || TREE_CODE (exp) == CONVERT_EXPR)
5733 && (TYPE_MODE (TREE_TYPE (exp))
5734 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5735 break;
5737 /* If any reference in the chain is volatile, the effect is volatile. */
5738 if (TREE_THIS_VOLATILE (exp))
5739 *pvolatilep = 1;
5741 exp = TREE_OPERAND (exp, 0);
5744 /* If OFFSET is constant, see if we can return the whole thing as a
5745 constant bit position. Otherwise, split it up. */
5746 if (host_integerp (offset, 0)
5747 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5748 bitsize_unit_node))
5749 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5750 && host_integerp (tem, 0))
5751 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5752 else
5753 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5755 *pmode = mode;
5756 return exp;
5759 /* Return 1 if T is an expression that get_inner_reference handles. */
5762 handled_component_p (t)
5763 tree t;
5765 switch (TREE_CODE (t))
5767 case BIT_FIELD_REF:
5768 case COMPONENT_REF:
5769 case ARRAY_REF:
5770 case ARRAY_RANGE_REF:
5771 case NON_LVALUE_EXPR:
5772 case VIEW_CONVERT_EXPR:
5773 return 1;
5775 case NOP_EXPR:
5776 case CONVERT_EXPR:
5777 return (TYPE_MODE (TREE_TYPE (t))
5778 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5780 default:
5781 return 0;
5785 /* Given an rtx VALUE that may contain additions and multiplications, return
5786 an equivalent value that just refers to a register, memory, or constant.
5787 This is done by generating instructions to perform the arithmetic and
5788 returning a pseudo-register containing the value.
5790 The returned value may be a REG, SUBREG, MEM or constant. */
5793 force_operand (value, target)
5794 rtx value, target;
5796 rtx op1, op2;
5797 /* Use subtarget as the target for operand 0 of a binary operation. */
5798 rtx subtarget = get_subtarget (target);
5799 enum rtx_code code = GET_CODE (value);
5801 /* Check for a PIC address load. */
5802 if ((code == PLUS || code == MINUS)
5803 && XEXP (value, 0) == pic_offset_table_rtx
5804 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5805 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5806 || GET_CODE (XEXP (value, 1)) == CONST))
5808 if (!subtarget)
5809 subtarget = gen_reg_rtx (GET_MODE (value));
5810 emit_move_insn (subtarget, value);
5811 return subtarget;
5814 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5816 if (!target)
5817 target = gen_reg_rtx (GET_MODE (value));
5818 convert_move (target, force_operand (XEXP (value, 0), NULL),
5819 code == ZERO_EXTEND);
5820 return target;
5823 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5825 op2 = XEXP (value, 1);
5826 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5827 subtarget = 0;
5828 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5830 code = PLUS;
5831 op2 = negate_rtx (GET_MODE (value), op2);
5834 /* Check for an addition with OP2 a constant integer and our first
5835 operand a PLUS of a virtual register and something else. In that
5836 case, we want to emit the sum of the virtual register and the
5837 constant first and then add the other value. This allows virtual
5838 register instantiation to simply modify the constant rather than
5839 creating another one around this addition. */
5840 if (code == PLUS && GET_CODE (op2) == CONST_INT
5841 && GET_CODE (XEXP (value, 0)) == PLUS
5842 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5843 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5844 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5846 rtx temp = expand_simple_binop (GET_MODE (value), code,
5847 XEXP (XEXP (value, 0), 0), op2,
5848 subtarget, 0, OPTAB_LIB_WIDEN);
5849 return expand_simple_binop (GET_MODE (value), code, temp,
5850 force_operand (XEXP (XEXP (value,
5851 0), 1), 0),
5852 target, 0, OPTAB_LIB_WIDEN);
5855 op1 = force_operand (XEXP (value, 0), subtarget);
5856 op2 = force_operand (op2, NULL_RTX);
5857 switch (code)
5859 case MULT:
5860 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5861 case DIV:
5862 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5863 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5864 target, 1, OPTAB_LIB_WIDEN);
5865 else
5866 return expand_divmod (0,
5867 FLOAT_MODE_P (GET_MODE (value))
5868 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5869 GET_MODE (value), op1, op2, target, 0);
5870 break;
5871 case MOD:
5872 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5873 target, 0);
5874 break;
5875 case UDIV:
5876 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5877 target, 1);
5878 break;
5879 case UMOD:
5880 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5881 target, 1);
5882 break;
5883 case ASHIFTRT:
5884 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5885 target, 0, OPTAB_LIB_WIDEN);
5886 break;
5887 default:
5888 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5889 target, 1, OPTAB_LIB_WIDEN);
5892 if (GET_RTX_CLASS (code) == '1')
5894 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5895 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5898 #ifdef INSN_SCHEDULING
5899 /* On machines that have insn scheduling, we want all memory reference to be
5900 explicit, so we need to deal with such paradoxical SUBREGs. */
5901 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5902 && (GET_MODE_SIZE (GET_MODE (value))
5903 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5904 value
5905 = simplify_gen_subreg (GET_MODE (value),
5906 force_reg (GET_MODE (SUBREG_REG (value)),
5907 force_operand (SUBREG_REG (value),
5908 NULL_RTX)),
5909 GET_MODE (SUBREG_REG (value)),
5910 SUBREG_BYTE (value));
5911 #endif
5913 return value;
5916 /* Subroutine of expand_expr: return nonzero iff there is no way that
5917 EXP can reference X, which is being modified. TOP_P is nonzero if this
5918 call is going to be used to determine whether we need a temporary
5919 for EXP, as opposed to a recursive call to this function.
5921 It is always safe for this routine to return zero since it merely
5922 searches for optimization opportunities. */
5925 safe_from_p (x, exp, top_p)
5926 rtx x;
5927 tree exp;
5928 int top_p;
5930 rtx exp_rtl = 0;
5931 int i, nops;
5932 static tree save_expr_list;
5934 if (x == 0
5935 /* If EXP has varying size, we MUST use a target since we currently
5936 have no way of allocating temporaries of variable size
5937 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5938 So we assume here that something at a higher level has prevented a
5939 clash. This is somewhat bogus, but the best we can do. Only
5940 do this when X is BLKmode and when we are at the top level. */
5941 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5942 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5943 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5944 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5945 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5946 != INTEGER_CST)
5947 && GET_MODE (x) == BLKmode)
5948 /* If X is in the outgoing argument area, it is always safe. */
5949 || (GET_CODE (x) == MEM
5950 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5951 || (GET_CODE (XEXP (x, 0)) == PLUS
5952 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5953 return 1;
5955 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5956 find the underlying pseudo. */
5957 if (GET_CODE (x) == SUBREG)
5959 x = SUBREG_REG (x);
5960 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5961 return 0;
5964 /* A SAVE_EXPR might appear many times in the expression passed to the
5965 top-level safe_from_p call, and if it has a complex subexpression,
5966 examining it multiple times could result in a combinatorial explosion.
5967 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5968 with optimization took about 28 minutes to compile -- even though it was
5969 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5970 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5971 we have processed. Note that the only test of top_p was above. */
5973 if (top_p)
5975 int rtn;
5976 tree t;
5978 save_expr_list = 0;
5980 rtn = safe_from_p (x, exp, 0);
5982 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5983 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5985 return rtn;
5988 /* Now look at our tree code and possibly recurse. */
5989 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5991 case 'd':
5992 exp_rtl = DECL_RTL_IF_SET (exp);
5993 break;
5995 case 'c':
5996 return 1;
5998 case 'x':
5999 if (TREE_CODE (exp) == TREE_LIST)
6000 return ((TREE_VALUE (exp) == 0
6001 || safe_from_p (x, TREE_VALUE (exp), 0))
6002 && (TREE_CHAIN (exp) == 0
6003 || safe_from_p (x, TREE_CHAIN (exp), 0)));
6004 else if (TREE_CODE (exp) == ERROR_MARK)
6005 return 1; /* An already-visited SAVE_EXPR? */
6006 else
6007 return 0;
6009 case '1':
6010 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6012 case '2':
6013 case '<':
6014 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6015 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6017 case 'e':
6018 case 'r':
6019 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6020 the expression. If it is set, we conflict iff we are that rtx or
6021 both are in memory. Otherwise, we check all operands of the
6022 expression recursively. */
6024 switch (TREE_CODE (exp))
6026 case ADDR_EXPR:
6027 /* If the operand is static or we are static, we can't conflict.
6028 Likewise if we don't conflict with the operand at all. */
6029 if (staticp (TREE_OPERAND (exp, 0))
6030 || TREE_STATIC (exp)
6031 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6032 return 1;
6034 /* Otherwise, the only way this can conflict is if we are taking
6035 the address of a DECL a that address if part of X, which is
6036 very rare. */
6037 exp = TREE_OPERAND (exp, 0);
6038 if (DECL_P (exp))
6040 if (!DECL_RTL_SET_P (exp)
6041 || GET_CODE (DECL_RTL (exp)) != MEM)
6042 return 0;
6043 else
6044 exp_rtl = XEXP (DECL_RTL (exp), 0);
6046 break;
6048 case INDIRECT_REF:
6049 if (GET_CODE (x) == MEM
6050 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6051 get_alias_set (exp)))
6052 return 0;
6053 break;
6055 case CALL_EXPR:
6056 /* Assume that the call will clobber all hard registers and
6057 all of memory. */
6058 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6059 || GET_CODE (x) == MEM)
6060 return 0;
6061 break;
6063 case RTL_EXPR:
6064 /* If a sequence exists, we would have to scan every instruction
6065 in the sequence to see if it was safe. This is probably not
6066 worthwhile. */
6067 if (RTL_EXPR_SEQUENCE (exp))
6068 return 0;
6070 exp_rtl = RTL_EXPR_RTL (exp);
6071 break;
6073 case WITH_CLEANUP_EXPR:
6074 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6075 break;
6077 case CLEANUP_POINT_EXPR:
6078 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6080 case SAVE_EXPR:
6081 exp_rtl = SAVE_EXPR_RTL (exp);
6082 if (exp_rtl)
6083 break;
6085 /* If we've already scanned this, don't do it again. Otherwise,
6086 show we've scanned it and record for clearing the flag if we're
6087 going on. */
6088 if (TREE_PRIVATE (exp))
6089 return 1;
6091 TREE_PRIVATE (exp) = 1;
6092 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6094 TREE_PRIVATE (exp) = 0;
6095 return 0;
6098 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6099 return 1;
6101 case BIND_EXPR:
6102 /* The only operand we look at is operand 1. The rest aren't
6103 part of the expression. */
6104 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6106 case METHOD_CALL_EXPR:
6107 /* This takes an rtx argument, but shouldn't appear here. */
6108 abort ();
6110 default:
6111 break;
6114 /* If we have an rtx, we do not need to scan our operands. */
6115 if (exp_rtl)
6116 break;
6118 nops = first_rtl_op (TREE_CODE (exp));
6119 for (i = 0; i < nops; i++)
6120 if (TREE_OPERAND (exp, i) != 0
6121 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6122 return 0;
6124 /* If this is a language-specific tree code, it may require
6125 special handling. */
6126 if ((unsigned int) TREE_CODE (exp)
6127 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6128 && !(*lang_hooks.safe_from_p) (x, exp))
6129 return 0;
6132 /* If we have an rtl, find any enclosed object. Then see if we conflict
6133 with it. */
6134 if (exp_rtl)
6136 if (GET_CODE (exp_rtl) == SUBREG)
6138 exp_rtl = SUBREG_REG (exp_rtl);
6139 if (GET_CODE (exp_rtl) == REG
6140 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6141 return 0;
6144 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6145 are memory and they conflict. */
6146 return ! (rtx_equal_p (x, exp_rtl)
6147 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6148 && true_dependence (exp_rtl, VOIDmode, x,
6149 rtx_addr_varies_p)));
6152 /* If we reach here, it is safe. */
6153 return 1;
6156 /* Subroutine of expand_expr: return rtx if EXP is a
6157 variable or parameter; else return 0. */
6159 static rtx
6160 var_rtx (exp)
6161 tree exp;
6163 STRIP_NOPS (exp);
6164 switch (TREE_CODE (exp))
6166 case PARM_DECL:
6167 case VAR_DECL:
6168 return DECL_RTL (exp);
6169 default:
6170 return 0;
6174 #ifdef MAX_INTEGER_COMPUTATION_MODE
6176 void
6177 check_max_integer_computation_mode (exp)
6178 tree exp;
6180 enum tree_code code;
6181 enum machine_mode mode;
6183 /* Strip any NOPs that don't change the mode. */
6184 STRIP_NOPS (exp);
6185 code = TREE_CODE (exp);
6187 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6188 if (code == NOP_EXPR
6189 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6190 return;
6192 /* First check the type of the overall operation. We need only look at
6193 unary, binary and relational operations. */
6194 if (TREE_CODE_CLASS (code) == '1'
6195 || TREE_CODE_CLASS (code) == '2'
6196 || TREE_CODE_CLASS (code) == '<')
6198 mode = TYPE_MODE (TREE_TYPE (exp));
6199 if (GET_MODE_CLASS (mode) == MODE_INT
6200 && mode > MAX_INTEGER_COMPUTATION_MODE)
6201 internal_error ("unsupported wide integer operation");
6204 /* Check operand of a unary op. */
6205 if (TREE_CODE_CLASS (code) == '1')
6207 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6208 if (GET_MODE_CLASS (mode) == MODE_INT
6209 && mode > MAX_INTEGER_COMPUTATION_MODE)
6210 internal_error ("unsupported wide integer operation");
6213 /* Check operands of a binary/comparison op. */
6214 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6216 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6217 if (GET_MODE_CLASS (mode) == MODE_INT
6218 && mode > MAX_INTEGER_COMPUTATION_MODE)
6219 internal_error ("unsupported wide integer operation");
6221 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6222 if (GET_MODE_CLASS (mode) == MODE_INT
6223 && mode > MAX_INTEGER_COMPUTATION_MODE)
6224 internal_error ("unsupported wide integer operation");
6227 #endif
6229 /* Return the highest power of two that EXP is known to be a multiple of.
6230 This is used in updating alignment of MEMs in array references. */
6232 static HOST_WIDE_INT
6233 highest_pow2_factor (exp)
6234 tree exp;
6236 HOST_WIDE_INT c0, c1;
6238 switch (TREE_CODE (exp))
6240 case INTEGER_CST:
6241 /* We can find the lowest bit that's a one. If the low
6242 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6243 We need to handle this case since we can find it in a COND_EXPR,
6244 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6245 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6246 later ICE. */
6247 if (TREE_CONSTANT_OVERFLOW (exp))
6248 return BIGGEST_ALIGNMENT;
6249 else
6251 /* Note: tree_low_cst is intentionally not used here,
6252 we don't care about the upper bits. */
6253 c0 = TREE_INT_CST_LOW (exp);
6254 c0 &= -c0;
6255 return c0 ? c0 : BIGGEST_ALIGNMENT;
6257 break;
6259 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6260 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6261 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6262 return MIN (c0, c1);
6264 case MULT_EXPR:
6265 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6266 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6267 return c0 * c1;
6269 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6270 case CEIL_DIV_EXPR:
6271 if (integer_pow2p (TREE_OPERAND (exp, 1))
6272 && host_integerp (TREE_OPERAND (exp, 1), 1))
6274 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6275 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6276 return MAX (1, c0 / c1);
6278 break;
6280 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6281 case SAVE_EXPR: case WITH_RECORD_EXPR:
6282 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6284 case COMPOUND_EXPR:
6285 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6287 case COND_EXPR:
6288 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6289 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6290 return MIN (c0, c1);
6292 default:
6293 break;
6296 return 1;
6299 /* Similar, except that it is known that the expression must be a multiple
6300 of the alignment of TYPE. */
6302 static HOST_WIDE_INT
6303 highest_pow2_factor_for_type (type, exp)
6304 tree type;
6305 tree exp;
6307 HOST_WIDE_INT type_align, factor;
6309 factor = highest_pow2_factor (exp);
6310 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6311 return MAX (factor, type_align);
6314 /* Return an object on the placeholder list that matches EXP, a
6315 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6316 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6317 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6318 is a location which initially points to a starting location in the
6319 placeholder list (zero means start of the list) and where a pointer into
6320 the placeholder list at which the object is found is placed. */
6322 tree
6323 find_placeholder (exp, plist)
6324 tree exp;
6325 tree *plist;
6327 tree type = TREE_TYPE (exp);
6328 tree placeholder_expr;
6330 for (placeholder_expr
6331 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6332 placeholder_expr != 0;
6333 placeholder_expr = TREE_CHAIN (placeholder_expr))
6335 tree need_type = TYPE_MAIN_VARIANT (type);
6336 tree elt;
6338 /* Find the outermost reference that is of the type we want. If none,
6339 see if any object has a type that is a pointer to the type we
6340 want. */
6341 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6342 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6343 || TREE_CODE (elt) == COND_EXPR)
6344 ? TREE_OPERAND (elt, 1)
6345 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6346 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6347 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6348 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6349 ? TREE_OPERAND (elt, 0) : 0))
6350 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6352 if (plist)
6353 *plist = placeholder_expr;
6354 return elt;
6357 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6359 = ((TREE_CODE (elt) == COMPOUND_EXPR
6360 || TREE_CODE (elt) == COND_EXPR)
6361 ? TREE_OPERAND (elt, 1)
6362 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6363 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6364 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6365 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6366 ? TREE_OPERAND (elt, 0) : 0))
6367 if (POINTER_TYPE_P (TREE_TYPE (elt))
6368 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6369 == need_type))
6371 if (plist)
6372 *plist = placeholder_expr;
6373 return build1 (INDIRECT_REF, need_type, elt);
6377 return 0;
6380 /* expand_expr: generate code for computing expression EXP.
6381 An rtx for the computed value is returned. The value is never null.
6382 In the case of a void EXP, const0_rtx is returned.
6384 The value may be stored in TARGET if TARGET is nonzero.
6385 TARGET is just a suggestion; callers must assume that
6386 the rtx returned may not be the same as TARGET.
6388 If TARGET is CONST0_RTX, it means that the value will be ignored.
6390 If TMODE is not VOIDmode, it suggests generating the
6391 result in mode TMODE. But this is done only when convenient.
6392 Otherwise, TMODE is ignored and the value generated in its natural mode.
6393 TMODE is just a suggestion; callers must assume that
6394 the rtx returned may not have mode TMODE.
6396 Note that TARGET may have neither TMODE nor MODE. In that case, it
6397 probably will not be used.
6399 If MODIFIER is EXPAND_SUM then when EXP is an addition
6400 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6401 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6402 products as above, or REG or MEM, or constant.
6403 Ordinarily in such cases we would output mul or add instructions
6404 and then return a pseudo reg containing the sum.
6406 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6407 it also marks a label as absolutely required (it can't be dead).
6408 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6409 This is used for outputting expressions used in initializers.
6411 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6412 with a constant address even if that address is not normally legitimate.
6413 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6416 expand_expr (exp, target, tmode, modifier)
6417 tree exp;
6418 rtx target;
6419 enum machine_mode tmode;
6420 enum expand_modifier modifier;
6422 rtx op0, op1, temp;
6423 tree type = TREE_TYPE (exp);
6424 int unsignedp = TREE_UNSIGNED (type);
6425 enum machine_mode mode;
6426 enum tree_code code = TREE_CODE (exp);
6427 optab this_optab;
6428 rtx subtarget, original_target;
6429 int ignore;
6430 tree context;
6432 /* Handle ERROR_MARK before anybody tries to access its type. */
6433 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6435 op0 = CONST0_RTX (tmode);
6436 if (op0 != 0)
6437 return op0;
6438 return const0_rtx;
6441 mode = TYPE_MODE (type);
6442 /* Use subtarget as the target for operand 0 of a binary operation. */
6443 subtarget = get_subtarget (target);
6444 original_target = target;
6445 ignore = (target == const0_rtx
6446 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6447 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6448 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6449 && TREE_CODE (type) == VOID_TYPE));
6451 /* If we are going to ignore this result, we need only do something
6452 if there is a side-effect somewhere in the expression. If there
6453 is, short-circuit the most common cases here. Note that we must
6454 not call expand_expr with anything but const0_rtx in case this
6455 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6457 if (ignore)
6459 if (! TREE_SIDE_EFFECTS (exp))
6460 return const0_rtx;
6462 /* Ensure we reference a volatile object even if value is ignored, but
6463 don't do this if all we are doing is taking its address. */
6464 if (TREE_THIS_VOLATILE (exp)
6465 && TREE_CODE (exp) != FUNCTION_DECL
6466 && mode != VOIDmode && mode != BLKmode
6467 && modifier != EXPAND_CONST_ADDRESS)
6469 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6470 if (GET_CODE (temp) == MEM)
6471 temp = copy_to_reg (temp);
6472 return const0_rtx;
6475 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6476 || code == INDIRECT_REF || code == BUFFER_REF)
6477 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6478 modifier);
6480 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6481 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6483 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6484 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6485 return const0_rtx;
6487 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6488 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6489 /* If the second operand has no side effects, just evaluate
6490 the first. */
6491 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6492 modifier);
6493 else if (code == BIT_FIELD_REF)
6495 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6496 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6497 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6498 return const0_rtx;
6501 target = 0;
6504 #ifdef MAX_INTEGER_COMPUTATION_MODE
6505 /* Only check stuff here if the mode we want is different from the mode
6506 of the expression; if it's the same, check_max_integer_computiation_mode
6507 will handle it. Do we really need to check this stuff at all? */
6509 if (target
6510 && GET_MODE (target) != mode
6511 && TREE_CODE (exp) != INTEGER_CST
6512 && TREE_CODE (exp) != PARM_DECL
6513 && TREE_CODE (exp) != ARRAY_REF
6514 && TREE_CODE (exp) != ARRAY_RANGE_REF
6515 && TREE_CODE (exp) != COMPONENT_REF
6516 && TREE_CODE (exp) != BIT_FIELD_REF
6517 && TREE_CODE (exp) != INDIRECT_REF
6518 && TREE_CODE (exp) != CALL_EXPR
6519 && TREE_CODE (exp) != VAR_DECL
6520 && TREE_CODE (exp) != RTL_EXPR)
6522 enum machine_mode mode = GET_MODE (target);
6524 if (GET_MODE_CLASS (mode) == MODE_INT
6525 && mode > MAX_INTEGER_COMPUTATION_MODE)
6526 internal_error ("unsupported wide integer operation");
6529 if (tmode != mode
6530 && TREE_CODE (exp) != INTEGER_CST
6531 && TREE_CODE (exp) != PARM_DECL
6532 && TREE_CODE (exp) != ARRAY_REF
6533 && TREE_CODE (exp) != ARRAY_RANGE_REF
6534 && TREE_CODE (exp) != COMPONENT_REF
6535 && TREE_CODE (exp) != BIT_FIELD_REF
6536 && TREE_CODE (exp) != INDIRECT_REF
6537 && TREE_CODE (exp) != VAR_DECL
6538 && TREE_CODE (exp) != CALL_EXPR
6539 && TREE_CODE (exp) != RTL_EXPR
6540 && GET_MODE_CLASS (tmode) == MODE_INT
6541 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6542 internal_error ("unsupported wide integer operation");
6544 check_max_integer_computation_mode (exp);
6545 #endif
6547 /* If will do cse, generate all results into pseudo registers
6548 since 1) that allows cse to find more things
6549 and 2) otherwise cse could produce an insn the machine
6550 cannot support. An exception is a CONSTRUCTOR into a multi-word
6551 MEM: that's much more likely to be most efficient into the MEM.
6552 Another is a CALL_EXPR which must return in memory. */
6554 if (! cse_not_expected && mode != BLKmode && target
6555 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6556 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6557 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6558 target = subtarget;
6560 switch (code)
6562 case LABEL_DECL:
6564 tree function = decl_function_context (exp);
6565 /* Handle using a label in a containing function. */
6566 if (function != current_function_decl
6567 && function != inline_function_decl && function != 0)
6569 struct function *p = find_function_data (function);
6570 p->expr->x_forced_labels
6571 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6572 p->expr->x_forced_labels);
6574 else
6576 if (modifier == EXPAND_INITIALIZER)
6577 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6578 label_rtx (exp),
6579 forced_labels);
6582 temp = gen_rtx_MEM (FUNCTION_MODE,
6583 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6584 if (function != current_function_decl
6585 && function != inline_function_decl && function != 0)
6586 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6587 return temp;
6590 case PARM_DECL:
6591 if (!DECL_RTL_SET_P (exp))
6593 error_with_decl (exp, "prior parameter's size depends on `%s'");
6594 return CONST0_RTX (mode);
6597 /* ... fall through ... */
6599 case VAR_DECL:
6600 /* If a static var's type was incomplete when the decl was written,
6601 but the type is complete now, lay out the decl now. */
6602 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6603 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6605 rtx value = DECL_RTL_IF_SET (exp);
6607 layout_decl (exp, 0);
6609 /* If the RTL was already set, update its mode and memory
6610 attributes. */
6611 if (value != 0)
6613 PUT_MODE (value, DECL_MODE (exp));
6614 SET_DECL_RTL (exp, 0);
6615 set_mem_attributes (value, exp, 1);
6616 SET_DECL_RTL (exp, value);
6620 /* ... fall through ... */
6622 case FUNCTION_DECL:
6623 case RESULT_DECL:
6624 if (DECL_RTL (exp) == 0)
6625 abort ();
6627 /* Ensure variable marked as used even if it doesn't go through
6628 a parser. If it hasn't be used yet, write out an external
6629 definition. */
6630 if (! TREE_USED (exp))
6632 assemble_external (exp);
6633 TREE_USED (exp) = 1;
6636 /* Show we haven't gotten RTL for this yet. */
6637 temp = 0;
6639 /* Handle variables inherited from containing functions. */
6640 context = decl_function_context (exp);
6642 /* We treat inline_function_decl as an alias for the current function
6643 because that is the inline function whose vars, types, etc.
6644 are being merged into the current function.
6645 See expand_inline_function. */
6647 if (context != 0 && context != current_function_decl
6648 && context != inline_function_decl
6649 /* If var is static, we don't need a static chain to access it. */
6650 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6651 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6653 rtx addr;
6655 /* Mark as non-local and addressable. */
6656 DECL_NONLOCAL (exp) = 1;
6657 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6658 abort ();
6659 (*lang_hooks.mark_addressable) (exp);
6660 if (GET_CODE (DECL_RTL (exp)) != MEM)
6661 abort ();
6662 addr = XEXP (DECL_RTL (exp), 0);
6663 if (GET_CODE (addr) == MEM)
6664 addr
6665 = replace_equiv_address (addr,
6666 fix_lexical_addr (XEXP (addr, 0), exp));
6667 else
6668 addr = fix_lexical_addr (addr, exp);
6670 temp = replace_equiv_address (DECL_RTL (exp), addr);
6673 /* This is the case of an array whose size is to be determined
6674 from its initializer, while the initializer is still being parsed.
6675 See expand_decl. */
6677 else if (GET_CODE (DECL_RTL (exp)) == MEM
6678 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6679 temp = validize_mem (DECL_RTL (exp));
6681 /* If DECL_RTL is memory, we are in the normal case and either
6682 the address is not valid or it is not a register and -fforce-addr
6683 is specified, get the address into a register. */
6685 else if (GET_CODE (DECL_RTL (exp)) == MEM
6686 && modifier != EXPAND_CONST_ADDRESS
6687 && modifier != EXPAND_SUM
6688 && modifier != EXPAND_INITIALIZER
6689 && (! memory_address_p (DECL_MODE (exp),
6690 XEXP (DECL_RTL (exp), 0))
6691 || (flag_force_addr
6692 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6693 temp = replace_equiv_address (DECL_RTL (exp),
6694 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6696 /* If we got something, return it. But first, set the alignment
6697 if the address is a register. */
6698 if (temp != 0)
6700 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6701 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6703 return temp;
6706 /* If the mode of DECL_RTL does not match that of the decl, it
6707 must be a promoted value. We return a SUBREG of the wanted mode,
6708 but mark it so that we know that it was already extended. */
6710 if (GET_CODE (DECL_RTL (exp)) == REG
6711 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6713 /* Get the signedness used for this variable. Ensure we get the
6714 same mode we got when the variable was declared. */
6715 if (GET_MODE (DECL_RTL (exp))
6716 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6717 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6718 abort ();
6720 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6721 SUBREG_PROMOTED_VAR_P (temp) = 1;
6722 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6723 return temp;
6726 return DECL_RTL (exp);
6728 case INTEGER_CST:
6729 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6730 TREE_INT_CST_HIGH (exp), mode);
6732 /* ??? If overflow is set, fold will have done an incomplete job,
6733 which can result in (plus xx (const_int 0)), which can get
6734 simplified by validate_replace_rtx during virtual register
6735 instantiation, which can result in unrecognizable insns.
6736 Avoid this by forcing all overflows into registers. */
6737 if (TREE_CONSTANT_OVERFLOW (exp)
6738 && modifier != EXPAND_INITIALIZER)
6739 temp = force_reg (mode, temp);
6741 return temp;
6743 case CONST_DECL:
6744 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6746 case REAL_CST:
6747 /* If optimized, generate immediate CONST_DOUBLE
6748 which will be turned into memory by reload if necessary.
6750 We used to force a register so that loop.c could see it. But
6751 this does not allow gen_* patterns to perform optimizations with
6752 the constants. It also produces two insns in cases like "x = 1.0;".
6753 On most machines, floating-point constants are not permitted in
6754 many insns, so we'd end up copying it to a register in any case.
6756 Now, we do the copying in expand_binop, if appropriate. */
6757 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6758 TYPE_MODE (TREE_TYPE (exp)));
6760 case COMPLEX_CST:
6761 case STRING_CST:
6762 if (! TREE_CST_RTL (exp))
6763 output_constant_def (exp, 1);
6765 /* TREE_CST_RTL probably contains a constant address.
6766 On RISC machines where a constant address isn't valid,
6767 make some insns to get that address into a register. */
6768 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6769 && modifier != EXPAND_CONST_ADDRESS
6770 && modifier != EXPAND_INITIALIZER
6771 && modifier != EXPAND_SUM
6772 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6773 || (flag_force_addr
6774 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6775 return replace_equiv_address (TREE_CST_RTL (exp),
6776 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6777 return TREE_CST_RTL (exp);
6779 case EXPR_WITH_FILE_LOCATION:
6781 rtx to_return;
6782 const char *saved_input_filename = input_filename;
6783 int saved_lineno = lineno;
6784 input_filename = EXPR_WFL_FILENAME (exp);
6785 lineno = EXPR_WFL_LINENO (exp);
6786 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6787 emit_line_note (input_filename, lineno);
6788 /* Possibly avoid switching back and forth here. */
6789 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6790 input_filename = saved_input_filename;
6791 lineno = saved_lineno;
6792 return to_return;
6795 case SAVE_EXPR:
6796 context = decl_function_context (exp);
6798 /* If this SAVE_EXPR was at global context, assume we are an
6799 initialization function and move it into our context. */
6800 if (context == 0)
6801 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6803 /* We treat inline_function_decl as an alias for the current function
6804 because that is the inline function whose vars, types, etc.
6805 are being merged into the current function.
6806 See expand_inline_function. */
6807 if (context == current_function_decl || context == inline_function_decl)
6808 context = 0;
6810 /* If this is non-local, handle it. */
6811 if (context)
6813 /* The following call just exists to abort if the context is
6814 not of a containing function. */
6815 find_function_data (context);
6817 temp = SAVE_EXPR_RTL (exp);
6818 if (temp && GET_CODE (temp) == REG)
6820 put_var_into_stack (exp);
6821 temp = SAVE_EXPR_RTL (exp);
6823 if (temp == 0 || GET_CODE (temp) != MEM)
6824 abort ();
6825 return
6826 replace_equiv_address (temp,
6827 fix_lexical_addr (XEXP (temp, 0), exp));
6829 if (SAVE_EXPR_RTL (exp) == 0)
6831 if (mode == VOIDmode)
6832 temp = const0_rtx;
6833 else
6834 temp = assign_temp (build_qualified_type (type,
6835 (TYPE_QUALS (type)
6836 | TYPE_QUAL_CONST)),
6837 3, 0, 0);
6839 SAVE_EXPR_RTL (exp) = temp;
6840 if (!optimize && GET_CODE (temp) == REG)
6841 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6842 save_expr_regs);
6844 /* If the mode of TEMP does not match that of the expression, it
6845 must be a promoted value. We pass store_expr a SUBREG of the
6846 wanted mode but mark it so that we know that it was already
6847 extended. Note that `unsignedp' was modified above in
6848 this case. */
6850 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6852 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6853 SUBREG_PROMOTED_VAR_P (temp) = 1;
6854 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6857 if (temp == const0_rtx)
6858 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6859 else
6860 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6862 TREE_USED (exp) = 1;
6865 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6866 must be a promoted value. We return a SUBREG of the wanted mode,
6867 but mark it so that we know that it was already extended. */
6869 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6870 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6872 /* Compute the signedness and make the proper SUBREG. */
6873 promote_mode (type, mode, &unsignedp, 0);
6874 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6875 SUBREG_PROMOTED_VAR_P (temp) = 1;
6876 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6877 return temp;
6880 return SAVE_EXPR_RTL (exp);
6882 case UNSAVE_EXPR:
6884 rtx temp;
6885 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6886 TREE_OPERAND (exp, 0)
6887 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6888 return temp;
6891 case PLACEHOLDER_EXPR:
6893 tree old_list = placeholder_list;
6894 tree placeholder_expr = 0;
6896 exp = find_placeholder (exp, &placeholder_expr);
6897 if (exp == 0)
6898 abort ();
6900 placeholder_list = TREE_CHAIN (placeholder_expr);
6901 temp = expand_expr (exp, original_target, tmode, modifier);
6902 placeholder_list = old_list;
6903 return temp;
6906 case WITH_RECORD_EXPR:
6907 /* Put the object on the placeholder list, expand our first operand,
6908 and pop the list. */
6909 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6910 placeholder_list);
6911 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6912 modifier);
6913 placeholder_list = TREE_CHAIN (placeholder_list);
6914 return target;
6916 case GOTO_EXPR:
6917 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6918 expand_goto (TREE_OPERAND (exp, 0));
6919 else
6920 expand_computed_goto (TREE_OPERAND (exp, 0));
6921 return const0_rtx;
6923 case EXIT_EXPR:
6924 expand_exit_loop_if_false (NULL,
6925 invert_truthvalue (TREE_OPERAND (exp, 0)));
6926 return const0_rtx;
6928 case LABELED_BLOCK_EXPR:
6929 if (LABELED_BLOCK_BODY (exp))
6930 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6931 /* Should perhaps use expand_label, but this is simpler and safer. */
6932 do_pending_stack_adjust ();
6933 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6934 return const0_rtx;
6936 case EXIT_BLOCK_EXPR:
6937 if (EXIT_BLOCK_RETURN (exp))
6938 sorry ("returned value in block_exit_expr");
6939 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6940 return const0_rtx;
6942 case LOOP_EXPR:
6943 push_temp_slots ();
6944 expand_start_loop (1);
6945 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6946 expand_end_loop ();
6947 pop_temp_slots ();
6949 return const0_rtx;
6951 case BIND_EXPR:
6953 tree vars = TREE_OPERAND (exp, 0);
6955 /* Need to open a binding contour here because
6956 if there are any cleanups they must be contained here. */
6957 expand_start_bindings (2);
6959 /* Mark the corresponding BLOCK for output in its proper place. */
6960 if (TREE_OPERAND (exp, 2) != 0
6961 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6962 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6964 /* If VARS have not yet been expanded, expand them now. */
6965 while (vars)
6967 if (!DECL_RTL_SET_P (vars))
6968 expand_decl (vars);
6969 expand_decl_init (vars);
6970 vars = TREE_CHAIN (vars);
6973 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6975 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6977 return temp;
6980 case RTL_EXPR:
6981 if (RTL_EXPR_SEQUENCE (exp))
6983 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6984 abort ();
6985 emit_insn (RTL_EXPR_SEQUENCE (exp));
6986 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6988 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6989 free_temps_for_rtl_expr (exp);
6990 return RTL_EXPR_RTL (exp);
6992 case CONSTRUCTOR:
6993 /* If we don't need the result, just ensure we evaluate any
6994 subexpressions. */
6995 if (ignore)
6997 tree elt;
6999 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7000 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7002 return const0_rtx;
7005 /* All elts simple constants => refer to a constant in memory. But
7006 if this is a non-BLKmode mode, let it store a field at a time
7007 since that should make a CONST_INT or CONST_DOUBLE when we
7008 fold. Likewise, if we have a target we can use, it is best to
7009 store directly into the target unless the type is large enough
7010 that memcpy will be used. If we are making an initializer and
7011 all operands are constant, put it in memory as well.
7013 FIXME: Avoid trying to fill vector constructors piece-meal.
7014 Output them with output_constant_def below unless we're sure
7015 they're zeros. This should go away when vector initializers
7016 are treated like VECTOR_CST instead of arrays.
7018 else if ((TREE_STATIC (exp)
7019 && ((mode == BLKmode
7020 && ! (target != 0 && safe_from_p (target, exp, 1)))
7021 || TREE_ADDRESSABLE (exp)
7022 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7023 && (! MOVE_BY_PIECES_P
7024 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7025 TYPE_ALIGN (type)))
7026 && ((TREE_CODE (type) == VECTOR_TYPE
7027 && !is_zeros_p (exp))
7028 || ! mostly_zeros_p (exp)))))
7029 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7031 rtx constructor = output_constant_def (exp, 1);
7033 if (modifier != EXPAND_CONST_ADDRESS
7034 && modifier != EXPAND_INITIALIZER
7035 && modifier != EXPAND_SUM)
7036 constructor = validize_mem (constructor);
7038 return constructor;
7040 else
7042 /* Handle calls that pass values in multiple non-contiguous
7043 locations. The Irix 6 ABI has examples of this. */
7044 if (target == 0 || ! safe_from_p (target, exp, 1)
7045 || GET_CODE (target) == PARALLEL)
7046 target
7047 = assign_temp (build_qualified_type (type,
7048 (TYPE_QUALS (type)
7049 | (TREE_READONLY (exp)
7050 * TYPE_QUAL_CONST))),
7051 0, TREE_ADDRESSABLE (exp), 1);
7053 store_constructor (exp, target, 0, int_expr_size (exp));
7054 return target;
7057 case INDIRECT_REF:
7059 tree exp1 = TREE_OPERAND (exp, 0);
7060 tree index;
7061 tree string = string_constant (exp1, &index);
7063 /* Try to optimize reads from const strings. */
7064 if (string
7065 && TREE_CODE (string) == STRING_CST
7066 && TREE_CODE (index) == INTEGER_CST
7067 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7068 && GET_MODE_CLASS (mode) == MODE_INT
7069 && GET_MODE_SIZE (mode) == 1
7070 && modifier != EXPAND_WRITE)
7071 return gen_int_mode (TREE_STRING_POINTER (string)
7072 [TREE_INT_CST_LOW (index)], mode);
7074 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7075 op0 = memory_address (mode, op0);
7076 temp = gen_rtx_MEM (mode, op0);
7077 set_mem_attributes (temp, exp, 0);
7079 /* If we are writing to this object and its type is a record with
7080 readonly fields, we must mark it as readonly so it will
7081 conflict with readonly references to those fields. */
7082 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7083 RTX_UNCHANGING_P (temp) = 1;
7085 return temp;
7088 case ARRAY_REF:
7089 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7090 abort ();
7093 tree array = TREE_OPERAND (exp, 0);
7094 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7095 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7096 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7097 HOST_WIDE_INT i;
7099 /* Optimize the special-case of a zero lower bound.
7101 We convert the low_bound to sizetype to avoid some problems
7102 with constant folding. (E.g. suppose the lower bound is 1,
7103 and its mode is QI. Without the conversion, (ARRAY
7104 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7105 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7107 if (! integer_zerop (low_bound))
7108 index = size_diffop (index, convert (sizetype, low_bound));
7110 /* Fold an expression like: "foo"[2].
7111 This is not done in fold so it won't happen inside &.
7112 Don't fold if this is for wide characters since it's too
7113 difficult to do correctly and this is a very rare case. */
7115 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7116 && TREE_CODE (array) == STRING_CST
7117 && TREE_CODE (index) == INTEGER_CST
7118 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7119 && GET_MODE_CLASS (mode) == MODE_INT
7120 && GET_MODE_SIZE (mode) == 1)
7121 return gen_int_mode (TREE_STRING_POINTER (array)
7122 [TREE_INT_CST_LOW (index)], mode);
7124 /* If this is a constant index into a constant array,
7125 just get the value from the array. Handle both the cases when
7126 we have an explicit constructor and when our operand is a variable
7127 that was declared const. */
7129 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7130 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7131 && TREE_CODE (index) == INTEGER_CST
7132 && 0 > compare_tree_int (index,
7133 list_length (CONSTRUCTOR_ELTS
7134 (TREE_OPERAND (exp, 0)))))
7136 tree elem;
7138 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7139 i = TREE_INT_CST_LOW (index);
7140 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7143 if (elem)
7144 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7145 modifier);
7148 else if (optimize >= 1
7149 && modifier != EXPAND_CONST_ADDRESS
7150 && modifier != EXPAND_INITIALIZER
7151 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7152 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7153 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7155 if (TREE_CODE (index) == INTEGER_CST)
7157 tree init = DECL_INITIAL (array);
7159 if (TREE_CODE (init) == CONSTRUCTOR)
7161 tree elem;
7163 for (elem = CONSTRUCTOR_ELTS (init);
7164 (elem
7165 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7166 elem = TREE_CHAIN (elem))
7169 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7170 return expand_expr (fold (TREE_VALUE (elem)), target,
7171 tmode, modifier);
7173 else if (TREE_CODE (init) == STRING_CST
7174 && 0 > compare_tree_int (index,
7175 TREE_STRING_LENGTH (init)))
7177 tree type = TREE_TYPE (TREE_TYPE (init));
7178 enum machine_mode mode = TYPE_MODE (type);
7180 if (GET_MODE_CLASS (mode) == MODE_INT
7181 && GET_MODE_SIZE (mode) == 1)
7182 return gen_int_mode (TREE_STRING_POINTER (init)
7183 [TREE_INT_CST_LOW (index)], mode);
7188 /* Fall through. */
7190 case COMPONENT_REF:
7191 case BIT_FIELD_REF:
7192 case ARRAY_RANGE_REF:
7193 /* If the operand is a CONSTRUCTOR, we can just extract the
7194 appropriate field if it is present. Don't do this if we have
7195 already written the data since we want to refer to that copy
7196 and varasm.c assumes that's what we'll do. */
7197 if (code == COMPONENT_REF
7198 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7199 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7201 tree elt;
7203 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7204 elt = TREE_CHAIN (elt))
7205 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7206 /* We can normally use the value of the field in the
7207 CONSTRUCTOR. However, if this is a bitfield in
7208 an integral mode that we can fit in a HOST_WIDE_INT,
7209 we must mask only the number of bits in the bitfield,
7210 since this is done implicitly by the constructor. If
7211 the bitfield does not meet either of those conditions,
7212 we can't do this optimization. */
7213 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7214 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7215 == MODE_INT)
7216 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7217 <= HOST_BITS_PER_WIDE_INT))))
7219 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7220 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7222 HOST_WIDE_INT bitsize
7223 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7224 enum machine_mode imode
7225 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7227 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7229 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7230 op0 = expand_and (imode, op0, op1, target);
7232 else
7234 tree count
7235 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7238 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7239 target, 0);
7240 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7241 target, 0);
7245 return op0;
7250 enum machine_mode mode1;
7251 HOST_WIDE_INT bitsize, bitpos;
7252 tree offset;
7253 int volatilep = 0;
7254 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7255 &mode1, &unsignedp, &volatilep);
7256 rtx orig_op0;
7258 /* If we got back the original object, something is wrong. Perhaps
7259 we are evaluating an expression too early. In any event, don't
7260 infinitely recurse. */
7261 if (tem == exp)
7262 abort ();
7264 /* If TEM's type is a union of variable size, pass TARGET to the inner
7265 computation, since it will need a temporary and TARGET is known
7266 to have to do. This occurs in unchecked conversion in Ada. */
7268 orig_op0 = op0
7269 = expand_expr (tem,
7270 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7271 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7272 != INTEGER_CST)
7273 ? target : NULL_RTX),
7274 VOIDmode,
7275 (modifier == EXPAND_INITIALIZER
7276 || modifier == EXPAND_CONST_ADDRESS)
7277 ? modifier : EXPAND_NORMAL);
7279 /* If this is a constant, put it into a register if it is a
7280 legitimate constant and OFFSET is 0 and memory if it isn't. */
7281 if (CONSTANT_P (op0))
7283 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7284 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7285 && offset == 0)
7286 op0 = force_reg (mode, op0);
7287 else
7288 op0 = validize_mem (force_const_mem (mode, op0));
7291 if (offset != 0)
7293 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7295 /* If this object is in a register, put it into memory.
7296 This case can't occur in C, but can in Ada if we have
7297 unchecked conversion of an expression from a scalar type to
7298 an array or record type. */
7299 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7300 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7302 /* If the operand is a SAVE_EXPR, we can deal with this by
7303 forcing the SAVE_EXPR into memory. */
7304 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7306 put_var_into_stack (TREE_OPERAND (exp, 0));
7307 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7309 else
7311 tree nt
7312 = build_qualified_type (TREE_TYPE (tem),
7313 (TYPE_QUALS (TREE_TYPE (tem))
7314 | TYPE_QUAL_CONST));
7315 rtx memloc = assign_temp (nt, 1, 1, 1);
7317 emit_move_insn (memloc, op0);
7318 op0 = memloc;
7322 if (GET_CODE (op0) != MEM)
7323 abort ();
7325 #ifdef POINTERS_EXTEND_UNSIGNED
7326 if (GET_MODE (offset_rtx) != Pmode)
7327 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7328 #else
7329 if (GET_MODE (offset_rtx) != ptr_mode)
7330 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7331 #endif
7333 /* A constant address in OP0 can have VOIDmode, we must not try
7334 to call force_reg for that case. Avoid that case. */
7335 if (GET_CODE (op0) == MEM
7336 && GET_MODE (op0) == BLKmode
7337 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7338 && bitsize != 0
7339 && (bitpos % bitsize) == 0
7340 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7341 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7343 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7344 bitpos = 0;
7347 op0 = offset_address (op0, offset_rtx,
7348 highest_pow2_factor (offset));
7351 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7352 record its alignment as BIGGEST_ALIGNMENT. */
7353 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7354 && is_aligning_offset (offset, tem))
7355 set_mem_align (op0, BIGGEST_ALIGNMENT);
7357 /* Don't forget about volatility even if this is a bitfield. */
7358 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7360 if (op0 == orig_op0)
7361 op0 = copy_rtx (op0);
7363 MEM_VOLATILE_P (op0) = 1;
7366 /* The following code doesn't handle CONCAT.
7367 Assume only bitpos == 0 can be used for CONCAT, due to
7368 one element arrays having the same mode as its element. */
7369 if (GET_CODE (op0) == CONCAT)
7371 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7372 abort ();
7373 return op0;
7376 /* In cases where an aligned union has an unaligned object
7377 as a field, we might be extracting a BLKmode value from
7378 an integer-mode (e.g., SImode) object. Handle this case
7379 by doing the extract into an object as wide as the field
7380 (which we know to be the width of a basic mode), then
7381 storing into memory, and changing the mode to BLKmode. */
7382 if (mode1 == VOIDmode
7383 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7384 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7385 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7386 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7387 && modifier != EXPAND_CONST_ADDRESS
7388 && modifier != EXPAND_INITIALIZER)
7389 /* If the field isn't aligned enough to fetch as a memref,
7390 fetch it as a bit field. */
7391 || (mode1 != BLKmode
7392 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7393 && ((TYPE_ALIGN (TREE_TYPE (tem))
7394 < GET_MODE_ALIGNMENT (mode))
7395 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7396 /* If the type and the field are a constant size and the
7397 size of the type isn't the same size as the bitfield,
7398 we must use bitfield operations. */
7399 || (bitsize >= 0
7400 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7401 == INTEGER_CST)
7402 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7403 bitsize)))
7405 enum machine_mode ext_mode = mode;
7407 if (ext_mode == BLKmode
7408 && ! (target != 0 && GET_CODE (op0) == MEM
7409 && GET_CODE (target) == MEM
7410 && bitpos % BITS_PER_UNIT == 0))
7411 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7413 if (ext_mode == BLKmode)
7415 /* In this case, BITPOS must start at a byte boundary and
7416 TARGET, if specified, must be a MEM. */
7417 if (GET_CODE (op0) != MEM
7418 || (target != 0 && GET_CODE (target) != MEM)
7419 || bitpos % BITS_PER_UNIT != 0)
7420 abort ();
7422 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7423 if (target == 0)
7424 target = assign_temp (type, 0, 1, 1);
7426 emit_block_move (target, op0,
7427 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7428 / BITS_PER_UNIT),
7429 BLOCK_OP_NORMAL);
7431 return target;
7434 op0 = validize_mem (op0);
7436 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7437 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7439 op0 = extract_bit_field (op0, bitsize, bitpos,
7440 unsignedp, target, ext_mode, ext_mode,
7441 int_size_in_bytes (TREE_TYPE (tem)));
7443 /* If the result is a record type and BITSIZE is narrower than
7444 the mode of OP0, an integral mode, and this is a big endian
7445 machine, we must put the field into the high-order bits. */
7446 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7447 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7448 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7449 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7450 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7451 - bitsize),
7452 op0, 1);
7454 if (mode == BLKmode)
7456 rtx new = assign_temp (build_qualified_type
7457 ((*lang_hooks.types.type_for_mode)
7458 (ext_mode, 0),
7459 TYPE_QUAL_CONST), 0, 1, 1);
7461 emit_move_insn (new, op0);
7462 op0 = copy_rtx (new);
7463 PUT_MODE (op0, BLKmode);
7464 set_mem_attributes (op0, exp, 1);
7467 return op0;
7470 /* If the result is BLKmode, use that to access the object
7471 now as well. */
7472 if (mode == BLKmode)
7473 mode1 = BLKmode;
7475 /* Get a reference to just this component. */
7476 if (modifier == EXPAND_CONST_ADDRESS
7477 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7478 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7479 else
7480 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7482 if (op0 == orig_op0)
7483 op0 = copy_rtx (op0);
7485 set_mem_attributes (op0, exp, 0);
7486 if (GET_CODE (XEXP (op0, 0)) == REG)
7487 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7489 MEM_VOLATILE_P (op0) |= volatilep;
7490 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7491 || modifier == EXPAND_CONST_ADDRESS
7492 || modifier == EXPAND_INITIALIZER)
7493 return op0;
7494 else if (target == 0)
7495 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7497 convert_move (target, op0, unsignedp);
7498 return target;
7501 case VTABLE_REF:
7503 rtx insn, before = get_last_insn (), vtbl_ref;
7505 /* Evaluate the interior expression. */
7506 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7507 tmode, modifier);
7509 /* Get or create an instruction off which to hang a note. */
7510 if (REG_P (subtarget))
7512 target = subtarget;
7513 insn = get_last_insn ();
7514 if (insn == before)
7515 abort ();
7516 if (! INSN_P (insn))
7517 insn = prev_nonnote_insn (insn);
7519 else
7521 target = gen_reg_rtx (GET_MODE (subtarget));
7522 insn = emit_move_insn (target, subtarget);
7525 /* Collect the data for the note. */
7526 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7527 vtbl_ref = plus_constant (vtbl_ref,
7528 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7529 /* Discard the initial CONST that was added. */
7530 vtbl_ref = XEXP (vtbl_ref, 0);
7532 REG_NOTES (insn)
7533 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7535 return target;
7538 /* Intended for a reference to a buffer of a file-object in Pascal.
7539 But it's not certain that a special tree code will really be
7540 necessary for these. INDIRECT_REF might work for them. */
7541 case BUFFER_REF:
7542 abort ();
7544 case IN_EXPR:
7546 /* Pascal set IN expression.
7548 Algorithm:
7549 rlo = set_low - (set_low%bits_per_word);
7550 the_word = set [ (index - rlo)/bits_per_word ];
7551 bit_index = index % bits_per_word;
7552 bitmask = 1 << bit_index;
7553 return !!(the_word & bitmask); */
7555 tree set = TREE_OPERAND (exp, 0);
7556 tree index = TREE_OPERAND (exp, 1);
7557 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7558 tree set_type = TREE_TYPE (set);
7559 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7560 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7561 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7562 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7563 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7564 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7565 rtx setaddr = XEXP (setval, 0);
7566 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7567 rtx rlow;
7568 rtx diff, quo, rem, addr, bit, result;
7570 /* If domain is empty, answer is no. Likewise if index is constant
7571 and out of bounds. */
7572 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7573 && TREE_CODE (set_low_bound) == INTEGER_CST
7574 && tree_int_cst_lt (set_high_bound, set_low_bound))
7575 || (TREE_CODE (index) == INTEGER_CST
7576 && TREE_CODE (set_low_bound) == INTEGER_CST
7577 && tree_int_cst_lt (index, set_low_bound))
7578 || (TREE_CODE (set_high_bound) == INTEGER_CST
7579 && TREE_CODE (index) == INTEGER_CST
7580 && tree_int_cst_lt (set_high_bound, index))))
7581 return const0_rtx;
7583 if (target == 0)
7584 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7586 /* If we get here, we have to generate the code for both cases
7587 (in range and out of range). */
7589 op0 = gen_label_rtx ();
7590 op1 = gen_label_rtx ();
7592 if (! (GET_CODE (index_val) == CONST_INT
7593 && GET_CODE (lo_r) == CONST_INT))
7594 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7595 GET_MODE (index_val), iunsignedp, op1);
7597 if (! (GET_CODE (index_val) == CONST_INT
7598 && GET_CODE (hi_r) == CONST_INT))
7599 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7600 GET_MODE (index_val), iunsignedp, op1);
7602 /* Calculate the element number of bit zero in the first word
7603 of the set. */
7604 if (GET_CODE (lo_r) == CONST_INT)
7605 rlow = GEN_INT (INTVAL (lo_r)
7606 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7607 else
7608 rlow = expand_binop (index_mode, and_optab, lo_r,
7609 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7610 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7612 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7613 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7615 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7616 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7617 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7618 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7620 addr = memory_address (byte_mode,
7621 expand_binop (index_mode, add_optab, diff,
7622 setaddr, NULL_RTX, iunsignedp,
7623 OPTAB_LIB_WIDEN));
7625 /* Extract the bit we want to examine. */
7626 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7627 gen_rtx_MEM (byte_mode, addr),
7628 make_tree (TREE_TYPE (index), rem),
7629 NULL_RTX, 1);
7630 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7631 GET_MODE (target) == byte_mode ? target : 0,
7632 1, OPTAB_LIB_WIDEN);
7634 if (result != target)
7635 convert_move (target, result, 1);
7637 /* Output the code to handle the out-of-range case. */
7638 emit_jump (op0);
7639 emit_label (op1);
7640 emit_move_insn (target, const0_rtx);
7641 emit_label (op0);
7642 return target;
7645 case WITH_CLEANUP_EXPR:
7646 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7648 WITH_CLEANUP_EXPR_RTL (exp)
7649 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7650 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7651 CLEANUP_EH_ONLY (exp));
7653 /* That's it for this cleanup. */
7654 TREE_OPERAND (exp, 1) = 0;
7656 return WITH_CLEANUP_EXPR_RTL (exp);
7658 case CLEANUP_POINT_EXPR:
7660 /* Start a new binding layer that will keep track of all cleanup
7661 actions to be performed. */
7662 expand_start_bindings (2);
7664 target_temp_slot_level = temp_slot_level;
7666 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7667 /* If we're going to use this value, load it up now. */
7668 if (! ignore)
7669 op0 = force_not_mem (op0);
7670 preserve_temp_slots (op0);
7671 expand_end_bindings (NULL_TREE, 0, 0);
7673 return op0;
7675 case CALL_EXPR:
7676 /* Check for a built-in function. */
7677 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7678 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7679 == FUNCTION_DECL)
7680 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7682 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7683 == BUILT_IN_FRONTEND)
7684 return (*lang_hooks.expand_expr)
7685 (exp, original_target, tmode, modifier);
7686 else
7687 return expand_builtin (exp, target, subtarget, tmode, ignore);
7690 return expand_call (exp, target, ignore);
7692 case NON_LVALUE_EXPR:
7693 case NOP_EXPR:
7694 case CONVERT_EXPR:
7695 case REFERENCE_EXPR:
7696 if (TREE_OPERAND (exp, 0) == error_mark_node)
7697 return const0_rtx;
7699 if (TREE_CODE (type) == UNION_TYPE)
7701 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7703 /* If both input and output are BLKmode, this conversion isn't doing
7704 anything except possibly changing memory attribute. */
7705 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7707 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7708 modifier);
7710 result = copy_rtx (result);
7711 set_mem_attributes (result, exp, 0);
7712 return result;
7715 if (target == 0)
7716 target = assign_temp (type, 0, 1, 1);
7718 if (GET_CODE (target) == MEM)
7719 /* Store data into beginning of memory target. */
7720 store_expr (TREE_OPERAND (exp, 0),
7721 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7723 else if (GET_CODE (target) == REG)
7724 /* Store this field into a union of the proper type. */
7725 store_field (target,
7726 MIN ((int_size_in_bytes (TREE_TYPE
7727 (TREE_OPERAND (exp, 0)))
7728 * BITS_PER_UNIT),
7729 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7730 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7731 VOIDmode, 0, type, 0);
7732 else
7733 abort ();
7735 /* Return the entire union. */
7736 return target;
7739 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7741 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7742 modifier);
7744 /* If the signedness of the conversion differs and OP0 is
7745 a promoted SUBREG, clear that indication since we now
7746 have to do the proper extension. */
7747 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7748 && GET_CODE (op0) == SUBREG)
7749 SUBREG_PROMOTED_VAR_P (op0) = 0;
7751 return op0;
7754 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7755 if (GET_MODE (op0) == mode)
7756 return op0;
7758 /* If OP0 is a constant, just convert it into the proper mode. */
7759 if (CONSTANT_P (op0))
7761 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7762 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7764 if (modifier == EXPAND_INITIALIZER)
7765 return simplify_gen_subreg (mode, op0, inner_mode,
7766 subreg_lowpart_offset (mode,
7767 inner_mode));
7768 else
7769 return convert_modes (mode, inner_mode, op0,
7770 TREE_UNSIGNED (inner_type));
7773 if (modifier == EXPAND_INITIALIZER)
7774 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7776 if (target == 0)
7777 return
7778 convert_to_mode (mode, op0,
7779 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7780 else
7781 convert_move (target, op0,
7782 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7783 return target;
7785 case VIEW_CONVERT_EXPR:
7786 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7788 /* If the input and output modes are both the same, we are done.
7789 Otherwise, if neither mode is BLKmode and both are within a word, we
7790 can use gen_lowpart. If neither is true, make sure the operand is
7791 in memory and convert the MEM to the new mode. */
7792 if (TYPE_MODE (type) == GET_MODE (op0))
7794 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7795 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7796 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7797 op0 = gen_lowpart (TYPE_MODE (type), op0);
7798 else if (GET_CODE (op0) != MEM)
7800 /* If the operand is not a MEM, force it into memory. Since we
7801 are going to be be changing the mode of the MEM, don't call
7802 force_const_mem for constants because we don't allow pool
7803 constants to change mode. */
7804 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7806 if (TREE_ADDRESSABLE (exp))
7807 abort ();
7809 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7810 target
7811 = assign_stack_temp_for_type
7812 (TYPE_MODE (inner_type),
7813 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7815 emit_move_insn (target, op0);
7816 op0 = target;
7819 /* At this point, OP0 is in the correct mode. If the output type is such
7820 that the operand is known to be aligned, indicate that it is.
7821 Otherwise, we need only be concerned about alignment for non-BLKmode
7822 results. */
7823 if (GET_CODE (op0) == MEM)
7825 op0 = copy_rtx (op0);
7827 if (TYPE_ALIGN_OK (type))
7828 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7829 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7830 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7832 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7833 HOST_WIDE_INT temp_size
7834 = MAX (int_size_in_bytes (inner_type),
7835 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7836 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7837 temp_size, 0, type);
7838 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7840 if (TREE_ADDRESSABLE (exp))
7841 abort ();
7843 if (GET_MODE (op0) == BLKmode)
7844 emit_block_move (new_with_op0_mode, op0,
7845 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7846 BLOCK_OP_NORMAL);
7847 else
7848 emit_move_insn (new_with_op0_mode, op0);
7850 op0 = new;
7853 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7856 return op0;
7858 case PLUS_EXPR:
7859 this_optab = ! unsignedp && flag_trapv
7860 && (GET_MODE_CLASS (mode) == MODE_INT)
7861 ? addv_optab : add_optab;
7863 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7864 something else, make sure we add the register to the constant and
7865 then to the other thing. This case can occur during strength
7866 reduction and doing it this way will produce better code if the
7867 frame pointer or argument pointer is eliminated.
7869 fold-const.c will ensure that the constant is always in the inner
7870 PLUS_EXPR, so the only case we need to do anything about is if
7871 sp, ap, or fp is our second argument, in which case we must swap
7872 the innermost first argument and our second argument. */
7874 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7875 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7876 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7877 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7878 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7879 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7881 tree t = TREE_OPERAND (exp, 1);
7883 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7884 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7887 /* If the result is to be ptr_mode and we are adding an integer to
7888 something, we might be forming a constant. So try to use
7889 plus_constant. If it produces a sum and we can't accept it,
7890 use force_operand. This allows P = &ARR[const] to generate
7891 efficient code on machines where a SYMBOL_REF is not a valid
7892 address.
7894 If this is an EXPAND_SUM call, always return the sum. */
7895 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7896 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7898 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7899 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7900 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7902 rtx constant_part;
7904 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7905 EXPAND_SUM);
7906 /* Use immed_double_const to ensure that the constant is
7907 truncated according to the mode of OP1, then sign extended
7908 to a HOST_WIDE_INT. Using the constant directly can result
7909 in non-canonical RTL in a 64x32 cross compile. */
7910 constant_part
7911 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7912 (HOST_WIDE_INT) 0,
7913 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7914 op1 = plus_constant (op1, INTVAL (constant_part));
7915 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7916 op1 = force_operand (op1, target);
7917 return op1;
7920 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7921 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7922 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7924 rtx constant_part;
7926 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7927 (modifier == EXPAND_INITIALIZER
7928 ? EXPAND_INITIALIZER : EXPAND_SUM));
7929 if (! CONSTANT_P (op0))
7931 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7932 VOIDmode, modifier);
7933 /* Don't go to both_summands if modifier
7934 says it's not right to return a PLUS. */
7935 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7936 goto binop2;
7937 goto both_summands;
7939 /* Use immed_double_const to ensure that the constant is
7940 truncated according to the mode of OP1, then sign extended
7941 to a HOST_WIDE_INT. Using the constant directly can result
7942 in non-canonical RTL in a 64x32 cross compile. */
7943 constant_part
7944 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7945 (HOST_WIDE_INT) 0,
7946 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7947 op0 = plus_constant (op0, INTVAL (constant_part));
7948 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7949 op0 = force_operand (op0, target);
7950 return op0;
7954 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7955 subtarget = 0;
7957 /* No sense saving up arithmetic to be done
7958 if it's all in the wrong mode to form part of an address.
7959 And force_operand won't know whether to sign-extend or
7960 zero-extend. */
7961 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7962 || mode != ptr_mode)
7964 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7965 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7966 if (op0 == const0_rtx)
7967 return op1;
7968 if (op1 == const0_rtx)
7969 return op0;
7970 goto binop2;
7973 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7974 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7976 /* We come here from MINUS_EXPR when the second operand is a
7977 constant. */
7978 both_summands:
7979 /* Make sure any term that's a sum with a constant comes last. */
7980 if (GET_CODE (op0) == PLUS
7981 && CONSTANT_P (XEXP (op0, 1)))
7983 temp = op0;
7984 op0 = op1;
7985 op1 = temp;
7987 /* If adding to a sum including a constant,
7988 associate it to put the constant outside. */
7989 if (GET_CODE (op1) == PLUS
7990 && CONSTANT_P (XEXP (op1, 1)))
7992 rtx constant_term = const0_rtx;
7994 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7995 if (temp != 0)
7996 op0 = temp;
7997 /* Ensure that MULT comes first if there is one. */
7998 else if (GET_CODE (op0) == MULT)
7999 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8000 else
8001 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8003 /* Let's also eliminate constants from op0 if possible. */
8004 op0 = eliminate_constant_term (op0, &constant_term);
8006 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8007 their sum should be a constant. Form it into OP1, since the
8008 result we want will then be OP0 + OP1. */
8010 temp = simplify_binary_operation (PLUS, mode, constant_term,
8011 XEXP (op1, 1));
8012 if (temp != 0)
8013 op1 = temp;
8014 else
8015 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8018 /* Put a constant term last and put a multiplication first. */
8019 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8020 temp = op1, op1 = op0, op0 = temp;
8022 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8023 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8025 case MINUS_EXPR:
8026 /* For initializers, we are allowed to return a MINUS of two
8027 symbolic constants. Here we handle all cases when both operands
8028 are constant. */
8029 /* Handle difference of two symbolic constants,
8030 for the sake of an initializer. */
8031 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8032 && really_constant_p (TREE_OPERAND (exp, 0))
8033 && really_constant_p (TREE_OPERAND (exp, 1)))
8035 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8036 modifier);
8037 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8038 modifier);
8040 /* If the last operand is a CONST_INT, use plus_constant of
8041 the negated constant. Else make the MINUS. */
8042 if (GET_CODE (op1) == CONST_INT)
8043 return plus_constant (op0, - INTVAL (op1));
8044 else
8045 return gen_rtx_MINUS (mode, op0, op1);
8048 this_optab = ! unsignedp && flag_trapv
8049 && (GET_MODE_CLASS(mode) == MODE_INT)
8050 ? subv_optab : sub_optab;
8052 /* No sense saving up arithmetic to be done
8053 if it's all in the wrong mode to form part of an address.
8054 And force_operand won't know whether to sign-extend or
8055 zero-extend. */
8056 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8057 || mode != ptr_mode)
8058 goto binop;
8060 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8061 subtarget = 0;
8063 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8064 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8066 /* Convert A - const to A + (-const). */
8067 if (GET_CODE (op1) == CONST_INT)
8069 op1 = negate_rtx (mode, op1);
8070 goto both_summands;
8073 goto binop2;
8075 case MULT_EXPR:
8076 /* If first operand is constant, swap them.
8077 Thus the following special case checks need only
8078 check the second operand. */
8079 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8081 tree t1 = TREE_OPERAND (exp, 0);
8082 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8083 TREE_OPERAND (exp, 1) = t1;
8086 /* Attempt to return something suitable for generating an
8087 indexed address, for machines that support that. */
8089 if (modifier == EXPAND_SUM && mode == ptr_mode
8090 && host_integerp (TREE_OPERAND (exp, 1), 0))
8092 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8093 EXPAND_SUM);
8095 /* If we knew for certain that this is arithmetic for an array
8096 reference, and we knew the bounds of the array, then we could
8097 apply the distributive law across (PLUS X C) for constant C.
8098 Without such knowledge, we risk overflowing the computation
8099 when both X and C are large, but X+C isn't. */
8100 /* ??? Could perhaps special-case EXP being unsigned and C being
8101 positive. In that case we are certain that X+C is no smaller
8102 than X and so the transformed expression will overflow iff the
8103 original would have. */
8105 if (GET_CODE (op0) != REG)
8106 op0 = force_operand (op0, NULL_RTX);
8107 if (GET_CODE (op0) != REG)
8108 op0 = copy_to_mode_reg (mode, op0);
8110 return
8111 gen_rtx_MULT (mode, op0,
8112 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
8115 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8116 subtarget = 0;
8118 /* Check for multiplying things that have been extended
8119 from a narrower type. If this machine supports multiplying
8120 in that narrower type with a result in the desired type,
8121 do it that way, and avoid the explicit type-conversion. */
8122 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8123 && TREE_CODE (type) == INTEGER_TYPE
8124 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8125 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8126 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8127 && int_fits_type_p (TREE_OPERAND (exp, 1),
8128 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8129 /* Don't use a widening multiply if a shift will do. */
8130 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8131 > HOST_BITS_PER_WIDE_INT)
8132 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8134 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8135 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8137 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8138 /* If both operands are extended, they must either both
8139 be zero-extended or both be sign-extended. */
8140 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8142 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8144 enum machine_mode innermode
8145 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8146 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8147 ? smul_widen_optab : umul_widen_optab);
8148 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8149 ? umul_widen_optab : smul_widen_optab);
8150 if (mode == GET_MODE_WIDER_MODE (innermode))
8152 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8154 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8155 NULL_RTX, VOIDmode, 0);
8156 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8157 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8158 VOIDmode, 0);
8159 else
8160 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8161 NULL_RTX, VOIDmode, 0);
8162 goto binop2;
8164 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8165 && innermode == word_mode)
8167 rtx htem;
8168 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8169 NULL_RTX, VOIDmode, 0);
8170 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8171 op1 = convert_modes (innermode, mode,
8172 expand_expr (TREE_OPERAND (exp, 1),
8173 NULL_RTX, VOIDmode, 0),
8174 unsignedp);
8175 else
8176 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8177 NULL_RTX, VOIDmode, 0);
8178 temp = expand_binop (mode, other_optab, op0, op1, target,
8179 unsignedp, OPTAB_LIB_WIDEN);
8180 htem = expand_mult_highpart_adjust (innermode,
8181 gen_highpart (innermode, temp),
8182 op0, op1,
8183 gen_highpart (innermode, temp),
8184 unsignedp);
8185 emit_move_insn (gen_highpart (innermode, temp), htem);
8186 return temp;
8190 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8191 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8192 return expand_mult (mode, op0, op1, target, unsignedp);
8194 case TRUNC_DIV_EXPR:
8195 case FLOOR_DIV_EXPR:
8196 case CEIL_DIV_EXPR:
8197 case ROUND_DIV_EXPR:
8198 case EXACT_DIV_EXPR:
8199 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8200 subtarget = 0;
8201 /* Possible optimization: compute the dividend with EXPAND_SUM
8202 then if the divisor is constant can optimize the case
8203 where some terms of the dividend have coeffs divisible by it. */
8204 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8205 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8206 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8208 case RDIV_EXPR:
8209 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8210 expensive divide. If not, combine will rebuild the original
8211 computation. */
8212 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8213 && TREE_CODE (type) == REAL_TYPE
8214 && !real_onep (TREE_OPERAND (exp, 0)))
8215 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8216 build (RDIV_EXPR, type,
8217 build_real (type, dconst1),
8218 TREE_OPERAND (exp, 1))),
8219 target, tmode, unsignedp);
8220 this_optab = sdiv_optab;
8221 goto binop;
8223 case TRUNC_MOD_EXPR:
8224 case FLOOR_MOD_EXPR:
8225 case CEIL_MOD_EXPR:
8226 case ROUND_MOD_EXPR:
8227 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8228 subtarget = 0;
8229 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8230 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8231 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8233 case FIX_ROUND_EXPR:
8234 case FIX_FLOOR_EXPR:
8235 case FIX_CEIL_EXPR:
8236 abort (); /* Not used for C. */
8238 case FIX_TRUNC_EXPR:
8239 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8240 if (target == 0)
8241 target = gen_reg_rtx (mode);
8242 expand_fix (target, op0, unsignedp);
8243 return target;
8245 case FLOAT_EXPR:
8246 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8247 if (target == 0)
8248 target = gen_reg_rtx (mode);
8249 /* expand_float can't figure out what to do if FROM has VOIDmode.
8250 So give it the correct mode. With -O, cse will optimize this. */
8251 if (GET_MODE (op0) == VOIDmode)
8252 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8253 op0);
8254 expand_float (target, op0,
8255 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8256 return target;
8258 case NEGATE_EXPR:
8259 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8260 temp = expand_unop (mode,
8261 ! unsignedp && flag_trapv
8262 && (GET_MODE_CLASS(mode) == MODE_INT)
8263 ? negv_optab : neg_optab, op0, target, 0);
8264 if (temp == 0)
8265 abort ();
8266 return temp;
8268 case ABS_EXPR:
8269 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8271 /* Handle complex values specially. */
8272 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8273 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8274 return expand_complex_abs (mode, op0, target, unsignedp);
8276 /* Unsigned abs is simply the operand. Testing here means we don't
8277 risk generating incorrect code below. */
8278 if (TREE_UNSIGNED (type))
8279 return op0;
8281 return expand_abs (mode, op0, target, unsignedp,
8282 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8284 case MAX_EXPR:
8285 case MIN_EXPR:
8286 target = original_target;
8287 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8288 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8289 || GET_MODE (target) != mode
8290 || (GET_CODE (target) == REG
8291 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8292 target = gen_reg_rtx (mode);
8293 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8294 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8296 /* First try to do it with a special MIN or MAX instruction.
8297 If that does not win, use a conditional jump to select the proper
8298 value. */
8299 this_optab = (TREE_UNSIGNED (type)
8300 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8301 : (code == MIN_EXPR ? smin_optab : smax_optab));
8303 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8304 OPTAB_WIDEN);
8305 if (temp != 0)
8306 return temp;
8308 /* At this point, a MEM target is no longer useful; we will get better
8309 code without it. */
8311 if (GET_CODE (target) == MEM)
8312 target = gen_reg_rtx (mode);
8314 if (target != op0)
8315 emit_move_insn (target, op0);
8317 op0 = gen_label_rtx ();
8319 /* If this mode is an integer too wide to compare properly,
8320 compare word by word. Rely on cse to optimize constant cases. */
8321 if (GET_MODE_CLASS (mode) == MODE_INT
8322 && ! can_compare_p (GE, mode, ccp_jump))
8324 if (code == MAX_EXPR)
8325 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8326 target, op1, NULL_RTX, op0);
8327 else
8328 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8329 op1, target, NULL_RTX, op0);
8331 else
8333 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8334 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8335 unsignedp, mode, NULL_RTX, NULL_RTX,
8336 op0);
8338 emit_move_insn (target, op1);
8339 emit_label (op0);
8340 return target;
8342 case BIT_NOT_EXPR:
8343 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8344 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8345 if (temp == 0)
8346 abort ();
8347 return temp;
8349 case FFS_EXPR:
8350 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8351 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8352 if (temp == 0)
8353 abort ();
8354 return temp;
8356 /* ??? Can optimize bitwise operations with one arg constant.
8357 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8358 and (a bitwise1 b) bitwise2 b (etc)
8359 but that is probably not worth while. */
8361 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8362 boolean values when we want in all cases to compute both of them. In
8363 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8364 as actual zero-or-1 values and then bitwise anding. In cases where
8365 there cannot be any side effects, better code would be made by
8366 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8367 how to recognize those cases. */
8369 case TRUTH_AND_EXPR:
8370 case BIT_AND_EXPR:
8371 this_optab = and_optab;
8372 goto binop;
8374 case TRUTH_OR_EXPR:
8375 case BIT_IOR_EXPR:
8376 this_optab = ior_optab;
8377 goto binop;
8379 case TRUTH_XOR_EXPR:
8380 case BIT_XOR_EXPR:
8381 this_optab = xor_optab;
8382 goto binop;
8384 case LSHIFT_EXPR:
8385 case RSHIFT_EXPR:
8386 case LROTATE_EXPR:
8387 case RROTATE_EXPR:
8388 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8389 subtarget = 0;
8390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8391 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8392 unsignedp);
8394 /* Could determine the answer when only additive constants differ. Also,
8395 the addition of one can be handled by changing the condition. */
8396 case LT_EXPR:
8397 case LE_EXPR:
8398 case GT_EXPR:
8399 case GE_EXPR:
8400 case EQ_EXPR:
8401 case NE_EXPR:
8402 case UNORDERED_EXPR:
8403 case ORDERED_EXPR:
8404 case UNLT_EXPR:
8405 case UNLE_EXPR:
8406 case UNGT_EXPR:
8407 case UNGE_EXPR:
8408 case UNEQ_EXPR:
8409 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8410 if (temp != 0)
8411 return temp;
8413 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8414 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8415 && original_target
8416 && GET_CODE (original_target) == REG
8417 && (GET_MODE (original_target)
8418 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8420 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8421 VOIDmode, 0);
8423 /* If temp is constant, we can just compute the result. */
8424 if (GET_CODE (temp) == CONST_INT)
8426 if (INTVAL (temp) != 0)
8427 emit_move_insn (target, const1_rtx);
8428 else
8429 emit_move_insn (target, const0_rtx);
8431 return target;
8434 if (temp != original_target)
8436 enum machine_mode mode1 = GET_MODE (temp);
8437 if (mode1 == VOIDmode)
8438 mode1 = tmode != VOIDmode ? tmode : mode;
8440 temp = copy_to_mode_reg (mode1, temp);
8443 op1 = gen_label_rtx ();
8444 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8445 GET_MODE (temp), unsignedp, op1);
8446 emit_move_insn (temp, const1_rtx);
8447 emit_label (op1);
8448 return temp;
8451 /* If no set-flag instruction, must generate a conditional
8452 store into a temporary variable. Drop through
8453 and handle this like && and ||. */
8455 case TRUTH_ANDIF_EXPR:
8456 case TRUTH_ORIF_EXPR:
8457 if (! ignore
8458 && (target == 0 || ! safe_from_p (target, exp, 1)
8459 /* Make sure we don't have a hard reg (such as function's return
8460 value) live across basic blocks, if not optimizing. */
8461 || (!optimize && GET_CODE (target) == REG
8462 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8463 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8465 if (target)
8466 emit_clr_insn (target);
8468 op1 = gen_label_rtx ();
8469 jumpifnot (exp, op1);
8471 if (target)
8472 emit_0_to_1_insn (target);
8474 emit_label (op1);
8475 return ignore ? const0_rtx : target;
8477 case TRUTH_NOT_EXPR:
8478 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8479 /* The parser is careful to generate TRUTH_NOT_EXPR
8480 only with operands that are always zero or one. */
8481 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8482 target, 1, OPTAB_LIB_WIDEN);
8483 if (temp == 0)
8484 abort ();
8485 return temp;
8487 case COMPOUND_EXPR:
8488 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8489 emit_queue ();
8490 return expand_expr (TREE_OPERAND (exp, 1),
8491 (ignore ? const0_rtx : target),
8492 VOIDmode, 0);
8494 case COND_EXPR:
8495 /* If we would have a "singleton" (see below) were it not for a
8496 conversion in each arm, bring that conversion back out. */
8497 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8498 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8499 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8500 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8502 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8503 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8505 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8506 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8507 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8508 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8509 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8510 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8511 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8512 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8513 return expand_expr (build1 (NOP_EXPR, type,
8514 build (COND_EXPR, TREE_TYPE (iftrue),
8515 TREE_OPERAND (exp, 0),
8516 iftrue, iffalse)),
8517 target, tmode, modifier);
8521 /* Note that COND_EXPRs whose type is a structure or union
8522 are required to be constructed to contain assignments of
8523 a temporary variable, so that we can evaluate them here
8524 for side effect only. If type is void, we must do likewise. */
8526 /* If an arm of the branch requires a cleanup,
8527 only that cleanup is performed. */
8529 tree singleton = 0;
8530 tree binary_op = 0, unary_op = 0;
8532 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8533 convert it to our mode, if necessary. */
8534 if (integer_onep (TREE_OPERAND (exp, 1))
8535 && integer_zerop (TREE_OPERAND (exp, 2))
8536 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8538 if (ignore)
8540 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8541 modifier);
8542 return const0_rtx;
8545 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8546 if (GET_MODE (op0) == mode)
8547 return op0;
8549 if (target == 0)
8550 target = gen_reg_rtx (mode);
8551 convert_move (target, op0, unsignedp);
8552 return target;
8555 /* Check for X ? A + B : A. If we have this, we can copy A to the
8556 output and conditionally add B. Similarly for unary operations.
8557 Don't do this if X has side-effects because those side effects
8558 might affect A or B and the "?" operation is a sequence point in
8559 ANSI. (operand_equal_p tests for side effects.) */
8561 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8562 && operand_equal_p (TREE_OPERAND (exp, 2),
8563 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8564 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8565 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8566 && operand_equal_p (TREE_OPERAND (exp, 1),
8567 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8568 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8569 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8570 && operand_equal_p (TREE_OPERAND (exp, 2),
8571 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8572 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8573 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8574 && operand_equal_p (TREE_OPERAND (exp, 1),
8575 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8576 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8578 /* If we are not to produce a result, we have no target. Otherwise,
8579 if a target was specified use it; it will not be used as an
8580 intermediate target unless it is safe. If no target, use a
8581 temporary. */
8583 if (ignore)
8584 temp = 0;
8585 else if (original_target
8586 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8587 || (singleton && GET_CODE (original_target) == REG
8588 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8589 && original_target == var_rtx (singleton)))
8590 && GET_MODE (original_target) == mode
8591 #ifdef HAVE_conditional_move
8592 && (! can_conditionally_move_p (mode)
8593 || GET_CODE (original_target) == REG
8594 || TREE_ADDRESSABLE (type))
8595 #endif
8596 && (GET_CODE (original_target) != MEM
8597 || TREE_ADDRESSABLE (type)))
8598 temp = original_target;
8599 else if (TREE_ADDRESSABLE (type))
8600 abort ();
8601 else
8602 temp = assign_temp (type, 0, 0, 1);
8604 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8605 do the test of X as a store-flag operation, do this as
8606 A + ((X != 0) << log C). Similarly for other simple binary
8607 operators. Only do for C == 1 if BRANCH_COST is low. */
8608 if (temp && singleton && binary_op
8609 && (TREE_CODE (binary_op) == PLUS_EXPR
8610 || TREE_CODE (binary_op) == MINUS_EXPR
8611 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8612 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8613 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8614 : integer_onep (TREE_OPERAND (binary_op, 1)))
8615 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8617 rtx result;
8618 tree cond;
8619 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8620 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8621 ? addv_optab : add_optab)
8622 : TREE_CODE (binary_op) == MINUS_EXPR
8623 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8624 ? subv_optab : sub_optab)
8625 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8626 : xor_optab);
8628 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8629 if (singleton == TREE_OPERAND (exp, 1))
8630 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8631 else
8632 cond = TREE_OPERAND (exp, 0);
8634 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8635 ? temp : NULL_RTX),
8636 mode, BRANCH_COST <= 1);
8638 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8639 result = expand_shift (LSHIFT_EXPR, mode, result,
8640 build_int_2 (tree_log2
8641 (TREE_OPERAND
8642 (binary_op, 1)),
8644 (safe_from_p (temp, singleton, 1)
8645 ? temp : NULL_RTX), 0);
8647 if (result)
8649 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8650 return expand_binop (mode, boptab, op1, result, temp,
8651 unsignedp, OPTAB_LIB_WIDEN);
8655 do_pending_stack_adjust ();
8656 NO_DEFER_POP;
8657 op0 = gen_label_rtx ();
8659 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8661 if (temp != 0)
8663 /* If the target conflicts with the other operand of the
8664 binary op, we can't use it. Also, we can't use the target
8665 if it is a hard register, because evaluating the condition
8666 might clobber it. */
8667 if ((binary_op
8668 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8669 || (GET_CODE (temp) == REG
8670 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8671 temp = gen_reg_rtx (mode);
8672 store_expr (singleton, temp, 0);
8674 else
8675 expand_expr (singleton,
8676 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8677 if (singleton == TREE_OPERAND (exp, 1))
8678 jumpif (TREE_OPERAND (exp, 0), op0);
8679 else
8680 jumpifnot (TREE_OPERAND (exp, 0), op0);
8682 start_cleanup_deferral ();
8683 if (binary_op && temp == 0)
8684 /* Just touch the other operand. */
8685 expand_expr (TREE_OPERAND (binary_op, 1),
8686 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8687 else if (binary_op)
8688 store_expr (build (TREE_CODE (binary_op), type,
8689 make_tree (type, temp),
8690 TREE_OPERAND (binary_op, 1)),
8691 temp, 0);
8692 else
8693 store_expr (build1 (TREE_CODE (unary_op), type,
8694 make_tree (type, temp)),
8695 temp, 0);
8696 op1 = op0;
8698 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8699 comparison operator. If we have one of these cases, set the
8700 output to A, branch on A (cse will merge these two references),
8701 then set the output to FOO. */
8702 else if (temp
8703 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8704 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8705 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8706 TREE_OPERAND (exp, 1), 0)
8707 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8708 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8709 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8711 if (GET_CODE (temp) == REG
8712 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8713 temp = gen_reg_rtx (mode);
8714 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8715 jumpif (TREE_OPERAND (exp, 0), op0);
8717 start_cleanup_deferral ();
8718 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8719 op1 = op0;
8721 else if (temp
8722 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8723 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8724 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8725 TREE_OPERAND (exp, 2), 0)
8726 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8727 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8728 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8730 if (GET_CODE (temp) == REG
8731 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8732 temp = gen_reg_rtx (mode);
8733 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8734 jumpifnot (TREE_OPERAND (exp, 0), op0);
8736 start_cleanup_deferral ();
8737 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8738 op1 = op0;
8740 else
8742 op1 = gen_label_rtx ();
8743 jumpifnot (TREE_OPERAND (exp, 0), op0);
8745 start_cleanup_deferral ();
8747 /* One branch of the cond can be void, if it never returns. For
8748 example A ? throw : E */
8749 if (temp != 0
8750 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8751 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8752 else
8753 expand_expr (TREE_OPERAND (exp, 1),
8754 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8755 end_cleanup_deferral ();
8756 emit_queue ();
8757 emit_jump_insn (gen_jump (op1));
8758 emit_barrier ();
8759 emit_label (op0);
8760 start_cleanup_deferral ();
8761 if (temp != 0
8762 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8763 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8764 else
8765 expand_expr (TREE_OPERAND (exp, 2),
8766 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8769 end_cleanup_deferral ();
8771 emit_queue ();
8772 emit_label (op1);
8773 OK_DEFER_POP;
8775 return temp;
8778 case TARGET_EXPR:
8780 /* Something needs to be initialized, but we didn't know
8781 where that thing was when building the tree. For example,
8782 it could be the return value of a function, or a parameter
8783 to a function which lays down in the stack, or a temporary
8784 variable which must be passed by reference.
8786 We guarantee that the expression will either be constructed
8787 or copied into our original target. */
8789 tree slot = TREE_OPERAND (exp, 0);
8790 tree cleanups = NULL_TREE;
8791 tree exp1;
8793 if (TREE_CODE (slot) != VAR_DECL)
8794 abort ();
8796 if (! ignore)
8797 target = original_target;
8799 /* Set this here so that if we get a target that refers to a
8800 register variable that's already been used, put_reg_into_stack
8801 knows that it should fix up those uses. */
8802 TREE_USED (slot) = 1;
8804 if (target == 0)
8806 if (DECL_RTL_SET_P (slot))
8808 target = DECL_RTL (slot);
8809 /* If we have already expanded the slot, so don't do
8810 it again. (mrs) */
8811 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8812 return target;
8814 else
8816 target = assign_temp (type, 2, 0, 1);
8817 /* All temp slots at this level must not conflict. */
8818 preserve_temp_slots (target);
8819 SET_DECL_RTL (slot, target);
8820 if (TREE_ADDRESSABLE (slot))
8821 put_var_into_stack (slot);
8823 /* Since SLOT is not known to the called function
8824 to belong to its stack frame, we must build an explicit
8825 cleanup. This case occurs when we must build up a reference
8826 to pass the reference as an argument. In this case,
8827 it is very likely that such a reference need not be
8828 built here. */
8830 if (TREE_OPERAND (exp, 2) == 0)
8831 TREE_OPERAND (exp, 2)
8832 = (*lang_hooks.maybe_build_cleanup) (slot);
8833 cleanups = TREE_OPERAND (exp, 2);
8836 else
8838 /* This case does occur, when expanding a parameter which
8839 needs to be constructed on the stack. The target
8840 is the actual stack address that we want to initialize.
8841 The function we call will perform the cleanup in this case. */
8843 /* If we have already assigned it space, use that space,
8844 not target that we were passed in, as our target
8845 parameter is only a hint. */
8846 if (DECL_RTL_SET_P (slot))
8848 target = DECL_RTL (slot);
8849 /* If we have already expanded the slot, so don't do
8850 it again. (mrs) */
8851 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8852 return target;
8854 else
8856 SET_DECL_RTL (slot, target);
8857 /* If we must have an addressable slot, then make sure that
8858 the RTL that we just stored in slot is OK. */
8859 if (TREE_ADDRESSABLE (slot))
8860 put_var_into_stack (slot);
8864 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8865 /* Mark it as expanded. */
8866 TREE_OPERAND (exp, 1) = NULL_TREE;
8868 store_expr (exp1, target, 0);
8870 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8872 return target;
8875 case INIT_EXPR:
8877 tree lhs = TREE_OPERAND (exp, 0);
8878 tree rhs = TREE_OPERAND (exp, 1);
8880 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8881 return temp;
8884 case MODIFY_EXPR:
8886 /* If lhs is complex, expand calls in rhs before computing it.
8887 That's so we don't compute a pointer and save it over a
8888 call. If lhs is simple, compute it first so we can give it
8889 as a target if the rhs is just a call. This avoids an
8890 extra temp and copy and that prevents a partial-subsumption
8891 which makes bad code. Actually we could treat
8892 component_ref's of vars like vars. */
8894 tree lhs = TREE_OPERAND (exp, 0);
8895 tree rhs = TREE_OPERAND (exp, 1);
8897 temp = 0;
8899 /* Check for |= or &= of a bitfield of size one into another bitfield
8900 of size 1. In this case, (unless we need the result of the
8901 assignment) we can do this more efficiently with a
8902 test followed by an assignment, if necessary.
8904 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8905 things change so we do, this code should be enhanced to
8906 support it. */
8907 if (ignore
8908 && TREE_CODE (lhs) == COMPONENT_REF
8909 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8910 || TREE_CODE (rhs) == BIT_AND_EXPR)
8911 && TREE_OPERAND (rhs, 0) == lhs
8912 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8913 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8914 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8916 rtx label = gen_label_rtx ();
8918 do_jump (TREE_OPERAND (rhs, 1),
8919 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8920 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8921 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8922 (TREE_CODE (rhs) == BIT_IOR_EXPR
8923 ? integer_one_node
8924 : integer_zero_node)),
8925 0, 0);
8926 do_pending_stack_adjust ();
8927 emit_label (label);
8928 return const0_rtx;
8931 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8933 return temp;
8936 case RETURN_EXPR:
8937 if (!TREE_OPERAND (exp, 0))
8938 expand_null_return ();
8939 else
8940 expand_return (TREE_OPERAND (exp, 0));
8941 return const0_rtx;
8943 case PREINCREMENT_EXPR:
8944 case PREDECREMENT_EXPR:
8945 return expand_increment (exp, 0, ignore);
8947 case POSTINCREMENT_EXPR:
8948 case POSTDECREMENT_EXPR:
8949 /* Faster to treat as pre-increment if result is not used. */
8950 return expand_increment (exp, ! ignore, ignore);
8952 case ADDR_EXPR:
8953 /* Are we taking the address of a nested function? */
8954 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8955 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8956 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8957 && ! TREE_STATIC (exp))
8959 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8960 op0 = force_operand (op0, target);
8962 /* If we are taking the address of something erroneous, just
8963 return a zero. */
8964 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8965 return const0_rtx;
8966 /* If we are taking the address of a constant and are at the
8967 top level, we have to use output_constant_def since we can't
8968 call force_const_mem at top level. */
8969 else if (cfun == 0
8970 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8971 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8972 == 'c')))
8973 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8974 else
8976 /* We make sure to pass const0_rtx down if we came in with
8977 ignore set, to avoid doing the cleanups twice for something. */
8978 op0 = expand_expr (TREE_OPERAND (exp, 0),
8979 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8980 (modifier == EXPAND_INITIALIZER
8981 ? modifier : EXPAND_CONST_ADDRESS));
8983 /* If we are going to ignore the result, OP0 will have been set
8984 to const0_rtx, so just return it. Don't get confused and
8985 think we are taking the address of the constant. */
8986 if (ignore)
8987 return op0;
8989 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8990 clever and returns a REG when given a MEM. */
8991 op0 = protect_from_queue (op0, 1);
8993 /* We would like the object in memory. If it is a constant, we can
8994 have it be statically allocated into memory. For a non-constant,
8995 we need to allocate some memory and store the value into it. */
8997 if (CONSTANT_P (op0))
8998 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8999 op0);
9000 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9001 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9002 || GET_CODE (op0) == PARALLEL)
9004 /* If the operand is a SAVE_EXPR, we can deal with this by
9005 forcing the SAVE_EXPR into memory. */
9006 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9008 put_var_into_stack (TREE_OPERAND (exp, 0));
9009 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9011 else
9013 /* If this object is in a register, it can't be BLKmode. */
9014 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9015 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9017 if (GET_CODE (op0) == PARALLEL)
9018 /* Handle calls that pass values in multiple
9019 non-contiguous locations. The Irix 6 ABI has examples
9020 of this. */
9021 emit_group_store (memloc, op0,
9022 int_size_in_bytes (inner_type));
9023 else
9024 emit_move_insn (memloc, op0);
9026 op0 = memloc;
9030 if (GET_CODE (op0) != MEM)
9031 abort ();
9033 mark_temp_addr_taken (op0);
9034 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9036 op0 = XEXP (op0, 0);
9037 #ifdef POINTERS_EXTEND_UNSIGNED
9038 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9039 && mode == ptr_mode)
9040 op0 = convert_memory_address (ptr_mode, op0);
9041 #endif
9042 return op0;
9045 /* If OP0 is not aligned as least as much as the type requires, we
9046 need to make a temporary, copy OP0 to it, and take the address of
9047 the temporary. We want to use the alignment of the type, not of
9048 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9049 the test for BLKmode means that can't happen. The test for
9050 BLKmode is because we never make mis-aligned MEMs with
9051 non-BLKmode.
9053 We don't need to do this at all if the machine doesn't have
9054 strict alignment. */
9055 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9056 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9057 > MEM_ALIGN (op0))
9058 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9060 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9061 rtx new
9062 = assign_stack_temp_for_type
9063 (TYPE_MODE (inner_type),
9064 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9065 : int_size_in_bytes (inner_type),
9066 1, build_qualified_type (inner_type,
9067 (TYPE_QUALS (inner_type)
9068 | TYPE_QUAL_CONST)));
9070 if (TYPE_ALIGN_OK (inner_type))
9071 abort ();
9073 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9074 BLOCK_OP_NORMAL);
9075 op0 = new;
9078 op0 = force_operand (XEXP (op0, 0), target);
9081 if (flag_force_addr
9082 && GET_CODE (op0) != REG
9083 && modifier != EXPAND_CONST_ADDRESS
9084 && modifier != EXPAND_INITIALIZER
9085 && modifier != EXPAND_SUM)
9086 op0 = force_reg (Pmode, op0);
9088 if (GET_CODE (op0) == REG
9089 && ! REG_USERVAR_P (op0))
9090 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9092 #ifdef POINTERS_EXTEND_UNSIGNED
9093 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9094 && mode == ptr_mode)
9095 op0 = convert_memory_address (ptr_mode, op0);
9096 #endif
9098 return op0;
9100 case ENTRY_VALUE_EXPR:
9101 abort ();
9103 /* COMPLEX type for Extended Pascal & Fortran */
9104 case COMPLEX_EXPR:
9106 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9107 rtx insns;
9109 /* Get the rtx code of the operands. */
9110 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9111 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9113 if (! target)
9114 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9116 start_sequence ();
9118 /* Move the real (op0) and imaginary (op1) parts to their location. */
9119 emit_move_insn (gen_realpart (mode, target), op0);
9120 emit_move_insn (gen_imagpart (mode, target), op1);
9122 insns = get_insns ();
9123 end_sequence ();
9125 /* Complex construction should appear as a single unit. */
9126 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9127 each with a separate pseudo as destination.
9128 It's not correct for flow to treat them as a unit. */
9129 if (GET_CODE (target) != CONCAT)
9130 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9131 else
9132 emit_insn (insns);
9134 return target;
9137 case REALPART_EXPR:
9138 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9139 return gen_realpart (mode, op0);
9141 case IMAGPART_EXPR:
9142 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9143 return gen_imagpart (mode, op0);
9145 case CONJ_EXPR:
9147 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9148 rtx imag_t;
9149 rtx insns;
9151 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9153 if (! target)
9154 target = gen_reg_rtx (mode);
9156 start_sequence ();
9158 /* Store the realpart and the negated imagpart to target. */
9159 emit_move_insn (gen_realpart (partmode, target),
9160 gen_realpart (partmode, op0));
9162 imag_t = gen_imagpart (partmode, target);
9163 temp = expand_unop (partmode,
9164 ! unsignedp && flag_trapv
9165 && (GET_MODE_CLASS(partmode) == MODE_INT)
9166 ? negv_optab : neg_optab,
9167 gen_imagpart (partmode, op0), imag_t, 0);
9168 if (temp != imag_t)
9169 emit_move_insn (imag_t, temp);
9171 insns = get_insns ();
9172 end_sequence ();
9174 /* Conjugate should appear as a single unit
9175 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9176 each with a separate pseudo as destination.
9177 It's not correct for flow to treat them as a unit. */
9178 if (GET_CODE (target) != CONCAT)
9179 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9180 else
9181 emit_insn (insns);
9183 return target;
9186 case TRY_CATCH_EXPR:
9188 tree handler = TREE_OPERAND (exp, 1);
9190 expand_eh_region_start ();
9192 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9194 expand_eh_region_end_cleanup (handler);
9196 return op0;
9199 case TRY_FINALLY_EXPR:
9201 tree try_block = TREE_OPERAND (exp, 0);
9202 tree finally_block = TREE_OPERAND (exp, 1);
9204 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9206 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9207 is not sufficient, so we cannot expand the block twice.
9208 So we play games with GOTO_SUBROUTINE_EXPR to let us
9209 expand the thing only once. */
9210 /* When not optimizing, we go ahead with this form since
9211 (1) user breakpoints operate more predictably without
9212 code duplication, and
9213 (2) we're not running any of the global optimizers
9214 that would explode in time/space with the highly
9215 connected CFG created by the indirect branching. */
9217 rtx finally_label = gen_label_rtx ();
9218 rtx done_label = gen_label_rtx ();
9219 rtx return_link = gen_reg_rtx (Pmode);
9220 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9221 (tree) finally_label, (tree) return_link);
9222 TREE_SIDE_EFFECTS (cleanup) = 1;
9224 /* Start a new binding layer that will keep track of all cleanup
9225 actions to be performed. */
9226 expand_start_bindings (2);
9227 target_temp_slot_level = temp_slot_level;
9229 expand_decl_cleanup (NULL_TREE, cleanup);
9230 op0 = expand_expr (try_block, target, tmode, modifier);
9232 preserve_temp_slots (op0);
9233 expand_end_bindings (NULL_TREE, 0, 0);
9234 emit_jump (done_label);
9235 emit_label (finally_label);
9236 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9237 emit_indirect_jump (return_link);
9238 emit_label (done_label);
9240 else
9242 expand_start_bindings (2);
9243 target_temp_slot_level = temp_slot_level;
9245 expand_decl_cleanup (NULL_TREE, finally_block);
9246 op0 = expand_expr (try_block, target, tmode, modifier);
9248 preserve_temp_slots (op0);
9249 expand_end_bindings (NULL_TREE, 0, 0);
9252 return op0;
9255 case GOTO_SUBROUTINE_EXPR:
9257 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9258 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9259 rtx return_address = gen_label_rtx ();
9260 emit_move_insn (return_link,
9261 gen_rtx_LABEL_REF (Pmode, return_address));
9262 emit_jump (subr);
9263 emit_label (return_address);
9264 return const0_rtx;
9267 case VA_ARG_EXPR:
9268 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9270 case EXC_PTR_EXPR:
9271 return get_exception_pointer (cfun);
9273 case FDESC_EXPR:
9274 /* Function descriptors are not valid except for as
9275 initialization constants, and should not be expanded. */
9276 abort ();
9278 default:
9279 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9282 /* Here to do an ordinary binary operator, generating an instruction
9283 from the optab already placed in `this_optab'. */
9284 binop:
9285 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9286 subtarget = 0;
9287 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9288 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9289 binop2:
9290 temp = expand_binop (mode, this_optab, op0, op1, target,
9291 unsignedp, OPTAB_LIB_WIDEN);
9292 if (temp == 0)
9293 abort ();
9294 return temp;
9297 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9298 when applied to the address of EXP produces an address known to be
9299 aligned more than BIGGEST_ALIGNMENT. */
9301 static int
9302 is_aligning_offset (offset, exp)
9303 tree offset;
9304 tree exp;
9306 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9307 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9308 || TREE_CODE (offset) == NOP_EXPR
9309 || TREE_CODE (offset) == CONVERT_EXPR
9310 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9311 offset = TREE_OPERAND (offset, 0);
9313 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9314 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9315 if (TREE_CODE (offset) != BIT_AND_EXPR
9316 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9317 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9318 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9319 return 0;
9321 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9322 It must be NEGATE_EXPR. Then strip any more conversions. */
9323 offset = TREE_OPERAND (offset, 0);
9324 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9325 || TREE_CODE (offset) == NOP_EXPR
9326 || TREE_CODE (offset) == CONVERT_EXPR)
9327 offset = TREE_OPERAND (offset, 0);
9329 if (TREE_CODE (offset) != NEGATE_EXPR)
9330 return 0;
9332 offset = TREE_OPERAND (offset, 0);
9333 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9334 || TREE_CODE (offset) == NOP_EXPR
9335 || TREE_CODE (offset) == CONVERT_EXPR)
9336 offset = TREE_OPERAND (offset, 0);
9338 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9339 whose type is the same as EXP. */
9340 return (TREE_CODE (offset) == ADDR_EXPR
9341 && (TREE_OPERAND (offset, 0) == exp
9342 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9343 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9344 == TREE_TYPE (exp)))));
9347 /* Return the tree node if an ARG corresponds to a string constant or zero
9348 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9349 in bytes within the string that ARG is accessing. The type of the
9350 offset will be `sizetype'. */
9352 tree
9353 string_constant (arg, ptr_offset)
9354 tree arg;
9355 tree *ptr_offset;
9357 STRIP_NOPS (arg);
9359 if (TREE_CODE (arg) == ADDR_EXPR
9360 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9362 *ptr_offset = size_zero_node;
9363 return TREE_OPERAND (arg, 0);
9365 else if (TREE_CODE (arg) == PLUS_EXPR)
9367 tree arg0 = TREE_OPERAND (arg, 0);
9368 tree arg1 = TREE_OPERAND (arg, 1);
9370 STRIP_NOPS (arg0);
9371 STRIP_NOPS (arg1);
9373 if (TREE_CODE (arg0) == ADDR_EXPR
9374 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9376 *ptr_offset = convert (sizetype, arg1);
9377 return TREE_OPERAND (arg0, 0);
9379 else if (TREE_CODE (arg1) == ADDR_EXPR
9380 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9382 *ptr_offset = convert (sizetype, arg0);
9383 return TREE_OPERAND (arg1, 0);
9387 return 0;
9390 /* Expand code for a post- or pre- increment or decrement
9391 and return the RTX for the result.
9392 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9394 static rtx
9395 expand_increment (exp, post, ignore)
9396 tree exp;
9397 int post, ignore;
9399 rtx op0, op1;
9400 rtx temp, value;
9401 tree incremented = TREE_OPERAND (exp, 0);
9402 optab this_optab = add_optab;
9403 int icode;
9404 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9405 int op0_is_copy = 0;
9406 int single_insn = 0;
9407 /* 1 means we can't store into OP0 directly,
9408 because it is a subreg narrower than a word,
9409 and we don't dare clobber the rest of the word. */
9410 int bad_subreg = 0;
9412 /* Stabilize any component ref that might need to be
9413 evaluated more than once below. */
9414 if (!post
9415 || TREE_CODE (incremented) == BIT_FIELD_REF
9416 || (TREE_CODE (incremented) == COMPONENT_REF
9417 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9418 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9419 incremented = stabilize_reference (incremented);
9420 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9421 ones into save exprs so that they don't accidentally get evaluated
9422 more than once by the code below. */
9423 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9424 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9425 incremented = save_expr (incremented);
9427 /* Compute the operands as RTX.
9428 Note whether OP0 is the actual lvalue or a copy of it:
9429 I believe it is a copy iff it is a register or subreg
9430 and insns were generated in computing it. */
9432 temp = get_last_insn ();
9433 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9435 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9436 in place but instead must do sign- or zero-extension during assignment,
9437 so we copy it into a new register and let the code below use it as
9438 a copy.
9440 Note that we can safely modify this SUBREG since it is know not to be
9441 shared (it was made by the expand_expr call above). */
9443 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9445 if (post)
9446 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9447 else
9448 bad_subreg = 1;
9450 else if (GET_CODE (op0) == SUBREG
9451 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9453 /* We cannot increment this SUBREG in place. If we are
9454 post-incrementing, get a copy of the old value. Otherwise,
9455 just mark that we cannot increment in place. */
9456 if (post)
9457 op0 = copy_to_reg (op0);
9458 else
9459 bad_subreg = 1;
9462 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9463 && temp != get_last_insn ());
9464 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9466 /* Decide whether incrementing or decrementing. */
9467 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9468 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9469 this_optab = sub_optab;
9471 /* Convert decrement by a constant into a negative increment. */
9472 if (this_optab == sub_optab
9473 && GET_CODE (op1) == CONST_INT)
9475 op1 = GEN_INT (-INTVAL (op1));
9476 this_optab = add_optab;
9479 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9480 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9482 /* For a preincrement, see if we can do this with a single instruction. */
9483 if (!post)
9485 icode = (int) this_optab->handlers[(int) mode].insn_code;
9486 if (icode != (int) CODE_FOR_nothing
9487 /* Make sure that OP0 is valid for operands 0 and 1
9488 of the insn we want to queue. */
9489 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9490 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9491 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9492 single_insn = 1;
9495 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9496 then we cannot just increment OP0. We must therefore contrive to
9497 increment the original value. Then, for postincrement, we can return
9498 OP0 since it is a copy of the old value. For preincrement, expand here
9499 unless we can do it with a single insn.
9501 Likewise if storing directly into OP0 would clobber high bits
9502 we need to preserve (bad_subreg). */
9503 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9505 /* This is the easiest way to increment the value wherever it is.
9506 Problems with multiple evaluation of INCREMENTED are prevented
9507 because either (1) it is a component_ref or preincrement,
9508 in which case it was stabilized above, or (2) it is an array_ref
9509 with constant index in an array in a register, which is
9510 safe to reevaluate. */
9511 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9512 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9513 ? MINUS_EXPR : PLUS_EXPR),
9514 TREE_TYPE (exp),
9515 incremented,
9516 TREE_OPERAND (exp, 1));
9518 while (TREE_CODE (incremented) == NOP_EXPR
9519 || TREE_CODE (incremented) == CONVERT_EXPR)
9521 newexp = convert (TREE_TYPE (incremented), newexp);
9522 incremented = TREE_OPERAND (incremented, 0);
9525 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9526 return post ? op0 : temp;
9529 if (post)
9531 /* We have a true reference to the value in OP0.
9532 If there is an insn to add or subtract in this mode, queue it.
9533 Queueing the increment insn avoids the register shuffling
9534 that often results if we must increment now and first save
9535 the old value for subsequent use. */
9537 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9538 op0 = stabilize (op0);
9539 #endif
9541 icode = (int) this_optab->handlers[(int) mode].insn_code;
9542 if (icode != (int) CODE_FOR_nothing
9543 /* Make sure that OP0 is valid for operands 0 and 1
9544 of the insn we want to queue. */
9545 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9546 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9548 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9549 op1 = force_reg (mode, op1);
9551 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9553 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9555 rtx addr = (general_operand (XEXP (op0, 0), mode)
9556 ? force_reg (Pmode, XEXP (op0, 0))
9557 : copy_to_reg (XEXP (op0, 0)));
9558 rtx temp, result;
9560 op0 = replace_equiv_address (op0, addr);
9561 temp = force_reg (GET_MODE (op0), op0);
9562 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9563 op1 = force_reg (mode, op1);
9565 /* The increment queue is LIFO, thus we have to `queue'
9566 the instructions in reverse order. */
9567 enqueue_insn (op0, gen_move_insn (op0, temp));
9568 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9569 return result;
9573 /* Preincrement, or we can't increment with one simple insn. */
9574 if (post)
9575 /* Save a copy of the value before inc or dec, to return it later. */
9576 temp = value = copy_to_reg (op0);
9577 else
9578 /* Arrange to return the incremented value. */
9579 /* Copy the rtx because expand_binop will protect from the queue,
9580 and the results of that would be invalid for us to return
9581 if our caller does emit_queue before using our result. */
9582 temp = copy_rtx (value = op0);
9584 /* Increment however we can. */
9585 op1 = expand_binop (mode, this_optab, value, op1, op0,
9586 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9588 /* Make sure the value is stored into OP0. */
9589 if (op1 != op0)
9590 emit_move_insn (op0, op1);
9592 return temp;
9595 /* At the start of a function, record that we have no previously-pushed
9596 arguments waiting to be popped. */
9598 void
9599 init_pending_stack_adjust ()
9601 pending_stack_adjust = 0;
9604 /* When exiting from function, if safe, clear out any pending stack adjust
9605 so the adjustment won't get done.
9607 Note, if the current function calls alloca, then it must have a
9608 frame pointer regardless of the value of flag_omit_frame_pointer. */
9610 void
9611 clear_pending_stack_adjust ()
9613 #ifdef EXIT_IGNORE_STACK
9614 if (optimize > 0
9615 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9616 && EXIT_IGNORE_STACK
9617 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9618 && ! flag_inline_functions)
9620 stack_pointer_delta -= pending_stack_adjust,
9621 pending_stack_adjust = 0;
9623 #endif
9626 /* Pop any previously-pushed arguments that have not been popped yet. */
9628 void
9629 do_pending_stack_adjust ()
9631 if (inhibit_defer_pop == 0)
9633 if (pending_stack_adjust != 0)
9634 adjust_stack (GEN_INT (pending_stack_adjust));
9635 pending_stack_adjust = 0;
9639 /* Expand conditional expressions. */
9641 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9642 LABEL is an rtx of code CODE_LABEL, in this function and all the
9643 functions here. */
9645 void
9646 jumpifnot (exp, label)
9647 tree exp;
9648 rtx label;
9650 do_jump (exp, label, NULL_RTX);
9653 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9655 void
9656 jumpif (exp, label)
9657 tree exp;
9658 rtx label;
9660 do_jump (exp, NULL_RTX, label);
9663 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9664 the result is zero, or IF_TRUE_LABEL if the result is one.
9665 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9666 meaning fall through in that case.
9668 do_jump always does any pending stack adjust except when it does not
9669 actually perform a jump. An example where there is no jump
9670 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9672 This function is responsible for optimizing cases such as
9673 &&, || and comparison operators in EXP. */
9675 void
9676 do_jump (exp, if_false_label, if_true_label)
9677 tree exp;
9678 rtx if_false_label, if_true_label;
9680 enum tree_code code = TREE_CODE (exp);
9681 /* Some cases need to create a label to jump to
9682 in order to properly fall through.
9683 These cases set DROP_THROUGH_LABEL nonzero. */
9684 rtx drop_through_label = 0;
9685 rtx temp;
9686 int i;
9687 tree type;
9688 enum machine_mode mode;
9690 #ifdef MAX_INTEGER_COMPUTATION_MODE
9691 check_max_integer_computation_mode (exp);
9692 #endif
9694 emit_queue ();
9696 switch (code)
9698 case ERROR_MARK:
9699 break;
9701 case INTEGER_CST:
9702 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9703 if (temp)
9704 emit_jump (temp);
9705 break;
9707 #if 0
9708 /* This is not true with #pragma weak */
9709 case ADDR_EXPR:
9710 /* The address of something can never be zero. */
9711 if (if_true_label)
9712 emit_jump (if_true_label);
9713 break;
9714 #endif
9716 case NOP_EXPR:
9717 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9718 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9719 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9720 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9721 goto normal;
9722 case CONVERT_EXPR:
9723 /* If we are narrowing the operand, we have to do the compare in the
9724 narrower mode. */
9725 if ((TYPE_PRECISION (TREE_TYPE (exp))
9726 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9727 goto normal;
9728 case NON_LVALUE_EXPR:
9729 case REFERENCE_EXPR:
9730 case ABS_EXPR:
9731 case NEGATE_EXPR:
9732 case LROTATE_EXPR:
9733 case RROTATE_EXPR:
9734 /* These cannot change zero->nonzero or vice versa. */
9735 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9736 break;
9738 case WITH_RECORD_EXPR:
9739 /* Put the object on the placeholder list, recurse through our first
9740 operand, and pop the list. */
9741 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9742 placeholder_list);
9743 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9744 placeholder_list = TREE_CHAIN (placeholder_list);
9745 break;
9747 #if 0
9748 /* This is never less insns than evaluating the PLUS_EXPR followed by
9749 a test and can be longer if the test is eliminated. */
9750 case PLUS_EXPR:
9751 /* Reduce to minus. */
9752 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9753 TREE_OPERAND (exp, 0),
9754 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9755 TREE_OPERAND (exp, 1))));
9756 /* Process as MINUS. */
9757 #endif
9759 case MINUS_EXPR:
9760 /* Nonzero iff operands of minus differ. */
9761 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9762 TREE_OPERAND (exp, 0),
9763 TREE_OPERAND (exp, 1)),
9764 NE, NE, if_false_label, if_true_label);
9765 break;
9767 case BIT_AND_EXPR:
9768 /* If we are AND'ing with a small constant, do this comparison in the
9769 smallest type that fits. If the machine doesn't have comparisons
9770 that small, it will be converted back to the wider comparison.
9771 This helps if we are testing the sign bit of a narrower object.
9772 combine can't do this for us because it can't know whether a
9773 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9775 if (! SLOW_BYTE_ACCESS
9776 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9777 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9778 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9779 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9780 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9781 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9782 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9783 != CODE_FOR_nothing))
9785 do_jump (convert (type, exp), if_false_label, if_true_label);
9786 break;
9788 goto normal;
9790 case TRUTH_NOT_EXPR:
9791 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9792 break;
9794 case TRUTH_ANDIF_EXPR:
9795 if (if_false_label == 0)
9796 if_false_label = drop_through_label = gen_label_rtx ();
9797 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9798 start_cleanup_deferral ();
9799 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9800 end_cleanup_deferral ();
9801 break;
9803 case TRUTH_ORIF_EXPR:
9804 if (if_true_label == 0)
9805 if_true_label = drop_through_label = gen_label_rtx ();
9806 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9807 start_cleanup_deferral ();
9808 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9809 end_cleanup_deferral ();
9810 break;
9812 case COMPOUND_EXPR:
9813 push_temp_slots ();
9814 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9815 preserve_temp_slots (NULL_RTX);
9816 free_temp_slots ();
9817 pop_temp_slots ();
9818 emit_queue ();
9819 do_pending_stack_adjust ();
9820 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9821 break;
9823 case COMPONENT_REF:
9824 case BIT_FIELD_REF:
9825 case ARRAY_REF:
9826 case ARRAY_RANGE_REF:
9828 HOST_WIDE_INT bitsize, bitpos;
9829 int unsignedp;
9830 enum machine_mode mode;
9831 tree type;
9832 tree offset;
9833 int volatilep = 0;
9835 /* Get description of this reference. We don't actually care
9836 about the underlying object here. */
9837 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9838 &unsignedp, &volatilep);
9840 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9841 if (! SLOW_BYTE_ACCESS
9842 && type != 0 && bitsize >= 0
9843 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9844 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9845 != CODE_FOR_nothing))
9847 do_jump (convert (type, exp), if_false_label, if_true_label);
9848 break;
9850 goto normal;
9853 case COND_EXPR:
9854 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9855 if (integer_onep (TREE_OPERAND (exp, 1))
9856 && integer_zerop (TREE_OPERAND (exp, 2)))
9857 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9859 else if (integer_zerop (TREE_OPERAND (exp, 1))
9860 && integer_onep (TREE_OPERAND (exp, 2)))
9861 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9863 else
9865 rtx label1 = gen_label_rtx ();
9866 drop_through_label = gen_label_rtx ();
9868 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9870 start_cleanup_deferral ();
9871 /* Now the THEN-expression. */
9872 do_jump (TREE_OPERAND (exp, 1),
9873 if_false_label ? if_false_label : drop_through_label,
9874 if_true_label ? if_true_label : drop_through_label);
9875 /* In case the do_jump just above never jumps. */
9876 do_pending_stack_adjust ();
9877 emit_label (label1);
9879 /* Now the ELSE-expression. */
9880 do_jump (TREE_OPERAND (exp, 2),
9881 if_false_label ? if_false_label : drop_through_label,
9882 if_true_label ? if_true_label : drop_through_label);
9883 end_cleanup_deferral ();
9885 break;
9887 case EQ_EXPR:
9889 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9891 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9892 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9894 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9895 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9896 do_jump
9897 (fold
9898 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9899 fold (build (EQ_EXPR, TREE_TYPE (exp),
9900 fold (build1 (REALPART_EXPR,
9901 TREE_TYPE (inner_type),
9902 exp0)),
9903 fold (build1 (REALPART_EXPR,
9904 TREE_TYPE (inner_type),
9905 exp1)))),
9906 fold (build (EQ_EXPR, TREE_TYPE (exp),
9907 fold (build1 (IMAGPART_EXPR,
9908 TREE_TYPE (inner_type),
9909 exp0)),
9910 fold (build1 (IMAGPART_EXPR,
9911 TREE_TYPE (inner_type),
9912 exp1)))))),
9913 if_false_label, if_true_label);
9916 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9917 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9919 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9920 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9921 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9922 else
9923 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9924 break;
9927 case NE_EXPR:
9929 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9931 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9932 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9934 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9935 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9936 do_jump
9937 (fold
9938 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9939 fold (build (NE_EXPR, TREE_TYPE (exp),
9940 fold (build1 (REALPART_EXPR,
9941 TREE_TYPE (inner_type),
9942 exp0)),
9943 fold (build1 (REALPART_EXPR,
9944 TREE_TYPE (inner_type),
9945 exp1)))),
9946 fold (build (NE_EXPR, TREE_TYPE (exp),
9947 fold (build1 (IMAGPART_EXPR,
9948 TREE_TYPE (inner_type),
9949 exp0)),
9950 fold (build1 (IMAGPART_EXPR,
9951 TREE_TYPE (inner_type),
9952 exp1)))))),
9953 if_false_label, if_true_label);
9956 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9957 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9959 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9960 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9961 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9962 else
9963 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9964 break;
9967 case LT_EXPR:
9968 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9969 if (GET_MODE_CLASS (mode) == MODE_INT
9970 && ! can_compare_p (LT, mode, ccp_jump))
9971 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9972 else
9973 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9974 break;
9976 case LE_EXPR:
9977 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9978 if (GET_MODE_CLASS (mode) == MODE_INT
9979 && ! can_compare_p (LE, mode, ccp_jump))
9980 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9981 else
9982 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9983 break;
9985 case GT_EXPR:
9986 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9987 if (GET_MODE_CLASS (mode) == MODE_INT
9988 && ! can_compare_p (GT, mode, ccp_jump))
9989 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9990 else
9991 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9992 break;
9994 case GE_EXPR:
9995 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9996 if (GET_MODE_CLASS (mode) == MODE_INT
9997 && ! can_compare_p (GE, mode, ccp_jump))
9998 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9999 else
10000 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10001 break;
10003 case UNORDERED_EXPR:
10004 case ORDERED_EXPR:
10006 enum rtx_code cmp, rcmp;
10007 int do_rev;
10009 if (code == UNORDERED_EXPR)
10010 cmp = UNORDERED, rcmp = ORDERED;
10011 else
10012 cmp = ORDERED, rcmp = UNORDERED;
10013 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10015 do_rev = 0;
10016 if (! can_compare_p (cmp, mode, ccp_jump)
10017 && (can_compare_p (rcmp, mode, ccp_jump)
10018 /* If the target doesn't provide either UNORDERED or ORDERED
10019 comparisons, canonicalize on UNORDERED for the library. */
10020 || rcmp == UNORDERED))
10021 do_rev = 1;
10023 if (! do_rev)
10024 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10025 else
10026 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10028 break;
10031 enum rtx_code rcode1;
10032 enum tree_code tcode2;
10034 case UNLT_EXPR:
10035 rcode1 = UNLT;
10036 tcode2 = LT_EXPR;
10037 goto unordered_bcc;
10038 case UNLE_EXPR:
10039 rcode1 = UNLE;
10040 tcode2 = LE_EXPR;
10041 goto unordered_bcc;
10042 case UNGT_EXPR:
10043 rcode1 = UNGT;
10044 tcode2 = GT_EXPR;
10045 goto unordered_bcc;
10046 case UNGE_EXPR:
10047 rcode1 = UNGE;
10048 tcode2 = GE_EXPR;
10049 goto unordered_bcc;
10050 case UNEQ_EXPR:
10051 rcode1 = UNEQ;
10052 tcode2 = EQ_EXPR;
10053 goto unordered_bcc;
10055 unordered_bcc:
10056 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10057 if (can_compare_p (rcode1, mode, ccp_jump))
10058 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10059 if_true_label);
10060 else
10062 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10063 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10064 tree cmp0, cmp1;
10066 /* If the target doesn't support combined unordered
10067 compares, decompose into UNORDERED + comparison. */
10068 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10069 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10070 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10071 do_jump (exp, if_false_label, if_true_label);
10074 break;
10076 /* Special case:
10077 __builtin_expect (<test>, 0) and
10078 __builtin_expect (<test>, 1)
10080 We need to do this here, so that <test> is not converted to a SCC
10081 operation on machines that use condition code registers and COMPARE
10082 like the PowerPC, and then the jump is done based on whether the SCC
10083 operation produced a 1 or 0. */
10084 case CALL_EXPR:
10085 /* Check for a built-in function. */
10086 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10088 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10089 tree arglist = TREE_OPERAND (exp, 1);
10091 if (TREE_CODE (fndecl) == FUNCTION_DECL
10092 && DECL_BUILT_IN (fndecl)
10093 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10094 && arglist != NULL_TREE
10095 && TREE_CHAIN (arglist) != NULL_TREE)
10097 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10098 if_true_label);
10100 if (seq != NULL_RTX)
10102 emit_insn (seq);
10103 return;
10107 /* fall through and generate the normal code. */
10109 default:
10110 normal:
10111 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10112 #if 0
10113 /* This is not needed any more and causes poor code since it causes
10114 comparisons and tests from non-SI objects to have different code
10115 sequences. */
10116 /* Copy to register to avoid generating bad insns by cse
10117 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10118 if (!cse_not_expected && GET_CODE (temp) == MEM)
10119 temp = copy_to_reg (temp);
10120 #endif
10121 do_pending_stack_adjust ();
10122 /* Do any postincrements in the expression that was tested. */
10123 emit_queue ();
10125 if (GET_CODE (temp) == CONST_INT
10126 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10127 || GET_CODE (temp) == LABEL_REF)
10129 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10130 if (target)
10131 emit_jump (target);
10133 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10134 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10135 /* Note swapping the labels gives us not-equal. */
10136 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10137 else if (GET_MODE (temp) != VOIDmode)
10138 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10139 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10140 GET_MODE (temp), NULL_RTX,
10141 if_false_label, if_true_label);
10142 else
10143 abort ();
10146 if (drop_through_label)
10148 /* If do_jump produces code that might be jumped around,
10149 do any stack adjusts from that code, before the place
10150 where control merges in. */
10151 do_pending_stack_adjust ();
10152 emit_label (drop_through_label);
10156 /* Given a comparison expression EXP for values too wide to be compared
10157 with one insn, test the comparison and jump to the appropriate label.
10158 The code of EXP is ignored; we always test GT if SWAP is 0,
10159 and LT if SWAP is 1. */
10161 static void
10162 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10163 tree exp;
10164 int swap;
10165 rtx if_false_label, if_true_label;
10167 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10168 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10169 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10170 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10172 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10175 /* Compare OP0 with OP1, word at a time, in mode MODE.
10176 UNSIGNEDP says to do unsigned comparison.
10177 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10179 void
10180 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10181 enum machine_mode mode;
10182 int unsignedp;
10183 rtx op0, op1;
10184 rtx if_false_label, if_true_label;
10186 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10187 rtx drop_through_label = 0;
10188 int i;
10190 if (! if_true_label || ! if_false_label)
10191 drop_through_label = gen_label_rtx ();
10192 if (! if_true_label)
10193 if_true_label = drop_through_label;
10194 if (! if_false_label)
10195 if_false_label = drop_through_label;
10197 /* Compare a word at a time, high order first. */
10198 for (i = 0; i < nwords; i++)
10200 rtx op0_word, op1_word;
10202 if (WORDS_BIG_ENDIAN)
10204 op0_word = operand_subword_force (op0, i, mode);
10205 op1_word = operand_subword_force (op1, i, mode);
10207 else
10209 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10210 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10213 /* All but high-order word must be compared as unsigned. */
10214 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10215 (unsignedp || i > 0), word_mode, NULL_RTX,
10216 NULL_RTX, if_true_label);
10218 /* Consider lower words only if these are equal. */
10219 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10220 NULL_RTX, NULL_RTX, if_false_label);
10223 if (if_false_label)
10224 emit_jump (if_false_label);
10225 if (drop_through_label)
10226 emit_label (drop_through_label);
10229 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10230 with one insn, test the comparison and jump to the appropriate label. */
10232 static void
10233 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10234 tree exp;
10235 rtx if_false_label, if_true_label;
10237 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10238 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10239 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10240 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10241 int i;
10242 rtx drop_through_label = 0;
10244 if (! if_false_label)
10245 drop_through_label = if_false_label = gen_label_rtx ();
10247 for (i = 0; i < nwords; i++)
10248 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10249 operand_subword_force (op1, i, mode),
10250 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10251 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10253 if (if_true_label)
10254 emit_jump (if_true_label);
10255 if (drop_through_label)
10256 emit_label (drop_through_label);
10259 /* Jump according to whether OP0 is 0.
10260 We assume that OP0 has an integer mode that is too wide
10261 for the available compare insns. */
10263 void
10264 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10265 rtx op0;
10266 rtx if_false_label, if_true_label;
10268 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10269 rtx part;
10270 int i;
10271 rtx drop_through_label = 0;
10273 /* The fastest way of doing this comparison on almost any machine is to
10274 "or" all the words and compare the result. If all have to be loaded
10275 from memory and this is a very wide item, it's possible this may
10276 be slower, but that's highly unlikely. */
10278 part = gen_reg_rtx (word_mode);
10279 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10280 for (i = 1; i < nwords && part != 0; i++)
10281 part = expand_binop (word_mode, ior_optab, part,
10282 operand_subword_force (op0, i, GET_MODE (op0)),
10283 part, 1, OPTAB_WIDEN);
10285 if (part != 0)
10287 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10288 NULL_RTX, if_false_label, if_true_label);
10290 return;
10293 /* If we couldn't do the "or" simply, do this with a series of compares. */
10294 if (! if_false_label)
10295 drop_through_label = if_false_label = gen_label_rtx ();
10297 for (i = 0; i < nwords; i++)
10298 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10299 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10300 if_false_label, NULL_RTX);
10302 if (if_true_label)
10303 emit_jump (if_true_label);
10305 if (drop_through_label)
10306 emit_label (drop_through_label);
10309 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10310 (including code to compute the values to be compared)
10311 and set (CC0) according to the result.
10312 The decision as to signed or unsigned comparison must be made by the caller.
10314 We force a stack adjustment unless there are currently
10315 things pushed on the stack that aren't yet used.
10317 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10318 compared. */
10321 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10322 rtx op0, op1;
10323 enum rtx_code code;
10324 int unsignedp;
10325 enum machine_mode mode;
10326 rtx size;
10328 enum rtx_code ucode;
10329 rtx tem;
10331 /* If one operand is constant, make it the second one. Only do this
10332 if the other operand is not constant as well. */
10334 if (swap_commutative_operands_p (op0, op1))
10336 tem = op0;
10337 op0 = op1;
10338 op1 = tem;
10339 code = swap_condition (code);
10342 if (flag_force_mem)
10344 op0 = force_not_mem (op0);
10345 op1 = force_not_mem (op1);
10348 do_pending_stack_adjust ();
10350 ucode = unsignedp ? unsigned_condition (code) : code;
10351 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10352 return tem;
10354 #if 0
10355 /* There's no need to do this now that combine.c can eliminate lots of
10356 sign extensions. This can be less efficient in certain cases on other
10357 machines. */
10359 /* If this is a signed equality comparison, we can do it as an
10360 unsigned comparison since zero-extension is cheaper than sign
10361 extension and comparisons with zero are done as unsigned. This is
10362 the case even on machines that can do fast sign extension, since
10363 zero-extension is easier to combine with other operations than
10364 sign-extension is. If we are comparing against a constant, we must
10365 convert it to what it would look like unsigned. */
10366 if ((code == EQ || code == NE) && ! unsignedp
10367 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10369 if (GET_CODE (op1) == CONST_INT
10370 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10371 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10372 unsignedp = 1;
10374 #endif
10376 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10378 #if HAVE_cc0
10379 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10380 #else
10381 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10382 #endif
10385 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10386 The decision as to signed or unsigned comparison must be made by the caller.
10388 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10389 compared. */
10391 void
10392 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10393 if_false_label, if_true_label)
10394 rtx op0, op1;
10395 enum rtx_code code;
10396 int unsignedp;
10397 enum machine_mode mode;
10398 rtx size;
10399 rtx if_false_label, if_true_label;
10401 enum rtx_code ucode;
10402 rtx tem;
10403 int dummy_true_label = 0;
10405 /* Reverse the comparison if that is safe and we want to jump if it is
10406 false. */
10407 if (! if_true_label && ! FLOAT_MODE_P (mode))
10409 if_true_label = if_false_label;
10410 if_false_label = 0;
10411 code = reverse_condition (code);
10414 /* If one operand is constant, make it the second one. Only do this
10415 if the other operand is not constant as well. */
10417 if (swap_commutative_operands_p (op0, op1))
10419 tem = op0;
10420 op0 = op1;
10421 op1 = tem;
10422 code = swap_condition (code);
10425 if (flag_force_mem)
10427 op0 = force_not_mem (op0);
10428 op1 = force_not_mem (op1);
10431 do_pending_stack_adjust ();
10433 ucode = unsignedp ? unsigned_condition (code) : code;
10434 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10436 if (tem == const_true_rtx)
10438 if (if_true_label)
10439 emit_jump (if_true_label);
10441 else
10443 if (if_false_label)
10444 emit_jump (if_false_label);
10446 return;
10449 #if 0
10450 /* There's no need to do this now that combine.c can eliminate lots of
10451 sign extensions. This can be less efficient in certain cases on other
10452 machines. */
10454 /* If this is a signed equality comparison, we can do it as an
10455 unsigned comparison since zero-extension is cheaper than sign
10456 extension and comparisons with zero are done as unsigned. This is
10457 the case even on machines that can do fast sign extension, since
10458 zero-extension is easier to combine with other operations than
10459 sign-extension is. If we are comparing against a constant, we must
10460 convert it to what it would look like unsigned. */
10461 if ((code == EQ || code == NE) && ! unsignedp
10462 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10464 if (GET_CODE (op1) == CONST_INT
10465 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10466 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10467 unsignedp = 1;
10469 #endif
10471 if (! if_true_label)
10473 dummy_true_label = 1;
10474 if_true_label = gen_label_rtx ();
10477 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10478 if_true_label);
10480 if (if_false_label)
10481 emit_jump (if_false_label);
10482 if (dummy_true_label)
10483 emit_label (if_true_label);
10486 /* Generate code for a comparison expression EXP (including code to compute
10487 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10488 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10489 generated code will drop through.
10490 SIGNED_CODE should be the rtx operation for this comparison for
10491 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10493 We force a stack adjustment unless there are currently
10494 things pushed on the stack that aren't yet used. */
10496 static void
10497 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10498 if_true_label)
10499 tree exp;
10500 enum rtx_code signed_code, unsigned_code;
10501 rtx if_false_label, if_true_label;
10503 rtx op0, op1;
10504 tree type;
10505 enum machine_mode mode;
10506 int unsignedp;
10507 enum rtx_code code;
10509 /* Don't crash if the comparison was erroneous. */
10510 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10511 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10512 return;
10514 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10515 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10516 return;
10518 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10519 mode = TYPE_MODE (type);
10520 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10521 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10522 || (GET_MODE_BITSIZE (mode)
10523 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10524 1)))))))
10526 /* op0 might have been replaced by promoted constant, in which
10527 case the type of second argument should be used. */
10528 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10529 mode = TYPE_MODE (type);
10531 unsignedp = TREE_UNSIGNED (type);
10532 code = unsignedp ? unsigned_code : signed_code;
10534 #ifdef HAVE_canonicalize_funcptr_for_compare
10535 /* If function pointers need to be "canonicalized" before they can
10536 be reliably compared, then canonicalize them. */
10537 if (HAVE_canonicalize_funcptr_for_compare
10538 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10539 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10540 == FUNCTION_TYPE))
10542 rtx new_op0 = gen_reg_rtx (mode);
10544 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10545 op0 = new_op0;
10548 if (HAVE_canonicalize_funcptr_for_compare
10549 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10550 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10551 == FUNCTION_TYPE))
10553 rtx new_op1 = gen_reg_rtx (mode);
10555 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10556 op1 = new_op1;
10558 #endif
10560 /* Do any postincrements in the expression that was tested. */
10561 emit_queue ();
10563 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10564 ((mode == BLKmode)
10565 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10566 if_false_label, if_true_label);
10569 /* Generate code to calculate EXP using a store-flag instruction
10570 and return an rtx for the result. EXP is either a comparison
10571 or a TRUTH_NOT_EXPR whose operand is a comparison.
10573 If TARGET is nonzero, store the result there if convenient.
10575 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10576 cheap.
10578 Return zero if there is no suitable set-flag instruction
10579 available on this machine.
10581 Once expand_expr has been called on the arguments of the comparison,
10582 we are committed to doing the store flag, since it is not safe to
10583 re-evaluate the expression. We emit the store-flag insn by calling
10584 emit_store_flag, but only expand the arguments if we have a reason
10585 to believe that emit_store_flag will be successful. If we think that
10586 it will, but it isn't, we have to simulate the store-flag with a
10587 set/jump/set sequence. */
10589 static rtx
10590 do_store_flag (exp, target, mode, only_cheap)
10591 tree exp;
10592 rtx target;
10593 enum machine_mode mode;
10594 int only_cheap;
10596 enum rtx_code code;
10597 tree arg0, arg1, type;
10598 tree tem;
10599 enum machine_mode operand_mode;
10600 int invert = 0;
10601 int unsignedp;
10602 rtx op0, op1;
10603 enum insn_code icode;
10604 rtx subtarget = target;
10605 rtx result, label;
10607 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10608 result at the end. We can't simply invert the test since it would
10609 have already been inverted if it were valid. This case occurs for
10610 some floating-point comparisons. */
10612 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10613 invert = 1, exp = TREE_OPERAND (exp, 0);
10615 arg0 = TREE_OPERAND (exp, 0);
10616 arg1 = TREE_OPERAND (exp, 1);
10618 /* Don't crash if the comparison was erroneous. */
10619 if (arg0 == error_mark_node || arg1 == error_mark_node)
10620 return const0_rtx;
10622 type = TREE_TYPE (arg0);
10623 operand_mode = TYPE_MODE (type);
10624 unsignedp = TREE_UNSIGNED (type);
10626 /* We won't bother with BLKmode store-flag operations because it would mean
10627 passing a lot of information to emit_store_flag. */
10628 if (operand_mode == BLKmode)
10629 return 0;
10631 /* We won't bother with store-flag operations involving function pointers
10632 when function pointers must be canonicalized before comparisons. */
10633 #ifdef HAVE_canonicalize_funcptr_for_compare
10634 if (HAVE_canonicalize_funcptr_for_compare
10635 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10636 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10637 == FUNCTION_TYPE))
10638 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10639 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10640 == FUNCTION_TYPE))))
10641 return 0;
10642 #endif
10644 STRIP_NOPS (arg0);
10645 STRIP_NOPS (arg1);
10647 /* Get the rtx comparison code to use. We know that EXP is a comparison
10648 operation of some type. Some comparisons against 1 and -1 can be
10649 converted to comparisons with zero. Do so here so that the tests
10650 below will be aware that we have a comparison with zero. These
10651 tests will not catch constants in the first operand, but constants
10652 are rarely passed as the first operand. */
10654 switch (TREE_CODE (exp))
10656 case EQ_EXPR:
10657 code = EQ;
10658 break;
10659 case NE_EXPR:
10660 code = NE;
10661 break;
10662 case LT_EXPR:
10663 if (integer_onep (arg1))
10664 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10665 else
10666 code = unsignedp ? LTU : LT;
10667 break;
10668 case LE_EXPR:
10669 if (! unsignedp && integer_all_onesp (arg1))
10670 arg1 = integer_zero_node, code = LT;
10671 else
10672 code = unsignedp ? LEU : LE;
10673 break;
10674 case GT_EXPR:
10675 if (! unsignedp && integer_all_onesp (arg1))
10676 arg1 = integer_zero_node, code = GE;
10677 else
10678 code = unsignedp ? GTU : GT;
10679 break;
10680 case GE_EXPR:
10681 if (integer_onep (arg1))
10682 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10683 else
10684 code = unsignedp ? GEU : GE;
10685 break;
10687 case UNORDERED_EXPR:
10688 code = UNORDERED;
10689 break;
10690 case ORDERED_EXPR:
10691 code = ORDERED;
10692 break;
10693 case UNLT_EXPR:
10694 code = UNLT;
10695 break;
10696 case UNLE_EXPR:
10697 code = UNLE;
10698 break;
10699 case UNGT_EXPR:
10700 code = UNGT;
10701 break;
10702 case UNGE_EXPR:
10703 code = UNGE;
10704 break;
10705 case UNEQ_EXPR:
10706 code = UNEQ;
10707 break;
10709 default:
10710 abort ();
10713 /* Put a constant second. */
10714 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10716 tem = arg0; arg0 = arg1; arg1 = tem;
10717 code = swap_condition (code);
10720 /* If this is an equality or inequality test of a single bit, we can
10721 do this by shifting the bit being tested to the low-order bit and
10722 masking the result with the constant 1. If the condition was EQ,
10723 we xor it with 1. This does not require an scc insn and is faster
10724 than an scc insn even if we have it. */
10726 if ((code == NE || code == EQ)
10727 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10728 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10730 tree inner = TREE_OPERAND (arg0, 0);
10731 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10732 int ops_unsignedp;
10734 /* If INNER is a right shift of a constant and it plus BITNUM does
10735 not overflow, adjust BITNUM and INNER. */
10737 if (TREE_CODE (inner) == RSHIFT_EXPR
10738 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10739 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10740 && bitnum < TYPE_PRECISION (type)
10741 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10742 bitnum - TYPE_PRECISION (type)))
10744 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10745 inner = TREE_OPERAND (inner, 0);
10748 /* If we are going to be able to omit the AND below, we must do our
10749 operations as unsigned. If we must use the AND, we have a choice.
10750 Normally unsigned is faster, but for some machines signed is. */
10751 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10752 #ifdef LOAD_EXTEND_OP
10753 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10754 #else
10756 #endif
10759 if (! get_subtarget (subtarget)
10760 || GET_MODE (subtarget) != operand_mode
10761 || ! safe_from_p (subtarget, inner, 1))
10762 subtarget = 0;
10764 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10766 if (bitnum != 0)
10767 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10768 size_int (bitnum), subtarget, ops_unsignedp);
10770 if (GET_MODE (op0) != mode)
10771 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10773 if ((code == EQ && ! invert) || (code == NE && invert))
10774 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10775 ops_unsignedp, OPTAB_LIB_WIDEN);
10777 /* Put the AND last so it can combine with more things. */
10778 if (bitnum != TYPE_PRECISION (type) - 1)
10779 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10781 return op0;
10784 /* Now see if we are likely to be able to do this. Return if not. */
10785 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10786 return 0;
10788 icode = setcc_gen_code[(int) code];
10789 if (icode == CODE_FOR_nothing
10790 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10792 /* We can only do this if it is one of the special cases that
10793 can be handled without an scc insn. */
10794 if ((code == LT && integer_zerop (arg1))
10795 || (! only_cheap && code == GE && integer_zerop (arg1)))
10797 else if (BRANCH_COST >= 0
10798 && ! only_cheap && (code == NE || code == EQ)
10799 && TREE_CODE (type) != REAL_TYPE
10800 && ((abs_optab->handlers[(int) operand_mode].insn_code
10801 != CODE_FOR_nothing)
10802 || (ffs_optab->handlers[(int) operand_mode].insn_code
10803 != CODE_FOR_nothing)))
10805 else
10806 return 0;
10809 if (! get_subtarget (target)
10810 || GET_MODE (subtarget) != operand_mode
10811 || ! safe_from_p (subtarget, arg1, 1))
10812 subtarget = 0;
10814 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10815 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10817 if (target == 0)
10818 target = gen_reg_rtx (mode);
10820 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10821 because, if the emit_store_flag does anything it will succeed and
10822 OP0 and OP1 will not be used subsequently. */
10824 result = emit_store_flag (target, code,
10825 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10826 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10827 operand_mode, unsignedp, 1);
10829 if (result)
10831 if (invert)
10832 result = expand_binop (mode, xor_optab, result, const1_rtx,
10833 result, 0, OPTAB_LIB_WIDEN);
10834 return result;
10837 /* If this failed, we have to do this with set/compare/jump/set code. */
10838 if (GET_CODE (target) != REG
10839 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10840 target = gen_reg_rtx (GET_MODE (target));
10842 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10843 result = compare_from_rtx (op0, op1, code, unsignedp,
10844 operand_mode, NULL_RTX);
10845 if (GET_CODE (result) == CONST_INT)
10846 return (((result == const0_rtx && ! invert)
10847 || (result != const0_rtx && invert))
10848 ? const0_rtx : const1_rtx);
10850 /* The code of RESULT may not match CODE if compare_from_rtx
10851 decided to swap its operands and reverse the original code.
10853 We know that compare_from_rtx returns either a CONST_INT or
10854 a new comparison code, so it is safe to just extract the
10855 code from RESULT. */
10856 code = GET_CODE (result);
10858 label = gen_label_rtx ();
10859 if (bcc_gen_fctn[(int) code] == 0)
10860 abort ();
10862 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10863 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10864 emit_label (label);
10866 return target;
10870 /* Stubs in case we haven't got a casesi insn. */
10871 #ifndef HAVE_casesi
10872 # define HAVE_casesi 0
10873 # define gen_casesi(a, b, c, d, e) (0)
10874 # define CODE_FOR_casesi CODE_FOR_nothing
10875 #endif
10877 /* If the machine does not have a case insn that compares the bounds,
10878 this means extra overhead for dispatch tables, which raises the
10879 threshold for using them. */
10880 #ifndef CASE_VALUES_THRESHOLD
10881 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10882 #endif /* CASE_VALUES_THRESHOLD */
10884 unsigned int
10885 case_values_threshold ()
10887 return CASE_VALUES_THRESHOLD;
10890 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10891 0 otherwise (i.e. if there is no casesi instruction). */
10893 try_casesi (index_type, index_expr, minval, range,
10894 table_label, default_label)
10895 tree index_type, index_expr, minval, range;
10896 rtx table_label ATTRIBUTE_UNUSED;
10897 rtx default_label;
10899 enum machine_mode index_mode = SImode;
10900 int index_bits = GET_MODE_BITSIZE (index_mode);
10901 rtx op1, op2, index;
10902 enum machine_mode op_mode;
10904 if (! HAVE_casesi)
10905 return 0;
10907 /* Convert the index to SImode. */
10908 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10910 enum machine_mode omode = TYPE_MODE (index_type);
10911 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10913 /* We must handle the endpoints in the original mode. */
10914 index_expr = build (MINUS_EXPR, index_type,
10915 index_expr, minval);
10916 minval = integer_zero_node;
10917 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10918 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10919 omode, 1, default_label);
10920 /* Now we can safely truncate. */
10921 index = convert_to_mode (index_mode, index, 0);
10923 else
10925 if (TYPE_MODE (index_type) != index_mode)
10927 index_expr = convert ((*lang_hooks.types.type_for_size)
10928 (index_bits, 0), index_expr);
10929 index_type = TREE_TYPE (index_expr);
10932 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10934 emit_queue ();
10935 index = protect_from_queue (index, 0);
10936 do_pending_stack_adjust ();
10938 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10939 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10940 (index, op_mode))
10941 index = copy_to_mode_reg (op_mode, index);
10943 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10945 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10946 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10947 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10948 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10949 (op1, op_mode))
10950 op1 = copy_to_mode_reg (op_mode, op1);
10952 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10954 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10955 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10956 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10957 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10958 (op2, op_mode))
10959 op2 = copy_to_mode_reg (op_mode, op2);
10961 emit_jump_insn (gen_casesi (index, op1, op2,
10962 table_label, default_label));
10963 return 1;
10966 /* Attempt to generate a tablejump instruction; same concept. */
10967 #ifndef HAVE_tablejump
10968 #define HAVE_tablejump 0
10969 #define gen_tablejump(x, y) (0)
10970 #endif
10972 /* Subroutine of the next function.
10974 INDEX is the value being switched on, with the lowest value
10975 in the table already subtracted.
10976 MODE is its expected mode (needed if INDEX is constant).
10977 RANGE is the length of the jump table.
10978 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10980 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10981 index value is out of range. */
10983 static void
10984 do_tablejump (index, mode, range, table_label, default_label)
10985 rtx index, range, table_label, default_label;
10986 enum machine_mode mode;
10988 rtx temp, vector;
10990 if (INTVAL (range) > cfun->max_jumptable_ents)
10991 cfun->max_jumptable_ents = INTVAL (range);
10993 /* Do an unsigned comparison (in the proper mode) between the index
10994 expression and the value which represents the length of the range.
10995 Since we just finished subtracting the lower bound of the range
10996 from the index expression, this comparison allows us to simultaneously
10997 check that the original index expression value is both greater than
10998 or equal to the minimum value of the range and less than or equal to
10999 the maximum value of the range. */
11001 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11002 default_label);
11004 /* If index is in range, it must fit in Pmode.
11005 Convert to Pmode so we can index with it. */
11006 if (mode != Pmode)
11007 index = convert_to_mode (Pmode, index, 1);
11009 /* Don't let a MEM slip thru, because then INDEX that comes
11010 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11011 and break_out_memory_refs will go to work on it and mess it up. */
11012 #ifdef PIC_CASE_VECTOR_ADDRESS
11013 if (flag_pic && GET_CODE (index) != REG)
11014 index = copy_to_mode_reg (Pmode, index);
11015 #endif
11017 /* If flag_force_addr were to affect this address
11018 it could interfere with the tricky assumptions made
11019 about addresses that contain label-refs,
11020 which may be valid only very near the tablejump itself. */
11021 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11022 GET_MODE_SIZE, because this indicates how large insns are. The other
11023 uses should all be Pmode, because they are addresses. This code
11024 could fail if addresses and insns are not the same size. */
11025 index = gen_rtx_PLUS (Pmode,
11026 gen_rtx_MULT (Pmode, index,
11027 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11028 gen_rtx_LABEL_REF (Pmode, table_label));
11029 #ifdef PIC_CASE_VECTOR_ADDRESS
11030 if (flag_pic)
11031 index = PIC_CASE_VECTOR_ADDRESS (index);
11032 else
11033 #endif
11034 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11035 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11036 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11037 RTX_UNCHANGING_P (vector) = 1;
11038 convert_move (temp, vector, 0);
11040 emit_jump_insn (gen_tablejump (temp, table_label));
11042 /* If we are generating PIC code or if the table is PC-relative, the
11043 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11044 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11045 emit_barrier ();
11049 try_tablejump (index_type, index_expr, minval, range,
11050 table_label, default_label)
11051 tree index_type, index_expr, minval, range;
11052 rtx table_label, default_label;
11054 rtx index;
11056 if (! HAVE_tablejump)
11057 return 0;
11059 index_expr = fold (build (MINUS_EXPR, index_type,
11060 convert (index_type, index_expr),
11061 convert (index_type, minval)));
11062 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11063 emit_queue ();
11064 index = protect_from_queue (index, 0);
11065 do_pending_stack_adjust ();
11067 do_tablejump (index, TYPE_MODE (index_type),
11068 convert_modes (TYPE_MODE (index_type),
11069 TYPE_MODE (TREE_TYPE (range)),
11070 expand_expr (range, NULL_RTX,
11071 VOIDmode, 0),
11072 TREE_UNSIGNED (TREE_TYPE (range))),
11073 table_label, default_label);
11074 return 1;
11077 /* Nonzero if the mode is a valid vector mode for this architecture.
11078 This returns nonzero even if there is no hardware support for the
11079 vector mode, but we can emulate with narrower modes. */
11082 vector_mode_valid_p (mode)
11083 enum machine_mode mode;
11085 enum mode_class class = GET_MODE_CLASS (mode);
11086 enum machine_mode innermode;
11088 /* Doh! What's going on? */
11089 if (class != MODE_VECTOR_INT
11090 && class != MODE_VECTOR_FLOAT)
11091 return 0;
11093 /* Hardware support. Woo hoo! */
11094 if (VECTOR_MODE_SUPPORTED_P (mode))
11095 return 1;
11097 innermode = GET_MODE_INNER (mode);
11099 /* We should probably return 1 if requesting V4DI and we have no DI,
11100 but we have V2DI, but this is probably very unlikely. */
11102 /* If we have support for the inner mode, we can safely emulate it.
11103 We may not have V2DI, but me can emulate with a pair of DIs. */
11104 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11107 #include "gt-expr.h"