2003-06-19 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / expr.c
blob6501f873e3d71550f64b72c20939656bec855631
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
129 void *constfundata;
130 int reverse;
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((void *, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
280 int regno;
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
298 REGNO (reg) = regno;
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
331 enum insn_code ic;
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
347 void
348 init_expr ()
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
363 void
364 finish_expr_for_function ()
366 if (pending_chain)
367 abort ();
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
417 if (code != QUEUED)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
430 if (QUEUED_INSN (y))
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
466 return x;
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
491 queued_subexp_p (x)
492 rtx x;
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
511 /* Perform all the pending incrementations. */
513 void
514 emit_queue ()
516 rtx p;
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
573 abort ();
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
592 return;
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
606 return;
609 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
611 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
612 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
613 return;
616 if (to_real != from_real)
617 abort ();
619 if (to_real)
621 rtx value, insns;
623 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
625 /* Try converting directly if the insn is supported. */
626 if ((code = can_extend_p (to_mode, from_mode, 0))
627 != CODE_FOR_nothing)
629 emit_unop_insn (code, to, from, UNKNOWN);
630 return;
634 #ifdef HAVE_trunchfqf2
635 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_trunctqfqf2
642 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
645 return;
647 #endif
648 #ifdef HAVE_truncsfqf2
649 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncdfqf2
656 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_truncxfqf2
663 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
666 return;
668 #endif
669 #ifdef HAVE_trunctfqf2
670 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
672 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
673 return;
675 #endif
677 #ifdef HAVE_trunctqfhf2
678 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncsfhf2
685 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_truncdfhf2
692 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_truncxfhf2
699 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
702 return;
704 #endif
705 #ifdef HAVE_trunctfhf2
706 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
708 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
709 return;
711 #endif
713 #ifdef HAVE_truncsftqf2
714 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_truncdftqf2
721 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
724 return;
726 #endif
727 #ifdef HAVE_truncxftqf2
728 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
731 return;
733 #endif
734 #ifdef HAVE_trunctftqf2
735 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
737 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
738 return;
740 #endif
742 #ifdef HAVE_truncdfsf2
743 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_truncxfsf2
750 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_trunctfsf2
757 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
759 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_truncxfdf2
764 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
767 return;
769 #endif
770 #ifdef HAVE_trunctfdf2
771 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
773 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
774 return;
776 #endif
778 libcall = (rtx) 0;
779 switch (from_mode)
781 case SFmode:
782 switch (to_mode)
784 case DFmode:
785 libcall = extendsfdf2_libfunc;
786 break;
788 case XFmode:
789 libcall = extendsfxf2_libfunc;
790 break;
792 case TFmode:
793 libcall = extendsftf2_libfunc;
794 break;
796 default:
797 break;
799 break;
801 case DFmode:
802 switch (to_mode)
804 case SFmode:
805 libcall = truncdfsf2_libfunc;
806 break;
808 case XFmode:
809 libcall = extenddfxf2_libfunc;
810 break;
812 case TFmode:
813 libcall = extenddftf2_libfunc;
814 break;
816 default:
817 break;
819 break;
821 case XFmode:
822 switch (to_mode)
824 case SFmode:
825 libcall = truncxfsf2_libfunc;
826 break;
828 case DFmode:
829 libcall = truncxfdf2_libfunc;
830 break;
832 default:
833 break;
835 break;
837 case TFmode:
838 switch (to_mode)
840 case SFmode:
841 libcall = trunctfsf2_libfunc;
842 break;
844 case DFmode:
845 libcall = trunctfdf2_libfunc;
846 break;
848 default:
849 break;
851 break;
853 default:
854 break;
857 if (libcall == (rtx) 0)
858 /* This conversion is not implemented yet. */
859 abort ();
861 start_sequence ();
862 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
863 1, from, from_mode);
864 insns = get_insns ();
865 end_sequence ();
866 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
867 from));
868 return;
871 /* Now both modes are integers. */
873 /* Handle expanding beyond a word. */
874 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
875 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
877 rtx insns;
878 rtx lowpart;
879 rtx fill_value;
880 rtx lowfrom;
881 int i;
882 enum machine_mode lowpart_mode;
883 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
885 /* Try converting directly if the insn is supported. */
886 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
887 != CODE_FOR_nothing)
889 /* If FROM is a SUBREG, put it into a register. Do this
890 so that we always generate the same set of insns for
891 better cse'ing; if an intermediate assignment occurred,
892 we won't be doing the operation directly on the SUBREG. */
893 if (optimize > 0 && GET_CODE (from) == SUBREG)
894 from = force_reg (from_mode, from);
895 emit_unop_insn (code, to, from, equiv_code);
896 return;
898 /* Next, try converting via full word. */
899 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
900 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
901 != CODE_FOR_nothing))
903 if (GET_CODE (to) == REG)
904 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
905 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
906 emit_unop_insn (code, to,
907 gen_lowpart (word_mode, to), equiv_code);
908 return;
911 /* No special multiword conversion insn; do it by hand. */
912 start_sequence ();
914 /* Since we will turn this into a no conflict block, we must ensure
915 that the source does not overlap the target. */
917 if (reg_overlap_mentioned_p (to, from))
918 from = force_reg (from_mode, from);
920 /* Get a copy of FROM widened to a word, if necessary. */
921 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
922 lowpart_mode = word_mode;
923 else
924 lowpart_mode = from_mode;
926 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
928 lowpart = gen_lowpart (lowpart_mode, to);
929 emit_move_insn (lowpart, lowfrom);
931 /* Compute the value to put in each remaining word. */
932 if (unsignedp)
933 fill_value = const0_rtx;
934 else
936 #ifdef HAVE_slt
937 if (HAVE_slt
938 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
939 && STORE_FLAG_VALUE == -1)
941 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
942 lowpart_mode, 0);
943 fill_value = gen_reg_rtx (word_mode);
944 emit_insn (gen_slt (fill_value));
946 else
947 #endif
949 fill_value
950 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
951 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
952 NULL_RTX, 0);
953 fill_value = convert_to_mode (word_mode, fill_value, 1);
957 /* Fill the remaining words. */
958 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
960 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
961 rtx subword = operand_subword (to, index, 1, to_mode);
963 if (subword == 0)
964 abort ();
966 if (fill_value != subword)
967 emit_move_insn (subword, fill_value);
970 insns = get_insns ();
971 end_sequence ();
973 emit_no_conflict_block (insns, to, from, NULL_RTX,
974 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
975 return;
978 /* Truncating multi-word to a word or less. */
979 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
980 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
982 if (!((GET_CODE (from) == MEM
983 && ! MEM_VOLATILE_P (from)
984 && direct_load[(int) to_mode]
985 && ! mode_dependent_address_p (XEXP (from, 0)))
986 || GET_CODE (from) == REG
987 || GET_CODE (from) == SUBREG))
988 from = force_reg (from_mode, from);
989 convert_move (to, gen_lowpart (word_mode, from), 0);
990 return;
993 /* Handle pointer conversion. */ /* SPEE 900220. */
994 if (to_mode == PQImode)
996 if (from_mode != QImode)
997 from = convert_to_mode (QImode, from, unsignedp);
999 #ifdef HAVE_truncqipqi2
1000 if (HAVE_truncqipqi2)
1002 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
1003 return;
1005 #endif /* HAVE_truncqipqi2 */
1006 abort ();
1009 if (from_mode == PQImode)
1011 if (to_mode != QImode)
1013 from = convert_to_mode (QImode, from, unsignedp);
1014 from_mode = QImode;
1016 else
1018 #ifdef HAVE_extendpqiqi2
1019 if (HAVE_extendpqiqi2)
1021 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1022 return;
1024 #endif /* HAVE_extendpqiqi2 */
1025 abort ();
1029 if (to_mode == PSImode)
1031 if (from_mode != SImode)
1032 from = convert_to_mode (SImode, from, unsignedp);
1034 #ifdef HAVE_truncsipsi2
1035 if (HAVE_truncsipsi2)
1037 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1038 return;
1040 #endif /* HAVE_truncsipsi2 */
1041 abort ();
1044 if (from_mode == PSImode)
1046 if (to_mode != SImode)
1048 from = convert_to_mode (SImode, from, unsignedp);
1049 from_mode = SImode;
1051 else
1053 #ifdef HAVE_extendpsisi2
1054 if (! unsignedp && HAVE_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1057 return;
1059 #endif /* HAVE_extendpsisi2 */
1060 #ifdef HAVE_zero_extendpsisi2
1061 if (unsignedp && HAVE_zero_extendpsisi2)
1063 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1064 return;
1066 #endif /* HAVE_zero_extendpsisi2 */
1067 abort ();
1071 if (to_mode == PDImode)
1073 if (from_mode != DImode)
1074 from = convert_to_mode (DImode, from, unsignedp);
1076 #ifdef HAVE_truncdipdi2
1077 if (HAVE_truncdipdi2)
1079 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1080 return;
1082 #endif /* HAVE_truncdipdi2 */
1083 abort ();
1086 if (from_mode == PDImode)
1088 if (to_mode != DImode)
1090 from = convert_to_mode (DImode, from, unsignedp);
1091 from_mode = DImode;
1093 else
1095 #ifdef HAVE_extendpdidi2
1096 if (HAVE_extendpdidi2)
1098 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1099 return;
1101 #endif /* HAVE_extendpdidi2 */
1102 abort ();
1106 /* Now follow all the conversions between integers
1107 no more than a word long. */
1109 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1110 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1111 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1112 GET_MODE_BITSIZE (from_mode)))
1114 if (!((GET_CODE (from) == MEM
1115 && ! MEM_VOLATILE_P (from)
1116 && direct_load[(int) to_mode]
1117 && ! mode_dependent_address_p (XEXP (from, 0)))
1118 || GET_CODE (from) == REG
1119 || GET_CODE (from) == SUBREG))
1120 from = force_reg (from_mode, from);
1121 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1122 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1123 from = copy_to_reg (from);
1124 emit_move_insn (to, gen_lowpart (to_mode, from));
1125 return;
1128 /* Handle extension. */
1129 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1131 /* Convert directly if that works. */
1132 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1133 != CODE_FOR_nothing)
1135 if (flag_force_mem)
1136 from = force_not_mem (from);
1138 emit_unop_insn (code, to, from, equiv_code);
1139 return;
1141 else
1143 enum machine_mode intermediate;
1144 rtx tmp;
1145 tree shift_amount;
1147 /* Search for a mode to convert via. */
1148 for (intermediate = from_mode; intermediate != VOIDmode;
1149 intermediate = GET_MODE_WIDER_MODE (intermediate))
1150 if (((can_extend_p (to_mode, intermediate, unsignedp)
1151 != CODE_FOR_nothing)
1152 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1153 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1154 GET_MODE_BITSIZE (intermediate))))
1155 && (can_extend_p (intermediate, from_mode, unsignedp)
1156 != CODE_FOR_nothing))
1158 convert_move (to, convert_to_mode (intermediate, from,
1159 unsignedp), unsignedp);
1160 return;
1163 /* No suitable intermediate mode.
1164 Generate what we need with shifts. */
1165 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1166 - GET_MODE_BITSIZE (from_mode), 0);
1167 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1168 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1169 to, unsignedp);
1170 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1171 to, unsignedp);
1172 if (tmp != to)
1173 emit_move_insn (to, tmp);
1174 return;
1178 /* Support special truncate insns for certain modes. */
1180 if (from_mode == DImode && to_mode == SImode)
1182 #ifdef HAVE_truncdisi2
1183 if (HAVE_truncdisi2)
1185 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1186 return;
1188 #endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1193 if (from_mode == DImode && to_mode == HImode)
1195 #ifdef HAVE_truncdihi2
1196 if (HAVE_truncdihi2)
1198 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1199 return;
1201 #endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1206 if (from_mode == DImode && to_mode == QImode)
1208 #ifdef HAVE_truncdiqi2
1209 if (HAVE_truncdiqi2)
1211 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1212 return;
1214 #endif
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 return;
1219 if (from_mode == SImode && to_mode == HImode)
1221 #ifdef HAVE_truncsihi2
1222 if (HAVE_truncsihi2)
1224 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1225 return;
1227 #endif
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 return;
1232 if (from_mode == SImode && to_mode == QImode)
1234 #ifdef HAVE_truncsiqi2
1235 if (HAVE_truncsiqi2)
1237 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1238 return;
1240 #endif
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 return;
1245 if (from_mode == HImode && to_mode == QImode)
1247 #ifdef HAVE_trunchiqi2
1248 if (HAVE_trunchiqi2)
1250 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1251 return;
1253 #endif
1254 convert_move (to, force_reg (from_mode, from), unsignedp);
1255 return;
1258 if (from_mode == TImode && to_mode == DImode)
1260 #ifdef HAVE_trunctidi2
1261 if (HAVE_trunctidi2)
1263 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1264 return;
1266 #endif
1267 convert_move (to, force_reg (from_mode, from), unsignedp);
1268 return;
1271 if (from_mode == TImode && to_mode == SImode)
1273 #ifdef HAVE_trunctisi2
1274 if (HAVE_trunctisi2)
1276 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1277 return;
1279 #endif
1280 convert_move (to, force_reg (from_mode, from), unsignedp);
1281 return;
1284 if (from_mode == TImode && to_mode == HImode)
1286 #ifdef HAVE_trunctihi2
1287 if (HAVE_trunctihi2)
1289 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1290 return;
1292 #endif
1293 convert_move (to, force_reg (from_mode, from), unsignedp);
1294 return;
1297 if (from_mode == TImode && to_mode == QImode)
1299 #ifdef HAVE_trunctiqi2
1300 if (HAVE_trunctiqi2)
1302 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1303 return;
1305 #endif
1306 convert_move (to, force_reg (from_mode, from), unsignedp);
1307 return;
1310 /* Handle truncation of volatile memrefs, and so on;
1311 the things that couldn't be truncated directly,
1312 and for which there was no special instruction. */
1313 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1315 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1316 emit_move_insn (to, temp);
1317 return;
1320 /* Mode combination is not recognized. */
1321 abort ();
1324 /* Return an rtx for a value that would result
1325 from converting X to mode MODE.
1326 Both X and MODE may be floating, or both integer.
1327 UNSIGNEDP is nonzero if X is an unsigned value.
1328 This can be done by referring to a part of X in place
1329 or by copying to a new temporary with conversion.
1331 This function *must not* call protect_from_queue
1332 except when putting X into an insn (in which case convert_move does it). */
1335 convert_to_mode (mode, x, unsignedp)
1336 enum machine_mode mode;
1337 rtx x;
1338 int unsignedp;
1340 return convert_modes (mode, VOIDmode, x, unsignedp);
1343 /* Return an rtx for a value that would result
1344 from converting X from mode OLDMODE to mode MODE.
1345 Both modes may be floating, or both integer.
1346 UNSIGNEDP is nonzero if X is an unsigned value.
1348 This can be done by referring to a part of X in place
1349 or by copying to a new temporary with conversion.
1351 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1353 This function *must not* call protect_from_queue
1354 except when putting X into an insn (in which case convert_move does it). */
1357 convert_modes (mode, oldmode, x, unsignedp)
1358 enum machine_mode mode, oldmode;
1359 rtx x;
1360 int unsignedp;
1362 rtx temp;
1364 /* If FROM is a SUBREG that indicates that we have already done at least
1365 the required extension, strip it. */
1367 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1368 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1369 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1370 x = gen_lowpart (mode, x);
1372 if (GET_MODE (x) != VOIDmode)
1373 oldmode = GET_MODE (x);
1375 if (mode == oldmode)
1376 return x;
1378 /* There is one case that we must handle specially: If we are converting
1379 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1380 we are to interpret the constant as unsigned, gen_lowpart will do
1381 the wrong if the constant appears negative. What we want to do is
1382 make the high-order word of the constant zero, not all ones. */
1384 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1385 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1386 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1388 HOST_WIDE_INT val = INTVAL (x);
1390 if (oldmode != VOIDmode
1391 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1393 int width = GET_MODE_BITSIZE (oldmode);
1395 /* We need to zero extend VAL. */
1396 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1399 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1402 /* We can do this with a gen_lowpart if both desired and current modes
1403 are integer, and this is either a constant integer, a register, or a
1404 non-volatile MEM. Except for the constant case where MODE is no
1405 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1407 if ((GET_CODE (x) == CONST_INT
1408 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1409 || (GET_MODE_CLASS (mode) == MODE_INT
1410 && GET_MODE_CLASS (oldmode) == MODE_INT
1411 && (GET_CODE (x) == CONST_DOUBLE
1412 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1413 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1414 && direct_load[(int) mode])
1415 || (GET_CODE (x) == REG
1416 && (! HARD_REGISTER_P (x)
1417 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1418 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1419 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1421 /* ?? If we don't know OLDMODE, we have to assume here that
1422 X does not need sign- or zero-extension. This may not be
1423 the case, but it's the best we can do. */
1424 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1425 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1427 HOST_WIDE_INT val = INTVAL (x);
1428 int width = GET_MODE_BITSIZE (oldmode);
1430 /* We must sign or zero-extend in this case. Start by
1431 zero-extending, then sign extend if we need to. */
1432 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1433 if (! unsignedp
1434 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1435 val |= (HOST_WIDE_INT) (-1) << width;
1437 return gen_int_mode (val, mode);
1440 return gen_lowpart (mode, x);
1443 temp = gen_reg_rtx (mode);
1444 convert_move (temp, x, unsignedp);
1445 return temp;
1448 /* This macro is used to determine what the largest unit size that
1449 move_by_pieces can use is. */
1451 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1452 move efficiently, as opposed to MOVE_MAX which is the maximum
1453 number of bytes we can move with a single instruction. */
1455 #ifndef MOVE_MAX_PIECES
1456 #define MOVE_MAX_PIECES MOVE_MAX
1457 #endif
1459 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1460 store efficiently. Due to internal GCC limitations, this is
1461 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1462 for an immediate constant. */
1464 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1466 /* Determine whether the LEN bytes can be moved by using several move
1467 instructions. Return nonzero if a call to move_by_pieces should
1468 succeed. */
1471 can_move_by_pieces (len, align)
1472 unsigned HOST_WIDE_INT len;
1473 unsigned int align ATTRIBUTE_UNUSED;
1475 return MOVE_BY_PIECES_P (len, align);
1478 /* Generate several move instructions to copy LEN bytes from block FROM to
1479 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1480 and TO through protect_from_queue before calling.
1482 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1483 used to push FROM to the stack.
1485 ALIGN is maximum stack alignment we can assume.
1487 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1488 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1489 stpcpy. */
1492 move_by_pieces (to, from, len, align, endp)
1493 rtx to, from;
1494 unsigned HOST_WIDE_INT len;
1495 unsigned int align;
1496 int endp;
1498 struct move_by_pieces data;
1499 rtx to_addr, from_addr = XEXP (from, 0);
1500 unsigned int max_size = MOVE_MAX_PIECES + 1;
1501 enum machine_mode mode = VOIDmode, tmode;
1502 enum insn_code icode;
1504 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1506 data.offset = 0;
1507 data.from_addr = from_addr;
1508 if (to)
1510 to_addr = XEXP (to, 0);
1511 data.to = to;
1512 data.autinc_to
1513 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1514 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1515 data.reverse
1516 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1518 else
1520 to_addr = NULL_RTX;
1521 data.to = NULL_RTX;
1522 data.autinc_to = 1;
1523 #ifdef STACK_GROWS_DOWNWARD
1524 data.reverse = 1;
1525 #else
1526 data.reverse = 0;
1527 #endif
1529 data.to_addr = to_addr;
1530 data.from = from;
1531 data.autinc_from
1532 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1533 || GET_CODE (from_addr) == POST_INC
1534 || GET_CODE (from_addr) == POST_DEC);
1536 data.explicit_inc_from = 0;
1537 data.explicit_inc_to = 0;
1538 if (data.reverse) data.offset = len;
1539 data.len = len;
1541 /* If copying requires more than two move insns,
1542 copy addresses to registers (to make displacements shorter)
1543 and use post-increment if available. */
1544 if (!(data.autinc_from && data.autinc_to)
1545 && move_by_pieces_ninsns (len, align) > 2)
1547 /* Find the mode of the largest move... */
1548 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1549 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1550 if (GET_MODE_SIZE (tmode) < max_size)
1551 mode = tmode;
1553 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1555 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1556 data.autinc_from = 1;
1557 data.explicit_inc_from = -1;
1559 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1561 data.from_addr = copy_addr_to_reg (from_addr);
1562 data.autinc_from = 1;
1563 data.explicit_inc_from = 1;
1565 if (!data.autinc_from && CONSTANT_P (from_addr))
1566 data.from_addr = copy_addr_to_reg (from_addr);
1567 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1569 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1570 data.autinc_to = 1;
1571 data.explicit_inc_to = -1;
1573 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1575 data.to_addr = copy_addr_to_reg (to_addr);
1576 data.autinc_to = 1;
1577 data.explicit_inc_to = 1;
1579 if (!data.autinc_to && CONSTANT_P (to_addr))
1580 data.to_addr = copy_addr_to_reg (to_addr);
1583 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1584 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1585 align = MOVE_MAX * BITS_PER_UNIT;
1587 /* First move what we can in the largest integer mode, then go to
1588 successively smaller modes. */
1590 while (max_size > 1)
1592 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1593 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1594 if (GET_MODE_SIZE (tmode) < max_size)
1595 mode = tmode;
1597 if (mode == VOIDmode)
1598 break;
1600 icode = mov_optab->handlers[(int) mode].insn_code;
1601 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1602 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1604 max_size = GET_MODE_SIZE (mode);
1607 /* The code above should have handled everything. */
1608 if (data.len > 0)
1609 abort ();
1611 if (endp)
1613 rtx to1;
1615 if (data.reverse)
1616 abort ();
1617 if (data.autinc_to)
1619 if (endp == 2)
1621 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1622 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1623 else
1624 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1625 -1));
1627 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1628 data.offset);
1630 else
1632 if (endp == 2)
1633 --data.offset;
1634 to1 = adjust_address (data.to, QImode, data.offset);
1636 return to1;
1638 else
1639 return data.to;
1642 /* Return number of insns required to move L bytes by pieces.
1643 ALIGN (in bits) is maximum alignment we can assume. */
1645 static unsigned HOST_WIDE_INT
1646 move_by_pieces_ninsns (l, align)
1647 unsigned HOST_WIDE_INT l;
1648 unsigned int align;
1650 unsigned HOST_WIDE_INT n_insns = 0;
1651 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1653 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1654 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1655 align = MOVE_MAX * BITS_PER_UNIT;
1657 while (max_size > 1)
1659 enum machine_mode mode = VOIDmode, tmode;
1660 enum insn_code icode;
1662 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1663 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1664 if (GET_MODE_SIZE (tmode) < max_size)
1665 mode = tmode;
1667 if (mode == VOIDmode)
1668 break;
1670 icode = mov_optab->handlers[(int) mode].insn_code;
1671 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1672 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1674 max_size = GET_MODE_SIZE (mode);
1677 if (l)
1678 abort ();
1679 return n_insns;
1682 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1683 with move instructions for mode MODE. GENFUN is the gen_... function
1684 to make a move insn for that mode. DATA has all the other info. */
1686 static void
1687 move_by_pieces_1 (genfun, mode, data)
1688 rtx (*genfun) PARAMS ((rtx, ...));
1689 enum machine_mode mode;
1690 struct move_by_pieces *data;
1692 unsigned int size = GET_MODE_SIZE (mode);
1693 rtx to1 = NULL_RTX, from1;
1695 while (data->len >= size)
1697 if (data->reverse)
1698 data->offset -= size;
1700 if (data->to)
1702 if (data->autinc_to)
1703 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1704 data->offset);
1705 else
1706 to1 = adjust_address (data->to, mode, data->offset);
1709 if (data->autinc_from)
1710 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1711 data->offset);
1712 else
1713 from1 = adjust_address (data->from, mode, data->offset);
1715 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1716 emit_insn (gen_add2_insn (data->to_addr,
1717 GEN_INT (-(HOST_WIDE_INT)size)));
1718 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1719 emit_insn (gen_add2_insn (data->from_addr,
1720 GEN_INT (-(HOST_WIDE_INT)size)));
1722 if (data->to)
1723 emit_insn ((*genfun) (to1, from1));
1724 else
1726 #ifdef PUSH_ROUNDING
1727 emit_single_push_insn (mode, from1, NULL);
1728 #else
1729 abort ();
1730 #endif
1733 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1734 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1735 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1736 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1738 if (! data->reverse)
1739 data->offset += size;
1741 data->len -= size;
1745 /* Emit code to move a block Y to a block X. This may be done with
1746 string-move instructions, with multiple scalar move instructions,
1747 or with a library call.
1749 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1750 SIZE is an rtx that says how long they are.
1751 ALIGN is the maximum alignment we can assume they have.
1752 METHOD describes what kind of copy this is, and what mechanisms may be used.
1754 Return the address of the new block, if memcpy is called and returns it,
1755 0 otherwise. */
1758 emit_block_move (x, y, size, method)
1759 rtx x, y, size;
1760 enum block_op_methods method;
1762 bool may_use_call;
1763 rtx retval = 0;
1764 unsigned int align;
1766 switch (method)
1768 case BLOCK_OP_NORMAL:
1769 may_use_call = true;
1770 break;
1772 case BLOCK_OP_CALL_PARM:
1773 may_use_call = block_move_libcall_safe_for_call_parm ();
1775 /* Make inhibit_defer_pop nonzero around the library call
1776 to force it to pop the arguments right away. */
1777 NO_DEFER_POP;
1778 break;
1780 case BLOCK_OP_NO_LIBCALL:
1781 may_use_call = false;
1782 break;
1784 default:
1785 abort ();
1788 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1790 if (GET_MODE (x) != BLKmode)
1791 abort ();
1792 if (GET_MODE (y) != BLKmode)
1793 abort ();
1795 x = protect_from_queue (x, 1);
1796 y = protect_from_queue (y, 0);
1797 size = protect_from_queue (size, 0);
1799 if (GET_CODE (x) != MEM)
1800 abort ();
1801 if (GET_CODE (y) != MEM)
1802 abort ();
1803 if (size == 0)
1804 abort ();
1806 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1807 can be incorrect is coming from __builtin_memcpy. */
1808 if (GET_CODE (size) == CONST_INT)
1810 x = shallow_copy_rtx (x);
1811 y = shallow_copy_rtx (y);
1812 set_mem_size (x, size);
1813 set_mem_size (y, size);
1816 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1817 move_by_pieces (x, y, INTVAL (size), align, 0);
1818 else if (emit_block_move_via_movstr (x, y, size, align))
1820 else if (may_use_call)
1821 retval = emit_block_move_via_libcall (x, y, size);
1822 else
1823 emit_block_move_via_loop (x, y, size, align);
1825 if (method == BLOCK_OP_CALL_PARM)
1826 OK_DEFER_POP;
1828 return retval;
1831 /* A subroutine of emit_block_move. Returns true if calling the
1832 block move libcall will not clobber any parameters which may have
1833 already been placed on the stack. */
1835 static bool
1836 block_move_libcall_safe_for_call_parm ()
1838 if (PUSH_ARGS)
1839 return true;
1840 else
1842 /* Check to see whether memcpy takes all register arguments. */
1843 static enum {
1844 takes_regs_uninit, takes_regs_no, takes_regs_yes
1845 } takes_regs = takes_regs_uninit;
1847 switch (takes_regs)
1849 case takes_regs_uninit:
1851 CUMULATIVE_ARGS args_so_far;
1852 tree fn, arg;
1854 fn = emit_block_move_libcall_fn (false);
1855 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1857 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1858 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1860 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1861 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1862 if (!tmp || !REG_P (tmp))
1863 goto fail_takes_regs;
1864 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1865 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1866 NULL_TREE, 1))
1867 goto fail_takes_regs;
1868 #endif
1869 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1872 takes_regs = takes_regs_yes;
1873 /* FALLTHRU */
1875 case takes_regs_yes:
1876 return true;
1878 fail_takes_regs:
1879 takes_regs = takes_regs_no;
1880 /* FALLTHRU */
1881 case takes_regs_no:
1882 return false;
1884 default:
1885 abort ();
1890 /* A subroutine of emit_block_move. Expand a movstr pattern;
1891 return true if successful. */
1893 static bool
1894 emit_block_move_via_movstr (x, y, size, align)
1895 rtx x, y, size;
1896 unsigned int align;
1898 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1899 enum machine_mode mode;
1901 /* Since this is a move insn, we don't care about volatility. */
1902 volatile_ok = 1;
1904 /* Try the most limited insn first, because there's no point
1905 including more than one in the machine description unless
1906 the more limited one has some advantage. */
1908 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1909 mode = GET_MODE_WIDER_MODE (mode))
1911 enum insn_code code = movstr_optab[(int) mode];
1912 insn_operand_predicate_fn pred;
1914 if (code != CODE_FOR_nothing
1915 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1916 here because if SIZE is less than the mode mask, as it is
1917 returned by the macro, it will definitely be less than the
1918 actual mode mask. */
1919 && ((GET_CODE (size) == CONST_INT
1920 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1921 <= (GET_MODE_MASK (mode) >> 1)))
1922 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1923 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1924 || (*pred) (x, BLKmode))
1925 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1926 || (*pred) (y, BLKmode))
1927 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1928 || (*pred) (opalign, VOIDmode)))
1930 rtx op2;
1931 rtx last = get_last_insn ();
1932 rtx pat;
1934 op2 = convert_to_mode (mode, size, 1);
1935 pred = insn_data[(int) code].operand[2].predicate;
1936 if (pred != 0 && ! (*pred) (op2, mode))
1937 op2 = copy_to_mode_reg (mode, op2);
1939 /* ??? When called via emit_block_move_for_call, it'd be
1940 nice if there were some way to inform the backend, so
1941 that it doesn't fail the expansion because it thinks
1942 emitting the libcall would be more efficient. */
1944 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1945 if (pat)
1947 emit_insn (pat);
1948 volatile_ok = 0;
1949 return true;
1951 else
1952 delete_insns_since (last);
1956 volatile_ok = 0;
1957 return false;
1960 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1961 Return the return value from memcpy, 0 otherwise. */
1963 static rtx
1964 emit_block_move_via_libcall (dst, src, size)
1965 rtx dst, src, size;
1967 rtx dst_addr, src_addr;
1968 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1969 enum machine_mode size_mode;
1970 rtx retval;
1972 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1974 It is unsafe to save the value generated by protect_from_queue and reuse
1975 it later. Consider what happens if emit_queue is called before the
1976 return value from protect_from_queue is used.
1978 Expansion of the CALL_EXPR below will call emit_queue before we are
1979 finished emitting RTL for argument setup. So if we are not careful we
1980 could get the wrong value for an argument.
1982 To avoid this problem we go ahead and emit code to copy the addresses of
1983 DST and SRC and SIZE into new pseudos. We can then place those new
1984 pseudos into an RTL_EXPR and use them later, even after a call to
1985 emit_queue.
1987 Note this is not strictly needed for library calls since they do not call
1988 emit_queue before loading their arguments. However, we may need to have
1989 library calls call emit_queue in the future since failing to do so could
1990 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1991 arguments in registers. */
1993 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1994 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1996 #ifdef POINTERS_EXTEND_UNSIGNED
1997 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1998 src_addr = convert_memory_address (ptr_mode, src_addr);
1999 #endif
2001 dst_tree = make_tree (ptr_type_node, dst_addr);
2002 src_tree = make_tree (ptr_type_node, src_addr);
2004 if (TARGET_MEM_FUNCTIONS)
2005 size_mode = TYPE_MODE (sizetype);
2006 else
2007 size_mode = TYPE_MODE (unsigned_type_node);
2009 size = convert_to_mode (size_mode, size, 1);
2010 size = copy_to_mode_reg (size_mode, size);
2012 /* It is incorrect to use the libcall calling conventions to call
2013 memcpy in this context. This could be a user call to memcpy and
2014 the user may wish to examine the return value from memcpy. For
2015 targets where libcalls and normal calls have different conventions
2016 for returning pointers, we could end up generating incorrect code.
2018 For convenience, we generate the call to bcopy this way as well. */
2020 if (TARGET_MEM_FUNCTIONS)
2021 size_tree = make_tree (sizetype, size);
2022 else
2023 size_tree = make_tree (unsigned_type_node, size);
2025 fn = emit_block_move_libcall_fn (true);
2026 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2027 if (TARGET_MEM_FUNCTIONS)
2029 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2030 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2032 else
2034 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2035 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2038 /* Now we have to build up the CALL_EXPR itself. */
2039 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2040 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2041 call_expr, arg_list, NULL_TREE);
2042 TREE_SIDE_EFFECTS (call_expr) = 1;
2044 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2046 /* If we are initializing a readonly value, show the above call clobbered
2047 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2048 the delay slot scheduler might overlook conflicts and take nasty
2049 decisions. */
2050 if (RTX_UNCHANGING_P (dst))
2051 add_function_usage_to
2052 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2053 gen_rtx_CLOBBER (VOIDmode, dst),
2054 NULL_RTX));
2056 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2059 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2060 for the function we use for block copies. The first time FOR_CALL
2061 is true, we call assemble_external. */
2063 static GTY(()) tree block_move_fn;
2065 void
2066 init_block_move_fn (asmspec)
2067 const char *asmspec;
2069 if (!block_move_fn)
2071 tree args, fn;
2073 if (TARGET_MEM_FUNCTIONS)
2075 fn = get_identifier ("memcpy");
2076 args = build_function_type_list (ptr_type_node, ptr_type_node,
2077 const_ptr_type_node, sizetype,
2078 NULL_TREE);
2080 else
2082 fn = get_identifier ("bcopy");
2083 args = build_function_type_list (void_type_node, const_ptr_type_node,
2084 ptr_type_node, unsigned_type_node,
2085 NULL_TREE);
2088 fn = build_decl (FUNCTION_DECL, fn, args);
2089 DECL_EXTERNAL (fn) = 1;
2090 TREE_PUBLIC (fn) = 1;
2091 DECL_ARTIFICIAL (fn) = 1;
2092 TREE_NOTHROW (fn) = 1;
2094 block_move_fn = fn;
2097 if (asmspec)
2099 SET_DECL_RTL (block_move_fn, NULL_RTX);
2100 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2104 static tree
2105 emit_block_move_libcall_fn (for_call)
2106 int for_call;
2108 static bool emitted_extern;
2110 if (!block_move_fn)
2111 init_block_move_fn (NULL);
2113 if (for_call && !emitted_extern)
2115 emitted_extern = true;
2116 make_decl_rtl (block_move_fn, NULL);
2117 assemble_external (block_move_fn);
2120 return block_move_fn;
2123 /* A subroutine of emit_block_move. Copy the data via an explicit
2124 loop. This is used only when libcalls are forbidden. */
2125 /* ??? It'd be nice to copy in hunks larger than QImode. */
2127 static void
2128 emit_block_move_via_loop (x, y, size, align)
2129 rtx x, y, size;
2130 unsigned int align ATTRIBUTE_UNUSED;
2132 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2133 enum machine_mode iter_mode;
2135 iter_mode = GET_MODE (size);
2136 if (iter_mode == VOIDmode)
2137 iter_mode = word_mode;
2139 top_label = gen_label_rtx ();
2140 cmp_label = gen_label_rtx ();
2141 iter = gen_reg_rtx (iter_mode);
2143 emit_move_insn (iter, const0_rtx);
2145 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2146 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2147 do_pending_stack_adjust ();
2149 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2151 emit_jump (cmp_label);
2152 emit_label (top_label);
2154 tmp = convert_modes (Pmode, iter_mode, iter, true);
2155 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2156 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2157 x = change_address (x, QImode, x_addr);
2158 y = change_address (y, QImode, y_addr);
2160 emit_move_insn (x, y);
2162 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2163 true, OPTAB_LIB_WIDEN);
2164 if (tmp != iter)
2165 emit_move_insn (iter, tmp);
2167 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2168 emit_label (cmp_label);
2170 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2171 true, top_label);
2173 emit_note (NULL, NOTE_INSN_LOOP_END);
2176 /* Copy all or part of a value X into registers starting at REGNO.
2177 The number of registers to be filled is NREGS. */
2179 void
2180 move_block_to_reg (regno, x, nregs, mode)
2181 int regno;
2182 rtx x;
2183 int nregs;
2184 enum machine_mode mode;
2186 int i;
2187 #ifdef HAVE_load_multiple
2188 rtx pat;
2189 rtx last;
2190 #endif
2192 if (nregs == 0)
2193 return;
2195 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2196 x = validize_mem (force_const_mem (mode, x));
2198 /* See if the machine can do this with a load multiple insn. */
2199 #ifdef HAVE_load_multiple
2200 if (HAVE_load_multiple)
2202 last = get_last_insn ();
2203 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2204 GEN_INT (nregs));
2205 if (pat)
2207 emit_insn (pat);
2208 return;
2210 else
2211 delete_insns_since (last);
2213 #endif
2215 for (i = 0; i < nregs; i++)
2216 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2217 operand_subword_force (x, i, mode));
2220 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2221 The number of registers to be filled is NREGS. */
2223 void
2224 move_block_from_reg (regno, x, nregs)
2225 int regno;
2226 rtx x;
2227 int nregs;
2229 int i;
2231 if (nregs == 0)
2232 return;
2234 /* See if the machine can do this with a store multiple insn. */
2235 #ifdef HAVE_store_multiple
2236 if (HAVE_store_multiple)
2238 rtx last = get_last_insn ();
2239 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2240 GEN_INT (nregs));
2241 if (pat)
2243 emit_insn (pat);
2244 return;
2246 else
2247 delete_insns_since (last);
2249 #endif
2251 for (i = 0; i < nregs; i++)
2253 rtx tem = operand_subword (x, i, 1, BLKmode);
2255 if (tem == 0)
2256 abort ();
2258 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2262 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2263 ORIG, where ORIG is a non-consecutive group of registers represented by
2264 a PARALLEL. The clone is identical to the original except in that the
2265 original set of registers is replaced by a new set of pseudo registers.
2266 The new set has the same modes as the original set. */
2269 gen_group_rtx (orig)
2270 rtx orig;
2272 int i, length;
2273 rtx *tmps;
2275 if (GET_CODE (orig) != PARALLEL)
2276 abort ();
2278 length = XVECLEN (orig, 0);
2279 tmps = (rtx *) alloca (sizeof (rtx) * length);
2281 /* Skip a NULL entry in first slot. */
2282 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2284 if (i)
2285 tmps[0] = 0;
2287 for (; i < length; i++)
2289 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2290 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2292 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2295 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2298 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2299 registers represented by a PARALLEL. SSIZE represents the total size of
2300 block SRC in bytes, or -1 if not known. */
2301 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2302 the balance will be in what would be the low-order memory addresses, i.e.
2303 left justified for big endian, right justified for little endian. This
2304 happens to be true for the targets currently using this support. If this
2305 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2306 would be needed. */
2308 void
2309 emit_group_load (dst, orig_src, ssize)
2310 rtx dst, orig_src;
2311 int ssize;
2313 rtx *tmps, src;
2314 int start, i;
2316 if (GET_CODE (dst) != PARALLEL)
2317 abort ();
2319 /* Check for a NULL entry, used to indicate that the parameter goes
2320 both on the stack and in registers. */
2321 if (XEXP (XVECEXP (dst, 0, 0), 0))
2322 start = 0;
2323 else
2324 start = 1;
2326 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2328 /* Process the pieces. */
2329 for (i = start; i < XVECLEN (dst, 0); i++)
2331 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2332 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2333 unsigned int bytelen = GET_MODE_SIZE (mode);
2334 int shift = 0;
2336 /* Handle trailing fragments that run over the size of the struct. */
2337 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2339 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2340 bytelen = ssize - bytepos;
2341 if (bytelen <= 0)
2342 abort ();
2345 /* If we won't be loading directly from memory, protect the real source
2346 from strange tricks we might play; but make sure that the source can
2347 be loaded directly into the destination. */
2348 src = orig_src;
2349 if (GET_CODE (orig_src) != MEM
2350 && (!CONSTANT_P (orig_src)
2351 || (GET_MODE (orig_src) != mode
2352 && GET_MODE (orig_src) != VOIDmode)))
2354 if (GET_MODE (orig_src) == VOIDmode)
2355 src = gen_reg_rtx (mode);
2356 else
2357 src = gen_reg_rtx (GET_MODE (orig_src));
2359 emit_move_insn (src, orig_src);
2362 /* Optimize the access just a bit. */
2363 if (GET_CODE (src) == MEM
2364 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2365 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2366 && bytelen == GET_MODE_SIZE (mode))
2368 tmps[i] = gen_reg_rtx (mode);
2369 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2371 else if (GET_CODE (src) == CONCAT)
2373 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2374 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2376 if ((bytepos == 0 && bytelen == slen0)
2377 || (bytepos != 0 && bytepos + bytelen <= slen))
2379 /* The following assumes that the concatenated objects all
2380 have the same size. In this case, a simple calculation
2381 can be used to determine the object and the bit field
2382 to be extracted. */
2383 tmps[i] = XEXP (src, bytepos / slen0);
2384 if (! CONSTANT_P (tmps[i])
2385 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2386 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2387 (bytepos % slen0) * BITS_PER_UNIT,
2388 1, NULL_RTX, mode, mode, ssize);
2390 else if (bytepos == 0)
2392 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2393 emit_move_insn (mem, src);
2394 tmps[i] = adjust_address (mem, mode, 0);
2396 else
2397 abort ();
2399 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2400 SIMD register, which is currently broken. While we get GCC
2401 to emit proper RTL for these cases, let's dump to memory. */
2402 else if (VECTOR_MODE_P (GET_MODE (dst))
2403 && GET_CODE (src) == REG)
2405 int slen = GET_MODE_SIZE (GET_MODE (src));
2406 rtx mem;
2408 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2409 emit_move_insn (mem, src);
2410 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2412 else if (CONSTANT_P (src)
2413 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2414 tmps[i] = src;
2415 else
2416 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2417 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2418 mode, mode, ssize);
2420 if (BYTES_BIG_ENDIAN && shift)
2421 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2422 tmps[i], 0, OPTAB_WIDEN);
2425 emit_queue ();
2427 /* Copy the extracted pieces into the proper (probable) hard regs. */
2428 for (i = start; i < XVECLEN (dst, 0); i++)
2429 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2432 /* Emit code to move a block SRC to block DST, where SRC and DST are
2433 non-consecutive groups of registers, each represented by a PARALLEL. */
2435 void
2436 emit_group_move (dst, src)
2437 rtx dst, src;
2439 int i;
2441 if (GET_CODE (src) != PARALLEL
2442 || GET_CODE (dst) != PARALLEL
2443 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2444 abort ();
2446 /* Skip first entry if NULL. */
2447 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2448 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2449 XEXP (XVECEXP (src, 0, i), 0));
2452 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2453 registers represented by a PARALLEL. SSIZE represents the total size of
2454 block DST, or -1 if not known. */
2456 void
2457 emit_group_store (orig_dst, src, ssize)
2458 rtx orig_dst, src;
2459 int ssize;
2461 rtx *tmps, dst;
2462 int start, i;
2464 if (GET_CODE (src) != PARALLEL)
2465 abort ();
2467 /* Check for a NULL entry, used to indicate that the parameter goes
2468 both on the stack and in registers. */
2469 if (XEXP (XVECEXP (src, 0, 0), 0))
2470 start = 0;
2471 else
2472 start = 1;
2474 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2476 /* Copy the (probable) hard regs into pseudos. */
2477 for (i = start; i < XVECLEN (src, 0); i++)
2479 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2480 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2481 emit_move_insn (tmps[i], reg);
2483 emit_queue ();
2485 /* If we won't be storing directly into memory, protect the real destination
2486 from strange tricks we might play. */
2487 dst = orig_dst;
2488 if (GET_CODE (dst) == PARALLEL)
2490 rtx temp;
2492 /* We can get a PARALLEL dst if there is a conditional expression in
2493 a return statement. In that case, the dst and src are the same,
2494 so no action is necessary. */
2495 if (rtx_equal_p (dst, src))
2496 return;
2498 /* It is unclear if we can ever reach here, but we may as well handle
2499 it. Allocate a temporary, and split this into a store/load to/from
2500 the temporary. */
2502 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2503 emit_group_store (temp, src, ssize);
2504 emit_group_load (dst, temp, ssize);
2505 return;
2507 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2509 dst = gen_reg_rtx (GET_MODE (orig_dst));
2510 /* Make life a bit easier for combine. */
2511 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2514 /* Process the pieces. */
2515 for (i = start; i < XVECLEN (src, 0); i++)
2517 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2518 enum machine_mode mode = GET_MODE (tmps[i]);
2519 unsigned int bytelen = GET_MODE_SIZE (mode);
2520 rtx dest = dst;
2522 /* Handle trailing fragments that run over the size of the struct. */
2523 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2525 if (BYTES_BIG_ENDIAN)
2527 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2528 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2529 tmps[i], 0, OPTAB_WIDEN);
2531 bytelen = ssize - bytepos;
2534 if (GET_CODE (dst) == CONCAT)
2536 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2537 dest = XEXP (dst, 0);
2538 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2540 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2541 dest = XEXP (dst, 1);
2543 else if (bytepos == 0 && XVECLEN (src, 0))
2545 dest = assign_stack_temp (GET_MODE (dest),
2546 GET_MODE_SIZE (GET_MODE (dest)), 0);
2547 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2548 tmps[i]);
2549 dst = dest;
2550 break;
2552 else
2553 abort ();
2556 /* Optimize the access just a bit. */
2557 if (GET_CODE (dest) == MEM
2558 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2559 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2560 && bytelen == GET_MODE_SIZE (mode))
2561 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2562 else
2563 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2564 mode, tmps[i], ssize);
2567 emit_queue ();
2569 /* Copy from the pseudo into the (probable) hard reg. */
2570 if (orig_dst != dst)
2571 emit_move_insn (orig_dst, dst);
2574 /* Generate code to copy a BLKmode object of TYPE out of a
2575 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2576 is null, a stack temporary is created. TGTBLK is returned.
2578 The primary purpose of this routine is to handle functions
2579 that return BLKmode structures in registers. Some machines
2580 (the PA for example) want to return all small structures
2581 in registers regardless of the structure's alignment. */
2584 copy_blkmode_from_reg (tgtblk, srcreg, type)
2585 rtx tgtblk;
2586 rtx srcreg;
2587 tree type;
2589 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2590 rtx src = NULL, dst = NULL;
2591 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2592 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2594 if (tgtblk == 0)
2596 tgtblk = assign_temp (build_qualified_type (type,
2597 (TYPE_QUALS (type)
2598 | TYPE_QUAL_CONST)),
2599 0, 1, 1);
2600 preserve_temp_slots (tgtblk);
2603 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2604 into a new pseudo which is a full word. */
2606 if (GET_MODE (srcreg) != BLKmode
2607 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2608 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2610 /* Structures whose size is not a multiple of a word are aligned
2611 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2612 machine, this means we must skip the empty high order bytes when
2613 calculating the bit offset. */
2614 if (BYTES_BIG_ENDIAN
2615 && bytes % UNITS_PER_WORD)
2616 big_endian_correction
2617 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2619 /* Copy the structure BITSIZE bites at a time.
2621 We could probably emit more efficient code for machines which do not use
2622 strict alignment, but it doesn't seem worth the effort at the current
2623 time. */
2624 for (bitpos = 0, xbitpos = big_endian_correction;
2625 bitpos < bytes * BITS_PER_UNIT;
2626 bitpos += bitsize, xbitpos += bitsize)
2628 /* We need a new source operand each time xbitpos is on a
2629 word boundary and when xbitpos == big_endian_correction
2630 (the first time through). */
2631 if (xbitpos % BITS_PER_WORD == 0
2632 || xbitpos == big_endian_correction)
2633 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2634 GET_MODE (srcreg));
2636 /* We need a new destination operand each time bitpos is on
2637 a word boundary. */
2638 if (bitpos % BITS_PER_WORD == 0)
2639 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2641 /* Use xbitpos for the source extraction (right justified) and
2642 xbitpos for the destination store (left justified). */
2643 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2644 extract_bit_field (src, bitsize,
2645 xbitpos % BITS_PER_WORD, 1,
2646 NULL_RTX, word_mode, word_mode,
2647 BITS_PER_WORD),
2648 BITS_PER_WORD);
2651 return tgtblk;
2654 /* Add a USE expression for REG to the (possibly empty) list pointed
2655 to by CALL_FUSAGE. REG must denote a hard register. */
2657 void
2658 use_reg (call_fusage, reg)
2659 rtx *call_fusage, reg;
2661 if (GET_CODE (reg) != REG
2662 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2663 abort ();
2665 *call_fusage
2666 = gen_rtx_EXPR_LIST (VOIDmode,
2667 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2670 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2671 starting at REGNO. All of these registers must be hard registers. */
2673 void
2674 use_regs (call_fusage, regno, nregs)
2675 rtx *call_fusage;
2676 int regno;
2677 int nregs;
2679 int i;
2681 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2682 abort ();
2684 for (i = 0; i < nregs; i++)
2685 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2688 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2689 PARALLEL REGS. This is for calls that pass values in multiple
2690 non-contiguous locations. The Irix 6 ABI has examples of this. */
2692 void
2693 use_group_regs (call_fusage, regs)
2694 rtx *call_fusage;
2695 rtx regs;
2697 int i;
2699 for (i = 0; i < XVECLEN (regs, 0); i++)
2701 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2703 /* A NULL entry means the parameter goes both on the stack and in
2704 registers. This can also be a MEM for targets that pass values
2705 partially on the stack and partially in registers. */
2706 if (reg != 0 && GET_CODE (reg) == REG)
2707 use_reg (call_fusage, reg);
2712 /* Determine whether the LEN bytes generated by CONSTFUN can be
2713 stored to memory using several move instructions. CONSTFUNDATA is
2714 a pointer which will be passed as argument in every CONSTFUN call.
2715 ALIGN is maximum alignment we can assume. Return nonzero if a
2716 call to store_by_pieces should succeed. */
2719 can_store_by_pieces (len, constfun, constfundata, align)
2720 unsigned HOST_WIDE_INT len;
2721 rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
2722 void *constfundata;
2723 unsigned int align;
2725 unsigned HOST_WIDE_INT max_size, l;
2726 HOST_WIDE_INT offset = 0;
2727 enum machine_mode mode, tmode;
2728 enum insn_code icode;
2729 int reverse;
2730 rtx cst;
2732 if (! STORE_BY_PIECES_P (len, align))
2733 return 0;
2735 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2736 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2737 align = MOVE_MAX * BITS_PER_UNIT;
2739 /* We would first store what we can in the largest integer mode, then go to
2740 successively smaller modes. */
2742 for (reverse = 0;
2743 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2744 reverse++)
2746 l = len;
2747 mode = VOIDmode;
2748 max_size = STORE_MAX_PIECES + 1;
2749 while (max_size > 1)
2751 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2752 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2753 if (GET_MODE_SIZE (tmode) < max_size)
2754 mode = tmode;
2756 if (mode == VOIDmode)
2757 break;
2759 icode = mov_optab->handlers[(int) mode].insn_code;
2760 if (icode != CODE_FOR_nothing
2761 && align >= GET_MODE_ALIGNMENT (mode))
2763 unsigned int size = GET_MODE_SIZE (mode);
2765 while (l >= size)
2767 if (reverse)
2768 offset -= size;
2770 cst = (*constfun) (constfundata, offset, mode);
2771 if (!LEGITIMATE_CONSTANT_P (cst))
2772 return 0;
2774 if (!reverse)
2775 offset += size;
2777 l -= size;
2781 max_size = GET_MODE_SIZE (mode);
2784 /* The code above should have handled everything. */
2785 if (l != 0)
2786 abort ();
2789 return 1;
2792 /* Generate several move instructions to store LEN bytes generated by
2793 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2794 pointer which will be passed as argument in every CONSTFUN call.
2795 ALIGN is maximum alignment we can assume.
2796 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2797 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2798 stpcpy. */
2801 store_by_pieces (to, len, constfun, constfundata, align, endp)
2802 rtx to;
2803 unsigned HOST_WIDE_INT len;
2804 rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
2805 void *constfundata;
2806 unsigned int align;
2807 int endp;
2809 struct store_by_pieces data;
2811 if (! STORE_BY_PIECES_P (len, align))
2812 abort ();
2813 to = protect_from_queue (to, 1);
2814 data.constfun = constfun;
2815 data.constfundata = constfundata;
2816 data.len = len;
2817 data.to = to;
2818 store_by_pieces_1 (&data, align);
2819 if (endp)
2821 rtx to1;
2823 if (data.reverse)
2824 abort ();
2825 if (data.autinc_to)
2827 if (endp == 2)
2829 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2830 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2831 else
2832 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2833 -1));
2835 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2836 data.offset);
2838 else
2840 if (endp == 2)
2841 --data.offset;
2842 to1 = adjust_address (data.to, QImode, data.offset);
2844 return to1;
2846 else
2847 return data.to;
2850 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2851 rtx with BLKmode). The caller must pass TO through protect_from_queue
2852 before calling. ALIGN is maximum alignment we can assume. */
2854 static void
2855 clear_by_pieces (to, len, align)
2856 rtx to;
2857 unsigned HOST_WIDE_INT len;
2858 unsigned int align;
2860 struct store_by_pieces data;
2862 data.constfun = clear_by_pieces_1;
2863 data.constfundata = NULL;
2864 data.len = len;
2865 data.to = to;
2866 store_by_pieces_1 (&data, align);
2869 /* Callback routine for clear_by_pieces.
2870 Return const0_rtx unconditionally. */
2872 static rtx
2873 clear_by_pieces_1 (data, offset, mode)
2874 void *data ATTRIBUTE_UNUSED;
2875 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2876 enum machine_mode mode ATTRIBUTE_UNUSED;
2878 return const0_rtx;
2881 /* Subroutine of clear_by_pieces and store_by_pieces.
2882 Generate several move instructions to store LEN bytes of block TO. (A MEM
2883 rtx with BLKmode). The caller must pass TO through protect_from_queue
2884 before calling. ALIGN is maximum alignment we can assume. */
2886 static void
2887 store_by_pieces_1 (data, align)
2888 struct store_by_pieces *data;
2889 unsigned int align;
2891 rtx to_addr = XEXP (data->to, 0);
2892 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2893 enum machine_mode mode = VOIDmode, tmode;
2894 enum insn_code icode;
2896 data->offset = 0;
2897 data->to_addr = to_addr;
2898 data->autinc_to
2899 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2900 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2902 data->explicit_inc_to = 0;
2903 data->reverse
2904 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2905 if (data->reverse)
2906 data->offset = data->len;
2908 /* If storing requires more than two move insns,
2909 copy addresses to registers (to make displacements shorter)
2910 and use post-increment if available. */
2911 if (!data->autinc_to
2912 && move_by_pieces_ninsns (data->len, align) > 2)
2914 /* Determine the main mode we'll be using. */
2915 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2916 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2917 if (GET_MODE_SIZE (tmode) < max_size)
2918 mode = tmode;
2920 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2922 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2923 data->autinc_to = 1;
2924 data->explicit_inc_to = -1;
2927 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2928 && ! data->autinc_to)
2930 data->to_addr = copy_addr_to_reg (to_addr);
2931 data->autinc_to = 1;
2932 data->explicit_inc_to = 1;
2935 if ( !data->autinc_to && CONSTANT_P (to_addr))
2936 data->to_addr = copy_addr_to_reg (to_addr);
2939 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2940 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2941 align = MOVE_MAX * BITS_PER_UNIT;
2943 /* First store what we can in the largest integer mode, then go to
2944 successively smaller modes. */
2946 while (max_size > 1)
2948 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2949 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2950 if (GET_MODE_SIZE (tmode) < max_size)
2951 mode = tmode;
2953 if (mode == VOIDmode)
2954 break;
2956 icode = mov_optab->handlers[(int) mode].insn_code;
2957 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2958 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2960 max_size = GET_MODE_SIZE (mode);
2963 /* The code above should have handled everything. */
2964 if (data->len != 0)
2965 abort ();
2968 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2969 with move instructions for mode MODE. GENFUN is the gen_... function
2970 to make a move insn for that mode. DATA has all the other info. */
2972 static void
2973 store_by_pieces_2 (genfun, mode, data)
2974 rtx (*genfun) PARAMS ((rtx, ...));
2975 enum machine_mode mode;
2976 struct store_by_pieces *data;
2978 unsigned int size = GET_MODE_SIZE (mode);
2979 rtx to1, cst;
2981 while (data->len >= size)
2983 if (data->reverse)
2984 data->offset -= size;
2986 if (data->autinc_to)
2987 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2988 data->offset);
2989 else
2990 to1 = adjust_address (data->to, mode, data->offset);
2992 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2993 emit_insn (gen_add2_insn (data->to_addr,
2994 GEN_INT (-(HOST_WIDE_INT) size)));
2996 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2997 emit_insn ((*genfun) (to1, cst));
2999 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
3000 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
3002 if (! data->reverse)
3003 data->offset += size;
3005 data->len -= size;
3009 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
3010 its length in bytes. */
3013 clear_storage (object, size)
3014 rtx object;
3015 rtx size;
3017 rtx retval = 0;
3018 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
3019 : GET_MODE_ALIGNMENT (GET_MODE (object)));
3021 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3022 just move a zero. Otherwise, do this a piece at a time. */
3023 if (GET_MODE (object) != BLKmode
3024 && GET_CODE (size) == CONST_INT
3025 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
3026 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
3027 else
3029 object = protect_from_queue (object, 1);
3030 size = protect_from_queue (size, 0);
3032 if (GET_CODE (size) == CONST_INT
3033 && CLEAR_BY_PIECES_P (INTVAL (size), align))
3034 clear_by_pieces (object, INTVAL (size), align);
3035 else if (clear_storage_via_clrstr (object, size, align))
3037 else
3038 retval = clear_storage_via_libcall (object, size);
3041 return retval;
3044 /* A subroutine of clear_storage. Expand a clrstr pattern;
3045 return true if successful. */
3047 static bool
3048 clear_storage_via_clrstr (object, size, align)
3049 rtx object, size;
3050 unsigned int align;
3052 /* Try the most limited insn first, because there's no point
3053 including more than one in the machine description unless
3054 the more limited one has some advantage. */
3056 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3057 enum machine_mode mode;
3059 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3060 mode = GET_MODE_WIDER_MODE (mode))
3062 enum insn_code code = clrstr_optab[(int) mode];
3063 insn_operand_predicate_fn pred;
3065 if (code != CODE_FOR_nothing
3066 /* We don't need MODE to be narrower than
3067 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3068 the mode mask, as it is returned by the macro, it will
3069 definitely be less than the actual mode mask. */
3070 && ((GET_CODE (size) == CONST_INT
3071 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3072 <= (GET_MODE_MASK (mode) >> 1)))
3073 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3074 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3075 || (*pred) (object, BLKmode))
3076 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3077 || (*pred) (opalign, VOIDmode)))
3079 rtx op1;
3080 rtx last = get_last_insn ();
3081 rtx pat;
3083 op1 = convert_to_mode (mode, size, 1);
3084 pred = insn_data[(int) code].operand[1].predicate;
3085 if (pred != 0 && ! (*pred) (op1, mode))
3086 op1 = copy_to_mode_reg (mode, op1);
3088 pat = GEN_FCN ((int) code) (object, op1, opalign);
3089 if (pat)
3091 emit_insn (pat);
3092 return true;
3094 else
3095 delete_insns_since (last);
3099 return false;
3102 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3103 Return the return value of memset, 0 otherwise. */
3105 static rtx
3106 clear_storage_via_libcall (object, size)
3107 rtx object, size;
3109 tree call_expr, arg_list, fn, object_tree, size_tree;
3110 enum machine_mode size_mode;
3111 rtx retval;
3113 /* OBJECT or SIZE may have been passed through protect_from_queue.
3115 It is unsafe to save the value generated by protect_from_queue
3116 and reuse it later. Consider what happens if emit_queue is
3117 called before the return value from protect_from_queue is used.
3119 Expansion of the CALL_EXPR below will call emit_queue before
3120 we are finished emitting RTL for argument setup. So if we are
3121 not careful we could get the wrong value for an argument.
3123 To avoid this problem we go ahead and emit code to copy OBJECT
3124 and SIZE into new pseudos. We can then place those new pseudos
3125 into an RTL_EXPR and use them later, even after a call to
3126 emit_queue.
3128 Note this is not strictly needed for library calls since they
3129 do not call emit_queue before loading their arguments. However,
3130 we may need to have library calls call emit_queue in the future
3131 since failing to do so could cause problems for targets which
3132 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3134 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3136 if (TARGET_MEM_FUNCTIONS)
3137 size_mode = TYPE_MODE (sizetype);
3138 else
3139 size_mode = TYPE_MODE (unsigned_type_node);
3140 size = convert_to_mode (size_mode, size, 1);
3141 size = copy_to_mode_reg (size_mode, size);
3143 /* It is incorrect to use the libcall calling conventions to call
3144 memset in this context. This could be a user call to memset and
3145 the user may wish to examine the return value from memset. For
3146 targets where libcalls and normal calls have different conventions
3147 for returning pointers, we could end up generating incorrect code.
3149 For convenience, we generate the call to bzero this way as well. */
3151 object_tree = make_tree (ptr_type_node, object);
3152 if (TARGET_MEM_FUNCTIONS)
3153 size_tree = make_tree (sizetype, size);
3154 else
3155 size_tree = make_tree (unsigned_type_node, size);
3157 fn = clear_storage_libcall_fn (true);
3158 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3159 if (TARGET_MEM_FUNCTIONS)
3160 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3161 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3163 /* Now we have to build up the CALL_EXPR itself. */
3164 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3165 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3166 call_expr, arg_list, NULL_TREE);
3167 TREE_SIDE_EFFECTS (call_expr) = 1;
3169 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3171 /* If we are initializing a readonly value, show the above call
3172 clobbered it. Otherwise, a load from it may erroneously be
3173 hoisted from a loop. */
3174 if (RTX_UNCHANGING_P (object))
3175 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3177 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3180 /* A subroutine of clear_storage_via_libcall. Create the tree node
3181 for the function we use for block clears. The first time FOR_CALL
3182 is true, we call assemble_external. */
3184 static GTY(()) tree block_clear_fn;
3186 void
3187 init_block_clear_fn (asmspec)
3188 const char *asmspec;
3190 if (!block_clear_fn)
3192 tree fn, args;
3194 if (TARGET_MEM_FUNCTIONS)
3196 fn = get_identifier ("memset");
3197 args = build_function_type_list (ptr_type_node, ptr_type_node,
3198 integer_type_node, sizetype,
3199 NULL_TREE);
3201 else
3203 fn = get_identifier ("bzero");
3204 args = build_function_type_list (void_type_node, ptr_type_node,
3205 unsigned_type_node, NULL_TREE);
3208 fn = build_decl (FUNCTION_DECL, fn, args);
3209 DECL_EXTERNAL (fn) = 1;
3210 TREE_PUBLIC (fn) = 1;
3211 DECL_ARTIFICIAL (fn) = 1;
3212 TREE_NOTHROW (fn) = 1;
3214 block_clear_fn = fn;
3217 if (asmspec)
3219 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3220 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3224 static tree
3225 clear_storage_libcall_fn (for_call)
3226 int for_call;
3228 static bool emitted_extern;
3230 if (!block_clear_fn)
3231 init_block_clear_fn (NULL);
3233 if (for_call && !emitted_extern)
3235 emitted_extern = true;
3236 make_decl_rtl (block_clear_fn, NULL);
3237 assemble_external (block_clear_fn);
3240 return block_clear_fn;
3243 /* Generate code to copy Y into X.
3244 Both Y and X must have the same mode, except that
3245 Y can be a constant with VOIDmode.
3246 This mode cannot be BLKmode; use emit_block_move for that.
3248 Return the last instruction emitted. */
3251 emit_move_insn (x, y)
3252 rtx x, y;
3254 enum machine_mode mode = GET_MODE (x);
3255 rtx y_cst = NULL_RTX;
3256 rtx last_insn, set;
3258 x = protect_from_queue (x, 1);
3259 y = protect_from_queue (y, 0);
3261 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3262 abort ();
3264 /* Never force constant_p_rtx to memory. */
3265 if (GET_CODE (y) == CONSTANT_P_RTX)
3267 else if (CONSTANT_P (y))
3269 if (optimize
3270 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3271 && (last_insn = compress_float_constant (x, y)))
3272 return last_insn;
3274 y_cst = y;
3276 if (!LEGITIMATE_CONSTANT_P (y))
3278 y = force_const_mem (mode, y);
3280 /* If the target's cannot_force_const_mem prevented the spill,
3281 assume that the target's move expanders will also take care
3282 of the non-legitimate constant. */
3283 if (!y)
3284 y = y_cst;
3288 /* If X or Y are memory references, verify that their addresses are valid
3289 for the machine. */
3290 if (GET_CODE (x) == MEM
3291 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3292 && ! push_operand (x, GET_MODE (x)))
3293 || (flag_force_addr
3294 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3295 x = validize_mem (x);
3297 if (GET_CODE (y) == MEM
3298 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3299 || (flag_force_addr
3300 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3301 y = validize_mem (y);
3303 if (mode == BLKmode)
3304 abort ();
3306 last_insn = emit_move_insn_1 (x, y);
3308 if (y_cst && GET_CODE (x) == REG
3309 && (set = single_set (last_insn)) != NULL_RTX
3310 && SET_DEST (set) == x
3311 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3312 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3314 return last_insn;
3317 /* Low level part of emit_move_insn.
3318 Called just like emit_move_insn, but assumes X and Y
3319 are basically valid. */
3322 emit_move_insn_1 (x, y)
3323 rtx x, y;
3325 enum machine_mode mode = GET_MODE (x);
3326 enum machine_mode submode;
3327 enum mode_class class = GET_MODE_CLASS (mode);
3329 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3330 abort ();
3332 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3333 return
3334 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3336 /* Expand complex moves by moving real part and imag part, if possible. */
3337 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3338 && BLKmode != (submode = GET_MODE_INNER (mode))
3339 && (mov_optab->handlers[(int) submode].insn_code
3340 != CODE_FOR_nothing))
3342 /* Don't split destination if it is a stack push. */
3343 int stack = push_operand (x, GET_MODE (x));
3345 #ifdef PUSH_ROUNDING
3346 /* In case we output to the stack, but the size is smaller machine can
3347 push exactly, we need to use move instructions. */
3348 if (stack
3349 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3350 != GET_MODE_SIZE (submode)))
3352 rtx temp;
3353 HOST_WIDE_INT offset1, offset2;
3355 /* Do not use anti_adjust_stack, since we don't want to update
3356 stack_pointer_delta. */
3357 temp = expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3359 sub_optab,
3360 #else
3361 add_optab,
3362 #endif
3363 stack_pointer_rtx,
3364 GEN_INT
3365 (PUSH_ROUNDING
3366 (GET_MODE_SIZE (GET_MODE (x)))),
3367 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3369 if (temp != stack_pointer_rtx)
3370 emit_move_insn (stack_pointer_rtx, temp);
3372 #ifdef STACK_GROWS_DOWNWARD
3373 offset1 = 0;
3374 offset2 = GET_MODE_SIZE (submode);
3375 #else
3376 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3377 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3378 + GET_MODE_SIZE (submode));
3379 #endif
3381 emit_move_insn (change_address (x, submode,
3382 gen_rtx_PLUS (Pmode,
3383 stack_pointer_rtx,
3384 GEN_INT (offset1))),
3385 gen_realpart (submode, y));
3386 emit_move_insn (change_address (x, submode,
3387 gen_rtx_PLUS (Pmode,
3388 stack_pointer_rtx,
3389 GEN_INT (offset2))),
3390 gen_imagpart (submode, y));
3392 else
3393 #endif
3394 /* If this is a stack, push the highpart first, so it
3395 will be in the argument order.
3397 In that case, change_address is used only to convert
3398 the mode, not to change the address. */
3399 if (stack)
3401 /* Note that the real part always precedes the imag part in memory
3402 regardless of machine's endianness. */
3403 #ifdef STACK_GROWS_DOWNWARD
3404 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3405 gen_imagpart (submode, y));
3406 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3407 gen_realpart (submode, y));
3408 #else
3409 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3410 gen_realpart (submode, y));
3411 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3412 gen_imagpart (submode, y));
3413 #endif
3415 else
3417 rtx realpart_x, realpart_y;
3418 rtx imagpart_x, imagpart_y;
3420 /* If this is a complex value with each part being smaller than a
3421 word, the usual calling sequence will likely pack the pieces into
3422 a single register. Unfortunately, SUBREG of hard registers only
3423 deals in terms of words, so we have a problem converting input
3424 arguments to the CONCAT of two registers that is used elsewhere
3425 for complex values. If this is before reload, we can copy it into
3426 memory and reload. FIXME, we should see about using extract and
3427 insert on integer registers, but complex short and complex char
3428 variables should be rarely used. */
3429 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3430 && (reload_in_progress | reload_completed) == 0)
3432 int packed_dest_p
3433 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3434 int packed_src_p
3435 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3437 if (packed_dest_p || packed_src_p)
3439 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3440 ? MODE_FLOAT : MODE_INT);
3442 enum machine_mode reg_mode
3443 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3445 if (reg_mode != BLKmode)
3447 rtx mem = assign_stack_temp (reg_mode,
3448 GET_MODE_SIZE (mode), 0);
3449 rtx cmem = adjust_address (mem, mode, 0);
3451 cfun->cannot_inline
3452 = N_("function using short complex types cannot be inline");
3454 if (packed_dest_p)
3456 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3458 emit_move_insn_1 (cmem, y);
3459 return emit_move_insn_1 (sreg, mem);
3461 else
3463 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3465 emit_move_insn_1 (mem, sreg);
3466 return emit_move_insn_1 (x, cmem);
3472 realpart_x = gen_realpart (submode, x);
3473 realpart_y = gen_realpart (submode, y);
3474 imagpart_x = gen_imagpart (submode, x);
3475 imagpart_y = gen_imagpart (submode, y);
3477 /* Show the output dies here. This is necessary for SUBREGs
3478 of pseudos since we cannot track their lifetimes correctly;
3479 hard regs shouldn't appear here except as return values.
3480 We never want to emit such a clobber after reload. */
3481 if (x != y
3482 && ! (reload_in_progress || reload_completed)
3483 && (GET_CODE (realpart_x) == SUBREG
3484 || GET_CODE (imagpart_x) == SUBREG))
3485 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3487 emit_move_insn (realpart_x, realpart_y);
3488 emit_move_insn (imagpart_x, imagpart_y);
3491 return get_last_insn ();
3494 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3495 find a mode to do it in. If we have a movcc, use it. Otherwise,
3496 find the MODE_INT mode of the same width. */
3497 else if (GET_MODE_CLASS (mode) == MODE_CC
3498 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3500 enum insn_code insn_code;
3501 enum machine_mode tmode = VOIDmode;
3502 rtx x1 = x, y1 = y;
3504 if (mode != CCmode
3505 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3506 tmode = CCmode;
3507 else
3508 for (tmode = QImode; tmode != VOIDmode;
3509 tmode = GET_MODE_WIDER_MODE (tmode))
3510 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3511 break;
3513 if (tmode == VOIDmode)
3514 abort ();
3516 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3517 may call change_address which is not appropriate if we were
3518 called when a reload was in progress. We don't have to worry
3519 about changing the address since the size in bytes is supposed to
3520 be the same. Copy the MEM to change the mode and move any
3521 substitutions from the old MEM to the new one. */
3523 if (reload_in_progress)
3525 x = gen_lowpart_common (tmode, x1);
3526 if (x == 0 && GET_CODE (x1) == MEM)
3528 x = adjust_address_nv (x1, tmode, 0);
3529 copy_replacements (x1, x);
3532 y = gen_lowpart_common (tmode, y1);
3533 if (y == 0 && GET_CODE (y1) == MEM)
3535 y = adjust_address_nv (y1, tmode, 0);
3536 copy_replacements (y1, y);
3539 else
3541 x = gen_lowpart (tmode, x);
3542 y = gen_lowpart (tmode, y);
3545 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3546 return emit_insn (GEN_FCN (insn_code) (x, y));
3549 /* This will handle any multi-word or full-word mode that lacks a move_insn
3550 pattern. However, you will get better code if you define such patterns,
3551 even if they must turn into multiple assembler instructions. */
3552 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3554 rtx last_insn = 0;
3555 rtx seq, inner;
3556 int need_clobber;
3557 int i;
3559 #ifdef PUSH_ROUNDING
3561 /* If X is a push on the stack, do the push now and replace
3562 X with a reference to the stack pointer. */
3563 if (push_operand (x, GET_MODE (x)))
3565 rtx temp;
3566 enum rtx_code code;
3568 /* Do not use anti_adjust_stack, since we don't want to update
3569 stack_pointer_delta. */
3570 temp = expand_binop (Pmode,
3571 #ifdef STACK_GROWS_DOWNWARD
3572 sub_optab,
3573 #else
3574 add_optab,
3575 #endif
3576 stack_pointer_rtx,
3577 GEN_INT
3578 (PUSH_ROUNDING
3579 (GET_MODE_SIZE (GET_MODE (x)))),
3580 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3582 if (temp != stack_pointer_rtx)
3583 emit_move_insn (stack_pointer_rtx, temp);
3585 code = GET_CODE (XEXP (x, 0));
3587 /* Just hope that small offsets off SP are OK. */
3588 if (code == POST_INC)
3589 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3590 GEN_INT (-((HOST_WIDE_INT)
3591 GET_MODE_SIZE (GET_MODE (x)))));
3592 else if (code == POST_DEC)
3593 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3594 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3595 else
3596 temp = stack_pointer_rtx;
3598 x = change_address (x, VOIDmode, temp);
3600 #endif
3602 /* If we are in reload, see if either operand is a MEM whose address
3603 is scheduled for replacement. */
3604 if (reload_in_progress && GET_CODE (x) == MEM
3605 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3606 x = replace_equiv_address_nv (x, inner);
3607 if (reload_in_progress && GET_CODE (y) == MEM
3608 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3609 y = replace_equiv_address_nv (y, inner);
3611 start_sequence ();
3613 need_clobber = 0;
3614 for (i = 0;
3615 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3616 i++)
3618 rtx xpart = operand_subword (x, i, 1, mode);
3619 rtx ypart = operand_subword (y, i, 1, mode);
3621 /* If we can't get a part of Y, put Y into memory if it is a
3622 constant. Otherwise, force it into a register. If we still
3623 can't get a part of Y, abort. */
3624 if (ypart == 0 && CONSTANT_P (y))
3626 y = force_const_mem (mode, y);
3627 ypart = operand_subword (y, i, 1, mode);
3629 else if (ypart == 0)
3630 ypart = operand_subword_force (y, i, mode);
3632 if (xpart == 0 || ypart == 0)
3633 abort ();
3635 need_clobber |= (GET_CODE (xpart) == SUBREG);
3637 last_insn = emit_move_insn (xpart, ypart);
3640 seq = get_insns ();
3641 end_sequence ();
3643 /* Show the output dies here. This is necessary for SUBREGs
3644 of pseudos since we cannot track their lifetimes correctly;
3645 hard regs shouldn't appear here except as return values.
3646 We never want to emit such a clobber after reload. */
3647 if (x != y
3648 && ! (reload_in_progress || reload_completed)
3649 && need_clobber != 0)
3650 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3652 emit_insn (seq);
3654 return last_insn;
3656 else
3657 abort ();
3660 /* If Y is representable exactly in a narrower mode, and the target can
3661 perform the extension directly from constant or memory, then emit the
3662 move as an extension. */
3664 static rtx
3665 compress_float_constant (x, y)
3666 rtx x, y;
3668 enum machine_mode dstmode = GET_MODE (x);
3669 enum machine_mode orig_srcmode = GET_MODE (y);
3670 enum machine_mode srcmode;
3671 REAL_VALUE_TYPE r;
3673 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3675 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3676 srcmode != orig_srcmode;
3677 srcmode = GET_MODE_WIDER_MODE (srcmode))
3679 enum insn_code ic;
3680 rtx trunc_y, last_insn;
3682 /* Skip if the target can't extend this way. */
3683 ic = can_extend_p (dstmode, srcmode, 0);
3684 if (ic == CODE_FOR_nothing)
3685 continue;
3687 /* Skip if the narrowed value isn't exact. */
3688 if (! exact_real_truncate (srcmode, &r))
3689 continue;
3691 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3693 if (LEGITIMATE_CONSTANT_P (trunc_y))
3695 /* Skip if the target needs extra instructions to perform
3696 the extension. */
3697 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3698 continue;
3700 else if (float_extend_from_mem[dstmode][srcmode])
3701 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3702 else
3703 continue;
3705 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3706 last_insn = get_last_insn ();
3708 if (GET_CODE (x) == REG)
3709 set_unique_reg_note (last_insn, REG_EQUAL, y);
3711 return last_insn;
3714 return NULL_RTX;
3717 /* Pushing data onto the stack. */
3719 /* Push a block of length SIZE (perhaps variable)
3720 and return an rtx to address the beginning of the block.
3721 Note that it is not possible for the value returned to be a QUEUED.
3722 The value may be virtual_outgoing_args_rtx.
3724 EXTRA is the number of bytes of padding to push in addition to SIZE.
3725 BELOW nonzero means this padding comes at low addresses;
3726 otherwise, the padding comes at high addresses. */
3729 push_block (size, extra, below)
3730 rtx size;
3731 int extra, below;
3733 rtx temp;
3735 size = convert_modes (Pmode, ptr_mode, size, 1);
3736 if (CONSTANT_P (size))
3737 anti_adjust_stack (plus_constant (size, extra));
3738 else if (GET_CODE (size) == REG && extra == 0)
3739 anti_adjust_stack (size);
3740 else
3742 temp = copy_to_mode_reg (Pmode, size);
3743 if (extra != 0)
3744 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3745 temp, 0, OPTAB_LIB_WIDEN);
3746 anti_adjust_stack (temp);
3749 #ifndef STACK_GROWS_DOWNWARD
3750 if (0)
3751 #else
3752 if (1)
3753 #endif
3755 temp = virtual_outgoing_args_rtx;
3756 if (extra != 0 && below)
3757 temp = plus_constant (temp, extra);
3759 else
3761 if (GET_CODE (size) == CONST_INT)
3762 temp = plus_constant (virtual_outgoing_args_rtx,
3763 -INTVAL (size) - (below ? 0 : extra));
3764 else if (extra != 0 && !below)
3765 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3766 negate_rtx (Pmode, plus_constant (size, extra)));
3767 else
3768 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3769 negate_rtx (Pmode, size));
3772 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3775 #ifdef PUSH_ROUNDING
3777 /* Emit single push insn. */
3779 static void
3780 emit_single_push_insn (mode, x, type)
3781 rtx x;
3782 enum machine_mode mode;
3783 tree type;
3785 rtx dest_addr;
3786 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3787 rtx dest;
3788 enum insn_code icode;
3789 insn_operand_predicate_fn pred;
3791 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3792 /* If there is push pattern, use it. Otherwise try old way of throwing
3793 MEM representing push operation to move expander. */
3794 icode = push_optab->handlers[(int) mode].insn_code;
3795 if (icode != CODE_FOR_nothing)
3797 if (((pred = insn_data[(int) icode].operand[0].predicate)
3798 && !((*pred) (x, mode))))
3799 x = force_reg (mode, x);
3800 emit_insn (GEN_FCN (icode) (x));
3801 return;
3803 if (GET_MODE_SIZE (mode) == rounded_size)
3804 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3805 else
3807 #ifdef STACK_GROWS_DOWNWARD
3808 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3809 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3810 #else
3811 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3812 GEN_INT (rounded_size));
3813 #endif
3814 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3817 dest = gen_rtx_MEM (mode, dest_addr);
3819 if (type != 0)
3821 set_mem_attributes (dest, type, 1);
3823 if (flag_optimize_sibling_calls)
3824 /* Function incoming arguments may overlap with sibling call
3825 outgoing arguments and we cannot allow reordering of reads
3826 from function arguments with stores to outgoing arguments
3827 of sibling calls. */
3828 set_mem_alias_set (dest, 0);
3830 emit_move_insn (dest, x);
3832 #endif
3834 /* Generate code to push X onto the stack, assuming it has mode MODE and
3835 type TYPE.
3836 MODE is redundant except when X is a CONST_INT (since they don't
3837 carry mode info).
3838 SIZE is an rtx for the size of data to be copied (in bytes),
3839 needed only if X is BLKmode.
3841 ALIGN (in bits) is maximum alignment we can assume.
3843 If PARTIAL and REG are both nonzero, then copy that many of the first
3844 words of X into registers starting with REG, and push the rest of X.
3845 The amount of space pushed is decreased by PARTIAL words,
3846 rounded *down* to a multiple of PARM_BOUNDARY.
3847 REG must be a hard register in this case.
3848 If REG is zero but PARTIAL is not, take any all others actions for an
3849 argument partially in registers, but do not actually load any
3850 registers.
3852 EXTRA is the amount in bytes of extra space to leave next to this arg.
3853 This is ignored if an argument block has already been allocated.
3855 On a machine that lacks real push insns, ARGS_ADDR is the address of
3856 the bottom of the argument block for this call. We use indexing off there
3857 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3858 argument block has not been preallocated.
3860 ARGS_SO_FAR is the size of args previously pushed for this call.
3862 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3863 for arguments passed in registers. If nonzero, it will be the number
3864 of bytes required. */
3866 void
3867 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3868 args_addr, args_so_far, reg_parm_stack_space,
3869 alignment_pad)
3870 rtx x;
3871 enum machine_mode mode;
3872 tree type;
3873 rtx size;
3874 unsigned int align;
3875 int partial;
3876 rtx reg;
3877 int extra;
3878 rtx args_addr;
3879 rtx args_so_far;
3880 int reg_parm_stack_space;
3881 rtx alignment_pad;
3883 rtx xinner;
3884 enum direction stack_direction
3885 #ifdef STACK_GROWS_DOWNWARD
3886 = downward;
3887 #else
3888 = upward;
3889 #endif
3891 /* Decide where to pad the argument: `downward' for below,
3892 `upward' for above, or `none' for don't pad it.
3893 Default is below for small data on big-endian machines; else above. */
3894 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3896 /* Invert direction if stack is post-decrement.
3897 FIXME: why? */
3898 if (STACK_PUSH_CODE == POST_DEC)
3899 if (where_pad != none)
3900 where_pad = (where_pad == downward ? upward : downward);
3902 xinner = x = protect_from_queue (x, 0);
3904 if (mode == BLKmode)
3906 /* Copy a block into the stack, entirely or partially. */
3908 rtx temp;
3909 int used = partial * UNITS_PER_WORD;
3910 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3911 int skip;
3913 if (size == 0)
3914 abort ();
3916 used -= offset;
3918 /* USED is now the # of bytes we need not copy to the stack
3919 because registers will take care of them. */
3921 if (partial != 0)
3922 xinner = adjust_address (xinner, BLKmode, used);
3924 /* If the partial register-part of the arg counts in its stack size,
3925 skip the part of stack space corresponding to the registers.
3926 Otherwise, start copying to the beginning of the stack space,
3927 by setting SKIP to 0. */
3928 skip = (reg_parm_stack_space == 0) ? 0 : used;
3930 #ifdef PUSH_ROUNDING
3931 /* Do it with several push insns if that doesn't take lots of insns
3932 and if there is no difficulty with push insns that skip bytes
3933 on the stack for alignment purposes. */
3934 if (args_addr == 0
3935 && PUSH_ARGS
3936 && GET_CODE (size) == CONST_INT
3937 && skip == 0
3938 && MEM_ALIGN (xinner) >= align
3939 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3940 /* Here we avoid the case of a structure whose weak alignment
3941 forces many pushes of a small amount of data,
3942 and such small pushes do rounding that causes trouble. */
3943 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3944 || align >= BIGGEST_ALIGNMENT
3945 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3946 == (align / BITS_PER_UNIT)))
3947 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3949 /* Push padding now if padding above and stack grows down,
3950 or if padding below and stack grows up.
3951 But if space already allocated, this has already been done. */
3952 if (extra && args_addr == 0
3953 && where_pad != none && where_pad != stack_direction)
3954 anti_adjust_stack (GEN_INT (extra));
3956 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3958 else
3959 #endif /* PUSH_ROUNDING */
3961 rtx target;
3963 /* Otherwise make space on the stack and copy the data
3964 to the address of that space. */
3966 /* Deduct words put into registers from the size we must copy. */
3967 if (partial != 0)
3969 if (GET_CODE (size) == CONST_INT)
3970 size = GEN_INT (INTVAL (size) - used);
3971 else
3972 size = expand_binop (GET_MODE (size), sub_optab, size,
3973 GEN_INT (used), NULL_RTX, 0,
3974 OPTAB_LIB_WIDEN);
3977 /* Get the address of the stack space.
3978 In this case, we do not deal with EXTRA separately.
3979 A single stack adjust will do. */
3980 if (! args_addr)
3982 temp = push_block (size, extra, where_pad == downward);
3983 extra = 0;
3985 else if (GET_CODE (args_so_far) == CONST_INT)
3986 temp = memory_address (BLKmode,
3987 plus_constant (args_addr,
3988 skip + INTVAL (args_so_far)));
3989 else
3990 temp = memory_address (BLKmode,
3991 plus_constant (gen_rtx_PLUS (Pmode,
3992 args_addr,
3993 args_so_far),
3994 skip));
3996 if (!ACCUMULATE_OUTGOING_ARGS)
3998 /* If the source is referenced relative to the stack pointer,
3999 copy it to another register to stabilize it. We do not need
4000 to do this if we know that we won't be changing sp. */
4002 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4003 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4004 temp = copy_to_reg (temp);
4007 target = gen_rtx_MEM (BLKmode, temp);
4009 if (type != 0)
4011 set_mem_attributes (target, type, 1);
4012 /* Function incoming arguments may overlap with sibling call
4013 outgoing arguments and we cannot allow reordering of reads
4014 from function arguments with stores to outgoing arguments
4015 of sibling calls. */
4016 set_mem_alias_set (target, 0);
4019 /* ALIGN may well be better aligned than TYPE, e.g. due to
4020 PARM_BOUNDARY. Assume the caller isn't lying. */
4021 set_mem_align (target, align);
4023 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4026 else if (partial > 0)
4028 /* Scalar partly in registers. */
4030 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4031 int i;
4032 int not_stack;
4033 /* # words of start of argument
4034 that we must make space for but need not store. */
4035 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
4036 int args_offset = INTVAL (args_so_far);
4037 int skip;
4039 /* Push padding now if padding above and stack grows down,
4040 or if padding below and stack grows up.
4041 But if space already allocated, this has already been done. */
4042 if (extra && args_addr == 0
4043 && where_pad != none && where_pad != stack_direction)
4044 anti_adjust_stack (GEN_INT (extra));
4046 /* If we make space by pushing it, we might as well push
4047 the real data. Otherwise, we can leave OFFSET nonzero
4048 and leave the space uninitialized. */
4049 if (args_addr == 0)
4050 offset = 0;
4052 /* Now NOT_STACK gets the number of words that we don't need to
4053 allocate on the stack. */
4054 not_stack = partial - offset;
4056 /* If the partial register-part of the arg counts in its stack size,
4057 skip the part of stack space corresponding to the registers.
4058 Otherwise, start copying to the beginning of the stack space,
4059 by setting SKIP to 0. */
4060 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4062 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4063 x = validize_mem (force_const_mem (mode, x));
4065 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4066 SUBREGs of such registers are not allowed. */
4067 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4068 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4069 x = copy_to_reg (x);
4071 /* Loop over all the words allocated on the stack for this arg. */
4072 /* We can do it by words, because any scalar bigger than a word
4073 has a size a multiple of a word. */
4074 #ifndef PUSH_ARGS_REVERSED
4075 for (i = not_stack; i < size; i++)
4076 #else
4077 for (i = size - 1; i >= not_stack; i--)
4078 #endif
4079 if (i >= not_stack + offset)
4080 emit_push_insn (operand_subword_force (x, i, mode),
4081 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4082 0, args_addr,
4083 GEN_INT (args_offset + ((i - not_stack + skip)
4084 * UNITS_PER_WORD)),
4085 reg_parm_stack_space, alignment_pad);
4087 else
4089 rtx addr;
4090 rtx dest;
4092 /* Push padding now if padding above and stack grows down,
4093 or if padding below and stack grows up.
4094 But if space already allocated, this has already been done. */
4095 if (extra && args_addr == 0
4096 && where_pad != none && where_pad != stack_direction)
4097 anti_adjust_stack (GEN_INT (extra));
4099 #ifdef PUSH_ROUNDING
4100 if (args_addr == 0 && PUSH_ARGS)
4101 emit_single_push_insn (mode, x, type);
4102 else
4103 #endif
4105 if (GET_CODE (args_so_far) == CONST_INT)
4106 addr
4107 = memory_address (mode,
4108 plus_constant (args_addr,
4109 INTVAL (args_so_far)));
4110 else
4111 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4112 args_so_far));
4113 dest = gen_rtx_MEM (mode, addr);
4114 if (type != 0)
4116 set_mem_attributes (dest, type, 1);
4117 /* Function incoming arguments may overlap with sibling call
4118 outgoing arguments and we cannot allow reordering of reads
4119 from function arguments with stores to outgoing arguments
4120 of sibling calls. */
4121 set_mem_alias_set (dest, 0);
4124 emit_move_insn (dest, x);
4128 /* If part should go in registers, copy that part
4129 into the appropriate registers. Do this now, at the end,
4130 since mem-to-mem copies above may do function calls. */
4131 if (partial > 0 && reg != 0)
4133 /* Handle calls that pass values in multiple non-contiguous locations.
4134 The Irix 6 ABI has examples of this. */
4135 if (GET_CODE (reg) == PARALLEL)
4136 emit_group_load (reg, x, -1); /* ??? size? */
4137 else
4138 move_block_to_reg (REGNO (reg), x, partial, mode);
4141 if (extra && args_addr == 0 && where_pad == stack_direction)
4142 anti_adjust_stack (GEN_INT (extra));
4144 if (alignment_pad && args_addr == 0)
4145 anti_adjust_stack (alignment_pad);
4148 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4149 operations. */
4151 static rtx
4152 get_subtarget (x)
4153 rtx x;
4155 return ((x == 0
4156 /* Only registers can be subtargets. */
4157 || GET_CODE (x) != REG
4158 /* If the register is readonly, it can't be set more than once. */
4159 || RTX_UNCHANGING_P (x)
4160 /* Don't use hard regs to avoid extending their life. */
4161 || REGNO (x) < FIRST_PSEUDO_REGISTER
4162 /* Avoid subtargets inside loops,
4163 since they hide some invariant expressions. */
4164 || preserve_subexpressions_p ())
4165 ? 0 : x);
4168 /* Expand an assignment that stores the value of FROM into TO.
4169 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4170 (This may contain a QUEUED rtx;
4171 if the value is constant, this rtx is a constant.)
4172 Otherwise, the returned value is NULL_RTX.
4174 SUGGEST_REG is no longer actually used.
4175 It used to mean, copy the value through a register
4176 and return that register, if that is possible.
4177 We now use WANT_VALUE to decide whether to do this. */
4180 expand_assignment (to, from, want_value, suggest_reg)
4181 tree to, from;
4182 int want_value;
4183 int suggest_reg ATTRIBUTE_UNUSED;
4185 rtx to_rtx = 0;
4186 rtx result;
4188 /* Don't crash if the lhs of the assignment was erroneous. */
4190 if (TREE_CODE (to) == ERROR_MARK)
4192 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4193 return want_value ? result : NULL_RTX;
4196 /* Assignment of a structure component needs special treatment
4197 if the structure component's rtx is not simply a MEM.
4198 Assignment of an array element at a constant index, and assignment of
4199 an array element in an unaligned packed structure field, has the same
4200 problem. */
4202 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4203 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4204 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4206 enum machine_mode mode1;
4207 HOST_WIDE_INT bitsize, bitpos;
4208 rtx orig_to_rtx;
4209 tree offset;
4210 int unsignedp;
4211 int volatilep = 0;
4212 tree tem;
4214 push_temp_slots ();
4215 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4216 &unsignedp, &volatilep);
4218 /* If we are going to use store_bit_field and extract_bit_field,
4219 make sure to_rtx will be safe for multiple use. */
4221 if (mode1 == VOIDmode && want_value)
4222 tem = stabilize_reference (tem);
4224 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4226 if (offset != 0)
4228 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4230 if (GET_CODE (to_rtx) != MEM)
4231 abort ();
4233 #ifdef POINTERS_EXTEND_UNSIGNED
4234 if (GET_MODE (offset_rtx) != Pmode)
4235 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4236 #else
4237 if (GET_MODE (offset_rtx) != ptr_mode)
4238 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4239 #endif
4241 /* A constant address in TO_RTX can have VOIDmode, we must not try
4242 to call force_reg for that case. Avoid that case. */
4243 if (GET_CODE (to_rtx) == MEM
4244 && GET_MODE (to_rtx) == BLKmode
4245 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4246 && bitsize > 0
4247 && (bitpos % bitsize) == 0
4248 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4249 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4251 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4252 bitpos = 0;
4255 to_rtx = offset_address (to_rtx, offset_rtx,
4256 highest_pow2_factor_for_type (TREE_TYPE (to),
4257 offset));
4260 if (GET_CODE (to_rtx) == MEM)
4262 /* If the field is at offset zero, we could have been given the
4263 DECL_RTX of the parent struct. Don't munge it. */
4264 to_rtx = shallow_copy_rtx (to_rtx);
4266 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4269 /* Deal with volatile and readonly fields. The former is only done
4270 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4271 if (volatilep && GET_CODE (to_rtx) == MEM)
4273 if (to_rtx == orig_to_rtx)
4274 to_rtx = copy_rtx (to_rtx);
4275 MEM_VOLATILE_P (to_rtx) = 1;
4278 if (TREE_CODE (to) == COMPONENT_REF
4279 && TREE_READONLY (TREE_OPERAND (to, 1)))
4281 if (to_rtx == orig_to_rtx)
4282 to_rtx = copy_rtx (to_rtx);
4283 RTX_UNCHANGING_P (to_rtx) = 1;
4286 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4288 if (to_rtx == orig_to_rtx)
4289 to_rtx = copy_rtx (to_rtx);
4290 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4293 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4294 (want_value
4295 /* Spurious cast for HPUX compiler. */
4296 ? ((enum machine_mode)
4297 TYPE_MODE (TREE_TYPE (to)))
4298 : VOIDmode),
4299 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4301 preserve_temp_slots (result);
4302 free_temp_slots ();
4303 pop_temp_slots ();
4305 /* If the value is meaningful, convert RESULT to the proper mode.
4306 Otherwise, return nothing. */
4307 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4308 TYPE_MODE (TREE_TYPE (from)),
4309 result,
4310 TREE_UNSIGNED (TREE_TYPE (to)))
4311 : NULL_RTX);
4314 /* If the rhs is a function call and its value is not an aggregate,
4315 call the function before we start to compute the lhs.
4316 This is needed for correct code for cases such as
4317 val = setjmp (buf) on machines where reference to val
4318 requires loading up part of an address in a separate insn.
4320 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4321 since it might be a promoted variable where the zero- or sign- extension
4322 needs to be done. Handling this in the normal way is safe because no
4323 computation is done before the call. */
4324 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4325 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4326 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4327 && GET_CODE (DECL_RTL (to)) == REG))
4329 rtx value;
4331 push_temp_slots ();
4332 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4333 if (to_rtx == 0)
4334 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4336 /* Handle calls that return values in multiple non-contiguous locations.
4337 The Irix 6 ABI has examples of this. */
4338 if (GET_CODE (to_rtx) == PARALLEL)
4339 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4340 else if (GET_MODE (to_rtx) == BLKmode)
4341 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4342 else
4344 #ifdef POINTERS_EXTEND_UNSIGNED
4345 if (POINTER_TYPE_P (TREE_TYPE (to))
4346 && GET_MODE (to_rtx) != GET_MODE (value))
4347 value = convert_memory_address (GET_MODE (to_rtx), value);
4348 #endif
4349 emit_move_insn (to_rtx, value);
4351 preserve_temp_slots (to_rtx);
4352 free_temp_slots ();
4353 pop_temp_slots ();
4354 return want_value ? to_rtx : NULL_RTX;
4357 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4358 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4360 if (to_rtx == 0)
4361 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4363 /* Don't move directly into a return register. */
4364 if (TREE_CODE (to) == RESULT_DECL
4365 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4367 rtx temp;
4369 push_temp_slots ();
4370 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4372 if (GET_CODE (to_rtx) == PARALLEL)
4373 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4374 else
4375 emit_move_insn (to_rtx, temp);
4377 preserve_temp_slots (to_rtx);
4378 free_temp_slots ();
4379 pop_temp_slots ();
4380 return want_value ? to_rtx : NULL_RTX;
4383 /* In case we are returning the contents of an object which overlaps
4384 the place the value is being stored, use a safe function when copying
4385 a value through a pointer into a structure value return block. */
4386 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4387 && current_function_returns_struct
4388 && !current_function_returns_pcc_struct)
4390 rtx from_rtx, size;
4392 push_temp_slots ();
4393 size = expr_size (from);
4394 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4396 if (TARGET_MEM_FUNCTIONS)
4397 emit_library_call (memmove_libfunc, LCT_NORMAL,
4398 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4399 XEXP (from_rtx, 0), Pmode,
4400 convert_to_mode (TYPE_MODE (sizetype),
4401 size, TREE_UNSIGNED (sizetype)),
4402 TYPE_MODE (sizetype));
4403 else
4404 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4405 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4406 XEXP (to_rtx, 0), Pmode,
4407 convert_to_mode (TYPE_MODE (integer_type_node),
4408 size,
4409 TREE_UNSIGNED (integer_type_node)),
4410 TYPE_MODE (integer_type_node));
4412 preserve_temp_slots (to_rtx);
4413 free_temp_slots ();
4414 pop_temp_slots ();
4415 return want_value ? to_rtx : NULL_RTX;
4418 /* Compute FROM and store the value in the rtx we got. */
4420 push_temp_slots ();
4421 result = store_expr (from, to_rtx, want_value);
4422 preserve_temp_slots (result);
4423 free_temp_slots ();
4424 pop_temp_slots ();
4425 return want_value ? result : NULL_RTX;
4428 /* Generate code for computing expression EXP,
4429 and storing the value into TARGET.
4430 TARGET may contain a QUEUED rtx.
4432 If WANT_VALUE & 1 is nonzero, return a copy of the value
4433 not in TARGET, so that we can be sure to use the proper
4434 value in a containing expression even if TARGET has something
4435 else stored in it. If possible, we copy the value through a pseudo
4436 and return that pseudo. Or, if the value is constant, we try to
4437 return the constant. In some cases, we return a pseudo
4438 copied *from* TARGET.
4440 If the mode is BLKmode then we may return TARGET itself.
4441 It turns out that in BLKmode it doesn't cause a problem.
4442 because C has no operators that could combine two different
4443 assignments into the same BLKmode object with different values
4444 with no sequence point. Will other languages need this to
4445 be more thorough?
4447 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4448 to catch quickly any cases where the caller uses the value
4449 and fails to set WANT_VALUE.
4451 If WANT_VALUE & 2 is set, this is a store into a call param on the
4452 stack, and block moves may need to be treated specially. */
4455 store_expr (exp, target, want_value)
4456 tree exp;
4457 rtx target;
4458 int want_value;
4460 rtx temp;
4461 int dont_return_target = 0;
4462 int dont_store_target = 0;
4464 if (VOID_TYPE_P (TREE_TYPE (exp)))
4466 /* C++ can generate ?: expressions with a throw expression in one
4467 branch and an rvalue in the other. Here, we resolve attempts to
4468 store the throw expression's nonexistant result. */
4469 if (want_value)
4470 abort ();
4471 expand_expr (exp, const0_rtx, VOIDmode, 0);
4472 return NULL_RTX;
4474 if (TREE_CODE (exp) == COMPOUND_EXPR)
4476 /* Perform first part of compound expression, then assign from second
4477 part. */
4478 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4479 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4480 emit_queue ();
4481 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4483 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4485 /* For conditional expression, get safe form of the target. Then
4486 test the condition, doing the appropriate assignment on either
4487 side. This avoids the creation of unnecessary temporaries.
4488 For non-BLKmode, it is more efficient not to do this. */
4490 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4492 emit_queue ();
4493 target = protect_from_queue (target, 1);
4495 do_pending_stack_adjust ();
4496 NO_DEFER_POP;
4497 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4498 start_cleanup_deferral ();
4499 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4500 end_cleanup_deferral ();
4501 emit_queue ();
4502 emit_jump_insn (gen_jump (lab2));
4503 emit_barrier ();
4504 emit_label (lab1);
4505 start_cleanup_deferral ();
4506 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4507 end_cleanup_deferral ();
4508 emit_queue ();
4509 emit_label (lab2);
4510 OK_DEFER_POP;
4512 return want_value & 1 ? target : NULL_RTX;
4514 else if (queued_subexp_p (target))
4515 /* If target contains a postincrement, let's not risk
4516 using it as the place to generate the rhs. */
4518 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4520 /* Expand EXP into a new pseudo. */
4521 temp = gen_reg_rtx (GET_MODE (target));
4522 temp = expand_expr (exp, temp, GET_MODE (target),
4523 (want_value & 2
4524 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4526 else
4527 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4528 (want_value & 2
4529 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4531 /* If target is volatile, ANSI requires accessing the value
4532 *from* the target, if it is accessed. So make that happen.
4533 In no case return the target itself. */
4534 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4535 dont_return_target = 1;
4537 else if ((want_value & 1) != 0
4538 && GET_CODE (target) == MEM
4539 && ! MEM_VOLATILE_P (target)
4540 && GET_MODE (target) != BLKmode)
4541 /* If target is in memory and caller wants value in a register instead,
4542 arrange that. Pass TARGET as target for expand_expr so that,
4543 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4544 We know expand_expr will not use the target in that case.
4545 Don't do this if TARGET is volatile because we are supposed
4546 to write it and then read it. */
4548 temp = expand_expr (exp, target, GET_MODE (target),
4549 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4550 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4552 /* If TEMP is already in the desired TARGET, only copy it from
4553 memory and don't store it there again. */
4554 if (temp == target
4555 || (rtx_equal_p (temp, target)
4556 && ! side_effects_p (temp) && ! side_effects_p (target)))
4557 dont_store_target = 1;
4558 temp = copy_to_reg (temp);
4560 dont_return_target = 1;
4562 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4563 /* If this is a scalar in a register that is stored in a wider mode
4564 than the declared mode, compute the result into its declared mode
4565 and then convert to the wider mode. Our value is the computed
4566 expression. */
4568 rtx inner_target = 0;
4570 /* If we don't want a value, we can do the conversion inside EXP,
4571 which will often result in some optimizations. Do the conversion
4572 in two steps: first change the signedness, if needed, then
4573 the extend. But don't do this if the type of EXP is a subtype
4574 of something else since then the conversion might involve
4575 more than just converting modes. */
4576 if ((want_value & 1) == 0
4577 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4578 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4580 if (TREE_UNSIGNED (TREE_TYPE (exp))
4581 != SUBREG_PROMOTED_UNSIGNED_P (target))
4582 exp = convert
4583 ((*lang_hooks.types.signed_or_unsigned_type)
4584 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4586 exp = convert ((*lang_hooks.types.type_for_mode)
4587 (GET_MODE (SUBREG_REG (target)),
4588 SUBREG_PROMOTED_UNSIGNED_P (target)),
4589 exp);
4591 inner_target = SUBREG_REG (target);
4594 temp = expand_expr (exp, inner_target, VOIDmode,
4595 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4597 /* If TEMP is a MEM and we want a result value, make the access
4598 now so it gets done only once. Strictly speaking, this is
4599 only necessary if the MEM is volatile, or if the address
4600 overlaps TARGET. But not performing the load twice also
4601 reduces the amount of rtl we generate and then have to CSE. */
4602 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4603 temp = copy_to_reg (temp);
4605 /* If TEMP is a VOIDmode constant, use convert_modes to make
4606 sure that we properly convert it. */
4607 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4609 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4610 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4611 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4612 GET_MODE (target), temp,
4613 SUBREG_PROMOTED_UNSIGNED_P (target));
4616 convert_move (SUBREG_REG (target), temp,
4617 SUBREG_PROMOTED_UNSIGNED_P (target));
4619 /* If we promoted a constant, change the mode back down to match
4620 target. Otherwise, the caller might get confused by a result whose
4621 mode is larger than expected. */
4623 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4625 if (GET_MODE (temp) != VOIDmode)
4627 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4628 SUBREG_PROMOTED_VAR_P (temp) = 1;
4629 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4630 SUBREG_PROMOTED_UNSIGNED_P (target));
4632 else
4633 temp = convert_modes (GET_MODE (target),
4634 GET_MODE (SUBREG_REG (target)),
4635 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4638 return want_value & 1 ? temp : NULL_RTX;
4640 else
4642 temp = expand_expr (exp, target, GET_MODE (target),
4643 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4644 /* Return TARGET if it's a specified hardware register.
4645 If TARGET is a volatile mem ref, either return TARGET
4646 or return a reg copied *from* TARGET; ANSI requires this.
4648 Otherwise, if TEMP is not TARGET, return TEMP
4649 if it is constant (for efficiency),
4650 or if we really want the correct value. */
4651 if (!(target && GET_CODE (target) == REG
4652 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4653 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4654 && ! rtx_equal_p (temp, target)
4655 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4656 dont_return_target = 1;
4659 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4660 the same as that of TARGET, adjust the constant. This is needed, for
4661 example, in case it is a CONST_DOUBLE and we want only a word-sized
4662 value. */
4663 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4664 && TREE_CODE (exp) != ERROR_MARK
4665 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4666 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4667 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4669 /* If value was not generated in the target, store it there.
4670 Convert the value to TARGET's type first if necessary.
4671 If TEMP and TARGET compare equal according to rtx_equal_p, but
4672 one or both of them are volatile memory refs, we have to distinguish
4673 two cases:
4674 - expand_expr has used TARGET. In this case, we must not generate
4675 another copy. This can be detected by TARGET being equal according
4676 to == .
4677 - expand_expr has not used TARGET - that means that the source just
4678 happens to have the same RTX form. Since temp will have been created
4679 by expand_expr, it will compare unequal according to == .
4680 We must generate a copy in this case, to reach the correct number
4681 of volatile memory references. */
4683 if ((! rtx_equal_p (temp, target)
4684 || (temp != target && (side_effects_p (temp)
4685 || side_effects_p (target))))
4686 && TREE_CODE (exp) != ERROR_MARK
4687 && ! dont_store_target
4688 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4689 but TARGET is not valid memory reference, TEMP will differ
4690 from TARGET although it is really the same location. */
4691 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4692 || target != DECL_RTL_IF_SET (exp))
4693 /* If there's nothing to copy, don't bother. Don't call expr_size
4694 unless necessary, because some front-ends (C++) expr_size-hook
4695 aborts on objects that are not supposed to be bit-copied or
4696 bit-initialized. */
4697 && expr_size (exp) != const0_rtx)
4699 target = protect_from_queue (target, 1);
4700 if (GET_MODE (temp) != GET_MODE (target)
4701 && GET_MODE (temp) != VOIDmode)
4703 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4704 if (dont_return_target)
4706 /* In this case, we will return TEMP,
4707 so make sure it has the proper mode.
4708 But don't forget to store the value into TARGET. */
4709 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4710 emit_move_insn (target, temp);
4712 else
4713 convert_move (target, temp, unsignedp);
4716 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4718 /* Handle copying a string constant into an array. The string
4719 constant may be shorter than the array. So copy just the string's
4720 actual length, and clear the rest. First get the size of the data
4721 type of the string, which is actually the size of the target. */
4722 rtx size = expr_size (exp);
4724 if (GET_CODE (size) == CONST_INT
4725 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4726 emit_block_move (target, temp, size,
4727 (want_value & 2
4728 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4729 else
4731 /* Compute the size of the data to copy from the string. */
4732 tree copy_size
4733 = size_binop (MIN_EXPR,
4734 make_tree (sizetype, size),
4735 size_int (TREE_STRING_LENGTH (exp)));
4736 rtx copy_size_rtx
4737 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4738 (want_value & 2
4739 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4740 rtx label = 0;
4742 /* Copy that much. */
4743 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4744 TREE_UNSIGNED (sizetype));
4745 emit_block_move (target, temp, copy_size_rtx,
4746 (want_value & 2
4747 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4749 /* Figure out how much is left in TARGET that we have to clear.
4750 Do all calculations in ptr_mode. */
4751 if (GET_CODE (copy_size_rtx) == CONST_INT)
4753 size = plus_constant (size, -INTVAL (copy_size_rtx));
4754 target = adjust_address (target, BLKmode,
4755 INTVAL (copy_size_rtx));
4757 else
4759 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4760 copy_size_rtx, NULL_RTX, 0,
4761 OPTAB_LIB_WIDEN);
4763 #ifdef POINTERS_EXTEND_UNSIGNED
4764 if (GET_MODE (copy_size_rtx) != Pmode)
4765 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4766 TREE_UNSIGNED (sizetype));
4767 #endif
4769 target = offset_address (target, copy_size_rtx,
4770 highest_pow2_factor (copy_size));
4771 label = gen_label_rtx ();
4772 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4773 GET_MODE (size), 0, label);
4776 if (size != const0_rtx)
4777 clear_storage (target, size);
4779 if (label)
4780 emit_label (label);
4783 /* Handle calls that return values in multiple non-contiguous locations.
4784 The Irix 6 ABI has examples of this. */
4785 else if (GET_CODE (target) == PARALLEL)
4786 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4787 else if (GET_MODE (temp) == BLKmode)
4788 emit_block_move (target, temp, expr_size (exp),
4789 (want_value & 2
4790 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4791 else
4792 emit_move_insn (target, temp);
4795 /* If we don't want a value, return NULL_RTX. */
4796 if ((want_value & 1) == 0)
4797 return NULL_RTX;
4799 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4800 ??? The latter test doesn't seem to make sense. */
4801 else if (dont_return_target && GET_CODE (temp) != MEM)
4802 return temp;
4804 /* Return TARGET itself if it is a hard register. */
4805 else if ((want_value & 1) != 0
4806 && GET_MODE (target) != BLKmode
4807 && ! (GET_CODE (target) == REG
4808 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4809 return copy_to_reg (target);
4811 else
4812 return target;
4815 /* Return 1 if EXP just contains zeros. */
4817 static int
4818 is_zeros_p (exp)
4819 tree exp;
4821 tree elt;
4823 switch (TREE_CODE (exp))
4825 case CONVERT_EXPR:
4826 case NOP_EXPR:
4827 case NON_LVALUE_EXPR:
4828 case VIEW_CONVERT_EXPR:
4829 return is_zeros_p (TREE_OPERAND (exp, 0));
4831 case INTEGER_CST:
4832 return integer_zerop (exp);
4834 case COMPLEX_CST:
4835 return
4836 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4838 case REAL_CST:
4839 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4841 case VECTOR_CST:
4842 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4843 elt = TREE_CHAIN (elt))
4844 if (!is_zeros_p (TREE_VALUE (elt)))
4845 return 0;
4847 return 1;
4849 case CONSTRUCTOR:
4850 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4851 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4852 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4853 if (! is_zeros_p (TREE_VALUE (elt)))
4854 return 0;
4856 return 1;
4858 default:
4859 return 0;
4863 /* Return 1 if EXP contains mostly (3/4) zeros. */
4865 static int
4866 mostly_zeros_p (exp)
4867 tree exp;
4869 if (TREE_CODE (exp) == CONSTRUCTOR)
4871 int elts = 0, zeros = 0;
4872 tree elt = CONSTRUCTOR_ELTS (exp);
4873 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4875 /* If there are no ranges of true bits, it is all zero. */
4876 return elt == NULL_TREE;
4878 for (; elt; elt = TREE_CHAIN (elt))
4880 /* We do not handle the case where the index is a RANGE_EXPR,
4881 so the statistic will be somewhat inaccurate.
4882 We do make a more accurate count in store_constructor itself,
4883 so since this function is only used for nested array elements,
4884 this should be close enough. */
4885 if (mostly_zeros_p (TREE_VALUE (elt)))
4886 zeros++;
4887 elts++;
4890 return 4 * zeros >= 3 * elts;
4893 return is_zeros_p (exp);
4896 /* Helper function for store_constructor.
4897 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4898 TYPE is the type of the CONSTRUCTOR, not the element type.
4899 CLEARED is as for store_constructor.
4900 ALIAS_SET is the alias set to use for any stores.
4902 This provides a recursive shortcut back to store_constructor when it isn't
4903 necessary to go through store_field. This is so that we can pass through
4904 the cleared field to let store_constructor know that we may not have to
4905 clear a substructure if the outer structure has already been cleared. */
4907 static void
4908 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4909 alias_set)
4910 rtx target;
4911 unsigned HOST_WIDE_INT bitsize;
4912 HOST_WIDE_INT bitpos;
4913 enum machine_mode mode;
4914 tree exp, type;
4915 int cleared;
4916 int alias_set;
4918 if (TREE_CODE (exp) == CONSTRUCTOR
4919 && bitpos % BITS_PER_UNIT == 0
4920 /* If we have a nonzero bitpos for a register target, then we just
4921 let store_field do the bitfield handling. This is unlikely to
4922 generate unnecessary clear instructions anyways. */
4923 && (bitpos == 0 || GET_CODE (target) == MEM))
4925 if (GET_CODE (target) == MEM)
4926 target
4927 = adjust_address (target,
4928 GET_MODE (target) == BLKmode
4929 || 0 != (bitpos
4930 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4931 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4934 /* Update the alias set, if required. */
4935 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4936 && MEM_ALIAS_SET (target) != 0)
4938 target = copy_rtx (target);
4939 set_mem_alias_set (target, alias_set);
4942 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4944 else
4945 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4946 alias_set);
4949 /* Store the value of constructor EXP into the rtx TARGET.
4950 TARGET is either a REG or a MEM; we know it cannot conflict, since
4951 safe_from_p has been called.
4952 CLEARED is true if TARGET is known to have been zero'd.
4953 SIZE is the number of bytes of TARGET we are allowed to modify: this
4954 may not be the same as the size of EXP if we are assigning to a field
4955 which has been packed to exclude padding bits. */
4957 static void
4958 store_constructor (exp, target, cleared, size)
4959 tree exp;
4960 rtx target;
4961 int cleared;
4962 HOST_WIDE_INT size;
4964 tree type = TREE_TYPE (exp);
4965 #ifdef WORD_REGISTER_OPERATIONS
4966 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4967 #endif
4969 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4970 || TREE_CODE (type) == QUAL_UNION_TYPE)
4972 tree elt;
4974 /* We either clear the aggregate or indicate the value is dead. */
4975 if ((TREE_CODE (type) == UNION_TYPE
4976 || TREE_CODE (type) == QUAL_UNION_TYPE)
4977 && ! cleared
4978 && ! CONSTRUCTOR_ELTS (exp))
4979 /* If the constructor is empty, clear the union. */
4981 clear_storage (target, expr_size (exp));
4982 cleared = 1;
4985 /* If we are building a static constructor into a register,
4986 set the initial value as zero so we can fold the value into
4987 a constant. But if more than one register is involved,
4988 this probably loses. */
4989 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4990 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4992 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4993 cleared = 1;
4996 /* If the constructor has fewer fields than the structure
4997 or if we are initializing the structure to mostly zeros,
4998 clear the whole structure first. Don't do this if TARGET is a
4999 register whose mode size isn't equal to SIZE since clear_storage
5000 can't handle this case. */
5001 else if (! cleared && size > 0
5002 && ((list_length (CONSTRUCTOR_ELTS (exp))
5003 != fields_length (type))
5004 || mostly_zeros_p (exp))
5005 && (GET_CODE (target) != REG
5006 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5007 == size)))
5009 rtx xtarget = target;
5011 if (readonly_fields_p (type))
5013 xtarget = copy_rtx (xtarget);
5014 RTX_UNCHANGING_P (xtarget) = 1;
5017 clear_storage (xtarget, GEN_INT (size));
5018 cleared = 1;
5021 if (! cleared)
5022 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5024 /* Store each element of the constructor into
5025 the corresponding field of TARGET. */
5027 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5029 tree field = TREE_PURPOSE (elt);
5030 tree value = TREE_VALUE (elt);
5031 enum machine_mode mode;
5032 HOST_WIDE_INT bitsize;
5033 HOST_WIDE_INT bitpos = 0;
5034 tree offset;
5035 rtx to_rtx = target;
5037 /* Just ignore missing fields.
5038 We cleared the whole structure, above,
5039 if any fields are missing. */
5040 if (field == 0)
5041 continue;
5043 if (cleared && is_zeros_p (value))
5044 continue;
5046 if (host_integerp (DECL_SIZE (field), 1))
5047 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5048 else
5049 bitsize = -1;
5051 mode = DECL_MODE (field);
5052 if (DECL_BIT_FIELD (field))
5053 mode = VOIDmode;
5055 offset = DECL_FIELD_OFFSET (field);
5056 if (host_integerp (offset, 0)
5057 && host_integerp (bit_position (field), 0))
5059 bitpos = int_bit_position (field);
5060 offset = 0;
5062 else
5063 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5065 if (offset)
5067 rtx offset_rtx;
5069 if (CONTAINS_PLACEHOLDER_P (offset))
5070 offset = build (WITH_RECORD_EXPR, sizetype,
5071 offset, make_tree (TREE_TYPE (exp), target));
5073 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5074 if (GET_CODE (to_rtx) != MEM)
5075 abort ();
5077 #ifdef POINTERS_EXTEND_UNSIGNED
5078 if (GET_MODE (offset_rtx) != Pmode)
5079 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5080 #else
5081 if (GET_MODE (offset_rtx) != ptr_mode)
5082 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5083 #endif
5085 to_rtx = offset_address (to_rtx, offset_rtx,
5086 highest_pow2_factor (offset));
5089 if (TREE_READONLY (field))
5091 if (GET_CODE (to_rtx) == MEM)
5092 to_rtx = copy_rtx (to_rtx);
5094 RTX_UNCHANGING_P (to_rtx) = 1;
5097 #ifdef WORD_REGISTER_OPERATIONS
5098 /* If this initializes a field that is smaller than a word, at the
5099 start of a word, try to widen it to a full word.
5100 This special case allows us to output C++ member function
5101 initializations in a form that the optimizers can understand. */
5102 if (GET_CODE (target) == REG
5103 && bitsize < BITS_PER_WORD
5104 && bitpos % BITS_PER_WORD == 0
5105 && GET_MODE_CLASS (mode) == MODE_INT
5106 && TREE_CODE (value) == INTEGER_CST
5107 && exp_size >= 0
5108 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5110 tree type = TREE_TYPE (value);
5112 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5114 type = (*lang_hooks.types.type_for_size)
5115 (BITS_PER_WORD, TREE_UNSIGNED (type));
5116 value = convert (type, value);
5119 if (BYTES_BIG_ENDIAN)
5120 value
5121 = fold (build (LSHIFT_EXPR, type, value,
5122 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5123 bitsize = BITS_PER_WORD;
5124 mode = word_mode;
5126 #endif
5128 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5129 && DECL_NONADDRESSABLE_P (field))
5131 to_rtx = copy_rtx (to_rtx);
5132 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5135 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5136 value, type, cleared,
5137 get_alias_set (TREE_TYPE (field)));
5140 else if (TREE_CODE (type) == ARRAY_TYPE
5141 || TREE_CODE (type) == VECTOR_TYPE)
5143 tree elt;
5144 int i;
5145 int need_to_clear;
5146 tree domain = TYPE_DOMAIN (type);
5147 tree elttype = TREE_TYPE (type);
5148 int const_bounds_p;
5149 HOST_WIDE_INT minelt = 0;
5150 HOST_WIDE_INT maxelt = 0;
5152 /* Vectors are like arrays, but the domain is stored via an array
5153 type indirectly. */
5154 if (TREE_CODE (type) == VECTOR_TYPE)
5156 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5157 the same field as TYPE_DOMAIN, we are not guaranteed that
5158 it always will. */
5159 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5160 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5163 const_bounds_p = (TYPE_MIN_VALUE (domain)
5164 && TYPE_MAX_VALUE (domain)
5165 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5166 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5168 /* If we have constant bounds for the range of the type, get them. */
5169 if (const_bounds_p)
5171 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5172 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5175 /* If the constructor has fewer elements than the array,
5176 clear the whole array first. Similarly if this is
5177 static constructor of a non-BLKmode object. */
5178 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5179 need_to_clear = 1;
5180 else
5182 HOST_WIDE_INT count = 0, zero_count = 0;
5183 need_to_clear = ! const_bounds_p;
5185 /* This loop is a more accurate version of the loop in
5186 mostly_zeros_p (it handles RANGE_EXPR in an index).
5187 It is also needed to check for missing elements. */
5188 for (elt = CONSTRUCTOR_ELTS (exp);
5189 elt != NULL_TREE && ! need_to_clear;
5190 elt = TREE_CHAIN (elt))
5192 tree index = TREE_PURPOSE (elt);
5193 HOST_WIDE_INT this_node_count;
5195 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5197 tree lo_index = TREE_OPERAND (index, 0);
5198 tree hi_index = TREE_OPERAND (index, 1);
5200 if (! host_integerp (lo_index, 1)
5201 || ! host_integerp (hi_index, 1))
5203 need_to_clear = 1;
5204 break;
5207 this_node_count = (tree_low_cst (hi_index, 1)
5208 - tree_low_cst (lo_index, 1) + 1);
5210 else
5211 this_node_count = 1;
5213 count += this_node_count;
5214 if (mostly_zeros_p (TREE_VALUE (elt)))
5215 zero_count += this_node_count;
5218 /* Clear the entire array first if there are any missing elements,
5219 or if the incidence of zero elements is >= 75%. */
5220 if (! need_to_clear
5221 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5222 need_to_clear = 1;
5225 if (need_to_clear && size > 0)
5227 if (! cleared)
5229 if (REG_P (target))
5230 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5231 else
5232 clear_storage (target, GEN_INT (size));
5234 cleared = 1;
5236 else if (REG_P (target))
5237 /* Inform later passes that the old value is dead. */
5238 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5240 /* Store each element of the constructor into
5241 the corresponding element of TARGET, determined
5242 by counting the elements. */
5243 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5244 elt;
5245 elt = TREE_CHAIN (elt), i++)
5247 enum machine_mode mode;
5248 HOST_WIDE_INT bitsize;
5249 HOST_WIDE_INT bitpos;
5250 int unsignedp;
5251 tree value = TREE_VALUE (elt);
5252 tree index = TREE_PURPOSE (elt);
5253 rtx xtarget = target;
5255 if (cleared && is_zeros_p (value))
5256 continue;
5258 unsignedp = TREE_UNSIGNED (elttype);
5259 mode = TYPE_MODE (elttype);
5260 if (mode == BLKmode)
5261 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5262 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5263 : -1);
5264 else
5265 bitsize = GET_MODE_BITSIZE (mode);
5267 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5269 tree lo_index = TREE_OPERAND (index, 0);
5270 tree hi_index = TREE_OPERAND (index, 1);
5271 rtx index_r, pos_rtx, loop_end;
5272 struct nesting *loop;
5273 HOST_WIDE_INT lo, hi, count;
5274 tree position;
5276 /* If the range is constant and "small", unroll the loop. */
5277 if (const_bounds_p
5278 && host_integerp (lo_index, 0)
5279 && host_integerp (hi_index, 0)
5280 && (lo = tree_low_cst (lo_index, 0),
5281 hi = tree_low_cst (hi_index, 0),
5282 count = hi - lo + 1,
5283 (GET_CODE (target) != MEM
5284 || count <= 2
5285 || (host_integerp (TYPE_SIZE (elttype), 1)
5286 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5287 <= 40 * 8)))))
5289 lo -= minelt; hi -= minelt;
5290 for (; lo <= hi; lo++)
5292 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5294 if (GET_CODE (target) == MEM
5295 && !MEM_KEEP_ALIAS_SET_P (target)
5296 && TREE_CODE (type) == ARRAY_TYPE
5297 && TYPE_NONALIASED_COMPONENT (type))
5299 target = copy_rtx (target);
5300 MEM_KEEP_ALIAS_SET_P (target) = 1;
5303 store_constructor_field
5304 (target, bitsize, bitpos, mode, value, type, cleared,
5305 get_alias_set (elttype));
5308 else
5310 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5311 loop_end = gen_label_rtx ();
5313 unsignedp = TREE_UNSIGNED (domain);
5315 index = build_decl (VAR_DECL, NULL_TREE, domain);
5317 index_r
5318 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5319 &unsignedp, 0));
5320 SET_DECL_RTL (index, index_r);
5321 if (TREE_CODE (value) == SAVE_EXPR
5322 && SAVE_EXPR_RTL (value) == 0)
5324 /* Make sure value gets expanded once before the
5325 loop. */
5326 expand_expr (value, const0_rtx, VOIDmode, 0);
5327 emit_queue ();
5329 store_expr (lo_index, index_r, 0);
5330 loop = expand_start_loop (0);
5332 /* Assign value to element index. */
5333 position
5334 = convert (ssizetype,
5335 fold (build (MINUS_EXPR, TREE_TYPE (index),
5336 index, TYPE_MIN_VALUE (domain))));
5337 position = size_binop (MULT_EXPR, position,
5338 convert (ssizetype,
5339 TYPE_SIZE_UNIT (elttype)));
5341 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5342 xtarget = offset_address (target, pos_rtx,
5343 highest_pow2_factor (position));
5344 xtarget = adjust_address (xtarget, mode, 0);
5345 if (TREE_CODE (value) == CONSTRUCTOR)
5346 store_constructor (value, xtarget, cleared,
5347 bitsize / BITS_PER_UNIT);
5348 else
5349 store_expr (value, xtarget, 0);
5351 expand_exit_loop_if_false (loop,
5352 build (LT_EXPR, integer_type_node,
5353 index, hi_index));
5355 expand_increment (build (PREINCREMENT_EXPR,
5356 TREE_TYPE (index),
5357 index, integer_one_node), 0, 0);
5358 expand_end_loop ();
5359 emit_label (loop_end);
5362 else if ((index != 0 && ! host_integerp (index, 0))
5363 || ! host_integerp (TYPE_SIZE (elttype), 1))
5365 tree position;
5367 if (index == 0)
5368 index = ssize_int (1);
5370 if (minelt)
5371 index = convert (ssizetype,
5372 fold (build (MINUS_EXPR, index,
5373 TYPE_MIN_VALUE (domain))));
5375 position = size_binop (MULT_EXPR, index,
5376 convert (ssizetype,
5377 TYPE_SIZE_UNIT (elttype)));
5378 xtarget = offset_address (target,
5379 expand_expr (position, 0, VOIDmode, 0),
5380 highest_pow2_factor (position));
5381 xtarget = adjust_address (xtarget, mode, 0);
5382 store_expr (value, xtarget, 0);
5384 else
5386 if (index != 0)
5387 bitpos = ((tree_low_cst (index, 0) - minelt)
5388 * tree_low_cst (TYPE_SIZE (elttype), 1));
5389 else
5390 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5392 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5393 && TREE_CODE (type) == ARRAY_TYPE
5394 && TYPE_NONALIASED_COMPONENT (type))
5396 target = copy_rtx (target);
5397 MEM_KEEP_ALIAS_SET_P (target) = 1;
5400 store_constructor_field (target, bitsize, bitpos, mode, value,
5401 type, cleared, get_alias_set (elttype));
5407 /* Set constructor assignments. */
5408 else if (TREE_CODE (type) == SET_TYPE)
5410 tree elt = CONSTRUCTOR_ELTS (exp);
5411 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5412 tree domain = TYPE_DOMAIN (type);
5413 tree domain_min, domain_max, bitlength;
5415 /* The default implementation strategy is to extract the constant
5416 parts of the constructor, use that to initialize the target,
5417 and then "or" in whatever non-constant ranges we need in addition.
5419 If a large set is all zero or all ones, it is
5420 probably better to set it using memset (if available) or bzero.
5421 Also, if a large set has just a single range, it may also be
5422 better to first clear all the first clear the set (using
5423 bzero/memset), and set the bits we want. */
5425 /* Check for all zeros. */
5426 if (elt == NULL_TREE && size > 0)
5428 if (!cleared)
5429 clear_storage (target, GEN_INT (size));
5430 return;
5433 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5434 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5435 bitlength = size_binop (PLUS_EXPR,
5436 size_diffop (domain_max, domain_min),
5437 ssize_int (1));
5439 nbits = tree_low_cst (bitlength, 1);
5441 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5442 are "complicated" (more than one range), initialize (the
5443 constant parts) by copying from a constant. */
5444 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5445 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5447 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5448 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5449 char *bit_buffer = (char *) alloca (nbits);
5450 HOST_WIDE_INT word = 0;
5451 unsigned int bit_pos = 0;
5452 unsigned int ibit = 0;
5453 unsigned int offset = 0; /* In bytes from beginning of set. */
5455 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5456 for (;;)
5458 if (bit_buffer[ibit])
5460 if (BYTES_BIG_ENDIAN)
5461 word |= (1 << (set_word_size - 1 - bit_pos));
5462 else
5463 word |= 1 << bit_pos;
5466 bit_pos++; ibit++;
5467 if (bit_pos >= set_word_size || ibit == nbits)
5469 if (word != 0 || ! cleared)
5471 rtx datum = GEN_INT (word);
5472 rtx to_rtx;
5474 /* The assumption here is that it is safe to use
5475 XEXP if the set is multi-word, but not if
5476 it's single-word. */
5477 if (GET_CODE (target) == MEM)
5478 to_rtx = adjust_address (target, mode, offset);
5479 else if (offset == 0)
5480 to_rtx = target;
5481 else
5482 abort ();
5483 emit_move_insn (to_rtx, datum);
5486 if (ibit == nbits)
5487 break;
5488 word = 0;
5489 bit_pos = 0;
5490 offset += set_word_size / BITS_PER_UNIT;
5494 else if (!cleared)
5495 /* Don't bother clearing storage if the set is all ones. */
5496 if (TREE_CHAIN (elt) != NULL_TREE
5497 || (TREE_PURPOSE (elt) == NULL_TREE
5498 ? nbits != 1
5499 : ( ! host_integerp (TREE_VALUE (elt), 0)
5500 || ! host_integerp (TREE_PURPOSE (elt), 0)
5501 || (tree_low_cst (TREE_VALUE (elt), 0)
5502 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5503 != (HOST_WIDE_INT) nbits))))
5504 clear_storage (target, expr_size (exp));
5506 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5508 /* Start of range of element or NULL. */
5509 tree startbit = TREE_PURPOSE (elt);
5510 /* End of range of element, or element value. */
5511 tree endbit = TREE_VALUE (elt);
5512 HOST_WIDE_INT startb, endb;
5513 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5515 bitlength_rtx = expand_expr (bitlength,
5516 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5518 /* Handle non-range tuple element like [ expr ]. */
5519 if (startbit == NULL_TREE)
5521 startbit = save_expr (endbit);
5522 endbit = startbit;
5525 startbit = convert (sizetype, startbit);
5526 endbit = convert (sizetype, endbit);
5527 if (! integer_zerop (domain_min))
5529 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5530 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5532 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5533 EXPAND_CONST_ADDRESS);
5534 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5535 EXPAND_CONST_ADDRESS);
5537 if (REG_P (target))
5539 targetx
5540 = assign_temp
5541 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5542 (GET_MODE (target), 0),
5543 TYPE_QUAL_CONST)),
5544 0, 1, 1);
5545 emit_move_insn (targetx, target);
5548 else if (GET_CODE (target) == MEM)
5549 targetx = target;
5550 else
5551 abort ();
5553 /* Optimization: If startbit and endbit are constants divisible
5554 by BITS_PER_UNIT, call memset instead. */
5555 if (TARGET_MEM_FUNCTIONS
5556 && TREE_CODE (startbit) == INTEGER_CST
5557 && TREE_CODE (endbit) == INTEGER_CST
5558 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5559 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5561 emit_library_call (memset_libfunc, LCT_NORMAL,
5562 VOIDmode, 3,
5563 plus_constant (XEXP (targetx, 0),
5564 startb / BITS_PER_UNIT),
5565 Pmode,
5566 constm1_rtx, TYPE_MODE (integer_type_node),
5567 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5568 TYPE_MODE (sizetype));
5570 else
5571 emit_library_call (setbits_libfunc, LCT_NORMAL,
5572 VOIDmode, 4, XEXP (targetx, 0),
5573 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5574 startbit_rtx, TYPE_MODE (sizetype),
5575 endbit_rtx, TYPE_MODE (sizetype));
5577 if (REG_P (target))
5578 emit_move_insn (target, targetx);
5582 else
5583 abort ();
5586 /* Store the value of EXP (an expression tree)
5587 into a subfield of TARGET which has mode MODE and occupies
5588 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5589 If MODE is VOIDmode, it means that we are storing into a bit-field.
5591 If VALUE_MODE is VOIDmode, return nothing in particular.
5592 UNSIGNEDP is not used in this case.
5594 Otherwise, return an rtx for the value stored. This rtx
5595 has mode VALUE_MODE if that is convenient to do.
5596 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5598 TYPE is the type of the underlying object,
5600 ALIAS_SET is the alias set for the destination. This value will
5601 (in general) be different from that for TARGET, since TARGET is a
5602 reference to the containing structure. */
5604 static rtx
5605 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5606 alias_set)
5607 rtx target;
5608 HOST_WIDE_INT bitsize;
5609 HOST_WIDE_INT bitpos;
5610 enum machine_mode mode;
5611 tree exp;
5612 enum machine_mode value_mode;
5613 int unsignedp;
5614 tree type;
5615 int alias_set;
5617 HOST_WIDE_INT width_mask = 0;
5619 if (TREE_CODE (exp) == ERROR_MARK)
5620 return const0_rtx;
5622 /* If we have nothing to store, do nothing unless the expression has
5623 side-effects. */
5624 if (bitsize == 0)
5625 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5626 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5627 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5629 /* If we are storing into an unaligned field of an aligned union that is
5630 in a register, we may have the mode of TARGET being an integer mode but
5631 MODE == BLKmode. In that case, get an aligned object whose size and
5632 alignment are the same as TARGET and store TARGET into it (we can avoid
5633 the store if the field being stored is the entire width of TARGET). Then
5634 call ourselves recursively to store the field into a BLKmode version of
5635 that object. Finally, load from the object into TARGET. This is not
5636 very efficient in general, but should only be slightly more expensive
5637 than the otherwise-required unaligned accesses. Perhaps this can be
5638 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5639 twice, once with emit_move_insn and once via store_field. */
5641 if (mode == BLKmode
5642 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5644 rtx object = assign_temp (type, 0, 1, 1);
5645 rtx blk_object = adjust_address (object, BLKmode, 0);
5647 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5648 emit_move_insn (object, target);
5650 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5651 alias_set);
5653 emit_move_insn (target, object);
5655 /* We want to return the BLKmode version of the data. */
5656 return blk_object;
5659 if (GET_CODE (target) == CONCAT)
5661 /* We're storing into a struct containing a single __complex. */
5663 if (bitpos != 0)
5664 abort ();
5665 return store_expr (exp, target, 0);
5668 /* If the structure is in a register or if the component
5669 is a bit field, we cannot use addressing to access it.
5670 Use bit-field techniques or SUBREG to store in it. */
5672 if (mode == VOIDmode
5673 || (mode != BLKmode && ! direct_store[(int) mode]
5674 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5675 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5676 || GET_CODE (target) == REG
5677 || GET_CODE (target) == SUBREG
5678 /* If the field isn't aligned enough to store as an ordinary memref,
5679 store it as a bit field. */
5680 || (mode != BLKmode
5681 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5682 || bitpos % GET_MODE_ALIGNMENT (mode))
5683 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5684 || (bitpos % BITS_PER_UNIT != 0)))
5685 /* If the RHS and field are a constant size and the size of the
5686 RHS isn't the same size as the bitfield, we must use bitfield
5687 operations. */
5688 || (bitsize >= 0
5689 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5690 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5692 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5694 /* If BITSIZE is narrower than the size of the type of EXP
5695 we will be narrowing TEMP. Normally, what's wanted are the
5696 low-order bits. However, if EXP's type is a record and this is
5697 big-endian machine, we want the upper BITSIZE bits. */
5698 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5699 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5700 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5701 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5702 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5703 - bitsize),
5704 NULL_RTX, 1);
5706 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5707 MODE. */
5708 if (mode != VOIDmode && mode != BLKmode
5709 && mode != TYPE_MODE (TREE_TYPE (exp)))
5710 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5712 /* If the modes of TARGET and TEMP are both BLKmode, both
5713 must be in memory and BITPOS must be aligned on a byte
5714 boundary. If so, we simply do a block copy. */
5715 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5717 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5718 || bitpos % BITS_PER_UNIT != 0)
5719 abort ();
5721 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5722 emit_block_move (target, temp,
5723 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5724 / BITS_PER_UNIT),
5725 BLOCK_OP_NORMAL);
5727 return value_mode == VOIDmode ? const0_rtx : target;
5730 /* Store the value in the bitfield. */
5731 store_bit_field (target, bitsize, bitpos, mode, temp,
5732 int_size_in_bytes (type));
5734 if (value_mode != VOIDmode)
5736 /* The caller wants an rtx for the value.
5737 If possible, avoid refetching from the bitfield itself. */
5738 if (width_mask != 0
5739 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5741 tree count;
5742 enum machine_mode tmode;
5744 tmode = GET_MODE (temp);
5745 if (tmode == VOIDmode)
5746 tmode = value_mode;
5748 if (unsignedp)
5749 return expand_and (tmode, temp,
5750 gen_int_mode (width_mask, tmode),
5751 NULL_RTX);
5753 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5754 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5755 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5758 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5759 NULL_RTX, value_mode, VOIDmode,
5760 int_size_in_bytes (type));
5762 return const0_rtx;
5764 else
5766 rtx addr = XEXP (target, 0);
5767 rtx to_rtx = target;
5769 /* If a value is wanted, it must be the lhs;
5770 so make the address stable for multiple use. */
5772 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5773 && ! CONSTANT_ADDRESS_P (addr)
5774 /* A frame-pointer reference is already stable. */
5775 && ! (GET_CODE (addr) == PLUS
5776 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5777 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5778 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5779 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5781 /* Now build a reference to just the desired component. */
5783 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5785 if (to_rtx == target)
5786 to_rtx = copy_rtx (to_rtx);
5788 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5789 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5790 set_mem_alias_set (to_rtx, alias_set);
5792 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5796 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5797 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5798 codes and find the ultimate containing object, which we return.
5800 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5801 bit position, and *PUNSIGNEDP to the signedness of the field.
5802 If the position of the field is variable, we store a tree
5803 giving the variable offset (in units) in *POFFSET.
5804 This offset is in addition to the bit position.
5805 If the position is not variable, we store 0 in *POFFSET.
5807 If any of the extraction expressions is volatile,
5808 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5810 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5811 is a mode that can be used to access the field. In that case, *PBITSIZE
5812 is redundant.
5814 If the field describes a variable-sized object, *PMODE is set to
5815 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5816 this case, but the address of the object can be found. */
5818 tree
5819 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5820 punsignedp, pvolatilep)
5821 tree exp;
5822 HOST_WIDE_INT *pbitsize;
5823 HOST_WIDE_INT *pbitpos;
5824 tree *poffset;
5825 enum machine_mode *pmode;
5826 int *punsignedp;
5827 int *pvolatilep;
5829 tree size_tree = 0;
5830 enum machine_mode mode = VOIDmode;
5831 tree offset = size_zero_node;
5832 tree bit_offset = bitsize_zero_node;
5833 tree placeholder_ptr = 0;
5834 tree tem;
5836 /* First get the mode, signedness, and size. We do this from just the
5837 outermost expression. */
5838 if (TREE_CODE (exp) == COMPONENT_REF)
5840 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5841 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5842 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5844 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5846 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5848 size_tree = TREE_OPERAND (exp, 1);
5849 *punsignedp = TREE_UNSIGNED (exp);
5851 else
5853 mode = TYPE_MODE (TREE_TYPE (exp));
5854 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5856 if (mode == BLKmode)
5857 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5858 else
5859 *pbitsize = GET_MODE_BITSIZE (mode);
5862 if (size_tree != 0)
5864 if (! host_integerp (size_tree, 1))
5865 mode = BLKmode, *pbitsize = -1;
5866 else
5867 *pbitsize = tree_low_cst (size_tree, 1);
5870 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5871 and find the ultimate containing object. */
5872 while (1)
5874 if (TREE_CODE (exp) == BIT_FIELD_REF)
5875 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5876 else if (TREE_CODE (exp) == COMPONENT_REF)
5878 tree field = TREE_OPERAND (exp, 1);
5879 tree this_offset = DECL_FIELD_OFFSET (field);
5881 /* If this field hasn't been filled in yet, don't go
5882 past it. This should only happen when folding expressions
5883 made during type construction. */
5884 if (this_offset == 0)
5885 break;
5886 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5887 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5889 offset = size_binop (PLUS_EXPR, offset, this_offset);
5890 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5891 DECL_FIELD_BIT_OFFSET (field));
5893 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5896 else if (TREE_CODE (exp) == ARRAY_REF
5897 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5899 tree index = TREE_OPERAND (exp, 1);
5900 tree array = TREE_OPERAND (exp, 0);
5901 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5902 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5903 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5905 /* We assume all arrays have sizes that are a multiple of a byte.
5906 First subtract the lower bound, if any, in the type of the
5907 index, then convert to sizetype and multiply by the size of the
5908 array element. */
5909 if (low_bound != 0 && ! integer_zerop (low_bound))
5910 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5911 index, low_bound));
5913 /* If the index has a self-referential type, pass it to a
5914 WITH_RECORD_EXPR; if the component size is, pass our
5915 component to one. */
5916 if (CONTAINS_PLACEHOLDER_P (index))
5917 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5918 if (CONTAINS_PLACEHOLDER_P (unit_size))
5919 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5921 offset = size_binop (PLUS_EXPR, offset,
5922 size_binop (MULT_EXPR,
5923 convert (sizetype, index),
5924 unit_size));
5927 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5929 tree new = find_placeholder (exp, &placeholder_ptr);
5931 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5932 We might have been called from tree optimization where we
5933 haven't set up an object yet. */
5934 if (new == 0)
5935 break;
5936 else
5937 exp = new;
5939 continue;
5942 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5943 conversions that don't change the mode, and all view conversions
5944 except those that need to "step up" the alignment. */
5945 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5946 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5947 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5948 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5949 && STRICT_ALIGNMENT
5950 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5951 < BIGGEST_ALIGNMENT)
5952 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5953 || TYPE_ALIGN_OK (TREE_TYPE
5954 (TREE_OPERAND (exp, 0))))))
5955 && ! ((TREE_CODE (exp) == NOP_EXPR
5956 || TREE_CODE (exp) == CONVERT_EXPR)
5957 && (TYPE_MODE (TREE_TYPE (exp))
5958 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5959 break;
5961 /* If any reference in the chain is volatile, the effect is volatile. */
5962 if (TREE_THIS_VOLATILE (exp))
5963 *pvolatilep = 1;
5965 exp = TREE_OPERAND (exp, 0);
5968 /* If OFFSET is constant, see if we can return the whole thing as a
5969 constant bit position. Otherwise, split it up. */
5970 if (host_integerp (offset, 0)
5971 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5972 bitsize_unit_node))
5973 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5974 && host_integerp (tem, 0))
5975 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5976 else
5977 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5979 *pmode = mode;
5980 return exp;
5983 /* Return 1 if T is an expression that get_inner_reference handles. */
5986 handled_component_p (t)
5987 tree t;
5989 switch (TREE_CODE (t))
5991 case BIT_FIELD_REF:
5992 case COMPONENT_REF:
5993 case ARRAY_REF:
5994 case ARRAY_RANGE_REF:
5995 case NON_LVALUE_EXPR:
5996 case VIEW_CONVERT_EXPR:
5997 return 1;
5999 case NOP_EXPR:
6000 case CONVERT_EXPR:
6001 return (TYPE_MODE (TREE_TYPE (t))
6002 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
6004 default:
6005 return 0;
6009 /* Given an rtx VALUE that may contain additions and multiplications, return
6010 an equivalent value that just refers to a register, memory, or constant.
6011 This is done by generating instructions to perform the arithmetic and
6012 returning a pseudo-register containing the value.
6014 The returned value may be a REG, SUBREG, MEM or constant. */
6017 force_operand (value, target)
6018 rtx value, target;
6020 rtx op1, op2;
6021 /* Use subtarget as the target for operand 0 of a binary operation. */
6022 rtx subtarget = get_subtarget (target);
6023 enum rtx_code code = GET_CODE (value);
6025 /* Check for a PIC address load. */
6026 if ((code == PLUS || code == MINUS)
6027 && XEXP (value, 0) == pic_offset_table_rtx
6028 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6029 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6030 || GET_CODE (XEXP (value, 1)) == CONST))
6032 if (!subtarget)
6033 subtarget = gen_reg_rtx (GET_MODE (value));
6034 emit_move_insn (subtarget, value);
6035 return subtarget;
6038 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
6040 if (!target)
6041 target = gen_reg_rtx (GET_MODE (value));
6042 convert_move (target, force_operand (XEXP (value, 0), NULL),
6043 code == ZERO_EXTEND);
6044 return target;
6047 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
6049 op2 = XEXP (value, 1);
6050 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
6051 subtarget = 0;
6052 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6054 code = PLUS;
6055 op2 = negate_rtx (GET_MODE (value), op2);
6058 /* Check for an addition with OP2 a constant integer and our first
6059 operand a PLUS of a virtual register and something else. In that
6060 case, we want to emit the sum of the virtual register and the
6061 constant first and then add the other value. This allows virtual
6062 register instantiation to simply modify the constant rather than
6063 creating another one around this addition. */
6064 if (code == PLUS && GET_CODE (op2) == CONST_INT
6065 && GET_CODE (XEXP (value, 0)) == PLUS
6066 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6067 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6068 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6070 rtx temp = expand_simple_binop (GET_MODE (value), code,
6071 XEXP (XEXP (value, 0), 0), op2,
6072 subtarget, 0, OPTAB_LIB_WIDEN);
6073 return expand_simple_binop (GET_MODE (value), code, temp,
6074 force_operand (XEXP (XEXP (value,
6075 0), 1), 0),
6076 target, 0, OPTAB_LIB_WIDEN);
6079 op1 = force_operand (XEXP (value, 0), subtarget);
6080 op2 = force_operand (op2, NULL_RTX);
6081 switch (code)
6083 case MULT:
6084 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6085 case DIV:
6086 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6087 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6088 target, 1, OPTAB_LIB_WIDEN);
6089 else
6090 return expand_divmod (0,
6091 FLOAT_MODE_P (GET_MODE (value))
6092 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6093 GET_MODE (value), op1, op2, target, 0);
6094 break;
6095 case MOD:
6096 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6097 target, 0);
6098 break;
6099 case UDIV:
6100 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6101 target, 1);
6102 break;
6103 case UMOD:
6104 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6105 target, 1);
6106 break;
6107 case ASHIFTRT:
6108 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6109 target, 0, OPTAB_LIB_WIDEN);
6110 break;
6111 default:
6112 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6113 target, 1, OPTAB_LIB_WIDEN);
6116 if (GET_RTX_CLASS (code) == '1')
6118 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6119 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6122 #ifdef INSN_SCHEDULING
6123 /* On machines that have insn scheduling, we want all memory reference to be
6124 explicit, so we need to deal with such paradoxical SUBREGs. */
6125 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6126 && (GET_MODE_SIZE (GET_MODE (value))
6127 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6128 value
6129 = simplify_gen_subreg (GET_MODE (value),
6130 force_reg (GET_MODE (SUBREG_REG (value)),
6131 force_operand (SUBREG_REG (value),
6132 NULL_RTX)),
6133 GET_MODE (SUBREG_REG (value)),
6134 SUBREG_BYTE (value));
6135 #endif
6137 return value;
6140 /* Subroutine of expand_expr: return nonzero iff there is no way that
6141 EXP can reference X, which is being modified. TOP_P is nonzero if this
6142 call is going to be used to determine whether we need a temporary
6143 for EXP, as opposed to a recursive call to this function.
6145 It is always safe for this routine to return zero since it merely
6146 searches for optimization opportunities. */
6149 safe_from_p (x, exp, top_p)
6150 rtx x;
6151 tree exp;
6152 int top_p;
6154 rtx exp_rtl = 0;
6155 int i, nops;
6156 static tree save_expr_list;
6158 if (x == 0
6159 /* If EXP has varying size, we MUST use a target since we currently
6160 have no way of allocating temporaries of variable size
6161 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6162 So we assume here that something at a higher level has prevented a
6163 clash. This is somewhat bogus, but the best we can do. Only
6164 do this when X is BLKmode and when we are at the top level. */
6165 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6166 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6167 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6168 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6169 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6170 != INTEGER_CST)
6171 && GET_MODE (x) == BLKmode)
6172 /* If X is in the outgoing argument area, it is always safe. */
6173 || (GET_CODE (x) == MEM
6174 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6175 || (GET_CODE (XEXP (x, 0)) == PLUS
6176 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6177 return 1;
6179 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6180 find the underlying pseudo. */
6181 if (GET_CODE (x) == SUBREG)
6183 x = SUBREG_REG (x);
6184 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6185 return 0;
6188 /* A SAVE_EXPR might appear many times in the expression passed to the
6189 top-level safe_from_p call, and if it has a complex subexpression,
6190 examining it multiple times could result in a combinatorial explosion.
6191 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6192 with optimization took about 28 minutes to compile -- even though it was
6193 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6194 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6195 we have processed. Note that the only test of top_p was above. */
6197 if (top_p)
6199 int rtn;
6200 tree t;
6202 save_expr_list = 0;
6204 rtn = safe_from_p (x, exp, 0);
6206 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6207 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6209 return rtn;
6212 /* Now look at our tree code and possibly recurse. */
6213 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6215 case 'd':
6216 exp_rtl = DECL_RTL_IF_SET (exp);
6217 break;
6219 case 'c':
6220 return 1;
6222 case 'x':
6223 if (TREE_CODE (exp) == TREE_LIST)
6225 while (1)
6227 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6228 return 0;
6229 exp = TREE_CHAIN (exp);
6230 if (!exp)
6231 return 1;
6232 if (TREE_CODE (exp) != TREE_LIST)
6233 return safe_from_p (x, exp, 0);
6236 else if (TREE_CODE (exp) == ERROR_MARK)
6237 return 1; /* An already-visited SAVE_EXPR? */
6238 else
6239 return 0;
6241 case '2':
6242 case '<':
6243 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6244 return 0;
6245 /* FALLTHRU */
6247 case '1':
6248 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6250 case 'e':
6251 case 'r':
6252 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6253 the expression. If it is set, we conflict iff we are that rtx or
6254 both are in memory. Otherwise, we check all operands of the
6255 expression recursively. */
6257 switch (TREE_CODE (exp))
6259 case ADDR_EXPR:
6260 /* If the operand is static or we are static, we can't conflict.
6261 Likewise if we don't conflict with the operand at all. */
6262 if (staticp (TREE_OPERAND (exp, 0))
6263 || TREE_STATIC (exp)
6264 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6265 return 1;
6267 /* Otherwise, the only way this can conflict is if we are taking
6268 the address of a DECL a that address if part of X, which is
6269 very rare. */
6270 exp = TREE_OPERAND (exp, 0);
6271 if (DECL_P (exp))
6273 if (!DECL_RTL_SET_P (exp)
6274 || GET_CODE (DECL_RTL (exp)) != MEM)
6275 return 0;
6276 else
6277 exp_rtl = XEXP (DECL_RTL (exp), 0);
6279 break;
6281 case INDIRECT_REF:
6282 if (GET_CODE (x) == MEM
6283 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6284 get_alias_set (exp)))
6285 return 0;
6286 break;
6288 case CALL_EXPR:
6289 /* Assume that the call will clobber all hard registers and
6290 all of memory. */
6291 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6292 || GET_CODE (x) == MEM)
6293 return 0;
6294 break;
6296 case RTL_EXPR:
6297 /* If a sequence exists, we would have to scan every instruction
6298 in the sequence to see if it was safe. This is probably not
6299 worthwhile. */
6300 if (RTL_EXPR_SEQUENCE (exp))
6301 return 0;
6303 exp_rtl = RTL_EXPR_RTL (exp);
6304 break;
6306 case WITH_CLEANUP_EXPR:
6307 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6308 break;
6310 case CLEANUP_POINT_EXPR:
6311 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6313 case SAVE_EXPR:
6314 exp_rtl = SAVE_EXPR_RTL (exp);
6315 if (exp_rtl)
6316 break;
6318 /* If we've already scanned this, don't do it again. Otherwise,
6319 show we've scanned it and record for clearing the flag if we're
6320 going on. */
6321 if (TREE_PRIVATE (exp))
6322 return 1;
6324 TREE_PRIVATE (exp) = 1;
6325 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6327 TREE_PRIVATE (exp) = 0;
6328 return 0;
6331 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6332 return 1;
6334 case BIND_EXPR:
6335 /* The only operand we look at is operand 1. The rest aren't
6336 part of the expression. */
6337 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6339 case METHOD_CALL_EXPR:
6340 /* This takes an rtx argument, but shouldn't appear here. */
6341 abort ();
6343 default:
6344 break;
6347 /* If we have an rtx, we do not need to scan our operands. */
6348 if (exp_rtl)
6349 break;
6351 nops = first_rtl_op (TREE_CODE (exp));
6352 for (i = 0; i < nops; i++)
6353 if (TREE_OPERAND (exp, i) != 0
6354 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6355 return 0;
6357 /* If this is a language-specific tree code, it may require
6358 special handling. */
6359 if ((unsigned int) TREE_CODE (exp)
6360 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6361 && !(*lang_hooks.safe_from_p) (x, exp))
6362 return 0;
6365 /* If we have an rtl, find any enclosed object. Then see if we conflict
6366 with it. */
6367 if (exp_rtl)
6369 if (GET_CODE (exp_rtl) == SUBREG)
6371 exp_rtl = SUBREG_REG (exp_rtl);
6372 if (GET_CODE (exp_rtl) == REG
6373 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6374 return 0;
6377 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6378 are memory and they conflict. */
6379 return ! (rtx_equal_p (x, exp_rtl)
6380 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6381 && true_dependence (exp_rtl, VOIDmode, x,
6382 rtx_addr_varies_p)));
6385 /* If we reach here, it is safe. */
6386 return 1;
6389 /* Subroutine of expand_expr: return rtx if EXP is a
6390 variable or parameter; else return 0. */
6392 static rtx
6393 var_rtx (exp)
6394 tree exp;
6396 STRIP_NOPS (exp);
6397 switch (TREE_CODE (exp))
6399 case PARM_DECL:
6400 case VAR_DECL:
6401 return DECL_RTL (exp);
6402 default:
6403 return 0;
6407 #ifdef MAX_INTEGER_COMPUTATION_MODE
6409 void
6410 check_max_integer_computation_mode (exp)
6411 tree exp;
6413 enum tree_code code;
6414 enum machine_mode mode;
6416 /* Strip any NOPs that don't change the mode. */
6417 STRIP_NOPS (exp);
6418 code = TREE_CODE (exp);
6420 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6421 if (code == NOP_EXPR
6422 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6423 return;
6425 /* First check the type of the overall operation. We need only look at
6426 unary, binary and relational operations. */
6427 if (TREE_CODE_CLASS (code) == '1'
6428 || TREE_CODE_CLASS (code) == '2'
6429 || TREE_CODE_CLASS (code) == '<')
6431 mode = TYPE_MODE (TREE_TYPE (exp));
6432 if (GET_MODE_CLASS (mode) == MODE_INT
6433 && mode > MAX_INTEGER_COMPUTATION_MODE)
6434 internal_error ("unsupported wide integer operation");
6437 /* Check operand of a unary op. */
6438 if (TREE_CODE_CLASS (code) == '1')
6440 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6441 if (GET_MODE_CLASS (mode) == MODE_INT
6442 && mode > MAX_INTEGER_COMPUTATION_MODE)
6443 internal_error ("unsupported wide integer operation");
6446 /* Check operands of a binary/comparison op. */
6447 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6449 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6450 if (GET_MODE_CLASS (mode) == MODE_INT
6451 && mode > MAX_INTEGER_COMPUTATION_MODE)
6452 internal_error ("unsupported wide integer operation");
6454 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6455 if (GET_MODE_CLASS (mode) == MODE_INT
6456 && mode > MAX_INTEGER_COMPUTATION_MODE)
6457 internal_error ("unsupported wide integer operation");
6460 #endif
6462 /* Return the highest power of two that EXP is known to be a multiple of.
6463 This is used in updating alignment of MEMs in array references. */
6465 static unsigned HOST_WIDE_INT
6466 highest_pow2_factor (exp)
6467 tree exp;
6469 unsigned HOST_WIDE_INT c0, c1;
6471 switch (TREE_CODE (exp))
6473 case INTEGER_CST:
6474 /* We can find the lowest bit that's a one. If the low
6475 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6476 We need to handle this case since we can find it in a COND_EXPR,
6477 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6478 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6479 later ICE. */
6480 if (TREE_CONSTANT_OVERFLOW (exp))
6481 return BIGGEST_ALIGNMENT;
6482 else
6484 /* Note: tree_low_cst is intentionally not used here,
6485 we don't care about the upper bits. */
6486 c0 = TREE_INT_CST_LOW (exp);
6487 c0 &= -c0;
6488 return c0 ? c0 : BIGGEST_ALIGNMENT;
6490 break;
6492 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6493 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6494 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6495 return MIN (c0, c1);
6497 case MULT_EXPR:
6498 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6499 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6500 return c0 * c1;
6502 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6503 case CEIL_DIV_EXPR:
6504 if (integer_pow2p (TREE_OPERAND (exp, 1))
6505 && host_integerp (TREE_OPERAND (exp, 1), 1))
6507 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6508 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6509 return MAX (1, c0 / c1);
6511 break;
6513 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6514 case SAVE_EXPR: case WITH_RECORD_EXPR:
6515 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6517 case COMPOUND_EXPR:
6518 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6520 case COND_EXPR:
6521 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6522 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6523 return MIN (c0, c1);
6525 default:
6526 break;
6529 return 1;
6532 /* Similar, except that it is known that the expression must be a multiple
6533 of the alignment of TYPE. */
6535 static unsigned HOST_WIDE_INT
6536 highest_pow2_factor_for_type (type, exp)
6537 tree type;
6538 tree exp;
6540 unsigned HOST_WIDE_INT type_align, factor;
6542 factor = highest_pow2_factor (exp);
6543 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6544 return MAX (factor, type_align);
6547 /* Return an object on the placeholder list that matches EXP, a
6548 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6549 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6550 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6551 is a location which initially points to a starting location in the
6552 placeholder list (zero means start of the list) and where a pointer into
6553 the placeholder list at which the object is found is placed. */
6555 tree
6556 find_placeholder (exp, plist)
6557 tree exp;
6558 tree *plist;
6560 tree type = TREE_TYPE (exp);
6561 tree placeholder_expr;
6563 for (placeholder_expr
6564 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6565 placeholder_expr != 0;
6566 placeholder_expr = TREE_CHAIN (placeholder_expr))
6568 tree need_type = TYPE_MAIN_VARIANT (type);
6569 tree elt;
6571 /* Find the outermost reference that is of the type we want. If none,
6572 see if any object has a type that is a pointer to the type we
6573 want. */
6574 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6575 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6576 || TREE_CODE (elt) == COND_EXPR)
6577 ? TREE_OPERAND (elt, 1)
6578 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6579 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6580 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6581 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6582 ? TREE_OPERAND (elt, 0) : 0))
6583 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6585 if (plist)
6586 *plist = placeholder_expr;
6587 return elt;
6590 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6592 = ((TREE_CODE (elt) == COMPOUND_EXPR
6593 || TREE_CODE (elt) == COND_EXPR)
6594 ? TREE_OPERAND (elt, 1)
6595 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6596 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6597 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6598 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6599 ? TREE_OPERAND (elt, 0) : 0))
6600 if (POINTER_TYPE_P (TREE_TYPE (elt))
6601 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6602 == need_type))
6604 if (plist)
6605 *plist = placeholder_expr;
6606 return build1 (INDIRECT_REF, need_type, elt);
6610 return 0;
6613 /* expand_expr: generate code for computing expression EXP.
6614 An rtx for the computed value is returned. The value is never null.
6615 In the case of a void EXP, const0_rtx is returned.
6617 The value may be stored in TARGET if TARGET is nonzero.
6618 TARGET is just a suggestion; callers must assume that
6619 the rtx returned may not be the same as TARGET.
6621 If TARGET is CONST0_RTX, it means that the value will be ignored.
6623 If TMODE is not VOIDmode, it suggests generating the
6624 result in mode TMODE. But this is done only when convenient.
6625 Otherwise, TMODE is ignored and the value generated in its natural mode.
6626 TMODE is just a suggestion; callers must assume that
6627 the rtx returned may not have mode TMODE.
6629 Note that TARGET may have neither TMODE nor MODE. In that case, it
6630 probably will not be used.
6632 If MODIFIER is EXPAND_SUM then when EXP is an addition
6633 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6634 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6635 products as above, or REG or MEM, or constant.
6636 Ordinarily in such cases we would output mul or add instructions
6637 and then return a pseudo reg containing the sum.
6639 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6640 it also marks a label as absolutely required (it can't be dead).
6641 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6642 This is used for outputting expressions used in initializers.
6644 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6645 with a constant address even if that address is not normally legitimate.
6646 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6648 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6649 a call parameter. Such targets require special care as we haven't yet
6650 marked TARGET so that it's safe from being trashed by libcalls. We
6651 don't want to use TARGET for anything but the final result;
6652 Intermediate values must go elsewhere. Additionally, calls to
6653 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6656 expand_expr (exp, target, tmode, modifier)
6657 tree exp;
6658 rtx target;
6659 enum machine_mode tmode;
6660 enum expand_modifier modifier;
6662 rtx op0, op1, temp;
6663 tree type = TREE_TYPE (exp);
6664 int unsignedp = TREE_UNSIGNED (type);
6665 enum machine_mode mode;
6666 enum tree_code code = TREE_CODE (exp);
6667 optab this_optab;
6668 rtx subtarget, original_target;
6669 int ignore;
6670 tree context;
6672 /* Handle ERROR_MARK before anybody tries to access its type. */
6673 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6675 op0 = CONST0_RTX (tmode);
6676 if (op0 != 0)
6677 return op0;
6678 return const0_rtx;
6681 mode = TYPE_MODE (type);
6682 /* Use subtarget as the target for operand 0 of a binary operation. */
6683 subtarget = get_subtarget (target);
6684 original_target = target;
6685 ignore = (target == const0_rtx
6686 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6687 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6688 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6689 && TREE_CODE (type) == VOID_TYPE));
6691 /* If we are going to ignore this result, we need only do something
6692 if there is a side-effect somewhere in the expression. If there
6693 is, short-circuit the most common cases here. Note that we must
6694 not call expand_expr with anything but const0_rtx in case this
6695 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6697 if (ignore)
6699 if (! TREE_SIDE_EFFECTS (exp))
6700 return const0_rtx;
6702 /* Ensure we reference a volatile object even if value is ignored, but
6703 don't do this if all we are doing is taking its address. */
6704 if (TREE_THIS_VOLATILE (exp)
6705 && TREE_CODE (exp) != FUNCTION_DECL
6706 && mode != VOIDmode && mode != BLKmode
6707 && modifier != EXPAND_CONST_ADDRESS)
6709 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6710 if (GET_CODE (temp) == MEM)
6711 temp = copy_to_reg (temp);
6712 return const0_rtx;
6715 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6716 || code == INDIRECT_REF || code == BUFFER_REF)
6717 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6718 modifier);
6720 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6721 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6723 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6724 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6725 return const0_rtx;
6727 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6728 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6729 /* If the second operand has no side effects, just evaluate
6730 the first. */
6731 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6732 modifier);
6733 else if (code == BIT_FIELD_REF)
6735 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6736 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6737 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6738 return const0_rtx;
6741 target = 0;
6744 #ifdef MAX_INTEGER_COMPUTATION_MODE
6745 /* Only check stuff here if the mode we want is different from the mode
6746 of the expression; if it's the same, check_max_integer_computation_mode
6747 will handle it. Do we really need to check this stuff at all? */
6749 if (target
6750 && GET_MODE (target) != mode
6751 && TREE_CODE (exp) != INTEGER_CST
6752 && TREE_CODE (exp) != PARM_DECL
6753 && TREE_CODE (exp) != ARRAY_REF
6754 && TREE_CODE (exp) != ARRAY_RANGE_REF
6755 && TREE_CODE (exp) != COMPONENT_REF
6756 && TREE_CODE (exp) != BIT_FIELD_REF
6757 && TREE_CODE (exp) != INDIRECT_REF
6758 && TREE_CODE (exp) != CALL_EXPR
6759 && TREE_CODE (exp) != VAR_DECL
6760 && TREE_CODE (exp) != RTL_EXPR)
6762 enum machine_mode mode = GET_MODE (target);
6764 if (GET_MODE_CLASS (mode) == MODE_INT
6765 && mode > MAX_INTEGER_COMPUTATION_MODE)
6766 internal_error ("unsupported wide integer operation");
6769 if (tmode != mode
6770 && TREE_CODE (exp) != INTEGER_CST
6771 && TREE_CODE (exp) != PARM_DECL
6772 && TREE_CODE (exp) != ARRAY_REF
6773 && TREE_CODE (exp) != ARRAY_RANGE_REF
6774 && TREE_CODE (exp) != COMPONENT_REF
6775 && TREE_CODE (exp) != BIT_FIELD_REF
6776 && TREE_CODE (exp) != INDIRECT_REF
6777 && TREE_CODE (exp) != VAR_DECL
6778 && TREE_CODE (exp) != CALL_EXPR
6779 && TREE_CODE (exp) != RTL_EXPR
6780 && GET_MODE_CLASS (tmode) == MODE_INT
6781 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6782 internal_error ("unsupported wide integer operation");
6784 check_max_integer_computation_mode (exp);
6785 #endif
6787 /* If will do cse, generate all results into pseudo registers
6788 since 1) that allows cse to find more things
6789 and 2) otherwise cse could produce an insn the machine
6790 cannot support. An exception is a CONSTRUCTOR into a multi-word
6791 MEM: that's much more likely to be most efficient into the MEM.
6792 Another is a CALL_EXPR which must return in memory. */
6794 if (! cse_not_expected && mode != BLKmode && target
6795 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6796 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6797 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6798 target = 0;
6800 switch (code)
6802 case LABEL_DECL:
6804 tree function = decl_function_context (exp);
6805 /* Labels in containing functions, or labels used from initializers,
6806 must be forced. */
6807 if (modifier == EXPAND_INITIALIZER
6808 || (function != current_function_decl
6809 && function != inline_function_decl
6810 && function != 0))
6811 temp = force_label_rtx (exp);
6812 else
6813 temp = label_rtx (exp);
6815 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6816 if (function != current_function_decl
6817 && function != inline_function_decl && function != 0)
6818 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6819 return temp;
6822 case PARM_DECL:
6823 if (!DECL_RTL_SET_P (exp))
6825 error_with_decl (exp, "prior parameter's size depends on `%s'");
6826 return CONST0_RTX (mode);
6829 /* ... fall through ... */
6831 case VAR_DECL:
6832 /* If a static var's type was incomplete when the decl was written,
6833 but the type is complete now, lay out the decl now. */
6834 if (DECL_SIZE (exp) == 0
6835 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6836 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6837 layout_decl (exp, 0);
6839 /* ... fall through ... */
6841 case FUNCTION_DECL:
6842 case RESULT_DECL:
6843 if (DECL_RTL (exp) == 0)
6844 abort ();
6846 /* Ensure variable marked as used even if it doesn't go through
6847 a parser. If it hasn't be used yet, write out an external
6848 definition. */
6849 if (! TREE_USED (exp))
6851 assemble_external (exp);
6852 TREE_USED (exp) = 1;
6855 /* Show we haven't gotten RTL for this yet. */
6856 temp = 0;
6858 /* Handle variables inherited from containing functions. */
6859 context = decl_function_context (exp);
6861 /* We treat inline_function_decl as an alias for the current function
6862 because that is the inline function whose vars, types, etc.
6863 are being merged into the current function.
6864 See expand_inline_function. */
6866 if (context != 0 && context != current_function_decl
6867 && context != inline_function_decl
6868 /* If var is static, we don't need a static chain to access it. */
6869 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6870 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6872 rtx addr;
6874 /* Mark as non-local and addressable. */
6875 DECL_NONLOCAL (exp) = 1;
6876 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6877 abort ();
6878 (*lang_hooks.mark_addressable) (exp);
6879 if (GET_CODE (DECL_RTL (exp)) != MEM)
6880 abort ();
6881 addr = XEXP (DECL_RTL (exp), 0);
6882 if (GET_CODE (addr) == MEM)
6883 addr
6884 = replace_equiv_address (addr,
6885 fix_lexical_addr (XEXP (addr, 0), exp));
6886 else
6887 addr = fix_lexical_addr (addr, exp);
6889 temp = replace_equiv_address (DECL_RTL (exp), addr);
6892 /* This is the case of an array whose size is to be determined
6893 from its initializer, while the initializer is still being parsed.
6894 See expand_decl. */
6896 else if (GET_CODE (DECL_RTL (exp)) == MEM
6897 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6898 temp = validize_mem (DECL_RTL (exp));
6900 /* If DECL_RTL is memory, we are in the normal case and either
6901 the address is not valid or it is not a register and -fforce-addr
6902 is specified, get the address into a register. */
6904 else if (GET_CODE (DECL_RTL (exp)) == MEM
6905 && modifier != EXPAND_CONST_ADDRESS
6906 && modifier != EXPAND_SUM
6907 && modifier != EXPAND_INITIALIZER
6908 && (! memory_address_p (DECL_MODE (exp),
6909 XEXP (DECL_RTL (exp), 0))
6910 || (flag_force_addr
6911 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6912 temp = replace_equiv_address (DECL_RTL (exp),
6913 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6915 /* If we got something, return it. But first, set the alignment
6916 if the address is a register. */
6917 if (temp != 0)
6919 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6920 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6922 return temp;
6925 /* If the mode of DECL_RTL does not match that of the decl, it
6926 must be a promoted value. We return a SUBREG of the wanted mode,
6927 but mark it so that we know that it was already extended. */
6929 if (GET_CODE (DECL_RTL (exp)) == REG
6930 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6932 /* Get the signedness used for this variable. Ensure we get the
6933 same mode we got when the variable was declared. */
6934 if (GET_MODE (DECL_RTL (exp))
6935 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6936 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6937 abort ();
6939 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6940 SUBREG_PROMOTED_VAR_P (temp) = 1;
6941 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6942 return temp;
6945 return DECL_RTL (exp);
6947 case INTEGER_CST:
6948 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6949 TREE_INT_CST_HIGH (exp), mode);
6951 /* ??? If overflow is set, fold will have done an incomplete job,
6952 which can result in (plus xx (const_int 0)), which can get
6953 simplified by validate_replace_rtx during virtual register
6954 instantiation, which can result in unrecognizable insns.
6955 Avoid this by forcing all overflows into registers. */
6956 if (TREE_CONSTANT_OVERFLOW (exp)
6957 && modifier != EXPAND_INITIALIZER)
6958 temp = force_reg (mode, temp);
6960 return temp;
6962 case VECTOR_CST:
6963 return const_vector_from_tree (exp);
6965 case CONST_DECL:
6966 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6968 case REAL_CST:
6969 /* If optimized, generate immediate CONST_DOUBLE
6970 which will be turned into memory by reload if necessary.
6972 We used to force a register so that loop.c could see it. But
6973 this does not allow gen_* patterns to perform optimizations with
6974 the constants. It also produces two insns in cases like "x = 1.0;".
6975 On most machines, floating-point constants are not permitted in
6976 many insns, so we'd end up copying it to a register in any case.
6978 Now, we do the copying in expand_binop, if appropriate. */
6979 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6980 TYPE_MODE (TREE_TYPE (exp)));
6982 case COMPLEX_CST:
6983 /* Handle evaluating a complex constant in a CONCAT target. */
6984 if (original_target && GET_CODE (original_target) == CONCAT)
6986 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6987 rtx rtarg, itarg;
6989 rtarg = XEXP (original_target, 0);
6990 itarg = XEXP (original_target, 1);
6992 /* Move the real and imaginary parts separately. */
6993 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6994 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6996 if (op0 != rtarg)
6997 emit_move_insn (rtarg, op0);
6998 if (op1 != itarg)
6999 emit_move_insn (itarg, op1);
7001 return original_target;
7004 /* ... fall through ... */
7006 case STRING_CST:
7007 temp = output_constant_def (exp, 1);
7009 /* temp contains a constant address.
7010 On RISC machines where a constant address isn't valid,
7011 make some insns to get that address into a register. */
7012 if (modifier != EXPAND_CONST_ADDRESS
7013 && modifier != EXPAND_INITIALIZER
7014 && modifier != EXPAND_SUM
7015 && (! memory_address_p (mode, XEXP (temp, 0))
7016 || flag_force_addr))
7017 return replace_equiv_address (temp,
7018 copy_rtx (XEXP (temp, 0)));
7019 return temp;
7021 case EXPR_WITH_FILE_LOCATION:
7023 rtx to_return;
7024 location_t saved_loc = input_location;
7025 input_filename = EXPR_WFL_FILENAME (exp);
7026 input_line = EXPR_WFL_LINENO (exp);
7027 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
7028 emit_line_note (input_filename, input_line);
7029 /* Possibly avoid switching back and forth here. */
7030 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
7031 input_location = saved_loc;
7032 return to_return;
7035 case SAVE_EXPR:
7036 context = decl_function_context (exp);
7038 /* If this SAVE_EXPR was at global context, assume we are an
7039 initialization function and move it into our context. */
7040 if (context == 0)
7041 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
7043 /* We treat inline_function_decl as an alias for the current function
7044 because that is the inline function whose vars, types, etc.
7045 are being merged into the current function.
7046 See expand_inline_function. */
7047 if (context == current_function_decl || context == inline_function_decl)
7048 context = 0;
7050 /* If this is non-local, handle it. */
7051 if (context)
7053 /* The following call just exists to abort if the context is
7054 not of a containing function. */
7055 find_function_data (context);
7057 temp = SAVE_EXPR_RTL (exp);
7058 if (temp && GET_CODE (temp) == REG)
7060 put_var_into_stack (exp, /*rescan=*/true);
7061 temp = SAVE_EXPR_RTL (exp);
7063 if (temp == 0 || GET_CODE (temp) != MEM)
7064 abort ();
7065 return
7066 replace_equiv_address (temp,
7067 fix_lexical_addr (XEXP (temp, 0), exp));
7069 if (SAVE_EXPR_RTL (exp) == 0)
7071 if (mode == VOIDmode)
7072 temp = const0_rtx;
7073 else
7074 temp = assign_temp (build_qualified_type (type,
7075 (TYPE_QUALS (type)
7076 | TYPE_QUAL_CONST)),
7077 3, 0, 0);
7079 SAVE_EXPR_RTL (exp) = temp;
7080 if (!optimize && GET_CODE (temp) == REG)
7081 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7082 save_expr_regs);
7084 /* If the mode of TEMP does not match that of the expression, it
7085 must be a promoted value. We pass store_expr a SUBREG of the
7086 wanted mode but mark it so that we know that it was already
7087 extended. */
7089 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7091 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7092 promote_mode (type, mode, &unsignedp, 0);
7093 SUBREG_PROMOTED_VAR_P (temp) = 1;
7094 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7097 if (temp == const0_rtx)
7098 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7099 else
7100 store_expr (TREE_OPERAND (exp, 0), temp,
7101 modifier == EXPAND_STACK_PARM ? 2 : 0);
7103 TREE_USED (exp) = 1;
7106 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7107 must be a promoted value. We return a SUBREG of the wanted mode,
7108 but mark it so that we know that it was already extended. */
7110 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7111 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7113 /* Compute the signedness and make the proper SUBREG. */
7114 promote_mode (type, mode, &unsignedp, 0);
7115 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7116 SUBREG_PROMOTED_VAR_P (temp) = 1;
7117 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7118 return temp;
7121 return SAVE_EXPR_RTL (exp);
7123 case UNSAVE_EXPR:
7125 rtx temp;
7126 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7127 TREE_OPERAND (exp, 0)
7128 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7129 return temp;
7132 case PLACEHOLDER_EXPR:
7134 tree old_list = placeholder_list;
7135 tree placeholder_expr = 0;
7137 exp = find_placeholder (exp, &placeholder_expr);
7138 if (exp == 0)
7139 abort ();
7141 placeholder_list = TREE_CHAIN (placeholder_expr);
7142 temp = expand_expr (exp, original_target, tmode, modifier);
7143 placeholder_list = old_list;
7144 return temp;
7147 case WITH_RECORD_EXPR:
7148 /* Put the object on the placeholder list, expand our first operand,
7149 and pop the list. */
7150 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7151 placeholder_list);
7152 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7153 modifier);
7154 placeholder_list = TREE_CHAIN (placeholder_list);
7155 return target;
7157 case GOTO_EXPR:
7158 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7159 expand_goto (TREE_OPERAND (exp, 0));
7160 else
7161 expand_computed_goto (TREE_OPERAND (exp, 0));
7162 return const0_rtx;
7164 case EXIT_EXPR:
7165 expand_exit_loop_if_false (NULL,
7166 invert_truthvalue (TREE_OPERAND (exp, 0)));
7167 return const0_rtx;
7169 case LABELED_BLOCK_EXPR:
7170 if (LABELED_BLOCK_BODY (exp))
7171 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7172 /* Should perhaps use expand_label, but this is simpler and safer. */
7173 do_pending_stack_adjust ();
7174 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7175 return const0_rtx;
7177 case EXIT_BLOCK_EXPR:
7178 if (EXIT_BLOCK_RETURN (exp))
7179 sorry ("returned value in block_exit_expr");
7180 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7181 return const0_rtx;
7183 case LOOP_EXPR:
7184 push_temp_slots ();
7185 expand_start_loop (1);
7186 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7187 expand_end_loop ();
7188 pop_temp_slots ();
7190 return const0_rtx;
7192 case BIND_EXPR:
7194 tree vars = TREE_OPERAND (exp, 0);
7196 /* Need to open a binding contour here because
7197 if there are any cleanups they must be contained here. */
7198 expand_start_bindings (2);
7200 /* Mark the corresponding BLOCK for output in its proper place. */
7201 if (TREE_OPERAND (exp, 2) != 0
7202 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7203 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7205 /* If VARS have not yet been expanded, expand them now. */
7206 while (vars)
7208 if (!DECL_RTL_SET_P (vars))
7209 expand_decl (vars);
7210 expand_decl_init (vars);
7211 vars = TREE_CHAIN (vars);
7214 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7216 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7218 return temp;
7221 case RTL_EXPR:
7222 if (RTL_EXPR_SEQUENCE (exp))
7224 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7225 abort ();
7226 emit_insn (RTL_EXPR_SEQUENCE (exp));
7227 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7229 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7230 free_temps_for_rtl_expr (exp);
7231 return RTL_EXPR_RTL (exp);
7233 case CONSTRUCTOR:
7234 /* If we don't need the result, just ensure we evaluate any
7235 subexpressions. */
7236 if (ignore)
7238 tree elt;
7240 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7241 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7243 return const0_rtx;
7246 /* All elts simple constants => refer to a constant in memory. But
7247 if this is a non-BLKmode mode, let it store a field at a time
7248 since that should make a CONST_INT or CONST_DOUBLE when we
7249 fold. Likewise, if we have a target we can use, it is best to
7250 store directly into the target unless the type is large enough
7251 that memcpy will be used. If we are making an initializer and
7252 all operands are constant, put it in memory as well.
7254 FIXME: Avoid trying to fill vector constructors piece-meal.
7255 Output them with output_constant_def below unless we're sure
7256 they're zeros. This should go away when vector initializers
7257 are treated like VECTOR_CST instead of arrays.
7259 else if ((TREE_STATIC (exp)
7260 && ((mode == BLKmode
7261 && ! (target != 0 && safe_from_p (target, exp, 1)))
7262 || TREE_ADDRESSABLE (exp)
7263 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7264 && (! MOVE_BY_PIECES_P
7265 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7266 TYPE_ALIGN (type)))
7267 && ((TREE_CODE (type) == VECTOR_TYPE
7268 && !is_zeros_p (exp))
7269 || ! mostly_zeros_p (exp)))))
7270 || ((modifier == EXPAND_INITIALIZER
7271 || modifier == EXPAND_CONST_ADDRESS)
7272 && TREE_CONSTANT (exp)))
7274 rtx constructor = output_constant_def (exp, 1);
7276 if (modifier != EXPAND_CONST_ADDRESS
7277 && modifier != EXPAND_INITIALIZER
7278 && modifier != EXPAND_SUM)
7279 constructor = validize_mem (constructor);
7281 return constructor;
7283 else
7285 /* Handle calls that pass values in multiple non-contiguous
7286 locations. The Irix 6 ABI has examples of this. */
7287 if (target == 0 || ! safe_from_p (target, exp, 1)
7288 || GET_CODE (target) == PARALLEL
7289 || modifier == EXPAND_STACK_PARM)
7290 target
7291 = assign_temp (build_qualified_type (type,
7292 (TYPE_QUALS (type)
7293 | (TREE_READONLY (exp)
7294 * TYPE_QUAL_CONST))),
7295 0, TREE_ADDRESSABLE (exp), 1);
7297 store_constructor (exp, target, 0, int_expr_size (exp));
7298 return target;
7301 case INDIRECT_REF:
7303 tree exp1 = TREE_OPERAND (exp, 0);
7304 tree index;
7305 tree string = string_constant (exp1, &index);
7307 /* Try to optimize reads from const strings. */
7308 if (string
7309 && TREE_CODE (string) == STRING_CST
7310 && TREE_CODE (index) == INTEGER_CST
7311 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7312 && GET_MODE_CLASS (mode) == MODE_INT
7313 && GET_MODE_SIZE (mode) == 1
7314 && modifier != EXPAND_WRITE)
7315 return gen_int_mode (TREE_STRING_POINTER (string)
7316 [TREE_INT_CST_LOW (index)], mode);
7318 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7319 op0 = memory_address (mode, op0);
7320 temp = gen_rtx_MEM (mode, op0);
7321 set_mem_attributes (temp, exp, 0);
7323 /* If we are writing to this object and its type is a record with
7324 readonly fields, we must mark it as readonly so it will
7325 conflict with readonly references to those fields. */
7326 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7327 RTX_UNCHANGING_P (temp) = 1;
7329 return temp;
7332 case ARRAY_REF:
7333 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7334 abort ();
7337 tree array = TREE_OPERAND (exp, 0);
7338 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7339 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7340 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7341 HOST_WIDE_INT i;
7343 /* Optimize the special-case of a zero lower bound.
7345 We convert the low_bound to sizetype to avoid some problems
7346 with constant folding. (E.g. suppose the lower bound is 1,
7347 and its mode is QI. Without the conversion, (ARRAY
7348 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7349 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7351 if (! integer_zerop (low_bound))
7352 index = size_diffop (index, convert (sizetype, low_bound));
7354 /* Fold an expression like: "foo"[2].
7355 This is not done in fold so it won't happen inside &.
7356 Don't fold if this is for wide characters since it's too
7357 difficult to do correctly and this is a very rare case. */
7359 if (modifier != EXPAND_CONST_ADDRESS
7360 && modifier != EXPAND_INITIALIZER
7361 && modifier != EXPAND_MEMORY
7362 && TREE_CODE (array) == STRING_CST
7363 && TREE_CODE (index) == INTEGER_CST
7364 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7365 && GET_MODE_CLASS (mode) == MODE_INT
7366 && GET_MODE_SIZE (mode) == 1)
7367 return gen_int_mode (TREE_STRING_POINTER (array)
7368 [TREE_INT_CST_LOW (index)], mode);
7370 /* If this is a constant index into a constant array,
7371 just get the value from the array. Handle both the cases when
7372 we have an explicit constructor and when our operand is a variable
7373 that was declared const. */
7375 if (modifier != EXPAND_CONST_ADDRESS
7376 && modifier != EXPAND_INITIALIZER
7377 && modifier != EXPAND_MEMORY
7378 && TREE_CODE (array) == CONSTRUCTOR
7379 && ! TREE_SIDE_EFFECTS (array)
7380 && TREE_CODE (index) == INTEGER_CST
7381 && 0 > compare_tree_int (index,
7382 list_length (CONSTRUCTOR_ELTS
7383 (TREE_OPERAND (exp, 0)))))
7385 tree elem;
7387 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7388 i = TREE_INT_CST_LOW (index);
7389 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7392 if (elem)
7393 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7394 modifier);
7397 else if (optimize >= 1
7398 && modifier != EXPAND_CONST_ADDRESS
7399 && modifier != EXPAND_INITIALIZER
7400 && modifier != EXPAND_MEMORY
7401 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7402 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7403 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7405 if (TREE_CODE (index) == INTEGER_CST)
7407 tree init = DECL_INITIAL (array);
7409 if (TREE_CODE (init) == CONSTRUCTOR)
7411 tree elem;
7413 for (elem = CONSTRUCTOR_ELTS (init);
7414 (elem
7415 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7416 elem = TREE_CHAIN (elem))
7419 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7420 return expand_expr (fold (TREE_VALUE (elem)), target,
7421 tmode, modifier);
7423 else if (TREE_CODE (init) == STRING_CST
7424 && 0 > compare_tree_int (index,
7425 TREE_STRING_LENGTH (init)))
7427 tree type = TREE_TYPE (TREE_TYPE (init));
7428 enum machine_mode mode = TYPE_MODE (type);
7430 if (GET_MODE_CLASS (mode) == MODE_INT
7431 && GET_MODE_SIZE (mode) == 1)
7432 return gen_int_mode (TREE_STRING_POINTER (init)
7433 [TREE_INT_CST_LOW (index)], mode);
7438 goto normal_inner_ref;
7440 case COMPONENT_REF:
7441 /* If the operand is a CONSTRUCTOR, we can just extract the
7442 appropriate field if it is present. */
7443 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7445 tree elt;
7447 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7448 elt = TREE_CHAIN (elt))
7449 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7450 /* We can normally use the value of the field in the
7451 CONSTRUCTOR. However, if this is a bitfield in
7452 an integral mode that we can fit in a HOST_WIDE_INT,
7453 we must mask only the number of bits in the bitfield,
7454 since this is done implicitly by the constructor. If
7455 the bitfield does not meet either of those conditions,
7456 we can't do this optimization. */
7457 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7458 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7459 == MODE_INT)
7460 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7461 <= HOST_BITS_PER_WIDE_INT))))
7463 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7464 && modifier == EXPAND_STACK_PARM)
7465 target = 0;
7466 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7467 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7469 HOST_WIDE_INT bitsize
7470 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7471 enum machine_mode imode
7472 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7474 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7476 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7477 op0 = expand_and (imode, op0, op1, target);
7479 else
7481 tree count
7482 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7485 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7486 target, 0);
7487 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7488 target, 0);
7492 return op0;
7495 goto normal_inner_ref;
7497 case BIT_FIELD_REF:
7498 case ARRAY_RANGE_REF:
7499 normal_inner_ref:
7501 enum machine_mode mode1;
7502 HOST_WIDE_INT bitsize, bitpos;
7503 tree offset;
7504 int volatilep = 0;
7505 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7506 &mode1, &unsignedp, &volatilep);
7507 rtx orig_op0;
7509 /* If we got back the original object, something is wrong. Perhaps
7510 we are evaluating an expression too early. In any event, don't
7511 infinitely recurse. */
7512 if (tem == exp)
7513 abort ();
7515 /* If TEM's type is a union of variable size, pass TARGET to the inner
7516 computation, since it will need a temporary and TARGET is known
7517 to have to do. This occurs in unchecked conversion in Ada. */
7519 orig_op0 = op0
7520 = expand_expr (tem,
7521 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7522 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7523 != INTEGER_CST)
7524 && modifier != EXPAND_STACK_PARM
7525 ? target : NULL_RTX),
7526 VOIDmode,
7527 (modifier == EXPAND_INITIALIZER
7528 || modifier == EXPAND_CONST_ADDRESS
7529 || modifier == EXPAND_STACK_PARM)
7530 ? modifier : EXPAND_NORMAL);
7532 /* If this is a constant, put it into a register if it is a
7533 legitimate constant and OFFSET is 0 and memory if it isn't. */
7534 if (CONSTANT_P (op0))
7536 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7537 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7538 && offset == 0)
7539 op0 = force_reg (mode, op0);
7540 else
7541 op0 = validize_mem (force_const_mem (mode, op0));
7544 if (offset != 0)
7546 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7547 EXPAND_SUM);
7549 /* If this object is in a register, put it into memory.
7550 This case can't occur in C, but can in Ada if we have
7551 unchecked conversion of an expression from a scalar type to
7552 an array or record type. */
7553 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7554 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7556 /* If the operand is a SAVE_EXPR, we can deal with this by
7557 forcing the SAVE_EXPR into memory. */
7558 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7560 put_var_into_stack (TREE_OPERAND (exp, 0),
7561 /*rescan=*/true);
7562 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7564 else
7566 tree nt
7567 = build_qualified_type (TREE_TYPE (tem),
7568 (TYPE_QUALS (TREE_TYPE (tem))
7569 | TYPE_QUAL_CONST));
7570 rtx memloc = assign_temp (nt, 1, 1, 1);
7572 emit_move_insn (memloc, op0);
7573 op0 = memloc;
7577 if (GET_CODE (op0) != MEM)
7578 abort ();
7580 #ifdef POINTERS_EXTEND_UNSIGNED
7581 if (GET_MODE (offset_rtx) != Pmode)
7582 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7583 #else
7584 if (GET_MODE (offset_rtx) != ptr_mode)
7585 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7586 #endif
7588 /* A constant address in OP0 can have VOIDmode, we must not try
7589 to call force_reg for that case. Avoid that case. */
7590 if (GET_CODE (op0) == MEM
7591 && GET_MODE (op0) == BLKmode
7592 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7593 && bitsize != 0
7594 && (bitpos % bitsize) == 0
7595 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7596 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7598 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7599 bitpos = 0;
7602 op0 = offset_address (op0, offset_rtx,
7603 highest_pow2_factor (offset));
7606 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7607 record its alignment as BIGGEST_ALIGNMENT. */
7608 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7609 && is_aligning_offset (offset, tem))
7610 set_mem_align (op0, BIGGEST_ALIGNMENT);
7612 /* Don't forget about volatility even if this is a bitfield. */
7613 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7615 if (op0 == orig_op0)
7616 op0 = copy_rtx (op0);
7618 MEM_VOLATILE_P (op0) = 1;
7621 /* The following code doesn't handle CONCAT.
7622 Assume only bitpos == 0 can be used for CONCAT, due to
7623 one element arrays having the same mode as its element. */
7624 if (GET_CODE (op0) == CONCAT)
7626 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7627 abort ();
7628 return op0;
7631 /* In cases where an aligned union has an unaligned object
7632 as a field, we might be extracting a BLKmode value from
7633 an integer-mode (e.g., SImode) object. Handle this case
7634 by doing the extract into an object as wide as the field
7635 (which we know to be the width of a basic mode), then
7636 storing into memory, and changing the mode to BLKmode. */
7637 if (mode1 == VOIDmode
7638 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7639 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7640 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7641 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7642 && modifier != EXPAND_CONST_ADDRESS
7643 && modifier != EXPAND_INITIALIZER)
7644 /* If the field isn't aligned enough to fetch as a memref,
7645 fetch it as a bit field. */
7646 || (mode1 != BLKmode
7647 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7648 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7649 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7650 || (bitpos % BITS_PER_UNIT != 0)))
7651 /* If the type and the field are a constant size and the
7652 size of the type isn't the same size as the bitfield,
7653 we must use bitfield operations. */
7654 || (bitsize >= 0
7655 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7656 == INTEGER_CST)
7657 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7658 bitsize)))
7660 enum machine_mode ext_mode = mode;
7662 if (ext_mode == BLKmode
7663 && ! (target != 0 && GET_CODE (op0) == MEM
7664 && GET_CODE (target) == MEM
7665 && bitpos % BITS_PER_UNIT == 0))
7666 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7668 if (ext_mode == BLKmode)
7670 /* In this case, BITPOS must start at a byte boundary and
7671 TARGET, if specified, must be a MEM. */
7672 if (GET_CODE (op0) != MEM
7673 || (target != 0 && GET_CODE (target) != MEM)
7674 || bitpos % BITS_PER_UNIT != 0)
7675 abort ();
7677 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7678 if (target == 0)
7679 target = assign_temp (type, 0, 1, 1);
7681 emit_block_move (target, op0,
7682 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7683 / BITS_PER_UNIT),
7684 (modifier == EXPAND_STACK_PARM
7685 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7687 return target;
7690 op0 = validize_mem (op0);
7692 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7693 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7695 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7696 (modifier == EXPAND_STACK_PARM
7697 ? NULL_RTX : target),
7698 ext_mode, ext_mode,
7699 int_size_in_bytes (TREE_TYPE (tem)));
7701 /* If the result is a record type and BITSIZE is narrower than
7702 the mode of OP0, an integral mode, and this is a big endian
7703 machine, we must put the field into the high-order bits. */
7704 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7705 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7706 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7707 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7708 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7709 - bitsize),
7710 op0, 1);
7712 if (mode == BLKmode)
7714 rtx new = assign_temp (build_qualified_type
7715 ((*lang_hooks.types.type_for_mode)
7716 (ext_mode, 0),
7717 TYPE_QUAL_CONST), 0, 1, 1);
7719 emit_move_insn (new, op0);
7720 op0 = copy_rtx (new);
7721 PUT_MODE (op0, BLKmode);
7722 set_mem_attributes (op0, exp, 1);
7725 return op0;
7728 /* If the result is BLKmode, use that to access the object
7729 now as well. */
7730 if (mode == BLKmode)
7731 mode1 = BLKmode;
7733 /* Get a reference to just this component. */
7734 if (modifier == EXPAND_CONST_ADDRESS
7735 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7736 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7737 else
7738 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7740 if (op0 == orig_op0)
7741 op0 = copy_rtx (op0);
7743 set_mem_attributes (op0, exp, 0);
7744 if (GET_CODE (XEXP (op0, 0)) == REG)
7745 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7747 MEM_VOLATILE_P (op0) |= volatilep;
7748 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7749 || modifier == EXPAND_CONST_ADDRESS
7750 || modifier == EXPAND_INITIALIZER)
7751 return op0;
7752 else if (target == 0)
7753 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7755 convert_move (target, op0, unsignedp);
7756 return target;
7759 case VTABLE_REF:
7761 rtx insn, before = get_last_insn (), vtbl_ref;
7763 /* Evaluate the interior expression. */
7764 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7765 tmode, modifier);
7767 /* Get or create an instruction off which to hang a note. */
7768 if (REG_P (subtarget))
7770 target = subtarget;
7771 insn = get_last_insn ();
7772 if (insn == before)
7773 abort ();
7774 if (! INSN_P (insn))
7775 insn = prev_nonnote_insn (insn);
7777 else
7779 target = gen_reg_rtx (GET_MODE (subtarget));
7780 insn = emit_move_insn (target, subtarget);
7783 /* Collect the data for the note. */
7784 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7785 vtbl_ref = plus_constant (vtbl_ref,
7786 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7787 /* Discard the initial CONST that was added. */
7788 vtbl_ref = XEXP (vtbl_ref, 0);
7790 REG_NOTES (insn)
7791 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7793 return target;
7796 /* Intended for a reference to a buffer of a file-object in Pascal.
7797 But it's not certain that a special tree code will really be
7798 necessary for these. INDIRECT_REF might work for them. */
7799 case BUFFER_REF:
7800 abort ();
7802 case IN_EXPR:
7804 /* Pascal set IN expression.
7806 Algorithm:
7807 rlo = set_low - (set_low%bits_per_word);
7808 the_word = set [ (index - rlo)/bits_per_word ];
7809 bit_index = index % bits_per_word;
7810 bitmask = 1 << bit_index;
7811 return !!(the_word & bitmask); */
7813 tree set = TREE_OPERAND (exp, 0);
7814 tree index = TREE_OPERAND (exp, 1);
7815 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7816 tree set_type = TREE_TYPE (set);
7817 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7818 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7819 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7820 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7821 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7822 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7823 rtx setaddr = XEXP (setval, 0);
7824 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7825 rtx rlow;
7826 rtx diff, quo, rem, addr, bit, result;
7828 /* If domain is empty, answer is no. Likewise if index is constant
7829 and out of bounds. */
7830 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7831 && TREE_CODE (set_low_bound) == INTEGER_CST
7832 && tree_int_cst_lt (set_high_bound, set_low_bound))
7833 || (TREE_CODE (index) == INTEGER_CST
7834 && TREE_CODE (set_low_bound) == INTEGER_CST
7835 && tree_int_cst_lt (index, set_low_bound))
7836 || (TREE_CODE (set_high_bound) == INTEGER_CST
7837 && TREE_CODE (index) == INTEGER_CST
7838 && tree_int_cst_lt (set_high_bound, index))))
7839 return const0_rtx;
7841 if (target == 0)
7842 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7844 /* If we get here, we have to generate the code for both cases
7845 (in range and out of range). */
7847 op0 = gen_label_rtx ();
7848 op1 = gen_label_rtx ();
7850 if (! (GET_CODE (index_val) == CONST_INT
7851 && GET_CODE (lo_r) == CONST_INT))
7852 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7853 GET_MODE (index_val), iunsignedp, op1);
7855 if (! (GET_CODE (index_val) == CONST_INT
7856 && GET_CODE (hi_r) == CONST_INT))
7857 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7858 GET_MODE (index_val), iunsignedp, op1);
7860 /* Calculate the element number of bit zero in the first word
7861 of the set. */
7862 if (GET_CODE (lo_r) == CONST_INT)
7863 rlow = GEN_INT (INTVAL (lo_r)
7864 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7865 else
7866 rlow = expand_binop (index_mode, and_optab, lo_r,
7867 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7868 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7870 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7871 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7873 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7874 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7875 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7876 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7878 addr = memory_address (byte_mode,
7879 expand_binop (index_mode, add_optab, diff,
7880 setaddr, NULL_RTX, iunsignedp,
7881 OPTAB_LIB_WIDEN));
7883 /* Extract the bit we want to examine. */
7884 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7885 gen_rtx_MEM (byte_mode, addr),
7886 make_tree (TREE_TYPE (index), rem),
7887 NULL_RTX, 1);
7888 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7889 GET_MODE (target) == byte_mode ? target : 0,
7890 1, OPTAB_LIB_WIDEN);
7892 if (result != target)
7893 convert_move (target, result, 1);
7895 /* Output the code to handle the out-of-range case. */
7896 emit_jump (op0);
7897 emit_label (op1);
7898 emit_move_insn (target, const0_rtx);
7899 emit_label (op0);
7900 return target;
7903 case WITH_CLEANUP_EXPR:
7904 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7906 WITH_CLEANUP_EXPR_RTL (exp)
7907 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7908 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7909 CLEANUP_EH_ONLY (exp));
7911 /* That's it for this cleanup. */
7912 TREE_OPERAND (exp, 1) = 0;
7914 return WITH_CLEANUP_EXPR_RTL (exp);
7916 case CLEANUP_POINT_EXPR:
7918 /* Start a new binding layer that will keep track of all cleanup
7919 actions to be performed. */
7920 expand_start_bindings (2);
7922 target_temp_slot_level = temp_slot_level;
7924 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7925 /* If we're going to use this value, load it up now. */
7926 if (! ignore)
7927 op0 = force_not_mem (op0);
7928 preserve_temp_slots (op0);
7929 expand_end_bindings (NULL_TREE, 0, 0);
7931 return op0;
7933 case CALL_EXPR:
7934 /* Check for a built-in function. */
7935 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7936 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7937 == FUNCTION_DECL)
7938 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7940 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7941 == BUILT_IN_FRONTEND)
7942 return (*lang_hooks.expand_expr) (exp, original_target,
7943 tmode, modifier);
7944 else
7945 return expand_builtin (exp, target, subtarget, tmode, ignore);
7948 return expand_call (exp, target, ignore);
7950 case NON_LVALUE_EXPR:
7951 case NOP_EXPR:
7952 case CONVERT_EXPR:
7953 case REFERENCE_EXPR:
7954 if (TREE_OPERAND (exp, 0) == error_mark_node)
7955 return const0_rtx;
7957 if (TREE_CODE (type) == UNION_TYPE)
7959 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7961 /* If both input and output are BLKmode, this conversion isn't doing
7962 anything except possibly changing memory attribute. */
7963 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7965 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7966 modifier);
7968 result = copy_rtx (result);
7969 set_mem_attributes (result, exp, 0);
7970 return result;
7973 if (target == 0)
7974 target = assign_temp (type, 0, 1, 1);
7976 if (GET_CODE (target) == MEM)
7977 /* Store data into beginning of memory target. */
7978 store_expr (TREE_OPERAND (exp, 0),
7979 adjust_address (target, TYPE_MODE (valtype), 0),
7980 modifier == EXPAND_STACK_PARM ? 2 : 0);
7982 else if (GET_CODE (target) == REG)
7983 /* Store this field into a union of the proper type. */
7984 store_field (target,
7985 MIN ((int_size_in_bytes (TREE_TYPE
7986 (TREE_OPERAND (exp, 0)))
7987 * BITS_PER_UNIT),
7988 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7989 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7990 VOIDmode, 0, type, 0);
7991 else
7992 abort ();
7994 /* Return the entire union. */
7995 return target;
7998 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8000 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8001 modifier);
8003 /* If the signedness of the conversion differs and OP0 is
8004 a promoted SUBREG, clear that indication since we now
8005 have to do the proper extension. */
8006 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8007 && GET_CODE (op0) == SUBREG)
8008 SUBREG_PROMOTED_VAR_P (op0) = 0;
8010 return op0;
8013 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8014 if (GET_MODE (op0) == mode)
8015 return op0;
8017 /* If OP0 is a constant, just convert it into the proper mode. */
8018 if (CONSTANT_P (op0))
8020 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8021 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8023 if (modifier == EXPAND_INITIALIZER)
8024 return simplify_gen_subreg (mode, op0, inner_mode,
8025 subreg_lowpart_offset (mode,
8026 inner_mode));
8027 else
8028 return convert_modes (mode, inner_mode, op0,
8029 TREE_UNSIGNED (inner_type));
8032 if (modifier == EXPAND_INITIALIZER)
8033 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8035 if (target == 0)
8036 return
8037 convert_to_mode (mode, op0,
8038 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8039 else
8040 convert_move (target, op0,
8041 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8042 return target;
8044 case VIEW_CONVERT_EXPR:
8045 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8047 /* If the input and output modes are both the same, we are done.
8048 Otherwise, if neither mode is BLKmode and both are integral and within
8049 a word, we can use gen_lowpart. If neither is true, make sure the
8050 operand is in memory and convert the MEM to the new mode. */
8051 if (TYPE_MODE (type) == GET_MODE (op0))
8053 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8054 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8055 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
8056 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
8057 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
8058 op0 = gen_lowpart (TYPE_MODE (type), op0);
8059 else if (GET_CODE (op0) != MEM)
8061 /* If the operand is not a MEM, force it into memory. Since we
8062 are going to be be changing the mode of the MEM, don't call
8063 force_const_mem for constants because we don't allow pool
8064 constants to change mode. */
8065 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8067 if (TREE_ADDRESSABLE (exp))
8068 abort ();
8070 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8071 target
8072 = assign_stack_temp_for_type
8073 (TYPE_MODE (inner_type),
8074 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8076 emit_move_insn (target, op0);
8077 op0 = target;
8080 /* At this point, OP0 is in the correct mode. If the output type is such
8081 that the operand is known to be aligned, indicate that it is.
8082 Otherwise, we need only be concerned about alignment for non-BLKmode
8083 results. */
8084 if (GET_CODE (op0) == MEM)
8086 op0 = copy_rtx (op0);
8088 if (TYPE_ALIGN_OK (type))
8089 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8090 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8091 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8093 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8094 HOST_WIDE_INT temp_size
8095 = MAX (int_size_in_bytes (inner_type),
8096 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8097 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8098 temp_size, 0, type);
8099 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8101 if (TREE_ADDRESSABLE (exp))
8102 abort ();
8104 if (GET_MODE (op0) == BLKmode)
8105 emit_block_move (new_with_op0_mode, op0,
8106 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8107 (modifier == EXPAND_STACK_PARM
8108 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8109 else
8110 emit_move_insn (new_with_op0_mode, op0);
8112 op0 = new;
8115 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8118 return op0;
8120 case PLUS_EXPR:
8121 this_optab = ! unsignedp && flag_trapv
8122 && (GET_MODE_CLASS (mode) == MODE_INT)
8123 ? addv_optab : add_optab;
8125 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8126 something else, make sure we add the register to the constant and
8127 then to the other thing. This case can occur during strength
8128 reduction and doing it this way will produce better code if the
8129 frame pointer or argument pointer is eliminated.
8131 fold-const.c will ensure that the constant is always in the inner
8132 PLUS_EXPR, so the only case we need to do anything about is if
8133 sp, ap, or fp is our second argument, in which case we must swap
8134 the innermost first argument and our second argument. */
8136 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8137 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8138 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8139 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8140 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8141 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8143 tree t = TREE_OPERAND (exp, 1);
8145 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8146 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8149 /* If the result is to be ptr_mode and we are adding an integer to
8150 something, we might be forming a constant. So try to use
8151 plus_constant. If it produces a sum and we can't accept it,
8152 use force_operand. This allows P = &ARR[const] to generate
8153 efficient code on machines where a SYMBOL_REF is not a valid
8154 address.
8156 If this is an EXPAND_SUM call, always return the sum. */
8157 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8158 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8160 if (modifier == EXPAND_STACK_PARM)
8161 target = 0;
8162 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8163 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8164 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8166 rtx constant_part;
8168 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8169 EXPAND_SUM);
8170 /* Use immed_double_const to ensure that the constant is
8171 truncated according to the mode of OP1, then sign extended
8172 to a HOST_WIDE_INT. Using the constant directly can result
8173 in non-canonical RTL in a 64x32 cross compile. */
8174 constant_part
8175 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8176 (HOST_WIDE_INT) 0,
8177 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8178 op1 = plus_constant (op1, INTVAL (constant_part));
8179 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8180 op1 = force_operand (op1, target);
8181 return op1;
8184 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8185 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8186 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8188 rtx constant_part;
8190 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8191 (modifier == EXPAND_INITIALIZER
8192 ? EXPAND_INITIALIZER : EXPAND_SUM));
8193 if (! CONSTANT_P (op0))
8195 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8196 VOIDmode, modifier);
8197 /* Don't go to both_summands if modifier
8198 says it's not right to return a PLUS. */
8199 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8200 goto binop2;
8201 goto both_summands;
8203 /* Use immed_double_const to ensure that the constant is
8204 truncated according to the mode of OP1, then sign extended
8205 to a HOST_WIDE_INT. Using the constant directly can result
8206 in non-canonical RTL in a 64x32 cross compile. */
8207 constant_part
8208 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8209 (HOST_WIDE_INT) 0,
8210 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8211 op0 = plus_constant (op0, INTVAL (constant_part));
8212 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8213 op0 = force_operand (op0, target);
8214 return op0;
8218 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8219 subtarget = 0;
8221 /* No sense saving up arithmetic to be done
8222 if it's all in the wrong mode to form part of an address.
8223 And force_operand won't know whether to sign-extend or
8224 zero-extend. */
8225 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8226 || mode != ptr_mode)
8228 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8229 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8230 TREE_OPERAND (exp, 1), 0))
8231 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8232 else
8233 op1 = op0;
8234 if (op0 == const0_rtx)
8235 return op1;
8236 if (op1 == const0_rtx)
8237 return op0;
8238 goto binop2;
8241 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8242 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8243 TREE_OPERAND (exp, 1), 0))
8244 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8245 VOIDmode, modifier);
8246 else
8247 op1 = op0;
8249 /* We come here from MINUS_EXPR when the second operand is a
8250 constant. */
8251 both_summands:
8252 /* Make sure any term that's a sum with a constant comes last. */
8253 if (GET_CODE (op0) == PLUS
8254 && CONSTANT_P (XEXP (op0, 1)))
8256 temp = op0;
8257 op0 = op1;
8258 op1 = temp;
8260 /* If adding to a sum including a constant,
8261 associate it to put the constant outside. */
8262 if (GET_CODE (op1) == PLUS
8263 && CONSTANT_P (XEXP (op1, 1)))
8265 rtx constant_term = const0_rtx;
8267 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8268 if (temp != 0)
8269 op0 = temp;
8270 /* Ensure that MULT comes first if there is one. */
8271 else if (GET_CODE (op0) == MULT)
8272 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8273 else
8274 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8276 /* Let's also eliminate constants from op0 if possible. */
8277 op0 = eliminate_constant_term (op0, &constant_term);
8279 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8280 their sum should be a constant. Form it into OP1, since the
8281 result we want will then be OP0 + OP1. */
8283 temp = simplify_binary_operation (PLUS, mode, constant_term,
8284 XEXP (op1, 1));
8285 if (temp != 0)
8286 op1 = temp;
8287 else
8288 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8291 /* Put a constant term last and put a multiplication first. */
8292 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8293 temp = op1, op1 = op0, op0 = temp;
8295 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8296 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8298 case MINUS_EXPR:
8299 /* For initializers, we are allowed to return a MINUS of two
8300 symbolic constants. Here we handle all cases when both operands
8301 are constant. */
8302 /* Handle difference of two symbolic constants,
8303 for the sake of an initializer. */
8304 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8305 && really_constant_p (TREE_OPERAND (exp, 0))
8306 && really_constant_p (TREE_OPERAND (exp, 1)))
8308 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8309 modifier);
8310 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8311 modifier);
8313 /* If the last operand is a CONST_INT, use plus_constant of
8314 the negated constant. Else make the MINUS. */
8315 if (GET_CODE (op1) == CONST_INT)
8316 return plus_constant (op0, - INTVAL (op1));
8317 else
8318 return gen_rtx_MINUS (mode, op0, op1);
8321 this_optab = ! unsignedp && flag_trapv
8322 && (GET_MODE_CLASS(mode) == MODE_INT)
8323 ? subv_optab : sub_optab;
8325 /* No sense saving up arithmetic to be done
8326 if it's all in the wrong mode to form part of an address.
8327 And force_operand won't know whether to sign-extend or
8328 zero-extend. */
8329 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8330 || mode != ptr_mode)
8331 goto binop;
8333 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8334 subtarget = 0;
8336 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8337 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8339 /* Convert A - const to A + (-const). */
8340 if (GET_CODE (op1) == CONST_INT)
8342 op1 = negate_rtx (mode, op1);
8343 goto both_summands;
8346 goto binop2;
8348 case MULT_EXPR:
8349 /* If first operand is constant, swap them.
8350 Thus the following special case checks need only
8351 check the second operand. */
8352 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8354 tree t1 = TREE_OPERAND (exp, 0);
8355 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8356 TREE_OPERAND (exp, 1) = t1;
8359 /* Attempt to return something suitable for generating an
8360 indexed address, for machines that support that. */
8362 if (modifier == EXPAND_SUM && mode == ptr_mode
8363 && host_integerp (TREE_OPERAND (exp, 1), 0))
8365 tree exp1 = TREE_OPERAND (exp, 1);
8367 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8368 EXPAND_SUM);
8370 /* If we knew for certain that this is arithmetic for an array
8371 reference, and we knew the bounds of the array, then we could
8372 apply the distributive law across (PLUS X C) for constant C.
8373 Without such knowledge, we risk overflowing the computation
8374 when both X and C are large, but X+C isn't. */
8375 /* ??? Could perhaps special-case EXP being unsigned and C being
8376 positive. In that case we are certain that X+C is no smaller
8377 than X and so the transformed expression will overflow iff the
8378 original would have. */
8380 if (GET_CODE (op0) != REG)
8381 op0 = force_operand (op0, NULL_RTX);
8382 if (GET_CODE (op0) != REG)
8383 op0 = copy_to_mode_reg (mode, op0);
8385 return gen_rtx_MULT (mode, op0,
8386 gen_int_mode (tree_low_cst (exp1, 0),
8387 TYPE_MODE (TREE_TYPE (exp1))));
8390 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8391 subtarget = 0;
8393 if (modifier == EXPAND_STACK_PARM)
8394 target = 0;
8396 /* Check for multiplying things that have been extended
8397 from a narrower type. If this machine supports multiplying
8398 in that narrower type with a result in the desired type,
8399 do it that way, and avoid the explicit type-conversion. */
8400 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8401 && TREE_CODE (type) == INTEGER_TYPE
8402 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8403 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8404 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8405 && int_fits_type_p (TREE_OPERAND (exp, 1),
8406 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8407 /* Don't use a widening multiply if a shift will do. */
8408 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8409 > HOST_BITS_PER_WIDE_INT)
8410 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8412 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8413 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8415 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8416 /* If both operands are extended, they must either both
8417 be zero-extended or both be sign-extended. */
8418 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8420 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8422 enum machine_mode innermode
8423 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8424 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8425 ? smul_widen_optab : umul_widen_optab);
8426 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8427 ? umul_widen_optab : smul_widen_optab);
8428 if (mode == GET_MODE_WIDER_MODE (innermode))
8430 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8432 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8433 NULL_RTX, VOIDmode, 0);
8434 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8436 VOIDmode, 0);
8437 else
8438 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8439 NULL_RTX, VOIDmode, 0);
8440 goto binop2;
8442 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8443 && innermode == word_mode)
8445 rtx htem;
8446 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8447 NULL_RTX, VOIDmode, 0);
8448 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8449 op1 = convert_modes (innermode, mode,
8450 expand_expr (TREE_OPERAND (exp, 1),
8451 NULL_RTX, VOIDmode, 0),
8452 unsignedp);
8453 else
8454 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8455 NULL_RTX, VOIDmode, 0);
8456 temp = expand_binop (mode, other_optab, op0, op1, target,
8457 unsignedp, OPTAB_LIB_WIDEN);
8458 htem = expand_mult_highpart_adjust (innermode,
8459 gen_highpart (innermode, temp),
8460 op0, op1,
8461 gen_highpart (innermode, temp),
8462 unsignedp);
8463 emit_move_insn (gen_highpart (innermode, temp), htem);
8464 return temp;
8468 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8469 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8470 TREE_OPERAND (exp, 1), 0))
8471 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8472 else
8473 op1 = op0;
8474 return expand_mult (mode, op0, op1, target, unsignedp);
8476 case TRUNC_DIV_EXPR:
8477 case FLOOR_DIV_EXPR:
8478 case CEIL_DIV_EXPR:
8479 case ROUND_DIV_EXPR:
8480 case EXACT_DIV_EXPR:
8481 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8482 subtarget = 0;
8483 if (modifier == EXPAND_STACK_PARM)
8484 target = 0;
8485 /* Possible optimization: compute the dividend with EXPAND_SUM
8486 then if the divisor is constant can optimize the case
8487 where some terms of the dividend have coeffs divisible by it. */
8488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8489 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8490 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8492 case RDIV_EXPR:
8493 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8494 expensive divide. If not, combine will rebuild the original
8495 computation. */
8496 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8497 && TREE_CODE (type) == REAL_TYPE
8498 && !real_onep (TREE_OPERAND (exp, 0)))
8499 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8500 build (RDIV_EXPR, type,
8501 build_real (type, dconst1),
8502 TREE_OPERAND (exp, 1))),
8503 target, tmode, modifier);
8504 this_optab = sdiv_optab;
8505 goto binop;
8507 case TRUNC_MOD_EXPR:
8508 case FLOOR_MOD_EXPR:
8509 case CEIL_MOD_EXPR:
8510 case ROUND_MOD_EXPR:
8511 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8512 subtarget = 0;
8513 if (modifier == EXPAND_STACK_PARM)
8514 target = 0;
8515 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8516 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8517 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8519 case FIX_ROUND_EXPR:
8520 case FIX_FLOOR_EXPR:
8521 case FIX_CEIL_EXPR:
8522 abort (); /* Not used for C. */
8524 case FIX_TRUNC_EXPR:
8525 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8526 if (target == 0 || modifier == EXPAND_STACK_PARM)
8527 target = gen_reg_rtx (mode);
8528 expand_fix (target, op0, unsignedp);
8529 return target;
8531 case FLOAT_EXPR:
8532 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8533 if (target == 0 || modifier == EXPAND_STACK_PARM)
8534 target = gen_reg_rtx (mode);
8535 /* expand_float can't figure out what to do if FROM has VOIDmode.
8536 So give it the correct mode. With -O, cse will optimize this. */
8537 if (GET_MODE (op0) == VOIDmode)
8538 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8539 op0);
8540 expand_float (target, op0,
8541 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8542 return target;
8544 case NEGATE_EXPR:
8545 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8546 if (modifier == EXPAND_STACK_PARM)
8547 target = 0;
8548 temp = expand_unop (mode,
8549 ! unsignedp && flag_trapv
8550 && (GET_MODE_CLASS(mode) == MODE_INT)
8551 ? negv_optab : neg_optab, op0, target, 0);
8552 if (temp == 0)
8553 abort ();
8554 return temp;
8556 case ABS_EXPR:
8557 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8558 if (modifier == EXPAND_STACK_PARM)
8559 target = 0;
8561 /* Handle complex values specially. */
8562 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8563 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8564 return expand_complex_abs (mode, op0, target, unsignedp);
8566 /* Unsigned abs is simply the operand. Testing here means we don't
8567 risk generating incorrect code below. */
8568 if (TREE_UNSIGNED (type))
8569 return op0;
8571 return expand_abs (mode, op0, target, unsignedp,
8572 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8574 case MAX_EXPR:
8575 case MIN_EXPR:
8576 target = original_target;
8577 if (target == 0
8578 || modifier == EXPAND_STACK_PARM
8579 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8580 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8581 || GET_MODE (target) != mode
8582 || (GET_CODE (target) == REG
8583 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8584 target = gen_reg_rtx (mode);
8585 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8586 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8588 /* First try to do it with a special MIN or MAX instruction.
8589 If that does not win, use a conditional jump to select the proper
8590 value. */
8591 this_optab = (TREE_UNSIGNED (type)
8592 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8593 : (code == MIN_EXPR ? smin_optab : smax_optab));
8595 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8596 OPTAB_WIDEN);
8597 if (temp != 0)
8598 return temp;
8600 /* At this point, a MEM target is no longer useful; we will get better
8601 code without it. */
8603 if (GET_CODE (target) == MEM)
8604 target = gen_reg_rtx (mode);
8606 if (target != op0)
8607 emit_move_insn (target, op0);
8609 op0 = gen_label_rtx ();
8611 /* If this mode is an integer too wide to compare properly,
8612 compare word by word. Rely on cse to optimize constant cases. */
8613 if (GET_MODE_CLASS (mode) == MODE_INT
8614 && ! can_compare_p (GE, mode, ccp_jump))
8616 if (code == MAX_EXPR)
8617 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8618 target, op1, NULL_RTX, op0);
8619 else
8620 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8621 op1, target, NULL_RTX, op0);
8623 else
8625 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8626 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8627 unsignedp, mode, NULL_RTX, NULL_RTX,
8628 op0);
8630 emit_move_insn (target, op1);
8631 emit_label (op0);
8632 return target;
8634 case BIT_NOT_EXPR:
8635 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8636 if (modifier == EXPAND_STACK_PARM)
8637 target = 0;
8638 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8639 if (temp == 0)
8640 abort ();
8641 return temp;
8643 case FFS_EXPR:
8644 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8645 if (modifier == EXPAND_STACK_PARM)
8646 target = 0;
8647 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8648 if (temp == 0)
8649 abort ();
8650 return temp;
8652 case CLZ_EXPR:
8653 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8654 temp = expand_unop (mode, clz_optab, op0, target, 1);
8655 if (temp == 0)
8656 abort ();
8657 return temp;
8659 case CTZ_EXPR:
8660 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8661 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8662 if (temp == 0)
8663 abort ();
8664 return temp;
8666 case POPCOUNT_EXPR:
8667 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8668 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8669 if (temp == 0)
8670 abort ();
8671 return temp;
8673 case PARITY_EXPR:
8674 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8675 temp = expand_unop (mode, parity_optab, op0, target, 1);
8676 if (temp == 0)
8677 abort ();
8678 return temp;
8680 /* ??? Can optimize bitwise operations with one arg constant.
8681 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8682 and (a bitwise1 b) bitwise2 b (etc)
8683 but that is probably not worth while. */
8685 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8686 boolean values when we want in all cases to compute both of them. In
8687 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8688 as actual zero-or-1 values and then bitwise anding. In cases where
8689 there cannot be any side effects, better code would be made by
8690 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8691 how to recognize those cases. */
8693 case TRUTH_AND_EXPR:
8694 case BIT_AND_EXPR:
8695 this_optab = and_optab;
8696 goto binop;
8698 case TRUTH_OR_EXPR:
8699 case BIT_IOR_EXPR:
8700 this_optab = ior_optab;
8701 goto binop;
8703 case TRUTH_XOR_EXPR:
8704 case BIT_XOR_EXPR:
8705 this_optab = xor_optab;
8706 goto binop;
8708 case LSHIFT_EXPR:
8709 case RSHIFT_EXPR:
8710 case LROTATE_EXPR:
8711 case RROTATE_EXPR:
8712 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8713 subtarget = 0;
8714 if (modifier == EXPAND_STACK_PARM)
8715 target = 0;
8716 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8717 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8718 unsignedp);
8720 /* Could determine the answer when only additive constants differ. Also,
8721 the addition of one can be handled by changing the condition. */
8722 case LT_EXPR:
8723 case LE_EXPR:
8724 case GT_EXPR:
8725 case GE_EXPR:
8726 case EQ_EXPR:
8727 case NE_EXPR:
8728 case UNORDERED_EXPR:
8729 case ORDERED_EXPR:
8730 case UNLT_EXPR:
8731 case UNLE_EXPR:
8732 case UNGT_EXPR:
8733 case UNGE_EXPR:
8734 case UNEQ_EXPR:
8735 temp = do_store_flag (exp,
8736 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8737 tmode != VOIDmode ? tmode : mode, 0);
8738 if (temp != 0)
8739 return temp;
8741 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8742 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8743 && original_target
8744 && GET_CODE (original_target) == REG
8745 && (GET_MODE (original_target)
8746 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8748 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8749 VOIDmode, 0);
8751 /* If temp is constant, we can just compute the result. */
8752 if (GET_CODE (temp) == CONST_INT)
8754 if (INTVAL (temp) != 0)
8755 emit_move_insn (target, const1_rtx);
8756 else
8757 emit_move_insn (target, const0_rtx);
8759 return target;
8762 if (temp != original_target)
8764 enum machine_mode mode1 = GET_MODE (temp);
8765 if (mode1 == VOIDmode)
8766 mode1 = tmode != VOIDmode ? tmode : mode;
8768 temp = copy_to_mode_reg (mode1, temp);
8771 op1 = gen_label_rtx ();
8772 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8773 GET_MODE (temp), unsignedp, op1);
8774 emit_move_insn (temp, const1_rtx);
8775 emit_label (op1);
8776 return temp;
8779 /* If no set-flag instruction, must generate a conditional
8780 store into a temporary variable. Drop through
8781 and handle this like && and ||. */
8783 case TRUTH_ANDIF_EXPR:
8784 case TRUTH_ORIF_EXPR:
8785 if (! ignore
8786 && (target == 0
8787 || modifier == EXPAND_STACK_PARM
8788 || ! safe_from_p (target, exp, 1)
8789 /* Make sure we don't have a hard reg (such as function's return
8790 value) live across basic blocks, if not optimizing. */
8791 || (!optimize && GET_CODE (target) == REG
8792 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8793 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8795 if (target)
8796 emit_clr_insn (target);
8798 op1 = gen_label_rtx ();
8799 jumpifnot (exp, op1);
8801 if (target)
8802 emit_0_to_1_insn (target);
8804 emit_label (op1);
8805 return ignore ? const0_rtx : target;
8807 case TRUTH_NOT_EXPR:
8808 if (modifier == EXPAND_STACK_PARM)
8809 target = 0;
8810 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8811 /* The parser is careful to generate TRUTH_NOT_EXPR
8812 only with operands that are always zero or one. */
8813 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8814 target, 1, OPTAB_LIB_WIDEN);
8815 if (temp == 0)
8816 abort ();
8817 return temp;
8819 case COMPOUND_EXPR:
8820 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8821 emit_queue ();
8822 return expand_expr (TREE_OPERAND (exp, 1),
8823 (ignore ? const0_rtx : target),
8824 VOIDmode, modifier);
8826 case COND_EXPR:
8827 /* If we would have a "singleton" (see below) were it not for a
8828 conversion in each arm, bring that conversion back out. */
8829 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8830 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8831 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8832 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8834 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8835 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8837 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8838 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8839 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8840 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8841 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8842 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8843 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8844 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8845 return expand_expr (build1 (NOP_EXPR, type,
8846 build (COND_EXPR, TREE_TYPE (iftrue),
8847 TREE_OPERAND (exp, 0),
8848 iftrue, iffalse)),
8849 target, tmode, modifier);
8853 /* Note that COND_EXPRs whose type is a structure or union
8854 are required to be constructed to contain assignments of
8855 a temporary variable, so that we can evaluate them here
8856 for side effect only. If type is void, we must do likewise. */
8858 /* If an arm of the branch requires a cleanup,
8859 only that cleanup is performed. */
8861 tree singleton = 0;
8862 tree binary_op = 0, unary_op = 0;
8864 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8865 convert it to our mode, if necessary. */
8866 if (integer_onep (TREE_OPERAND (exp, 1))
8867 && integer_zerop (TREE_OPERAND (exp, 2))
8868 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8870 if (ignore)
8872 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8873 modifier);
8874 return const0_rtx;
8877 if (modifier == EXPAND_STACK_PARM)
8878 target = 0;
8879 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8880 if (GET_MODE (op0) == mode)
8881 return op0;
8883 if (target == 0)
8884 target = gen_reg_rtx (mode);
8885 convert_move (target, op0, unsignedp);
8886 return target;
8889 /* Check for X ? A + B : A. If we have this, we can copy A to the
8890 output and conditionally add B. Similarly for unary operations.
8891 Don't do this if X has side-effects because those side effects
8892 might affect A or B and the "?" operation is a sequence point in
8893 ANSI. (operand_equal_p tests for side effects.) */
8895 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8896 && operand_equal_p (TREE_OPERAND (exp, 2),
8897 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8898 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8899 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8900 && operand_equal_p (TREE_OPERAND (exp, 1),
8901 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8902 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8903 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8904 && operand_equal_p (TREE_OPERAND (exp, 2),
8905 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8906 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8907 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8908 && operand_equal_p (TREE_OPERAND (exp, 1),
8909 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8910 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8912 /* If we are not to produce a result, we have no target. Otherwise,
8913 if a target was specified use it; it will not be used as an
8914 intermediate target unless it is safe. If no target, use a
8915 temporary. */
8917 if (ignore)
8918 temp = 0;
8919 else if (modifier == EXPAND_STACK_PARM)
8920 temp = assign_temp (type, 0, 0, 1);
8921 else if (original_target
8922 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8923 || (singleton && GET_CODE (original_target) == REG
8924 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8925 && original_target == var_rtx (singleton)))
8926 && GET_MODE (original_target) == mode
8927 #ifdef HAVE_conditional_move
8928 && (! can_conditionally_move_p (mode)
8929 || GET_CODE (original_target) == REG
8930 || TREE_ADDRESSABLE (type))
8931 #endif
8932 && (GET_CODE (original_target) != MEM
8933 || TREE_ADDRESSABLE (type)))
8934 temp = original_target;
8935 else if (TREE_ADDRESSABLE (type))
8936 abort ();
8937 else
8938 temp = assign_temp (type, 0, 0, 1);
8940 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8941 do the test of X as a store-flag operation, do this as
8942 A + ((X != 0) << log C). Similarly for other simple binary
8943 operators. Only do for C == 1 if BRANCH_COST is low. */
8944 if (temp && singleton && binary_op
8945 && (TREE_CODE (binary_op) == PLUS_EXPR
8946 || TREE_CODE (binary_op) == MINUS_EXPR
8947 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8948 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8949 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8950 : integer_onep (TREE_OPERAND (binary_op, 1)))
8951 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8953 rtx result;
8954 tree cond;
8955 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8956 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8957 ? addv_optab : add_optab)
8958 : TREE_CODE (binary_op) == MINUS_EXPR
8959 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8960 ? subv_optab : sub_optab)
8961 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8962 : xor_optab);
8964 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8965 if (singleton == TREE_OPERAND (exp, 1))
8966 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8967 else
8968 cond = TREE_OPERAND (exp, 0);
8970 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8971 ? temp : NULL_RTX),
8972 mode, BRANCH_COST <= 1);
8974 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8975 result = expand_shift (LSHIFT_EXPR, mode, result,
8976 build_int_2 (tree_log2
8977 (TREE_OPERAND
8978 (binary_op, 1)),
8980 (safe_from_p (temp, singleton, 1)
8981 ? temp : NULL_RTX), 0);
8983 if (result)
8985 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8986 return expand_binop (mode, boptab, op1, result, temp,
8987 unsignedp, OPTAB_LIB_WIDEN);
8991 do_pending_stack_adjust ();
8992 NO_DEFER_POP;
8993 op0 = gen_label_rtx ();
8995 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8997 if (temp != 0)
8999 /* If the target conflicts with the other operand of the
9000 binary op, we can't use it. Also, we can't use the target
9001 if it is a hard register, because evaluating the condition
9002 might clobber it. */
9003 if ((binary_op
9004 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
9005 || (GET_CODE (temp) == REG
9006 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
9007 temp = gen_reg_rtx (mode);
9008 store_expr (singleton, temp,
9009 modifier == EXPAND_STACK_PARM ? 2 : 0);
9011 else
9012 expand_expr (singleton,
9013 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9014 if (singleton == TREE_OPERAND (exp, 1))
9015 jumpif (TREE_OPERAND (exp, 0), op0);
9016 else
9017 jumpifnot (TREE_OPERAND (exp, 0), op0);
9019 start_cleanup_deferral ();
9020 if (binary_op && temp == 0)
9021 /* Just touch the other operand. */
9022 expand_expr (TREE_OPERAND (binary_op, 1),
9023 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9024 else if (binary_op)
9025 store_expr (build (TREE_CODE (binary_op), type,
9026 make_tree (type, temp),
9027 TREE_OPERAND (binary_op, 1)),
9028 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
9029 else
9030 store_expr (build1 (TREE_CODE (unary_op), type,
9031 make_tree (type, temp)),
9032 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
9033 op1 = op0;
9035 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
9036 comparison operator. If we have one of these cases, set the
9037 output to A, branch on A (cse will merge these two references),
9038 then set the output to FOO. */
9039 else if (temp
9040 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
9041 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
9042 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
9043 TREE_OPERAND (exp, 1), 0)
9044 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
9045 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
9046 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
9048 if (GET_CODE (temp) == REG
9049 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
9050 temp = gen_reg_rtx (mode);
9051 store_expr (TREE_OPERAND (exp, 1), temp,
9052 modifier == EXPAND_STACK_PARM ? 2 : 0);
9053 jumpif (TREE_OPERAND (exp, 0), op0);
9055 start_cleanup_deferral ();
9056 store_expr (TREE_OPERAND (exp, 2), temp,
9057 modifier == EXPAND_STACK_PARM ? 2 : 0);
9058 op1 = op0;
9060 else if (temp
9061 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
9062 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
9063 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
9064 TREE_OPERAND (exp, 2), 0)
9065 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
9066 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
9067 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
9069 if (GET_CODE (temp) == REG
9070 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
9071 temp = gen_reg_rtx (mode);
9072 store_expr (TREE_OPERAND (exp, 2), temp,
9073 modifier == EXPAND_STACK_PARM ? 2 : 0);
9074 jumpifnot (TREE_OPERAND (exp, 0), op0);
9076 start_cleanup_deferral ();
9077 store_expr (TREE_OPERAND (exp, 1), temp,
9078 modifier == EXPAND_STACK_PARM ? 2 : 0);
9079 op1 = op0;
9081 else
9083 op1 = gen_label_rtx ();
9084 jumpifnot (TREE_OPERAND (exp, 0), op0);
9086 start_cleanup_deferral ();
9088 /* One branch of the cond can be void, if it never returns. For
9089 example A ? throw : E */
9090 if (temp != 0
9091 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
9092 store_expr (TREE_OPERAND (exp, 1), temp,
9093 modifier == EXPAND_STACK_PARM ? 2 : 0);
9094 else
9095 expand_expr (TREE_OPERAND (exp, 1),
9096 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9097 end_cleanup_deferral ();
9098 emit_queue ();
9099 emit_jump_insn (gen_jump (op1));
9100 emit_barrier ();
9101 emit_label (op0);
9102 start_cleanup_deferral ();
9103 if (temp != 0
9104 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9105 store_expr (TREE_OPERAND (exp, 2), temp,
9106 modifier == EXPAND_STACK_PARM ? 2 : 0);
9107 else
9108 expand_expr (TREE_OPERAND (exp, 2),
9109 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9112 end_cleanup_deferral ();
9114 emit_queue ();
9115 emit_label (op1);
9116 OK_DEFER_POP;
9118 return temp;
9121 case TARGET_EXPR:
9123 /* Something needs to be initialized, but we didn't know
9124 where that thing was when building the tree. For example,
9125 it could be the return value of a function, or a parameter
9126 to a function which lays down in the stack, or a temporary
9127 variable which must be passed by reference.
9129 We guarantee that the expression will either be constructed
9130 or copied into our original target. */
9132 tree slot = TREE_OPERAND (exp, 0);
9133 tree cleanups = NULL_TREE;
9134 tree exp1;
9136 if (TREE_CODE (slot) != VAR_DECL)
9137 abort ();
9139 if (! ignore)
9140 target = original_target;
9142 /* Set this here so that if we get a target that refers to a
9143 register variable that's already been used, put_reg_into_stack
9144 knows that it should fix up those uses. */
9145 TREE_USED (slot) = 1;
9147 if (target == 0)
9149 if (DECL_RTL_SET_P (slot))
9151 target = DECL_RTL (slot);
9152 /* If we have already expanded the slot, so don't do
9153 it again. (mrs) */
9154 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9155 return target;
9157 else
9159 target = assign_temp (type, 2, 0, 1);
9160 /* All temp slots at this level must not conflict. */
9161 preserve_temp_slots (target);
9162 SET_DECL_RTL (slot, target);
9163 if (TREE_ADDRESSABLE (slot))
9164 put_var_into_stack (slot, /*rescan=*/false);
9166 /* Since SLOT is not known to the called function
9167 to belong to its stack frame, we must build an explicit
9168 cleanup. This case occurs when we must build up a reference
9169 to pass the reference as an argument. In this case,
9170 it is very likely that such a reference need not be
9171 built here. */
9173 if (TREE_OPERAND (exp, 2) == 0)
9174 TREE_OPERAND (exp, 2)
9175 = (*lang_hooks.maybe_build_cleanup) (slot);
9176 cleanups = TREE_OPERAND (exp, 2);
9179 else
9181 /* This case does occur, when expanding a parameter which
9182 needs to be constructed on the stack. The target
9183 is the actual stack address that we want to initialize.
9184 The function we call will perform the cleanup in this case. */
9186 /* If we have already assigned it space, use that space,
9187 not target that we were passed in, as our target
9188 parameter is only a hint. */
9189 if (DECL_RTL_SET_P (slot))
9191 target = DECL_RTL (slot);
9192 /* If we have already expanded the slot, so don't do
9193 it again. (mrs) */
9194 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9195 return target;
9197 else
9199 SET_DECL_RTL (slot, target);
9200 /* If we must have an addressable slot, then make sure that
9201 the RTL that we just stored in slot is OK. */
9202 if (TREE_ADDRESSABLE (slot))
9203 put_var_into_stack (slot, /*rescan=*/true);
9207 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9208 /* Mark it as expanded. */
9209 TREE_OPERAND (exp, 1) = NULL_TREE;
9211 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9213 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9215 return target;
9218 case INIT_EXPR:
9220 tree lhs = TREE_OPERAND (exp, 0);
9221 tree rhs = TREE_OPERAND (exp, 1);
9223 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9224 return temp;
9227 case MODIFY_EXPR:
9229 /* If lhs is complex, expand calls in rhs before computing it.
9230 That's so we don't compute a pointer and save it over a
9231 call. If lhs is simple, compute it first so we can give it
9232 as a target if the rhs is just a call. This avoids an
9233 extra temp and copy and that prevents a partial-subsumption
9234 which makes bad code. Actually we could treat
9235 component_ref's of vars like vars. */
9237 tree lhs = TREE_OPERAND (exp, 0);
9238 tree rhs = TREE_OPERAND (exp, 1);
9240 temp = 0;
9242 /* Check for |= or &= of a bitfield of size one into another bitfield
9243 of size 1. In this case, (unless we need the result of the
9244 assignment) we can do this more efficiently with a
9245 test followed by an assignment, if necessary.
9247 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9248 things change so we do, this code should be enhanced to
9249 support it. */
9250 if (ignore
9251 && TREE_CODE (lhs) == COMPONENT_REF
9252 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9253 || TREE_CODE (rhs) == BIT_AND_EXPR)
9254 && TREE_OPERAND (rhs, 0) == lhs
9255 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9256 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9257 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9259 rtx label = gen_label_rtx ();
9261 do_jump (TREE_OPERAND (rhs, 1),
9262 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9263 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9264 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9265 (TREE_CODE (rhs) == BIT_IOR_EXPR
9266 ? integer_one_node
9267 : integer_zero_node)),
9268 0, 0);
9269 do_pending_stack_adjust ();
9270 emit_label (label);
9271 return const0_rtx;
9274 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9276 return temp;
9279 case RETURN_EXPR:
9280 if (!TREE_OPERAND (exp, 0))
9281 expand_null_return ();
9282 else
9283 expand_return (TREE_OPERAND (exp, 0));
9284 return const0_rtx;
9286 case PREINCREMENT_EXPR:
9287 case PREDECREMENT_EXPR:
9288 return expand_increment (exp, 0, ignore);
9290 case POSTINCREMENT_EXPR:
9291 case POSTDECREMENT_EXPR:
9292 /* Faster to treat as pre-increment if result is not used. */
9293 return expand_increment (exp, ! ignore, ignore);
9295 case ADDR_EXPR:
9296 if (modifier == EXPAND_STACK_PARM)
9297 target = 0;
9298 /* Are we taking the address of a nested function? */
9299 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9300 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9301 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9302 && ! TREE_STATIC (exp))
9304 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9305 op0 = force_operand (op0, target);
9307 /* If we are taking the address of something erroneous, just
9308 return a zero. */
9309 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9310 return const0_rtx;
9311 /* If we are taking the address of a constant and are at the
9312 top level, we have to use output_constant_def since we can't
9313 call force_const_mem at top level. */
9314 else if (cfun == 0
9315 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9316 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9317 == 'c')))
9318 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9319 else
9321 /* We make sure to pass const0_rtx down if we came in with
9322 ignore set, to avoid doing the cleanups twice for something. */
9323 op0 = expand_expr (TREE_OPERAND (exp, 0),
9324 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9325 (modifier == EXPAND_INITIALIZER
9326 ? modifier : EXPAND_CONST_ADDRESS));
9328 /* If we are going to ignore the result, OP0 will have been set
9329 to const0_rtx, so just return it. Don't get confused and
9330 think we are taking the address of the constant. */
9331 if (ignore)
9332 return op0;
9334 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9335 clever and returns a REG when given a MEM. */
9336 op0 = protect_from_queue (op0, 1);
9338 /* We would like the object in memory. If it is a constant, we can
9339 have it be statically allocated into memory. For a non-constant,
9340 we need to allocate some memory and store the value into it. */
9342 if (CONSTANT_P (op0))
9343 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9344 op0);
9345 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9346 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9347 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9349 /* If the operand is a SAVE_EXPR, we can deal with this by
9350 forcing the SAVE_EXPR into memory. */
9351 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9353 put_var_into_stack (TREE_OPERAND (exp, 0),
9354 /*rescan=*/true);
9355 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9357 else
9359 /* If this object is in a register, it can't be BLKmode. */
9360 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9361 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9363 if (GET_CODE (op0) == PARALLEL)
9364 /* Handle calls that pass values in multiple
9365 non-contiguous locations. The Irix 6 ABI has examples
9366 of this. */
9367 emit_group_store (memloc, op0,
9368 int_size_in_bytes (inner_type));
9369 else
9370 emit_move_insn (memloc, op0);
9372 op0 = memloc;
9376 if (GET_CODE (op0) != MEM)
9377 abort ();
9379 mark_temp_addr_taken (op0);
9380 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9382 op0 = XEXP (op0, 0);
9383 #ifdef POINTERS_EXTEND_UNSIGNED
9384 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9385 && mode == ptr_mode)
9386 op0 = convert_memory_address (ptr_mode, op0);
9387 #endif
9388 return op0;
9391 /* If OP0 is not aligned as least as much as the type requires, we
9392 need to make a temporary, copy OP0 to it, and take the address of
9393 the temporary. We want to use the alignment of the type, not of
9394 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9395 the test for BLKmode means that can't happen. The test for
9396 BLKmode is because we never make mis-aligned MEMs with
9397 non-BLKmode.
9399 We don't need to do this at all if the machine doesn't have
9400 strict alignment. */
9401 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9402 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9403 > MEM_ALIGN (op0))
9404 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9406 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9407 rtx new;
9409 if (TYPE_ALIGN_OK (inner_type))
9410 abort ();
9412 if (TREE_ADDRESSABLE (inner_type))
9414 /* We can't make a bitwise copy of this object, so fail. */
9415 error ("cannot take the address of an unaligned member");
9416 return const0_rtx;
9419 new = assign_stack_temp_for_type
9420 (TYPE_MODE (inner_type),
9421 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9422 : int_size_in_bytes (inner_type),
9423 1, build_qualified_type (inner_type,
9424 (TYPE_QUALS (inner_type)
9425 | TYPE_QUAL_CONST)));
9427 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9428 (modifier == EXPAND_STACK_PARM
9429 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9431 op0 = new;
9434 op0 = force_operand (XEXP (op0, 0), target);
9437 if (flag_force_addr
9438 && GET_CODE (op0) != REG
9439 && modifier != EXPAND_CONST_ADDRESS
9440 && modifier != EXPAND_INITIALIZER
9441 && modifier != EXPAND_SUM)
9442 op0 = force_reg (Pmode, op0);
9444 if (GET_CODE (op0) == REG
9445 && ! REG_USERVAR_P (op0))
9446 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9448 #ifdef POINTERS_EXTEND_UNSIGNED
9449 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9450 && mode == ptr_mode)
9451 op0 = convert_memory_address (ptr_mode, op0);
9452 #endif
9454 return op0;
9456 case ENTRY_VALUE_EXPR:
9457 abort ();
9459 /* COMPLEX type for Extended Pascal & Fortran */
9460 case COMPLEX_EXPR:
9462 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9463 rtx insns;
9465 /* Get the rtx code of the operands. */
9466 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9467 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9469 if (! target)
9470 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9472 start_sequence ();
9474 /* Move the real (op0) and imaginary (op1) parts to their location. */
9475 emit_move_insn (gen_realpart (mode, target), op0);
9476 emit_move_insn (gen_imagpart (mode, target), op1);
9478 insns = get_insns ();
9479 end_sequence ();
9481 /* Complex construction should appear as a single unit. */
9482 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9483 each with a separate pseudo as destination.
9484 It's not correct for flow to treat them as a unit. */
9485 if (GET_CODE (target) != CONCAT)
9486 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9487 else
9488 emit_insn (insns);
9490 return target;
9493 case REALPART_EXPR:
9494 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9495 return gen_realpart (mode, op0);
9497 case IMAGPART_EXPR:
9498 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9499 return gen_imagpart (mode, op0);
9501 case CONJ_EXPR:
9503 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9504 rtx imag_t;
9505 rtx insns;
9507 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9509 if (! target)
9510 target = gen_reg_rtx (mode);
9512 start_sequence ();
9514 /* Store the realpart and the negated imagpart to target. */
9515 emit_move_insn (gen_realpart (partmode, target),
9516 gen_realpart (partmode, op0));
9518 imag_t = gen_imagpart (partmode, target);
9519 temp = expand_unop (partmode,
9520 ! unsignedp && flag_trapv
9521 && (GET_MODE_CLASS(partmode) == MODE_INT)
9522 ? negv_optab : neg_optab,
9523 gen_imagpart (partmode, op0), imag_t, 0);
9524 if (temp != imag_t)
9525 emit_move_insn (imag_t, temp);
9527 insns = get_insns ();
9528 end_sequence ();
9530 /* Conjugate should appear as a single unit
9531 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9532 each with a separate pseudo as destination.
9533 It's not correct for flow to treat them as a unit. */
9534 if (GET_CODE (target) != CONCAT)
9535 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9536 else
9537 emit_insn (insns);
9539 return target;
9542 case TRY_CATCH_EXPR:
9544 tree handler = TREE_OPERAND (exp, 1);
9546 expand_eh_region_start ();
9548 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9550 expand_eh_region_end_cleanup (handler);
9552 return op0;
9555 case TRY_FINALLY_EXPR:
9557 tree try_block = TREE_OPERAND (exp, 0);
9558 tree finally_block = TREE_OPERAND (exp, 1);
9560 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9562 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9563 is not sufficient, so we cannot expand the block twice.
9564 So we play games with GOTO_SUBROUTINE_EXPR to let us
9565 expand the thing only once. */
9566 /* When not optimizing, we go ahead with this form since
9567 (1) user breakpoints operate more predictably without
9568 code duplication, and
9569 (2) we're not running any of the global optimizers
9570 that would explode in time/space with the highly
9571 connected CFG created by the indirect branching. */
9573 rtx finally_label = gen_label_rtx ();
9574 rtx done_label = gen_label_rtx ();
9575 rtx return_link = gen_reg_rtx (Pmode);
9576 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9577 (tree) finally_label, (tree) return_link);
9578 TREE_SIDE_EFFECTS (cleanup) = 1;
9580 /* Start a new binding layer that will keep track of all cleanup
9581 actions to be performed. */
9582 expand_start_bindings (2);
9583 target_temp_slot_level = temp_slot_level;
9585 expand_decl_cleanup (NULL_TREE, cleanup);
9586 op0 = expand_expr (try_block, target, tmode, modifier);
9588 preserve_temp_slots (op0);
9589 expand_end_bindings (NULL_TREE, 0, 0);
9590 emit_jump (done_label);
9591 emit_label (finally_label);
9592 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9593 emit_indirect_jump (return_link);
9594 emit_label (done_label);
9596 else
9598 expand_start_bindings (2);
9599 target_temp_slot_level = temp_slot_level;
9601 expand_decl_cleanup (NULL_TREE, finally_block);
9602 op0 = expand_expr (try_block, target, tmode, modifier);
9604 preserve_temp_slots (op0);
9605 expand_end_bindings (NULL_TREE, 0, 0);
9608 return op0;
9611 case GOTO_SUBROUTINE_EXPR:
9613 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9614 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9615 rtx return_address = gen_label_rtx ();
9616 emit_move_insn (return_link,
9617 gen_rtx_LABEL_REF (Pmode, return_address));
9618 emit_jump (subr);
9619 emit_label (return_address);
9620 return const0_rtx;
9623 case VA_ARG_EXPR:
9624 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9626 case EXC_PTR_EXPR:
9627 return get_exception_pointer (cfun);
9629 case FDESC_EXPR:
9630 /* Function descriptors are not valid except for as
9631 initialization constants, and should not be expanded. */
9632 abort ();
9634 default:
9635 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9638 /* Here to do an ordinary binary operator, generating an instruction
9639 from the optab already placed in `this_optab'. */
9640 binop:
9641 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9642 subtarget = 0;
9643 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9644 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9645 binop2:
9646 if (modifier == EXPAND_STACK_PARM)
9647 target = 0;
9648 temp = expand_binop (mode, this_optab, op0, op1, target,
9649 unsignedp, OPTAB_LIB_WIDEN);
9650 if (temp == 0)
9651 abort ();
9652 return temp;
9655 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9656 when applied to the address of EXP produces an address known to be
9657 aligned more than BIGGEST_ALIGNMENT. */
9659 static int
9660 is_aligning_offset (offset, exp)
9661 tree offset;
9662 tree exp;
9664 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9665 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9666 || TREE_CODE (offset) == NOP_EXPR
9667 || TREE_CODE (offset) == CONVERT_EXPR
9668 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9669 offset = TREE_OPERAND (offset, 0);
9671 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9672 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9673 if (TREE_CODE (offset) != BIT_AND_EXPR
9674 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9675 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9676 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9677 return 0;
9679 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9680 It must be NEGATE_EXPR. Then strip any more conversions. */
9681 offset = TREE_OPERAND (offset, 0);
9682 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9683 || TREE_CODE (offset) == NOP_EXPR
9684 || TREE_CODE (offset) == CONVERT_EXPR)
9685 offset = TREE_OPERAND (offset, 0);
9687 if (TREE_CODE (offset) != NEGATE_EXPR)
9688 return 0;
9690 offset = TREE_OPERAND (offset, 0);
9691 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9692 || TREE_CODE (offset) == NOP_EXPR
9693 || TREE_CODE (offset) == CONVERT_EXPR)
9694 offset = TREE_OPERAND (offset, 0);
9696 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9697 whose type is the same as EXP. */
9698 return (TREE_CODE (offset) == ADDR_EXPR
9699 && (TREE_OPERAND (offset, 0) == exp
9700 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9701 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9702 == TREE_TYPE (exp)))));
9705 /* Return the tree node if an ARG corresponds to a string constant or zero
9706 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9707 in bytes within the string that ARG is accessing. The type of the
9708 offset will be `sizetype'. */
9710 tree
9711 string_constant (arg, ptr_offset)
9712 tree arg;
9713 tree *ptr_offset;
9715 STRIP_NOPS (arg);
9717 if (TREE_CODE (arg) == ADDR_EXPR
9718 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9720 *ptr_offset = size_zero_node;
9721 return TREE_OPERAND (arg, 0);
9723 else if (TREE_CODE (arg) == PLUS_EXPR)
9725 tree arg0 = TREE_OPERAND (arg, 0);
9726 tree arg1 = TREE_OPERAND (arg, 1);
9728 STRIP_NOPS (arg0);
9729 STRIP_NOPS (arg1);
9731 if (TREE_CODE (arg0) == ADDR_EXPR
9732 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9734 *ptr_offset = convert (sizetype, arg1);
9735 return TREE_OPERAND (arg0, 0);
9737 else if (TREE_CODE (arg1) == ADDR_EXPR
9738 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9740 *ptr_offset = convert (sizetype, arg0);
9741 return TREE_OPERAND (arg1, 0);
9745 return 0;
9748 /* Expand code for a post- or pre- increment or decrement
9749 and return the RTX for the result.
9750 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9752 static rtx
9753 expand_increment (exp, post, ignore)
9754 tree exp;
9755 int post, ignore;
9757 rtx op0, op1;
9758 rtx temp, value;
9759 tree incremented = TREE_OPERAND (exp, 0);
9760 optab this_optab = add_optab;
9761 int icode;
9762 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9763 int op0_is_copy = 0;
9764 int single_insn = 0;
9765 /* 1 means we can't store into OP0 directly,
9766 because it is a subreg narrower than a word,
9767 and we don't dare clobber the rest of the word. */
9768 int bad_subreg = 0;
9770 /* Stabilize any component ref that might need to be
9771 evaluated more than once below. */
9772 if (!post
9773 || TREE_CODE (incremented) == BIT_FIELD_REF
9774 || (TREE_CODE (incremented) == COMPONENT_REF
9775 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9776 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9777 incremented = stabilize_reference (incremented);
9778 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9779 ones into save exprs so that they don't accidentally get evaluated
9780 more than once by the code below. */
9781 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9782 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9783 incremented = save_expr (incremented);
9785 /* Compute the operands as RTX.
9786 Note whether OP0 is the actual lvalue or a copy of it:
9787 I believe it is a copy iff it is a register or subreg
9788 and insns were generated in computing it. */
9790 temp = get_last_insn ();
9791 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9793 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9794 in place but instead must do sign- or zero-extension during assignment,
9795 so we copy it into a new register and let the code below use it as
9796 a copy.
9798 Note that we can safely modify this SUBREG since it is know not to be
9799 shared (it was made by the expand_expr call above). */
9801 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9803 if (post)
9804 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9805 else
9806 bad_subreg = 1;
9808 else if (GET_CODE (op0) == SUBREG
9809 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9811 /* We cannot increment this SUBREG in place. If we are
9812 post-incrementing, get a copy of the old value. Otherwise,
9813 just mark that we cannot increment in place. */
9814 if (post)
9815 op0 = copy_to_reg (op0);
9816 else
9817 bad_subreg = 1;
9820 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9821 && temp != get_last_insn ());
9822 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9824 /* Decide whether incrementing or decrementing. */
9825 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9826 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9827 this_optab = sub_optab;
9829 /* Convert decrement by a constant into a negative increment. */
9830 if (this_optab == sub_optab
9831 && GET_CODE (op1) == CONST_INT)
9833 op1 = GEN_INT (-INTVAL (op1));
9834 this_optab = add_optab;
9837 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9838 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9840 /* For a preincrement, see if we can do this with a single instruction. */
9841 if (!post)
9843 icode = (int) this_optab->handlers[(int) mode].insn_code;
9844 if (icode != (int) CODE_FOR_nothing
9845 /* Make sure that OP0 is valid for operands 0 and 1
9846 of the insn we want to queue. */
9847 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9848 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9849 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9850 single_insn = 1;
9853 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9854 then we cannot just increment OP0. We must therefore contrive to
9855 increment the original value. Then, for postincrement, we can return
9856 OP0 since it is a copy of the old value. For preincrement, expand here
9857 unless we can do it with a single insn.
9859 Likewise if storing directly into OP0 would clobber high bits
9860 we need to preserve (bad_subreg). */
9861 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9863 /* This is the easiest way to increment the value wherever it is.
9864 Problems with multiple evaluation of INCREMENTED are prevented
9865 because either (1) it is a component_ref or preincrement,
9866 in which case it was stabilized above, or (2) it is an array_ref
9867 with constant index in an array in a register, which is
9868 safe to reevaluate. */
9869 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9870 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9871 ? MINUS_EXPR : PLUS_EXPR),
9872 TREE_TYPE (exp),
9873 incremented,
9874 TREE_OPERAND (exp, 1));
9876 while (TREE_CODE (incremented) == NOP_EXPR
9877 || TREE_CODE (incremented) == CONVERT_EXPR)
9879 newexp = convert (TREE_TYPE (incremented), newexp);
9880 incremented = TREE_OPERAND (incremented, 0);
9883 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9884 return post ? op0 : temp;
9887 if (post)
9889 /* We have a true reference to the value in OP0.
9890 If there is an insn to add or subtract in this mode, queue it.
9891 Queueing the increment insn avoids the register shuffling
9892 that often results if we must increment now and first save
9893 the old value for subsequent use. */
9895 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9896 op0 = stabilize (op0);
9897 #endif
9899 icode = (int) this_optab->handlers[(int) mode].insn_code;
9900 if (icode != (int) CODE_FOR_nothing
9901 /* Make sure that OP0 is valid for operands 0 and 1
9902 of the insn we want to queue. */
9903 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9904 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9906 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9907 op1 = force_reg (mode, op1);
9909 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9911 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9913 rtx addr = (general_operand (XEXP (op0, 0), mode)
9914 ? force_reg (Pmode, XEXP (op0, 0))
9915 : copy_to_reg (XEXP (op0, 0)));
9916 rtx temp, result;
9918 op0 = replace_equiv_address (op0, addr);
9919 temp = force_reg (GET_MODE (op0), op0);
9920 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9921 op1 = force_reg (mode, op1);
9923 /* The increment queue is LIFO, thus we have to `queue'
9924 the instructions in reverse order. */
9925 enqueue_insn (op0, gen_move_insn (op0, temp));
9926 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9927 return result;
9931 /* Preincrement, or we can't increment with one simple insn. */
9932 if (post)
9933 /* Save a copy of the value before inc or dec, to return it later. */
9934 temp = value = copy_to_reg (op0);
9935 else
9936 /* Arrange to return the incremented value. */
9937 /* Copy the rtx because expand_binop will protect from the queue,
9938 and the results of that would be invalid for us to return
9939 if our caller does emit_queue before using our result. */
9940 temp = copy_rtx (value = op0);
9942 /* Increment however we can. */
9943 op1 = expand_binop (mode, this_optab, value, op1, op0,
9944 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9946 /* Make sure the value is stored into OP0. */
9947 if (op1 != op0)
9948 emit_move_insn (op0, op1);
9950 return temp;
9953 /* Generate code to calculate EXP using a store-flag instruction
9954 and return an rtx for the result. EXP is either a comparison
9955 or a TRUTH_NOT_EXPR whose operand is a comparison.
9957 If TARGET is nonzero, store the result there if convenient.
9959 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9960 cheap.
9962 Return zero if there is no suitable set-flag instruction
9963 available on this machine.
9965 Once expand_expr has been called on the arguments of the comparison,
9966 we are committed to doing the store flag, since it is not safe to
9967 re-evaluate the expression. We emit the store-flag insn by calling
9968 emit_store_flag, but only expand the arguments if we have a reason
9969 to believe that emit_store_flag will be successful. If we think that
9970 it will, but it isn't, we have to simulate the store-flag with a
9971 set/jump/set sequence. */
9973 static rtx
9974 do_store_flag (exp, target, mode, only_cheap)
9975 tree exp;
9976 rtx target;
9977 enum machine_mode mode;
9978 int only_cheap;
9980 enum rtx_code code;
9981 tree arg0, arg1, type;
9982 tree tem;
9983 enum machine_mode operand_mode;
9984 int invert = 0;
9985 int unsignedp;
9986 rtx op0, op1;
9987 enum insn_code icode;
9988 rtx subtarget = target;
9989 rtx result, label;
9991 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9992 result at the end. We can't simply invert the test since it would
9993 have already been inverted if it were valid. This case occurs for
9994 some floating-point comparisons. */
9996 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9997 invert = 1, exp = TREE_OPERAND (exp, 0);
9999 arg0 = TREE_OPERAND (exp, 0);
10000 arg1 = TREE_OPERAND (exp, 1);
10002 /* Don't crash if the comparison was erroneous. */
10003 if (arg0 == error_mark_node || arg1 == error_mark_node)
10004 return const0_rtx;
10006 type = TREE_TYPE (arg0);
10007 operand_mode = TYPE_MODE (type);
10008 unsignedp = TREE_UNSIGNED (type);
10010 /* We won't bother with BLKmode store-flag operations because it would mean
10011 passing a lot of information to emit_store_flag. */
10012 if (operand_mode == BLKmode)
10013 return 0;
10015 /* We won't bother with store-flag operations involving function pointers
10016 when function pointers must be canonicalized before comparisons. */
10017 #ifdef HAVE_canonicalize_funcptr_for_compare
10018 if (HAVE_canonicalize_funcptr_for_compare
10019 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10020 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10021 == FUNCTION_TYPE))
10022 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10023 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10024 == FUNCTION_TYPE))))
10025 return 0;
10026 #endif
10028 STRIP_NOPS (arg0);
10029 STRIP_NOPS (arg1);
10031 /* Get the rtx comparison code to use. We know that EXP is a comparison
10032 operation of some type. Some comparisons against 1 and -1 can be
10033 converted to comparisons with zero. Do so here so that the tests
10034 below will be aware that we have a comparison with zero. These
10035 tests will not catch constants in the first operand, but constants
10036 are rarely passed as the first operand. */
10038 switch (TREE_CODE (exp))
10040 case EQ_EXPR:
10041 code = EQ;
10042 break;
10043 case NE_EXPR:
10044 code = NE;
10045 break;
10046 case LT_EXPR:
10047 if (integer_onep (arg1))
10048 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10049 else
10050 code = unsignedp ? LTU : LT;
10051 break;
10052 case LE_EXPR:
10053 if (! unsignedp && integer_all_onesp (arg1))
10054 arg1 = integer_zero_node, code = LT;
10055 else
10056 code = unsignedp ? LEU : LE;
10057 break;
10058 case GT_EXPR:
10059 if (! unsignedp && integer_all_onesp (arg1))
10060 arg1 = integer_zero_node, code = GE;
10061 else
10062 code = unsignedp ? GTU : GT;
10063 break;
10064 case GE_EXPR:
10065 if (integer_onep (arg1))
10066 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10067 else
10068 code = unsignedp ? GEU : GE;
10069 break;
10071 case UNORDERED_EXPR:
10072 code = UNORDERED;
10073 break;
10074 case ORDERED_EXPR:
10075 code = ORDERED;
10076 break;
10077 case UNLT_EXPR:
10078 code = UNLT;
10079 break;
10080 case UNLE_EXPR:
10081 code = UNLE;
10082 break;
10083 case UNGT_EXPR:
10084 code = UNGT;
10085 break;
10086 case UNGE_EXPR:
10087 code = UNGE;
10088 break;
10089 case UNEQ_EXPR:
10090 code = UNEQ;
10091 break;
10093 default:
10094 abort ();
10097 /* Put a constant second. */
10098 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10100 tem = arg0; arg0 = arg1; arg1 = tem;
10101 code = swap_condition (code);
10104 /* If this is an equality or inequality test of a single bit, we can
10105 do this by shifting the bit being tested to the low-order bit and
10106 masking the result with the constant 1. If the condition was EQ,
10107 we xor it with 1. This does not require an scc insn and is faster
10108 than an scc insn even if we have it. */
10110 if ((code == NE || code == EQ)
10111 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10112 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10114 tree inner = TREE_OPERAND (arg0, 0);
10115 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10116 int ops_unsignedp;
10118 /* If INNER is a right shift of a constant and it plus BITNUM does
10119 not overflow, adjust BITNUM and INNER. */
10121 if (TREE_CODE (inner) == RSHIFT_EXPR
10122 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10123 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10124 && bitnum < TYPE_PRECISION (type)
10125 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10126 bitnum - TYPE_PRECISION (type)))
10128 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10129 inner = TREE_OPERAND (inner, 0);
10132 /* If we are going to be able to omit the AND below, we must do our
10133 operations as unsigned. If we must use the AND, we have a choice.
10134 Normally unsigned is faster, but for some machines signed is. */
10135 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10136 #ifdef LOAD_EXTEND_OP
10137 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10138 #else
10140 #endif
10143 if (! get_subtarget (subtarget)
10144 || GET_MODE (subtarget) != operand_mode
10145 || ! safe_from_p (subtarget, inner, 1))
10146 subtarget = 0;
10148 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10150 if (bitnum != 0)
10151 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10152 size_int (bitnum), subtarget, ops_unsignedp);
10154 if (GET_MODE (op0) != mode)
10155 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10157 if ((code == EQ && ! invert) || (code == NE && invert))
10158 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10159 ops_unsignedp, OPTAB_LIB_WIDEN);
10161 /* Put the AND last so it can combine with more things. */
10162 if (bitnum != TYPE_PRECISION (type) - 1)
10163 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10165 return op0;
10168 /* Now see if we are likely to be able to do this. Return if not. */
10169 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10170 return 0;
10172 icode = setcc_gen_code[(int) code];
10173 if (icode == CODE_FOR_nothing
10174 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10176 /* We can only do this if it is one of the special cases that
10177 can be handled without an scc insn. */
10178 if ((code == LT && integer_zerop (arg1))
10179 || (! only_cheap && code == GE && integer_zerop (arg1)))
10181 else if (BRANCH_COST >= 0
10182 && ! only_cheap && (code == NE || code == EQ)
10183 && TREE_CODE (type) != REAL_TYPE
10184 && ((abs_optab->handlers[(int) operand_mode].insn_code
10185 != CODE_FOR_nothing)
10186 || (ffs_optab->handlers[(int) operand_mode].insn_code
10187 != CODE_FOR_nothing)))
10189 else
10190 return 0;
10193 if (! get_subtarget (target)
10194 || GET_MODE (subtarget) != operand_mode
10195 || ! safe_from_p (subtarget, arg1, 1))
10196 subtarget = 0;
10198 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10199 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10201 if (target == 0)
10202 target = gen_reg_rtx (mode);
10204 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10205 because, if the emit_store_flag does anything it will succeed and
10206 OP0 and OP1 will not be used subsequently. */
10208 result = emit_store_flag (target, code,
10209 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10210 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10211 operand_mode, unsignedp, 1);
10213 if (result)
10215 if (invert)
10216 result = expand_binop (mode, xor_optab, result, const1_rtx,
10217 result, 0, OPTAB_LIB_WIDEN);
10218 return result;
10221 /* If this failed, we have to do this with set/compare/jump/set code. */
10222 if (GET_CODE (target) != REG
10223 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10224 target = gen_reg_rtx (GET_MODE (target));
10226 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10227 result = compare_from_rtx (op0, op1, code, unsignedp,
10228 operand_mode, NULL_RTX);
10229 if (GET_CODE (result) == CONST_INT)
10230 return (((result == const0_rtx && ! invert)
10231 || (result != const0_rtx && invert))
10232 ? const0_rtx : const1_rtx);
10234 /* The code of RESULT may not match CODE if compare_from_rtx
10235 decided to swap its operands and reverse the original code.
10237 We know that compare_from_rtx returns either a CONST_INT or
10238 a new comparison code, so it is safe to just extract the
10239 code from RESULT. */
10240 code = GET_CODE (result);
10242 label = gen_label_rtx ();
10243 if (bcc_gen_fctn[(int) code] == 0)
10244 abort ();
10246 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10247 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10248 emit_label (label);
10250 return target;
10254 /* Stubs in case we haven't got a casesi insn. */
10255 #ifndef HAVE_casesi
10256 # define HAVE_casesi 0
10257 # define gen_casesi(a, b, c, d, e) (0)
10258 # define CODE_FOR_casesi CODE_FOR_nothing
10259 #endif
10261 /* If the machine does not have a case insn that compares the bounds,
10262 this means extra overhead for dispatch tables, which raises the
10263 threshold for using them. */
10264 #ifndef CASE_VALUES_THRESHOLD
10265 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10266 #endif /* CASE_VALUES_THRESHOLD */
10268 unsigned int
10269 case_values_threshold ()
10271 return CASE_VALUES_THRESHOLD;
10274 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10275 0 otherwise (i.e. if there is no casesi instruction). */
10277 try_casesi (index_type, index_expr, minval, range,
10278 table_label, default_label)
10279 tree index_type, index_expr, minval, range;
10280 rtx table_label ATTRIBUTE_UNUSED;
10281 rtx default_label;
10283 enum machine_mode index_mode = SImode;
10284 int index_bits = GET_MODE_BITSIZE (index_mode);
10285 rtx op1, op2, index;
10286 enum machine_mode op_mode;
10288 if (! HAVE_casesi)
10289 return 0;
10291 /* Convert the index to SImode. */
10292 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10294 enum machine_mode omode = TYPE_MODE (index_type);
10295 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10297 /* We must handle the endpoints in the original mode. */
10298 index_expr = build (MINUS_EXPR, index_type,
10299 index_expr, minval);
10300 minval = integer_zero_node;
10301 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10302 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10303 omode, 1, default_label);
10304 /* Now we can safely truncate. */
10305 index = convert_to_mode (index_mode, index, 0);
10307 else
10309 if (TYPE_MODE (index_type) != index_mode)
10311 index_expr = convert ((*lang_hooks.types.type_for_size)
10312 (index_bits, 0), index_expr);
10313 index_type = TREE_TYPE (index_expr);
10316 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10318 emit_queue ();
10319 index = protect_from_queue (index, 0);
10320 do_pending_stack_adjust ();
10322 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10323 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10324 (index, op_mode))
10325 index = copy_to_mode_reg (op_mode, index);
10327 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10329 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10330 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10331 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10332 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10333 (op1, op_mode))
10334 op1 = copy_to_mode_reg (op_mode, op1);
10336 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10338 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10339 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10340 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10341 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10342 (op2, op_mode))
10343 op2 = copy_to_mode_reg (op_mode, op2);
10345 emit_jump_insn (gen_casesi (index, op1, op2,
10346 table_label, default_label));
10347 return 1;
10350 /* Attempt to generate a tablejump instruction; same concept. */
10351 #ifndef HAVE_tablejump
10352 #define HAVE_tablejump 0
10353 #define gen_tablejump(x, y) (0)
10354 #endif
10356 /* Subroutine of the next function.
10358 INDEX is the value being switched on, with the lowest value
10359 in the table already subtracted.
10360 MODE is its expected mode (needed if INDEX is constant).
10361 RANGE is the length of the jump table.
10362 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10364 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10365 index value is out of range. */
10367 static void
10368 do_tablejump (index, mode, range, table_label, default_label)
10369 rtx index, range, table_label, default_label;
10370 enum machine_mode mode;
10372 rtx temp, vector;
10374 if (INTVAL (range) > cfun->max_jumptable_ents)
10375 cfun->max_jumptable_ents = INTVAL (range);
10377 /* Do an unsigned comparison (in the proper mode) between the index
10378 expression and the value which represents the length of the range.
10379 Since we just finished subtracting the lower bound of the range
10380 from the index expression, this comparison allows us to simultaneously
10381 check that the original index expression value is both greater than
10382 or equal to the minimum value of the range and less than or equal to
10383 the maximum value of the range. */
10385 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10386 default_label);
10388 /* If index is in range, it must fit in Pmode.
10389 Convert to Pmode so we can index with it. */
10390 if (mode != Pmode)
10391 index = convert_to_mode (Pmode, index, 1);
10393 /* Don't let a MEM slip thru, because then INDEX that comes
10394 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10395 and break_out_memory_refs will go to work on it and mess it up. */
10396 #ifdef PIC_CASE_VECTOR_ADDRESS
10397 if (flag_pic && GET_CODE (index) != REG)
10398 index = copy_to_mode_reg (Pmode, index);
10399 #endif
10401 /* If flag_force_addr were to affect this address
10402 it could interfere with the tricky assumptions made
10403 about addresses that contain label-refs,
10404 which may be valid only very near the tablejump itself. */
10405 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10406 GET_MODE_SIZE, because this indicates how large insns are. The other
10407 uses should all be Pmode, because they are addresses. This code
10408 could fail if addresses and insns are not the same size. */
10409 index = gen_rtx_PLUS (Pmode,
10410 gen_rtx_MULT (Pmode, index,
10411 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10412 gen_rtx_LABEL_REF (Pmode, table_label));
10413 #ifdef PIC_CASE_VECTOR_ADDRESS
10414 if (flag_pic)
10415 index = PIC_CASE_VECTOR_ADDRESS (index);
10416 else
10417 #endif
10418 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10419 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10420 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10421 RTX_UNCHANGING_P (vector) = 1;
10422 MEM_NOTRAP_P (vector) = 1;
10423 convert_move (temp, vector, 0);
10425 emit_jump_insn (gen_tablejump (temp, table_label));
10427 /* If we are generating PIC code or if the table is PC-relative, the
10428 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10429 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10430 emit_barrier ();
10434 try_tablejump (index_type, index_expr, minval, range,
10435 table_label, default_label)
10436 tree index_type, index_expr, minval, range;
10437 rtx table_label, default_label;
10439 rtx index;
10441 if (! HAVE_tablejump)
10442 return 0;
10444 index_expr = fold (build (MINUS_EXPR, index_type,
10445 convert (index_type, index_expr),
10446 convert (index_type, minval)));
10447 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10448 emit_queue ();
10449 index = protect_from_queue (index, 0);
10450 do_pending_stack_adjust ();
10452 do_tablejump (index, TYPE_MODE (index_type),
10453 convert_modes (TYPE_MODE (index_type),
10454 TYPE_MODE (TREE_TYPE (range)),
10455 expand_expr (range, NULL_RTX,
10456 VOIDmode, 0),
10457 TREE_UNSIGNED (TREE_TYPE (range))),
10458 table_label, default_label);
10459 return 1;
10462 /* Nonzero if the mode is a valid vector mode for this architecture.
10463 This returns nonzero even if there is no hardware support for the
10464 vector mode, but we can emulate with narrower modes. */
10467 vector_mode_valid_p (mode)
10468 enum machine_mode mode;
10470 enum mode_class class = GET_MODE_CLASS (mode);
10471 enum machine_mode innermode;
10473 /* Doh! What's going on? */
10474 if (class != MODE_VECTOR_INT
10475 && class != MODE_VECTOR_FLOAT)
10476 return 0;
10478 /* Hardware support. Woo hoo! */
10479 if (VECTOR_MODE_SUPPORTED_P (mode))
10480 return 1;
10482 innermode = GET_MODE_INNER (mode);
10484 /* We should probably return 1 if requesting V4DI and we have no DI,
10485 but we have V2DI, but this is probably very unlikely. */
10487 /* If we have support for the inner mode, we can safely emulate it.
10488 We may not have V2DI, but me can emulate with a pair of DIs. */
10489 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10492 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10493 static rtx
10494 const_vector_from_tree (exp)
10495 tree exp;
10497 rtvec v;
10498 int units, i;
10499 tree link, elt;
10500 enum machine_mode inner, mode;
10502 mode = TYPE_MODE (TREE_TYPE (exp));
10504 if (is_zeros_p (exp))
10505 return CONST0_RTX (mode);
10507 units = GET_MODE_NUNITS (mode);
10508 inner = GET_MODE_INNER (mode);
10510 v = rtvec_alloc (units);
10512 link = TREE_VECTOR_CST_ELTS (exp);
10513 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10515 elt = TREE_VALUE (link);
10517 if (TREE_CODE (elt) == REAL_CST)
10518 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10519 inner);
10520 else
10521 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10522 TREE_INT_CST_HIGH (elt),
10523 inner);
10526 /* Initialize remaining elements to 0. */
10527 for (; i < units; ++i)
10528 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10530 return gen_rtx_raw_CONST_VECTOR (mode, v);
10533 #include "gt-expr.h"