stl_bvector.h (swap(_Bit_reference,_Bit_reference)): Move/rename...
[official-gcc.git] / gcc / expr.c
blobe633232c06ece690e489d40d2192581cf9c2e736
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
56 #ifdef PUSH_ROUNDING
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
60 #endif
62 #endif
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
67 #else
68 #define STACK_PUSH_CODE PRE_INC
69 #endif
70 #endif
72 /* Assume that case vectors are not pc-relative. */
73 #ifndef CASE_VECTOR_PC_RELATIVE
74 #define CASE_VECTOR_PC_RELATIVE 0
75 #endif
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
83 int cse_not_expected;
85 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
86 static tree placeholder_list = 0;
88 /* This structure is used by move_by_pieces to describe the move to
89 be performed. */
90 struct move_by_pieces
92 rtx to;
93 rtx to_addr;
94 int autinc_to;
95 int explicit_inc_to;
96 rtx from;
97 rtx from_addr;
98 int autinc_from;
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
102 int reverse;
105 /* This structure is used by store_by_pieces to describe the clear to
106 be performed. */
108 struct store_by_pieces
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
117 PTR constfundata;
118 int reverse;
121 extern struct obstack permanent_obstack;
123 static rtx enqueue_insn PARAMS ((rtx, rtx));
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
125 PARAMS ((unsigned HOST_WIDE_INT,
126 unsigned int));
127 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *));
129 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 enum machine_mode));
131 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 unsigned int));
133 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 unsigned int));
135 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 enum machine_mode,
137 struct store_by_pieces *));
138 static rtx compress_float_constant PARAMS ((rtx, rtx));
139 static rtx get_subtarget PARAMS ((rtx));
140 static int is_zeros_p PARAMS ((tree));
141 static int mostly_zeros_p PARAMS ((tree));
142 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, enum machine_mode, int, tree,
149 int));
150 static rtx var_rtx PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153 static int is_aligning_offset PARAMS ((tree, tree));
154 static rtx expand_increment PARAMS ((tree, int, int));
155 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
158 rtx, rtx));
159 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
160 #ifdef PUSH_ROUNDING
161 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
162 #endif
163 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* If a memory-to-memory move would take MOVE_RATIO or more simple
177 move-instruction sequences, we will do a movstr or libcall instead. */
179 #ifndef MOVE_RATIO
180 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
181 #define MOVE_RATIO 2
182 #else
183 /* If we are optimizing for space (-Os), cut down the default move ratio. */
184 #define MOVE_RATIO (optimize_size ? 3 : 15)
185 #endif
186 #endif
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
193 #endif
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movstr_optab[NUM_MACHINE_MODES];
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
201 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
203 #ifndef SLOW_UNALIGNED_ACCESS
204 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
205 #endif
207 /* This is run once per compilation to set up which modes can be used
208 directly in memory and to initialize the block move optab. */
210 void
211 init_expr_once ()
213 rtx insn, pat;
214 enum machine_mode mode;
215 int num_clobbers;
216 rtx mem, mem1;
218 /* Try indexing by frame ptr and try by stack ptr.
219 It is known that on the Convex the stack ptr isn't a valid index.
220 With luck, one or the other is valid on any machine. */
221 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
222 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
224 insn = rtx_alloc (INSN);
225 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
226 PATTERN (insn) = pat;
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
231 int regno;
232 rtx reg;
234 direct_load[(int) mode] = direct_store[(int) mode] = 0;
235 PUT_MODE (mem, mode);
236 PUT_MODE (mem1, mode);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
249 reg = gen_rtx_REG (mode, regno);
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
282 enum insn_code ic;
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
286 continue;
288 PUT_MODE (mem, srcmode);
290 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
291 float_extend_from_mem[mode][srcmode] = true;
296 /* This is run at the start of compiling a function. */
298 void
299 init_expr ()
301 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
303 pending_chain = 0;
304 pending_stack_adjust = 0;
305 stack_pointer_delta = 0;
306 inhibit_defer_pop = 0;
307 saveregs_value = 0;
308 apply_args_value = 0;
309 forced_labels = 0;
312 void
313 mark_expr_status (p)
314 struct expr_status *p;
316 if (p == NULL)
317 return;
319 ggc_mark_rtx (p->x_saveregs_value);
320 ggc_mark_rtx (p->x_apply_args_value);
321 ggc_mark_rtx (p->x_forced_labels);
324 void
325 free_expr_status (f)
326 struct function *f;
328 free (f->expr);
329 f->expr = NULL;
332 /* Small sanity check that the queue is empty at the end of a function. */
334 void
335 finish_expr_for_function ()
337 if (pending_chain)
338 abort ();
341 /* Manage the queue of increment instructions to be output
342 for POSTINCREMENT_EXPR expressions, etc. */
344 /* Queue up to increment (or change) VAR later. BODY says how:
345 BODY should be the same thing you would pass to emit_insn
346 to increment right away. It will go to emit_insn later on.
348 The value is a QUEUED expression to be used in place of VAR
349 where you want to guarantee the pre-incrementation value of VAR. */
351 static rtx
352 enqueue_insn (var, body)
353 rtx var, body;
355 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
356 body, pending_chain);
357 return pending_chain;
360 /* Use protect_from_queue to convert a QUEUED expression
361 into something that you can put immediately into an instruction.
362 If the queued incrementation has not happened yet,
363 protect_from_queue returns the variable itself.
364 If the incrementation has happened, protect_from_queue returns a temp
365 that contains a copy of the old value of the variable.
367 Any time an rtx which might possibly be a QUEUED is to be put
368 into an instruction, it must be passed through protect_from_queue first.
369 QUEUED expressions are not meaningful in instructions.
371 Do not pass a value through protect_from_queue and then hold
372 on to it for a while before putting it in an instruction!
373 If the queue is flushed in between, incorrect code will result. */
376 protect_from_queue (x, modify)
377 rtx x;
378 int modify;
380 RTX_CODE code = GET_CODE (x);
382 #if 0 /* A QUEUED can hang around after the queue is forced out. */
383 /* Shortcut for most common case. */
384 if (pending_chain == 0)
385 return x;
386 #endif
388 if (code != QUEUED)
390 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
391 use of autoincrement. Make a copy of the contents of the memory
392 location rather than a copy of the address, but not if the value is
393 of mode BLKmode. Don't modify X in place since it might be
394 shared. */
395 if (code == MEM && GET_MODE (x) != BLKmode
396 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
398 rtx y = XEXP (x, 0);
399 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
401 if (QUEUED_INSN (y))
403 rtx temp = gen_reg_rtx (GET_MODE (x));
405 emit_insn_before (gen_move_insn (temp, new),
406 QUEUED_INSN (y));
407 return temp;
410 /* Copy the address into a pseudo, so that the returned value
411 remains correct across calls to emit_queue. */
412 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
417 if (code == MEM)
419 rtx tem = protect_from_queue (XEXP (x, 0), 0);
420 if (tem != XEXP (x, 0))
422 x = copy_rtx (x);
423 XEXP (x, 0) = tem;
426 else if (code == PLUS || code == MULT)
428 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
429 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
432 x = copy_rtx (x);
433 XEXP (x, 0) = new0;
434 XEXP (x, 1) = new1;
437 return x;
439 /* If the increment has not happened, use the variable itself. Copy it
440 into a new pseudo so that the value remains correct across calls to
441 emit_queue. */
442 if (QUEUED_INSN (x) == 0)
443 return copy_to_reg (QUEUED_VAR (x));
444 /* If the increment has happened and a pre-increment copy exists,
445 use that copy. */
446 if (QUEUED_COPY (x) != 0)
447 return QUEUED_COPY (x);
448 /* The increment has happened but we haven't set up a pre-increment copy.
449 Set one up now, and use it. */
450 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
451 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
452 QUEUED_INSN (x));
453 return QUEUED_COPY (x);
456 /* Return nonzero if X contains a QUEUED expression:
457 if it contains anything that will be altered by a queued increment.
458 We handle only combinations of MEM, PLUS, MINUS and MULT operators
459 since memory addresses generally contain only those. */
462 queued_subexp_p (x)
463 rtx x;
465 enum rtx_code code = GET_CODE (x);
466 switch (code)
468 case QUEUED:
469 return 1;
470 case MEM:
471 return queued_subexp_p (XEXP (x, 0));
472 case MULT:
473 case PLUS:
474 case MINUS:
475 return (queued_subexp_p (XEXP (x, 0))
476 || queued_subexp_p (XEXP (x, 1)));
477 default:
478 return 0;
482 /* Perform all the pending incrementations. */
484 void
485 emit_queue ()
487 rtx p;
488 while ((p = pending_chain))
490 rtx body = QUEUED_BODY (p);
492 if (GET_CODE (body) == SEQUENCE)
494 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
495 emit_insn (QUEUED_BODY (p));
497 else
498 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
499 pending_chain = QUEUED_NEXT (p);
503 /* Copy data from FROM to TO, where the machine modes are not the same.
504 Both modes may be integer, or both may be floating.
505 UNSIGNEDP should be nonzero if FROM is an unsigned type.
506 This causes zero-extension instead of sign-extension. */
508 void
509 convert_move (to, from, unsignedp)
510 rtx to, from;
511 int unsignedp;
513 enum machine_mode to_mode = GET_MODE (to);
514 enum machine_mode from_mode = GET_MODE (from);
515 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
516 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
517 enum insn_code code;
518 rtx libcall;
520 /* rtx code for making an equivalent value. */
521 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
523 to = protect_from_queue (to, 1);
524 from = protect_from_queue (from, 0);
526 if (to_real != from_real)
527 abort ();
529 /* If FROM is a SUBREG that indicates that we have already done at least
530 the required extension, strip it. We don't handle such SUBREGs as
531 TO here. */
533 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
534 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
535 >= GET_MODE_SIZE (to_mode))
536 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
537 from = gen_lowpart (to_mode, from), from_mode = to_mode;
539 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
540 abort ();
542 if (to_mode == from_mode
543 || (from_mode == VOIDmode && CONSTANT_P (from)))
545 emit_move_insn (to, from);
546 return;
549 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
551 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
552 abort ();
554 if (VECTOR_MODE_P (to_mode))
555 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
556 else
557 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
559 emit_move_insn (to, from);
560 return;
563 if (to_real != from_real)
564 abort ();
566 if (to_real)
568 rtx value, insns;
570 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
572 /* Try converting directly if the insn is supported. */
573 if ((code = can_extend_p (to_mode, from_mode, 0))
574 != CODE_FOR_nothing)
576 emit_unop_insn (code, to, from, UNKNOWN);
577 return;
581 #ifdef HAVE_trunchfqf2
582 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
584 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
585 return;
587 #endif
588 #ifdef HAVE_trunctqfqf2
589 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
591 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
592 return;
594 #endif
595 #ifdef HAVE_truncsfqf2
596 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
598 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
599 return;
601 #endif
602 #ifdef HAVE_truncdfqf2
603 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
605 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
606 return;
608 #endif
609 #ifdef HAVE_truncxfqf2
610 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
612 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
613 return;
615 #endif
616 #ifdef HAVE_trunctfqf2
617 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
620 return;
622 #endif
624 #ifdef HAVE_trunctqfhf2
625 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
627 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
628 return;
630 #endif
631 #ifdef HAVE_truncsfhf2
632 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
634 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_truncdfhf2
639 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
641 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_truncxfhf2
646 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
648 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
649 return;
651 #endif
652 #ifdef HAVE_trunctfhf2
653 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
655 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
656 return;
658 #endif
660 #ifdef HAVE_truncsftqf2
661 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
663 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
664 return;
666 #endif
667 #ifdef HAVE_truncdftqf2
668 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
670 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_truncxftqf2
675 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
677 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
678 return;
680 #endif
681 #ifdef HAVE_trunctftqf2
682 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
684 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
685 return;
687 #endif
689 #ifdef HAVE_truncdfsf2
690 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
692 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
693 return;
695 #endif
696 #ifdef HAVE_truncxfsf2
697 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
699 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
700 return;
702 #endif
703 #ifdef HAVE_trunctfsf2
704 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
706 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
707 return;
709 #endif
710 #ifdef HAVE_truncxfdf2
711 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
713 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
714 return;
716 #endif
717 #ifdef HAVE_trunctfdf2
718 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
720 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
721 return;
723 #endif
725 libcall = (rtx) 0;
726 switch (from_mode)
728 case SFmode:
729 switch (to_mode)
731 case DFmode:
732 libcall = extendsfdf2_libfunc;
733 break;
735 case XFmode:
736 libcall = extendsfxf2_libfunc;
737 break;
739 case TFmode:
740 libcall = extendsftf2_libfunc;
741 break;
743 default:
744 break;
746 break;
748 case DFmode:
749 switch (to_mode)
751 case SFmode:
752 libcall = truncdfsf2_libfunc;
753 break;
755 case XFmode:
756 libcall = extenddfxf2_libfunc;
757 break;
759 case TFmode:
760 libcall = extenddftf2_libfunc;
761 break;
763 default:
764 break;
766 break;
768 case XFmode:
769 switch (to_mode)
771 case SFmode:
772 libcall = truncxfsf2_libfunc;
773 break;
775 case DFmode:
776 libcall = truncxfdf2_libfunc;
777 break;
779 default:
780 break;
782 break;
784 case TFmode:
785 switch (to_mode)
787 case SFmode:
788 libcall = trunctfsf2_libfunc;
789 break;
791 case DFmode:
792 libcall = trunctfdf2_libfunc;
793 break;
795 default:
796 break;
798 break;
800 default:
801 break;
804 if (libcall == (rtx) 0)
805 /* This conversion is not implemented yet. */
806 abort ();
808 start_sequence ();
809 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
810 1, from, from_mode);
811 insns = get_insns ();
812 end_sequence ();
813 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
814 from));
815 return;
818 /* Now both modes are integers. */
820 /* Handle expanding beyond a word. */
821 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
822 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
824 rtx insns;
825 rtx lowpart;
826 rtx fill_value;
827 rtx lowfrom;
828 int i;
829 enum machine_mode lowpart_mode;
830 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
832 /* Try converting directly if the insn is supported. */
833 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
834 != CODE_FOR_nothing)
836 /* If FROM is a SUBREG, put it into a register. Do this
837 so that we always generate the same set of insns for
838 better cse'ing; if an intermediate assignment occurred,
839 we won't be doing the operation directly on the SUBREG. */
840 if (optimize > 0 && GET_CODE (from) == SUBREG)
841 from = force_reg (from_mode, from);
842 emit_unop_insn (code, to, from, equiv_code);
843 return;
845 /* Next, try converting via full word. */
846 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
847 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
848 != CODE_FOR_nothing))
850 if (GET_CODE (to) == REG)
851 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
852 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
853 emit_unop_insn (code, to,
854 gen_lowpart (word_mode, to), equiv_code);
855 return;
858 /* No special multiword conversion insn; do it by hand. */
859 start_sequence ();
861 /* Since we will turn this into a no conflict block, we must ensure
862 that the source does not overlap the target. */
864 if (reg_overlap_mentioned_p (to, from))
865 from = force_reg (from_mode, from);
867 /* Get a copy of FROM widened to a word, if necessary. */
868 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
869 lowpart_mode = word_mode;
870 else
871 lowpart_mode = from_mode;
873 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
875 lowpart = gen_lowpart (lowpart_mode, to);
876 emit_move_insn (lowpart, lowfrom);
878 /* Compute the value to put in each remaining word. */
879 if (unsignedp)
880 fill_value = const0_rtx;
881 else
883 #ifdef HAVE_slt
884 if (HAVE_slt
885 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
886 && STORE_FLAG_VALUE == -1)
888 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
889 lowpart_mode, 0);
890 fill_value = gen_reg_rtx (word_mode);
891 emit_insn (gen_slt (fill_value));
893 else
894 #endif
896 fill_value
897 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
898 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
899 NULL_RTX, 0);
900 fill_value = convert_to_mode (word_mode, fill_value, 1);
904 /* Fill the remaining words. */
905 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
907 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
908 rtx subword = operand_subword (to, index, 1, to_mode);
910 if (subword == 0)
911 abort ();
913 if (fill_value != subword)
914 emit_move_insn (subword, fill_value);
917 insns = get_insns ();
918 end_sequence ();
920 emit_no_conflict_block (insns, to, from, NULL_RTX,
921 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
922 return;
925 /* Truncating multi-word to a word or less. */
926 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
927 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
929 if (!((GET_CODE (from) == MEM
930 && ! MEM_VOLATILE_P (from)
931 && direct_load[(int) to_mode]
932 && ! mode_dependent_address_p (XEXP (from, 0)))
933 || GET_CODE (from) == REG
934 || GET_CODE (from) == SUBREG))
935 from = force_reg (from_mode, from);
936 convert_move (to, gen_lowpart (word_mode, from), 0);
937 return;
940 /* Handle pointer conversion. */ /* SPEE 900220. */
941 if (to_mode == PQImode)
943 if (from_mode != QImode)
944 from = convert_to_mode (QImode, from, unsignedp);
946 #ifdef HAVE_truncqipqi2
947 if (HAVE_truncqipqi2)
949 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
950 return;
952 #endif /* HAVE_truncqipqi2 */
953 abort ();
956 if (from_mode == PQImode)
958 if (to_mode != QImode)
960 from = convert_to_mode (QImode, from, unsignedp);
961 from_mode = QImode;
963 else
965 #ifdef HAVE_extendpqiqi2
966 if (HAVE_extendpqiqi2)
968 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
969 return;
971 #endif /* HAVE_extendpqiqi2 */
972 abort ();
976 if (to_mode == PSImode)
978 if (from_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
981 #ifdef HAVE_truncsipsi2
982 if (HAVE_truncsipsi2)
984 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
985 return;
987 #endif /* HAVE_truncsipsi2 */
988 abort ();
991 if (from_mode == PSImode)
993 if (to_mode != SImode)
995 from = convert_to_mode (SImode, from, unsignedp);
996 from_mode = SImode;
998 else
1000 #ifdef HAVE_extendpsisi2
1001 if (! unsignedp && HAVE_extendpsisi2)
1003 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1004 return;
1006 #endif /* HAVE_extendpsisi2 */
1007 #ifdef HAVE_zero_extendpsisi2
1008 if (unsignedp && HAVE_zero_extendpsisi2)
1010 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1011 return;
1013 #endif /* HAVE_zero_extendpsisi2 */
1014 abort ();
1018 if (to_mode == PDImode)
1020 if (from_mode != DImode)
1021 from = convert_to_mode (DImode, from, unsignedp);
1023 #ifdef HAVE_truncdipdi2
1024 if (HAVE_truncdipdi2)
1026 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1027 return;
1029 #endif /* HAVE_truncdipdi2 */
1030 abort ();
1033 if (from_mode == PDImode)
1035 if (to_mode != DImode)
1037 from = convert_to_mode (DImode, from, unsignedp);
1038 from_mode = DImode;
1040 else
1042 #ifdef HAVE_extendpdidi2
1043 if (HAVE_extendpdidi2)
1045 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1046 return;
1048 #endif /* HAVE_extendpdidi2 */
1049 abort ();
1053 /* Now follow all the conversions between integers
1054 no more than a word long. */
1056 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1057 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1058 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1059 GET_MODE_BITSIZE (from_mode)))
1061 if (!((GET_CODE (from) == MEM
1062 && ! MEM_VOLATILE_P (from)
1063 && direct_load[(int) to_mode]
1064 && ! mode_dependent_address_p (XEXP (from, 0)))
1065 || GET_CODE (from) == REG
1066 || GET_CODE (from) == SUBREG))
1067 from = force_reg (from_mode, from);
1068 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1069 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1070 from = copy_to_reg (from);
1071 emit_move_insn (to, gen_lowpart (to_mode, from));
1072 return;
1075 /* Handle extension. */
1076 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1078 /* Convert directly if that works. */
1079 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1080 != CODE_FOR_nothing)
1082 if (flag_force_mem)
1083 from = force_not_mem (from);
1085 emit_unop_insn (code, to, from, equiv_code);
1086 return;
1088 else
1090 enum machine_mode intermediate;
1091 rtx tmp;
1092 tree shift_amount;
1094 /* Search for a mode to convert via. */
1095 for (intermediate = from_mode; intermediate != VOIDmode;
1096 intermediate = GET_MODE_WIDER_MODE (intermediate))
1097 if (((can_extend_p (to_mode, intermediate, unsignedp)
1098 != CODE_FOR_nothing)
1099 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1100 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1101 GET_MODE_BITSIZE (intermediate))))
1102 && (can_extend_p (intermediate, from_mode, unsignedp)
1103 != CODE_FOR_nothing))
1105 convert_move (to, convert_to_mode (intermediate, from,
1106 unsignedp), unsignedp);
1107 return;
1110 /* No suitable intermediate mode.
1111 Generate what we need with shifts. */
1112 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1113 - GET_MODE_BITSIZE (from_mode), 0);
1114 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1115 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1116 to, unsignedp);
1117 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1118 to, unsignedp);
1119 if (tmp != to)
1120 emit_move_insn (to, tmp);
1121 return;
1125 /* Support special truncate insns for certain modes. */
1127 if (from_mode == DImode && to_mode == SImode)
1129 #ifdef HAVE_truncdisi2
1130 if (HAVE_truncdisi2)
1132 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1133 return;
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1140 if (from_mode == DImode && to_mode == HImode)
1142 #ifdef HAVE_truncdihi2
1143 if (HAVE_truncdihi2)
1145 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1146 return;
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1153 if (from_mode == DImode && to_mode == QImode)
1155 #ifdef HAVE_truncdiqi2
1156 if (HAVE_truncdiqi2)
1158 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1159 return;
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1166 if (from_mode == SImode && to_mode == HImode)
1168 #ifdef HAVE_truncsihi2
1169 if (HAVE_truncsihi2)
1171 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1172 return;
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1179 if (from_mode == SImode && to_mode == QImode)
1181 #ifdef HAVE_truncsiqi2
1182 if (HAVE_truncsiqi2)
1184 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1185 return;
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1192 if (from_mode == HImode && to_mode == QImode)
1194 #ifdef HAVE_trunchiqi2
1195 if (HAVE_trunchiqi2)
1197 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1198 return;
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1205 if (from_mode == TImode && to_mode == DImode)
1207 #ifdef HAVE_trunctidi2
1208 if (HAVE_trunctidi2)
1210 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1211 return;
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1218 if (from_mode == TImode && to_mode == SImode)
1220 #ifdef HAVE_trunctisi2
1221 if (HAVE_trunctisi2)
1223 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1224 return;
1226 #endif
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1228 return;
1231 if (from_mode == TImode && to_mode == HImode)
1233 #ifdef HAVE_trunctihi2
1234 if (HAVE_trunctihi2)
1236 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1237 return;
1239 #endif
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1241 return;
1244 if (from_mode == TImode && to_mode == QImode)
1246 #ifdef HAVE_trunctiqi2
1247 if (HAVE_trunctiqi2)
1249 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1250 return;
1252 #endif
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1254 return;
1257 /* Handle truncation of volatile memrefs, and so on;
1258 the things that couldn't be truncated directly,
1259 and for which there was no special instruction. */
1260 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1262 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1263 emit_move_insn (to, temp);
1264 return;
1267 /* Mode combination is not recognized. */
1268 abort ();
1271 /* Return an rtx for a value that would result
1272 from converting X to mode MODE.
1273 Both X and MODE may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1275 This can be done by referring to a part of X in place
1276 or by copying to a new temporary with conversion.
1278 This function *must not* call protect_from_queue
1279 except when putting X into an insn (in which case convert_move does it). */
1282 convert_to_mode (mode, x, unsignedp)
1283 enum machine_mode mode;
1284 rtx x;
1285 int unsignedp;
1287 return convert_modes (mode, VOIDmode, x, unsignedp);
1290 /* Return an rtx for a value that would result
1291 from converting X from mode OLDMODE to mode MODE.
1292 Both modes may be floating, or both integer.
1293 UNSIGNEDP is nonzero if X is an unsigned value.
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1298 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1300 This function *must not* call protect_from_queue
1301 except when putting X into an insn (in which case convert_move does it). */
1304 convert_modes (mode, oldmode, x, unsignedp)
1305 enum machine_mode mode, oldmode;
1306 rtx x;
1307 int unsignedp;
1309 rtx temp;
1311 /* If FROM is a SUBREG that indicates that we have already done at least
1312 the required extension, strip it. */
1314 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1315 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1316 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1317 x = gen_lowpart (mode, x);
1319 if (GET_MODE (x) != VOIDmode)
1320 oldmode = GET_MODE (x);
1322 if (mode == oldmode)
1323 return x;
1325 /* There is one case that we must handle specially: If we are converting
1326 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1327 we are to interpret the constant as unsigned, gen_lowpart will do
1328 the wrong if the constant appears negative. What we want to do is
1329 make the high-order word of the constant zero, not all ones. */
1331 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1332 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1333 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1335 HOST_WIDE_INT val = INTVAL (x);
1337 if (oldmode != VOIDmode
1338 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1340 int width = GET_MODE_BITSIZE (oldmode);
1342 /* We need to zero extend VAL. */
1343 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1346 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1349 /* We can do this with a gen_lowpart if both desired and current modes
1350 are integer, and this is either a constant integer, a register, or a
1351 non-volatile MEM. Except for the constant case where MODE is no
1352 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1354 if ((GET_CODE (x) == CONST_INT
1355 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1356 || (GET_MODE_CLASS (mode) == MODE_INT
1357 && GET_MODE_CLASS (oldmode) == MODE_INT
1358 && (GET_CODE (x) == CONST_DOUBLE
1359 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1360 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1361 && direct_load[(int) mode])
1362 || (GET_CODE (x) == REG
1363 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1364 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1366 /* ?? If we don't know OLDMODE, we have to assume here that
1367 X does not need sign- or zero-extension. This may not be
1368 the case, but it's the best we can do. */
1369 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1370 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1372 HOST_WIDE_INT val = INTVAL (x);
1373 int width = GET_MODE_BITSIZE (oldmode);
1375 /* We must sign or zero-extend in this case. Start by
1376 zero-extending, then sign extend if we need to. */
1377 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1378 if (! unsignedp
1379 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1380 val |= (HOST_WIDE_INT) (-1) << width;
1382 return gen_int_mode (val, mode);
1385 return gen_lowpart (mode, x);
1388 temp = gen_reg_rtx (mode);
1389 convert_move (temp, x, unsignedp);
1390 return temp;
1393 /* This macro is used to determine what the largest unit size that
1394 move_by_pieces can use is. */
1396 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1397 move efficiently, as opposed to MOVE_MAX which is the maximum
1398 number of bytes we can move with a single instruction. */
1400 #ifndef MOVE_MAX_PIECES
1401 #define MOVE_MAX_PIECES MOVE_MAX
1402 #endif
1404 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1405 store efficiently. Due to internal GCC limitations, this is
1406 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1407 for an immediate constant. */
1409 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1411 /* Generate several move instructions to copy LEN bytes from block FROM to
1412 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1413 and TO through protect_from_queue before calling.
1415 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1416 used to push FROM to the stack.
1418 ALIGN is maximum alignment we can assume. */
1420 void
1421 move_by_pieces (to, from, len, align)
1422 rtx to, from;
1423 unsigned HOST_WIDE_INT len;
1424 unsigned int align;
1426 struct move_by_pieces data;
1427 rtx to_addr, from_addr = XEXP (from, 0);
1428 unsigned int max_size = MOVE_MAX_PIECES + 1;
1429 enum machine_mode mode = VOIDmode, tmode;
1430 enum insn_code icode;
1432 data.offset = 0;
1433 data.from_addr = from_addr;
1434 if (to)
1436 to_addr = XEXP (to, 0);
1437 data.to = to;
1438 data.autinc_to
1439 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1440 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1441 data.reverse
1442 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1444 else
1446 to_addr = NULL_RTX;
1447 data.to = NULL_RTX;
1448 data.autinc_to = 1;
1449 #ifdef STACK_GROWS_DOWNWARD
1450 data.reverse = 1;
1451 #else
1452 data.reverse = 0;
1453 #endif
1455 data.to_addr = to_addr;
1456 data.from = from;
1457 data.autinc_from
1458 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1459 || GET_CODE (from_addr) == POST_INC
1460 || GET_CODE (from_addr) == POST_DEC);
1462 data.explicit_inc_from = 0;
1463 data.explicit_inc_to = 0;
1464 if (data.reverse) data.offset = len;
1465 data.len = len;
1467 /* If copying requires more than two move insns,
1468 copy addresses to registers (to make displacements shorter)
1469 and use post-increment if available. */
1470 if (!(data.autinc_from && data.autinc_to)
1471 && move_by_pieces_ninsns (len, align) > 2)
1473 /* Find the mode of the largest move... */
1474 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1475 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1476 if (GET_MODE_SIZE (tmode) < max_size)
1477 mode = tmode;
1479 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1481 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1482 data.autinc_from = 1;
1483 data.explicit_inc_from = -1;
1485 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1487 data.from_addr = copy_addr_to_reg (from_addr);
1488 data.autinc_from = 1;
1489 data.explicit_inc_from = 1;
1491 if (!data.autinc_from && CONSTANT_P (from_addr))
1492 data.from_addr = copy_addr_to_reg (from_addr);
1493 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1495 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1496 data.autinc_to = 1;
1497 data.explicit_inc_to = -1;
1499 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1501 data.to_addr = copy_addr_to_reg (to_addr);
1502 data.autinc_to = 1;
1503 data.explicit_inc_to = 1;
1505 if (!data.autinc_to && CONSTANT_P (to_addr))
1506 data.to_addr = copy_addr_to_reg (to_addr);
1509 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1510 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1511 align = MOVE_MAX * BITS_PER_UNIT;
1513 /* First move what we can in the largest integer mode, then go to
1514 successively smaller modes. */
1516 while (max_size > 1)
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1521 mode = tmode;
1523 if (mode == VOIDmode)
1524 break;
1526 icode = mov_optab->handlers[(int) mode].insn_code;
1527 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1528 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1530 max_size = GET_MODE_SIZE (mode);
1533 /* The code above should have handled everything. */
1534 if (data.len > 0)
1535 abort ();
1538 /* Return number of insns required to move L bytes by pieces.
1539 ALIGN (in bits) is maximum alignment we can assume. */
1541 static unsigned HOST_WIDE_INT
1542 move_by_pieces_ninsns (l, align)
1543 unsigned HOST_WIDE_INT l;
1544 unsigned int align;
1546 unsigned HOST_WIDE_INT n_insns = 0;
1547 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1549 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1550 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1551 align = MOVE_MAX * BITS_PER_UNIT;
1553 while (max_size > 1)
1555 enum machine_mode mode = VOIDmode, tmode;
1556 enum insn_code icode;
1558 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1559 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1560 if (GET_MODE_SIZE (tmode) < max_size)
1561 mode = tmode;
1563 if (mode == VOIDmode)
1564 break;
1566 icode = mov_optab->handlers[(int) mode].insn_code;
1567 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1568 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1570 max_size = GET_MODE_SIZE (mode);
1573 if (l)
1574 abort ();
1575 return n_insns;
1578 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1579 with move instructions for mode MODE. GENFUN is the gen_... function
1580 to make a move insn for that mode. DATA has all the other info. */
1582 static void
1583 move_by_pieces_1 (genfun, mode, data)
1584 rtx (*genfun) PARAMS ((rtx, ...));
1585 enum machine_mode mode;
1586 struct move_by_pieces *data;
1588 unsigned int size = GET_MODE_SIZE (mode);
1589 rtx to1 = NULL_RTX, from1;
1591 while (data->len >= size)
1593 if (data->reverse)
1594 data->offset -= size;
1596 if (data->to)
1598 if (data->autinc_to)
1599 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1600 data->offset);
1601 else
1602 to1 = adjust_address (data->to, mode, data->offset);
1605 if (data->autinc_from)
1606 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1607 data->offset);
1608 else
1609 from1 = adjust_address (data->from, mode, data->offset);
1611 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1612 emit_insn (gen_add2_insn (data->to_addr,
1613 GEN_INT (-(HOST_WIDE_INT)size)));
1614 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1615 emit_insn (gen_add2_insn (data->from_addr,
1616 GEN_INT (-(HOST_WIDE_INT)size)));
1618 if (data->to)
1619 emit_insn ((*genfun) (to1, from1));
1620 else
1622 #ifdef PUSH_ROUNDING
1623 emit_single_push_insn (mode, from1, NULL);
1624 #else
1625 abort ();
1626 #endif
1629 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1630 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1631 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1632 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1634 if (! data->reverse)
1635 data->offset += size;
1637 data->len -= size;
1641 /* Emit code to move a block Y to a block X.
1642 This may be done with string-move instructions,
1643 with multiple scalar move instructions, or with a library call.
1645 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1646 with mode BLKmode.
1647 SIZE is an rtx that says how long they are.
1648 ALIGN is the maximum alignment we can assume they have.
1650 Return the address of the new block, if memcpy is called and returns it,
1651 0 otherwise. */
1654 emit_block_move (x, y, size)
1655 rtx x, y;
1656 rtx size;
1658 rtx retval = 0;
1659 #ifdef TARGET_MEM_FUNCTIONS
1660 static tree fn;
1661 tree call_expr, arg_list;
1662 #endif
1663 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1665 if (GET_MODE (x) != BLKmode)
1666 abort ();
1668 if (GET_MODE (y) != BLKmode)
1669 abort ();
1671 x = protect_from_queue (x, 1);
1672 y = protect_from_queue (y, 0);
1673 size = protect_from_queue (size, 0);
1675 if (GET_CODE (x) != MEM)
1676 abort ();
1677 if (GET_CODE (y) != MEM)
1678 abort ();
1679 if (size == 0)
1680 abort ();
1682 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1683 move_by_pieces (x, y, INTVAL (size), align);
1684 else
1686 /* Try the most limited insn first, because there's no point
1687 including more than one in the machine description unless
1688 the more limited one has some advantage. */
1690 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1691 enum machine_mode mode;
1693 /* Since this is a move insn, we don't care about volatility. */
1694 volatile_ok = 1;
1696 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1697 mode = GET_MODE_WIDER_MODE (mode))
1699 enum insn_code code = movstr_optab[(int) mode];
1700 insn_operand_predicate_fn pred;
1702 if (code != CODE_FOR_nothing
1703 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1704 here because if SIZE is less than the mode mask, as it is
1705 returned by the macro, it will definitely be less than the
1706 actual mode mask. */
1707 && ((GET_CODE (size) == CONST_INT
1708 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1709 <= (GET_MODE_MASK (mode) >> 1)))
1710 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1711 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1712 || (*pred) (x, BLKmode))
1713 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1714 || (*pred) (y, BLKmode))
1715 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1716 || (*pred) (opalign, VOIDmode)))
1718 rtx op2;
1719 rtx last = get_last_insn ();
1720 rtx pat;
1722 op2 = convert_to_mode (mode, size, 1);
1723 pred = insn_data[(int) code].operand[2].predicate;
1724 if (pred != 0 && ! (*pred) (op2, mode))
1725 op2 = copy_to_mode_reg (mode, op2);
1727 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1728 if (pat)
1730 emit_insn (pat);
1731 volatile_ok = 0;
1732 return 0;
1734 else
1735 delete_insns_since (last);
1739 volatile_ok = 0;
1741 /* X, Y, or SIZE may have been passed through protect_from_queue.
1743 It is unsafe to save the value generated by protect_from_queue
1744 and reuse it later. Consider what happens if emit_queue is
1745 called before the return value from protect_from_queue is used.
1747 Expansion of the CALL_EXPR below will call emit_queue before
1748 we are finished emitting RTL for argument setup. So if we are
1749 not careful we could get the wrong value for an argument.
1751 To avoid this problem we go ahead and emit code to copy X, Y &
1752 SIZE into new pseudos. We can then place those new pseudos
1753 into an RTL_EXPR and use them later, even after a call to
1754 emit_queue.
1756 Note this is not strictly needed for library calls since they
1757 do not call emit_queue before loading their arguments. However,
1758 we may need to have library calls call emit_queue in the future
1759 since failing to do so could cause problems for targets which
1760 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1761 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1762 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1764 #ifdef TARGET_MEM_FUNCTIONS
1765 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1766 #else
1767 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1768 TREE_UNSIGNED (integer_type_node));
1769 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1770 #endif
1772 #ifdef TARGET_MEM_FUNCTIONS
1773 /* It is incorrect to use the libcall calling conventions to call
1774 memcpy in this context.
1776 This could be a user call to memcpy and the user may wish to
1777 examine the return value from memcpy.
1779 For targets where libcalls and normal calls have different conventions
1780 for returning pointers, we could end up generating incorrect code.
1782 So instead of using a libcall sequence we build up a suitable
1783 CALL_EXPR and expand the call in the normal fashion. */
1784 if (fn == NULL_TREE)
1786 tree fntype;
1788 /* This was copied from except.c, I don't know if all this is
1789 necessary in this context or not. */
1790 fn = get_identifier ("memcpy");
1791 fntype = build_pointer_type (void_type_node);
1792 fntype = build_function_type (fntype, NULL_TREE);
1793 fn = build_decl (FUNCTION_DECL, fn, fntype);
1794 ggc_add_tree_root (&fn, 1);
1795 DECL_EXTERNAL (fn) = 1;
1796 TREE_PUBLIC (fn) = 1;
1797 DECL_ARTIFICIAL (fn) = 1;
1798 TREE_NOTHROW (fn) = 1;
1799 make_decl_rtl (fn, NULL);
1800 assemble_external (fn);
1803 /* We need to make an argument list for the function call.
1805 memcpy has three arguments, the first two are void * addresses and
1806 the last is a size_t byte count for the copy. */
1807 arg_list
1808 = build_tree_list (NULL_TREE,
1809 make_tree (build_pointer_type (void_type_node), x));
1810 TREE_CHAIN (arg_list)
1811 = build_tree_list (NULL_TREE,
1812 make_tree (build_pointer_type (void_type_node), y));
1813 TREE_CHAIN (TREE_CHAIN (arg_list))
1814 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1815 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1817 /* Now we have to build up the CALL_EXPR itself. */
1818 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1819 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1820 call_expr, arg_list, NULL_TREE);
1821 TREE_SIDE_EFFECTS (call_expr) = 1;
1823 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1824 #else
1825 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1826 VOIDmode, 3, y, Pmode, x, Pmode,
1827 convert_to_mode (TYPE_MODE (integer_type_node), size,
1828 TREE_UNSIGNED (integer_type_node)),
1829 TYPE_MODE (integer_type_node));
1830 #endif
1832 /* If we are initializing a readonly value, show the above call
1833 clobbered it. Otherwise, a load from it may erroneously be hoisted
1834 from a loop. */
1835 if (RTX_UNCHANGING_P (x))
1836 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1839 return retval;
1842 /* Copy all or part of a value X into registers starting at REGNO.
1843 The number of registers to be filled is NREGS. */
1845 void
1846 move_block_to_reg (regno, x, nregs, mode)
1847 int regno;
1848 rtx x;
1849 int nregs;
1850 enum machine_mode mode;
1852 int i;
1853 #ifdef HAVE_load_multiple
1854 rtx pat;
1855 rtx last;
1856 #endif
1858 if (nregs == 0)
1859 return;
1861 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1862 x = validize_mem (force_const_mem (mode, x));
1864 /* See if the machine can do this with a load multiple insn. */
1865 #ifdef HAVE_load_multiple
1866 if (HAVE_load_multiple)
1868 last = get_last_insn ();
1869 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1870 GEN_INT (nregs));
1871 if (pat)
1873 emit_insn (pat);
1874 return;
1876 else
1877 delete_insns_since (last);
1879 #endif
1881 for (i = 0; i < nregs; i++)
1882 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1883 operand_subword_force (x, i, mode));
1886 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1887 The number of registers to be filled is NREGS. SIZE indicates the number
1888 of bytes in the object X. */
1890 void
1891 move_block_from_reg (regno, x, nregs, size)
1892 int regno;
1893 rtx x;
1894 int nregs;
1895 int size;
1897 int i;
1898 #ifdef HAVE_store_multiple
1899 rtx pat;
1900 rtx last;
1901 #endif
1902 enum machine_mode mode;
1904 if (nregs == 0)
1905 return;
1907 /* If SIZE is that of a mode no bigger than a word, just use that
1908 mode's store operation. */
1909 if (size <= UNITS_PER_WORD
1910 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1911 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1913 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1914 return;
1917 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1918 to the left before storing to memory. Note that the previous test
1919 doesn't handle all cases (e.g. SIZE == 3). */
1920 if (size < UNITS_PER_WORD
1921 && BYTES_BIG_ENDIAN
1922 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1924 rtx tem = operand_subword (x, 0, 1, BLKmode);
1925 rtx shift;
1927 if (tem == 0)
1928 abort ();
1930 shift = expand_shift (LSHIFT_EXPR, word_mode,
1931 gen_rtx_REG (word_mode, regno),
1932 build_int_2 ((UNITS_PER_WORD - size)
1933 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1934 emit_move_insn (tem, shift);
1935 return;
1938 /* See if the machine can do this with a store multiple insn. */
1939 #ifdef HAVE_store_multiple
1940 if (HAVE_store_multiple)
1942 last = get_last_insn ();
1943 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1944 GEN_INT (nregs));
1945 if (pat)
1947 emit_insn (pat);
1948 return;
1950 else
1951 delete_insns_since (last);
1953 #endif
1955 for (i = 0; i < nregs; i++)
1957 rtx tem = operand_subword (x, i, 1, BLKmode);
1959 if (tem == 0)
1960 abort ();
1962 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1966 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1967 registers represented by a PARALLEL. SSIZE represents the total size of
1968 block SRC in bytes, or -1 if not known. */
1969 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1970 the balance will be in what would be the low-order memory addresses, i.e.
1971 left justified for big endian, right justified for little endian. This
1972 happens to be true for the targets currently using this support. If this
1973 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1974 would be needed. */
1976 void
1977 emit_group_load (dst, orig_src, ssize)
1978 rtx dst, orig_src;
1979 int ssize;
1981 rtx *tmps, src;
1982 int start, i;
1984 if (GET_CODE (dst) != PARALLEL)
1985 abort ();
1987 /* Check for a NULL entry, used to indicate that the parameter goes
1988 both on the stack and in registers. */
1989 if (XEXP (XVECEXP (dst, 0, 0), 0))
1990 start = 0;
1991 else
1992 start = 1;
1994 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1996 /* Process the pieces. */
1997 for (i = start; i < XVECLEN (dst, 0); i++)
1999 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2000 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2001 unsigned int bytelen = GET_MODE_SIZE (mode);
2002 int shift = 0;
2004 /* Handle trailing fragments that run over the size of the struct. */
2005 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2007 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2008 bytelen = ssize - bytepos;
2009 if (bytelen <= 0)
2010 abort ();
2013 /* If we won't be loading directly from memory, protect the real source
2014 from strange tricks we might play; but make sure that the source can
2015 be loaded directly into the destination. */
2016 src = orig_src;
2017 if (GET_CODE (orig_src) != MEM
2018 && (!CONSTANT_P (orig_src)
2019 || (GET_MODE (orig_src) != mode
2020 && GET_MODE (orig_src) != VOIDmode)))
2022 if (GET_MODE (orig_src) == VOIDmode)
2023 src = gen_reg_rtx (mode);
2024 else
2025 src = gen_reg_rtx (GET_MODE (orig_src));
2027 emit_move_insn (src, orig_src);
2030 /* Optimize the access just a bit. */
2031 if (GET_CODE (src) == MEM
2032 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2033 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2034 && bytelen == GET_MODE_SIZE (mode))
2036 tmps[i] = gen_reg_rtx (mode);
2037 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2039 else if (GET_CODE (src) == CONCAT)
2041 if ((bytepos == 0
2042 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2043 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2044 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2046 tmps[i] = XEXP (src, bytepos != 0);
2047 if (! CONSTANT_P (tmps[i])
2048 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2049 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2050 0, 1, NULL_RTX, mode, mode, ssize);
2052 else if (bytepos == 0)
2054 rtx mem = assign_stack_temp (GET_MODE (src),
2055 GET_MODE_SIZE (GET_MODE (src)), 0);
2056 emit_move_insn (mem, src);
2057 tmps[i] = adjust_address (mem, mode, 0);
2059 else
2060 abort ();
2062 else if (CONSTANT_P (src)
2063 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2064 tmps[i] = src;
2065 else
2066 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2067 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2068 mode, mode, ssize);
2070 if (BYTES_BIG_ENDIAN && shift)
2071 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2072 tmps[i], 0, OPTAB_WIDEN);
2075 emit_queue ();
2077 /* Copy the extracted pieces into the proper (probable) hard regs. */
2078 for (i = start; i < XVECLEN (dst, 0); i++)
2079 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2082 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2083 registers represented by a PARALLEL. SSIZE represents the total size of
2084 block DST, or -1 if not known. */
2086 void
2087 emit_group_store (orig_dst, src, ssize)
2088 rtx orig_dst, src;
2089 int ssize;
2091 rtx *tmps, dst;
2092 int start, i;
2094 if (GET_CODE (src) != PARALLEL)
2095 abort ();
2097 /* Check for a NULL entry, used to indicate that the parameter goes
2098 both on the stack and in registers. */
2099 if (XEXP (XVECEXP (src, 0, 0), 0))
2100 start = 0;
2101 else
2102 start = 1;
2104 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2106 /* Copy the (probable) hard regs into pseudos. */
2107 for (i = start; i < XVECLEN (src, 0); i++)
2109 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2110 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2111 emit_move_insn (tmps[i], reg);
2113 emit_queue ();
2115 /* If we won't be storing directly into memory, protect the real destination
2116 from strange tricks we might play. */
2117 dst = orig_dst;
2118 if (GET_CODE (dst) == PARALLEL)
2120 rtx temp;
2122 /* We can get a PARALLEL dst if there is a conditional expression in
2123 a return statement. In that case, the dst and src are the same,
2124 so no action is necessary. */
2125 if (rtx_equal_p (dst, src))
2126 return;
2128 /* It is unclear if we can ever reach here, but we may as well handle
2129 it. Allocate a temporary, and split this into a store/load to/from
2130 the temporary. */
2132 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2133 emit_group_store (temp, src, ssize);
2134 emit_group_load (dst, temp, ssize);
2135 return;
2137 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2139 dst = gen_reg_rtx (GET_MODE (orig_dst));
2140 /* Make life a bit easier for combine. */
2141 emit_move_insn (dst, const0_rtx);
2144 /* Process the pieces. */
2145 for (i = start; i < XVECLEN (src, 0); i++)
2147 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2148 enum machine_mode mode = GET_MODE (tmps[i]);
2149 unsigned int bytelen = GET_MODE_SIZE (mode);
2150 rtx dest = dst;
2152 /* Handle trailing fragments that run over the size of the struct. */
2153 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2155 if (BYTES_BIG_ENDIAN)
2157 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2158 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2159 tmps[i], 0, OPTAB_WIDEN);
2161 bytelen = ssize - bytepos;
2164 if (GET_CODE (dst) == CONCAT)
2166 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2167 dest = XEXP (dst, 0);
2168 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2170 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2171 dest = XEXP (dst, 1);
2173 else
2174 abort ();
2177 /* Optimize the access just a bit. */
2178 if (GET_CODE (dest) == MEM
2179 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2180 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2181 && bytelen == GET_MODE_SIZE (mode))
2182 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2183 else
2184 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2185 mode, tmps[i], ssize);
2188 emit_queue ();
2190 /* Copy from the pseudo into the (probable) hard reg. */
2191 if (GET_CODE (dst) == REG)
2192 emit_move_insn (orig_dst, dst);
2195 /* Generate code to copy a BLKmode object of TYPE out of a
2196 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2197 is null, a stack temporary is created. TGTBLK is returned.
2199 The primary purpose of this routine is to handle functions
2200 that return BLKmode structures in registers. Some machines
2201 (the PA for example) want to return all small structures
2202 in registers regardless of the structure's alignment. */
2205 copy_blkmode_from_reg (tgtblk, srcreg, type)
2206 rtx tgtblk;
2207 rtx srcreg;
2208 tree type;
2210 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2211 rtx src = NULL, dst = NULL;
2212 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2213 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2215 if (tgtblk == 0)
2217 tgtblk = assign_temp (build_qualified_type (type,
2218 (TYPE_QUALS (type)
2219 | TYPE_QUAL_CONST)),
2220 0, 1, 1);
2221 preserve_temp_slots (tgtblk);
2224 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2225 into a new pseudo which is a full word.
2227 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2228 the wrong part of the register gets copied so we fake a type conversion
2229 in place. */
2230 if (GET_MODE (srcreg) != BLKmode
2231 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2233 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2234 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2235 else
2236 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2239 /* Structures whose size is not a multiple of a word are aligned
2240 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2241 machine, this means we must skip the empty high order bytes when
2242 calculating the bit offset. */
2243 if (BYTES_BIG_ENDIAN
2244 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2245 && bytes % UNITS_PER_WORD)
2246 big_endian_correction
2247 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2249 /* Copy the structure BITSIZE bites at a time.
2251 We could probably emit more efficient code for machines which do not use
2252 strict alignment, but it doesn't seem worth the effort at the current
2253 time. */
2254 for (bitpos = 0, xbitpos = big_endian_correction;
2255 bitpos < bytes * BITS_PER_UNIT;
2256 bitpos += bitsize, xbitpos += bitsize)
2258 /* We need a new source operand each time xbitpos is on a
2259 word boundary and when xbitpos == big_endian_correction
2260 (the first time through). */
2261 if (xbitpos % BITS_PER_WORD == 0
2262 || xbitpos == big_endian_correction)
2263 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2264 GET_MODE (srcreg));
2266 /* We need a new destination operand each time bitpos is on
2267 a word boundary. */
2268 if (bitpos % BITS_PER_WORD == 0)
2269 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2271 /* Use xbitpos for the source extraction (right justified) and
2272 xbitpos for the destination store (left justified). */
2273 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2274 extract_bit_field (src, bitsize,
2275 xbitpos % BITS_PER_WORD, 1,
2276 NULL_RTX, word_mode, word_mode,
2277 BITS_PER_WORD),
2278 BITS_PER_WORD);
2281 return tgtblk;
2284 /* Add a USE expression for REG to the (possibly empty) list pointed
2285 to by CALL_FUSAGE. REG must denote a hard register. */
2287 void
2288 use_reg (call_fusage, reg)
2289 rtx *call_fusage, reg;
2291 if (GET_CODE (reg) != REG
2292 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2293 abort ();
2295 *call_fusage
2296 = gen_rtx_EXPR_LIST (VOIDmode,
2297 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2300 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2301 starting at REGNO. All of these registers must be hard registers. */
2303 void
2304 use_regs (call_fusage, regno, nregs)
2305 rtx *call_fusage;
2306 int regno;
2307 int nregs;
2309 int i;
2311 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2312 abort ();
2314 for (i = 0; i < nregs; i++)
2315 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2318 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2319 PARALLEL REGS. This is for calls that pass values in multiple
2320 non-contiguous locations. The Irix 6 ABI has examples of this. */
2322 void
2323 use_group_regs (call_fusage, regs)
2324 rtx *call_fusage;
2325 rtx regs;
2327 int i;
2329 for (i = 0; i < XVECLEN (regs, 0); i++)
2331 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2333 /* A NULL entry means the parameter goes both on the stack and in
2334 registers. This can also be a MEM for targets that pass values
2335 partially on the stack and partially in registers. */
2336 if (reg != 0 && GET_CODE (reg) == REG)
2337 use_reg (call_fusage, reg);
2342 /* Determine whether the LEN bytes generated by CONSTFUN can be
2343 stored to memory using several move instructions. CONSTFUNDATA is
2344 a pointer which will be passed as argument in every CONSTFUN call.
2345 ALIGN is maximum alignment we can assume. Return nonzero if a
2346 call to store_by_pieces should succeed. */
2349 can_store_by_pieces (len, constfun, constfundata, align)
2350 unsigned HOST_WIDE_INT len;
2351 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2352 PTR constfundata;
2353 unsigned int align;
2355 unsigned HOST_WIDE_INT max_size, l;
2356 HOST_WIDE_INT offset = 0;
2357 enum machine_mode mode, tmode;
2358 enum insn_code icode;
2359 int reverse;
2360 rtx cst;
2362 if (! MOVE_BY_PIECES_P (len, align))
2363 return 0;
2365 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2366 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2367 align = MOVE_MAX * BITS_PER_UNIT;
2369 /* We would first store what we can in the largest integer mode, then go to
2370 successively smaller modes. */
2372 for (reverse = 0;
2373 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2374 reverse++)
2376 l = len;
2377 mode = VOIDmode;
2378 max_size = STORE_MAX_PIECES + 1;
2379 while (max_size > 1)
2381 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2382 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2383 if (GET_MODE_SIZE (tmode) < max_size)
2384 mode = tmode;
2386 if (mode == VOIDmode)
2387 break;
2389 icode = mov_optab->handlers[(int) mode].insn_code;
2390 if (icode != CODE_FOR_nothing
2391 && align >= GET_MODE_ALIGNMENT (mode))
2393 unsigned int size = GET_MODE_SIZE (mode);
2395 while (l >= size)
2397 if (reverse)
2398 offset -= size;
2400 cst = (*constfun) (constfundata, offset, mode);
2401 if (!LEGITIMATE_CONSTANT_P (cst))
2402 return 0;
2404 if (!reverse)
2405 offset += size;
2407 l -= size;
2411 max_size = GET_MODE_SIZE (mode);
2414 /* The code above should have handled everything. */
2415 if (l != 0)
2416 abort ();
2419 return 1;
2422 /* Generate several move instructions to store LEN bytes generated by
2423 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2424 pointer which will be passed as argument in every CONSTFUN call.
2425 ALIGN is maximum alignment we can assume. */
2427 void
2428 store_by_pieces (to, len, constfun, constfundata, align)
2429 rtx to;
2430 unsigned HOST_WIDE_INT len;
2431 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2432 PTR constfundata;
2433 unsigned int align;
2435 struct store_by_pieces data;
2437 if (! MOVE_BY_PIECES_P (len, align))
2438 abort ();
2439 to = protect_from_queue (to, 1);
2440 data.constfun = constfun;
2441 data.constfundata = constfundata;
2442 data.len = len;
2443 data.to = to;
2444 store_by_pieces_1 (&data, align);
2447 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2448 rtx with BLKmode). The caller must pass TO through protect_from_queue
2449 before calling. ALIGN is maximum alignment we can assume. */
2451 static void
2452 clear_by_pieces (to, len, align)
2453 rtx to;
2454 unsigned HOST_WIDE_INT len;
2455 unsigned int align;
2457 struct store_by_pieces data;
2459 data.constfun = clear_by_pieces_1;
2460 data.constfundata = NULL;
2461 data.len = len;
2462 data.to = to;
2463 store_by_pieces_1 (&data, align);
2466 /* Callback routine for clear_by_pieces.
2467 Return const0_rtx unconditionally. */
2469 static rtx
2470 clear_by_pieces_1 (data, offset, mode)
2471 PTR data ATTRIBUTE_UNUSED;
2472 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2473 enum machine_mode mode ATTRIBUTE_UNUSED;
2475 return const0_rtx;
2478 /* Subroutine of clear_by_pieces and store_by_pieces.
2479 Generate several move instructions to store LEN bytes of block TO. (A MEM
2480 rtx with BLKmode). The caller must pass TO through protect_from_queue
2481 before calling. ALIGN is maximum alignment we can assume. */
2483 static void
2484 store_by_pieces_1 (data, align)
2485 struct store_by_pieces *data;
2486 unsigned int align;
2488 rtx to_addr = XEXP (data->to, 0);
2489 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2490 enum machine_mode mode = VOIDmode, tmode;
2491 enum insn_code icode;
2493 data->offset = 0;
2494 data->to_addr = to_addr;
2495 data->autinc_to
2496 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2497 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2499 data->explicit_inc_to = 0;
2500 data->reverse
2501 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2502 if (data->reverse)
2503 data->offset = data->len;
2505 /* If storing requires more than two move insns,
2506 copy addresses to registers (to make displacements shorter)
2507 and use post-increment if available. */
2508 if (!data->autinc_to
2509 && move_by_pieces_ninsns (data->len, align) > 2)
2511 /* Determine the main mode we'll be using. */
2512 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2513 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2514 if (GET_MODE_SIZE (tmode) < max_size)
2515 mode = tmode;
2517 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2519 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2520 data->autinc_to = 1;
2521 data->explicit_inc_to = -1;
2524 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2525 && ! data->autinc_to)
2527 data->to_addr = copy_addr_to_reg (to_addr);
2528 data->autinc_to = 1;
2529 data->explicit_inc_to = 1;
2532 if ( !data->autinc_to && CONSTANT_P (to_addr))
2533 data->to_addr = copy_addr_to_reg (to_addr);
2536 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2537 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2538 align = MOVE_MAX * BITS_PER_UNIT;
2540 /* First store what we can in the largest integer mode, then go to
2541 successively smaller modes. */
2543 while (max_size > 1)
2545 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2546 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2547 if (GET_MODE_SIZE (tmode) < max_size)
2548 mode = tmode;
2550 if (mode == VOIDmode)
2551 break;
2553 icode = mov_optab->handlers[(int) mode].insn_code;
2554 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2555 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2557 max_size = GET_MODE_SIZE (mode);
2560 /* The code above should have handled everything. */
2561 if (data->len != 0)
2562 abort ();
2565 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2566 with move instructions for mode MODE. GENFUN is the gen_... function
2567 to make a move insn for that mode. DATA has all the other info. */
2569 static void
2570 store_by_pieces_2 (genfun, mode, data)
2571 rtx (*genfun) PARAMS ((rtx, ...));
2572 enum machine_mode mode;
2573 struct store_by_pieces *data;
2575 unsigned int size = GET_MODE_SIZE (mode);
2576 rtx to1, cst;
2578 while (data->len >= size)
2580 if (data->reverse)
2581 data->offset -= size;
2583 if (data->autinc_to)
2584 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2585 data->offset);
2586 else
2587 to1 = adjust_address (data->to, mode, data->offset);
2589 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2590 emit_insn (gen_add2_insn (data->to_addr,
2591 GEN_INT (-(HOST_WIDE_INT) size)));
2593 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2594 emit_insn ((*genfun) (to1, cst));
2596 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2597 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2599 if (! data->reverse)
2600 data->offset += size;
2602 data->len -= size;
2606 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2607 its length in bytes. */
2610 clear_storage (object, size)
2611 rtx object;
2612 rtx size;
2614 #ifdef TARGET_MEM_FUNCTIONS
2615 static tree fn;
2616 tree call_expr, arg_list;
2617 #endif
2618 rtx retval = 0;
2619 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2620 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2622 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2623 just move a zero. Otherwise, do this a piece at a time. */
2624 if (GET_MODE (object) != BLKmode
2625 && GET_CODE (size) == CONST_INT
2626 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2627 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2628 else
2630 object = protect_from_queue (object, 1);
2631 size = protect_from_queue (size, 0);
2633 if (GET_CODE (size) == CONST_INT
2634 && MOVE_BY_PIECES_P (INTVAL (size), align))
2635 clear_by_pieces (object, INTVAL (size), align);
2636 else
2638 /* Try the most limited insn first, because there's no point
2639 including more than one in the machine description unless
2640 the more limited one has some advantage. */
2642 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2643 enum machine_mode mode;
2645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2646 mode = GET_MODE_WIDER_MODE (mode))
2648 enum insn_code code = clrstr_optab[(int) mode];
2649 insn_operand_predicate_fn pred;
2651 if (code != CODE_FOR_nothing
2652 /* We don't need MODE to be narrower than
2653 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2654 the mode mask, as it is returned by the macro, it will
2655 definitely be less than the actual mode mask. */
2656 && ((GET_CODE (size) == CONST_INT
2657 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2658 <= (GET_MODE_MASK (mode) >> 1)))
2659 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2660 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2661 || (*pred) (object, BLKmode))
2662 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2663 || (*pred) (opalign, VOIDmode)))
2665 rtx op1;
2666 rtx last = get_last_insn ();
2667 rtx pat;
2669 op1 = convert_to_mode (mode, size, 1);
2670 pred = insn_data[(int) code].operand[1].predicate;
2671 if (pred != 0 && ! (*pred) (op1, mode))
2672 op1 = copy_to_mode_reg (mode, op1);
2674 pat = GEN_FCN ((int) code) (object, op1, opalign);
2675 if (pat)
2677 emit_insn (pat);
2678 return 0;
2680 else
2681 delete_insns_since (last);
2685 /* OBJECT or SIZE may have been passed through protect_from_queue.
2687 It is unsafe to save the value generated by protect_from_queue
2688 and reuse it later. Consider what happens if emit_queue is
2689 called before the return value from protect_from_queue is used.
2691 Expansion of the CALL_EXPR below will call emit_queue before
2692 we are finished emitting RTL for argument setup. So if we are
2693 not careful we could get the wrong value for an argument.
2695 To avoid this problem we go ahead and emit code to copy OBJECT
2696 and SIZE into new pseudos. We can then place those new pseudos
2697 into an RTL_EXPR and use them later, even after a call to
2698 emit_queue.
2700 Note this is not strictly needed for library calls since they
2701 do not call emit_queue before loading their arguments. However,
2702 we may need to have library calls call emit_queue in the future
2703 since failing to do so could cause problems for targets which
2704 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2705 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2707 #ifdef TARGET_MEM_FUNCTIONS
2708 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2709 #else
2710 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2711 TREE_UNSIGNED (integer_type_node));
2712 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2713 #endif
2715 #ifdef TARGET_MEM_FUNCTIONS
2716 /* It is incorrect to use the libcall calling conventions to call
2717 memset in this context.
2719 This could be a user call to memset and the user may wish to
2720 examine the return value from memset.
2722 For targets where libcalls and normal calls have different
2723 conventions for returning pointers, we could end up generating
2724 incorrect code.
2726 So instead of using a libcall sequence we build up a suitable
2727 CALL_EXPR and expand the call in the normal fashion. */
2728 if (fn == NULL_TREE)
2730 tree fntype;
2732 /* This was copied from except.c, I don't know if all this is
2733 necessary in this context or not. */
2734 fn = get_identifier ("memset");
2735 fntype = build_pointer_type (void_type_node);
2736 fntype = build_function_type (fntype, NULL_TREE);
2737 fn = build_decl (FUNCTION_DECL, fn, fntype);
2738 ggc_add_tree_root (&fn, 1);
2739 DECL_EXTERNAL (fn) = 1;
2740 TREE_PUBLIC (fn) = 1;
2741 DECL_ARTIFICIAL (fn) = 1;
2742 TREE_NOTHROW (fn) = 1;
2743 make_decl_rtl (fn, NULL);
2744 assemble_external (fn);
2747 /* We need to make an argument list for the function call.
2749 memset has three arguments, the first is a void * addresses, the
2750 second an integer with the initialization value, the last is a
2751 size_t byte count for the copy. */
2752 arg_list
2753 = build_tree_list (NULL_TREE,
2754 make_tree (build_pointer_type (void_type_node),
2755 object));
2756 TREE_CHAIN (arg_list)
2757 = build_tree_list (NULL_TREE,
2758 make_tree (integer_type_node, const0_rtx));
2759 TREE_CHAIN (TREE_CHAIN (arg_list))
2760 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2761 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2763 /* Now we have to build up the CALL_EXPR itself. */
2764 call_expr = build1 (ADDR_EXPR,
2765 build_pointer_type (TREE_TYPE (fn)), fn);
2766 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2767 call_expr, arg_list, NULL_TREE);
2768 TREE_SIDE_EFFECTS (call_expr) = 1;
2770 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2771 #else
2772 emit_library_call (bzero_libfunc, LCT_NORMAL,
2773 VOIDmode, 2, object, Pmode, size,
2774 TYPE_MODE (integer_type_node));
2775 #endif
2777 /* If we are initializing a readonly value, show the above call
2778 clobbered it. Otherwise, a load from it may erroneously be
2779 hoisted from a loop. */
2780 if (RTX_UNCHANGING_P (object))
2781 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2785 return retval;
2788 /* Generate code to copy Y into X.
2789 Both Y and X must have the same mode, except that
2790 Y can be a constant with VOIDmode.
2791 This mode cannot be BLKmode; use emit_block_move for that.
2793 Return the last instruction emitted. */
2796 emit_move_insn (x, y)
2797 rtx x, y;
2799 enum machine_mode mode = GET_MODE (x);
2800 rtx y_cst = NULL_RTX;
2801 rtx last_insn;
2803 x = protect_from_queue (x, 1);
2804 y = protect_from_queue (y, 0);
2806 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2807 abort ();
2809 /* Never force constant_p_rtx to memory. */
2810 if (GET_CODE (y) == CONSTANT_P_RTX)
2812 else if (CONSTANT_P (y))
2814 if (optimize
2815 && FLOAT_MODE_P (GET_MODE (x))
2816 && (last_insn = compress_float_constant (x, y)))
2817 return last_insn;
2819 if (!LEGITIMATE_CONSTANT_P (y))
2821 y_cst = y;
2822 y = force_const_mem (mode, y);
2826 /* If X or Y are memory references, verify that their addresses are valid
2827 for the machine. */
2828 if (GET_CODE (x) == MEM
2829 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2830 && ! push_operand (x, GET_MODE (x)))
2831 || (flag_force_addr
2832 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2833 x = validize_mem (x);
2835 if (GET_CODE (y) == MEM
2836 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 || (flag_force_addr
2838 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2839 y = validize_mem (y);
2841 if (mode == BLKmode)
2842 abort ();
2844 last_insn = emit_move_insn_1 (x, y);
2846 if (y_cst && GET_CODE (x) == REG)
2847 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2849 return last_insn;
2852 /* Low level part of emit_move_insn.
2853 Called just like emit_move_insn, but assumes X and Y
2854 are basically valid. */
2857 emit_move_insn_1 (x, y)
2858 rtx x, y;
2860 enum machine_mode mode = GET_MODE (x);
2861 enum machine_mode submode;
2862 enum mode_class class = GET_MODE_CLASS (mode);
2864 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2865 abort ();
2867 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2868 return
2869 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2871 /* Expand complex moves by moving real part and imag part, if possible. */
2872 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2873 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2874 * BITS_PER_UNIT),
2875 (class == MODE_COMPLEX_INT
2876 ? MODE_INT : MODE_FLOAT),
2878 && (mov_optab->handlers[(int) submode].insn_code
2879 != CODE_FOR_nothing))
2881 /* Don't split destination if it is a stack push. */
2882 int stack = push_operand (x, GET_MODE (x));
2884 #ifdef PUSH_ROUNDING
2885 /* In case we output to the stack, but the size is smaller machine can
2886 push exactly, we need to use move instructions. */
2887 if (stack
2888 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2889 != GET_MODE_SIZE (submode)))
2891 rtx temp;
2892 HOST_WIDE_INT offset1, offset2;
2894 /* Do not use anti_adjust_stack, since we don't want to update
2895 stack_pointer_delta. */
2896 temp = expand_binop (Pmode,
2897 #ifdef STACK_GROWS_DOWNWARD
2898 sub_optab,
2899 #else
2900 add_optab,
2901 #endif
2902 stack_pointer_rtx,
2903 GEN_INT
2904 (PUSH_ROUNDING
2905 (GET_MODE_SIZE (GET_MODE (x)))),
2906 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2908 if (temp != stack_pointer_rtx)
2909 emit_move_insn (stack_pointer_rtx, temp);
2911 #ifdef STACK_GROWS_DOWNWARD
2912 offset1 = 0;
2913 offset2 = GET_MODE_SIZE (submode);
2914 #else
2915 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2916 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2917 + GET_MODE_SIZE (submode));
2918 #endif
2920 emit_move_insn (change_address (x, submode,
2921 gen_rtx_PLUS (Pmode,
2922 stack_pointer_rtx,
2923 GEN_INT (offset1))),
2924 gen_realpart (submode, y));
2925 emit_move_insn (change_address (x, submode,
2926 gen_rtx_PLUS (Pmode,
2927 stack_pointer_rtx,
2928 GEN_INT (offset2))),
2929 gen_imagpart (submode, y));
2931 else
2932 #endif
2933 /* If this is a stack, push the highpart first, so it
2934 will be in the argument order.
2936 In that case, change_address is used only to convert
2937 the mode, not to change the address. */
2938 if (stack)
2940 /* Note that the real part always precedes the imag part in memory
2941 regardless of machine's endianness. */
2942 #ifdef STACK_GROWS_DOWNWARD
2943 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2944 (gen_rtx_MEM (submode, XEXP (x, 0)),
2945 gen_imagpart (submode, y)));
2946 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2947 (gen_rtx_MEM (submode, XEXP (x, 0)),
2948 gen_realpart (submode, y)));
2949 #else
2950 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2951 (gen_rtx_MEM (submode, XEXP (x, 0)),
2952 gen_realpart (submode, y)));
2953 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2954 (gen_rtx_MEM (submode, XEXP (x, 0)),
2955 gen_imagpart (submode, y)));
2956 #endif
2958 else
2960 rtx realpart_x, realpart_y;
2961 rtx imagpart_x, imagpart_y;
2963 /* If this is a complex value with each part being smaller than a
2964 word, the usual calling sequence will likely pack the pieces into
2965 a single register. Unfortunately, SUBREG of hard registers only
2966 deals in terms of words, so we have a problem converting input
2967 arguments to the CONCAT of two registers that is used elsewhere
2968 for complex values. If this is before reload, we can copy it into
2969 memory and reload. FIXME, we should see about using extract and
2970 insert on integer registers, but complex short and complex char
2971 variables should be rarely used. */
2972 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2973 && (reload_in_progress | reload_completed) == 0)
2975 int packed_dest_p
2976 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2977 int packed_src_p
2978 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2980 if (packed_dest_p || packed_src_p)
2982 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2983 ? MODE_FLOAT : MODE_INT);
2985 enum machine_mode reg_mode
2986 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2988 if (reg_mode != BLKmode)
2990 rtx mem = assign_stack_temp (reg_mode,
2991 GET_MODE_SIZE (mode), 0);
2992 rtx cmem = adjust_address (mem, mode, 0);
2994 cfun->cannot_inline
2995 = N_("function using short complex types cannot be inline");
2997 if (packed_dest_p)
2999 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3001 emit_move_insn_1 (cmem, y);
3002 return emit_move_insn_1 (sreg, mem);
3004 else
3006 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3008 emit_move_insn_1 (mem, sreg);
3009 return emit_move_insn_1 (x, cmem);
3015 realpart_x = gen_realpart (submode, x);
3016 realpart_y = gen_realpart (submode, y);
3017 imagpart_x = gen_imagpart (submode, x);
3018 imagpart_y = gen_imagpart (submode, y);
3020 /* Show the output dies here. This is necessary for SUBREGs
3021 of pseudos since we cannot track their lifetimes correctly;
3022 hard regs shouldn't appear here except as return values.
3023 We never want to emit such a clobber after reload. */
3024 if (x != y
3025 && ! (reload_in_progress || reload_completed)
3026 && (GET_CODE (realpart_x) == SUBREG
3027 || GET_CODE (imagpart_x) == SUBREG))
3028 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3030 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3031 (realpart_x, realpart_y));
3032 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3033 (imagpart_x, imagpart_y));
3036 return get_last_insn ();
3039 /* This will handle any multi-word mode that lacks a move_insn pattern.
3040 However, you will get better code if you define such patterns,
3041 even if they must turn into multiple assembler instructions. */
3042 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3044 rtx last_insn = 0;
3045 rtx seq, inner;
3046 int need_clobber;
3047 int i;
3049 #ifdef PUSH_ROUNDING
3051 /* If X is a push on the stack, do the push now and replace
3052 X with a reference to the stack pointer. */
3053 if (push_operand (x, GET_MODE (x)))
3055 rtx temp;
3056 enum rtx_code code;
3058 /* Do not use anti_adjust_stack, since we don't want to update
3059 stack_pointer_delta. */
3060 temp = expand_binop (Pmode,
3061 #ifdef STACK_GROWS_DOWNWARD
3062 sub_optab,
3063 #else
3064 add_optab,
3065 #endif
3066 stack_pointer_rtx,
3067 GEN_INT
3068 (PUSH_ROUNDING
3069 (GET_MODE_SIZE (GET_MODE (x)))),
3070 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3072 if (temp != stack_pointer_rtx)
3073 emit_move_insn (stack_pointer_rtx, temp);
3075 code = GET_CODE (XEXP (x, 0));
3077 /* Just hope that small offsets off SP are OK. */
3078 if (code == POST_INC)
3079 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3080 GEN_INT (-((HOST_WIDE_INT)
3081 GET_MODE_SIZE (GET_MODE (x)))));
3082 else if (code == POST_DEC)
3083 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3084 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3085 else
3086 temp = stack_pointer_rtx;
3088 x = change_address (x, VOIDmode, temp);
3090 #endif
3092 /* If we are in reload, see if either operand is a MEM whose address
3093 is scheduled for replacement. */
3094 if (reload_in_progress && GET_CODE (x) == MEM
3095 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3096 x = replace_equiv_address_nv (x, inner);
3097 if (reload_in_progress && GET_CODE (y) == MEM
3098 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3099 y = replace_equiv_address_nv (y, inner);
3101 start_sequence ();
3103 need_clobber = 0;
3104 for (i = 0;
3105 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3106 i++)
3108 rtx xpart = operand_subword (x, i, 1, mode);
3109 rtx ypart = operand_subword (y, i, 1, mode);
3111 /* If we can't get a part of Y, put Y into memory if it is a
3112 constant. Otherwise, force it into a register. If we still
3113 can't get a part of Y, abort. */
3114 if (ypart == 0 && CONSTANT_P (y))
3116 y = force_const_mem (mode, y);
3117 ypart = operand_subword (y, i, 1, mode);
3119 else if (ypart == 0)
3120 ypart = operand_subword_force (y, i, mode);
3122 if (xpart == 0 || ypart == 0)
3123 abort ();
3125 need_clobber |= (GET_CODE (xpart) == SUBREG);
3127 last_insn = emit_move_insn (xpart, ypart);
3130 seq = gen_sequence ();
3131 end_sequence ();
3133 /* Show the output dies here. This is necessary for SUBREGs
3134 of pseudos since we cannot track their lifetimes correctly;
3135 hard regs shouldn't appear here except as return values.
3136 We never want to emit such a clobber after reload. */
3137 if (x != y
3138 && ! (reload_in_progress || reload_completed)
3139 && need_clobber != 0)
3140 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3142 emit_insn (seq);
3144 return last_insn;
3146 else
3147 abort ();
3150 /* If Y is representable exactly in a narrower mode, and the target can
3151 perform the extension directly from constant or memory, then emit the
3152 move as an extension. */
3154 static rtx
3155 compress_float_constant (x, y)
3156 rtx x, y;
3158 enum machine_mode dstmode = GET_MODE (x);
3159 enum machine_mode orig_srcmode = GET_MODE (y);
3160 enum machine_mode srcmode;
3161 REAL_VALUE_TYPE r;
3163 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3165 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3166 srcmode != orig_srcmode;
3167 srcmode = GET_MODE_WIDER_MODE (srcmode))
3169 enum insn_code ic;
3170 rtx trunc_y, last_insn;
3172 /* Skip if the target can't extend this way. */
3173 ic = can_extend_p (dstmode, srcmode, 0);
3174 if (ic == CODE_FOR_nothing)
3175 continue;
3177 /* Skip if the narrowed value isn't exact. */
3178 if (! exact_real_truncate (srcmode, &r))
3179 continue;
3181 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3183 if (LEGITIMATE_CONSTANT_P (trunc_y))
3185 /* Skip if the target needs extra instructions to perform
3186 the extension. */
3187 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3188 continue;
3190 else if (float_extend_from_mem[dstmode][srcmode])
3191 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3192 else
3193 continue;
3195 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3196 last_insn = get_last_insn ();
3198 if (GET_CODE (x) == REG)
3199 REG_NOTES (last_insn)
3200 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3202 return last_insn;
3205 return NULL_RTX;
3208 /* Pushing data onto the stack. */
3210 /* Push a block of length SIZE (perhaps variable)
3211 and return an rtx to address the beginning of the block.
3212 Note that it is not possible for the value returned to be a QUEUED.
3213 The value may be virtual_outgoing_args_rtx.
3215 EXTRA is the number of bytes of padding to push in addition to SIZE.
3216 BELOW nonzero means this padding comes at low addresses;
3217 otherwise, the padding comes at high addresses. */
3220 push_block (size, extra, below)
3221 rtx size;
3222 int extra, below;
3224 rtx temp;
3226 size = convert_modes (Pmode, ptr_mode, size, 1);
3227 if (CONSTANT_P (size))
3228 anti_adjust_stack (plus_constant (size, extra));
3229 else if (GET_CODE (size) == REG && extra == 0)
3230 anti_adjust_stack (size);
3231 else
3233 temp = copy_to_mode_reg (Pmode, size);
3234 if (extra != 0)
3235 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3236 temp, 0, OPTAB_LIB_WIDEN);
3237 anti_adjust_stack (temp);
3240 #ifndef STACK_GROWS_DOWNWARD
3241 if (0)
3242 #else
3243 if (1)
3244 #endif
3246 temp = virtual_outgoing_args_rtx;
3247 if (extra != 0 && below)
3248 temp = plus_constant (temp, extra);
3250 else
3252 if (GET_CODE (size) == CONST_INT)
3253 temp = plus_constant (virtual_outgoing_args_rtx,
3254 -INTVAL (size) - (below ? 0 : extra));
3255 else if (extra != 0 && !below)
3256 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3257 negate_rtx (Pmode, plus_constant (size, extra)));
3258 else
3259 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3260 negate_rtx (Pmode, size));
3263 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3266 #ifdef PUSH_ROUNDING
3268 /* Emit single push insn. */
3270 static void
3271 emit_single_push_insn (mode, x, type)
3272 rtx x;
3273 enum machine_mode mode;
3274 tree type;
3276 rtx dest_addr;
3277 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3278 rtx dest;
3279 enum insn_code icode;
3280 insn_operand_predicate_fn pred;
3282 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3283 /* If there is push pattern, use it. Otherwise try old way of throwing
3284 MEM representing push operation to move expander. */
3285 icode = push_optab->handlers[(int) mode].insn_code;
3286 if (icode != CODE_FOR_nothing)
3288 if (((pred = insn_data[(int) icode].operand[0].predicate)
3289 && !((*pred) (x, mode))))
3290 x = force_reg (mode, x);
3291 emit_insn (GEN_FCN (icode) (x));
3292 return;
3294 if (GET_MODE_SIZE (mode) == rounded_size)
3295 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3296 else
3298 #ifdef STACK_GROWS_DOWNWARD
3299 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3300 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3301 #else
3302 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3303 GEN_INT (rounded_size));
3304 #endif
3305 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3308 dest = gen_rtx_MEM (mode, dest_addr);
3310 if (type != 0)
3312 set_mem_attributes (dest, type, 1);
3314 if (flag_optimize_sibling_calls)
3315 /* Function incoming arguments may overlap with sibling call
3316 outgoing arguments and we cannot allow reordering of reads
3317 from function arguments with stores to outgoing arguments
3318 of sibling calls. */
3319 set_mem_alias_set (dest, 0);
3321 emit_move_insn (dest, x);
3323 #endif
3325 /* Generate code to push X onto the stack, assuming it has mode MODE and
3326 type TYPE.
3327 MODE is redundant except when X is a CONST_INT (since they don't
3328 carry mode info).
3329 SIZE is an rtx for the size of data to be copied (in bytes),
3330 needed only if X is BLKmode.
3332 ALIGN (in bits) is maximum alignment we can assume.
3334 If PARTIAL and REG are both nonzero, then copy that many of the first
3335 words of X into registers starting with REG, and push the rest of X.
3336 The amount of space pushed is decreased by PARTIAL words,
3337 rounded *down* to a multiple of PARM_BOUNDARY.
3338 REG must be a hard register in this case.
3339 If REG is zero but PARTIAL is not, take any all others actions for an
3340 argument partially in registers, but do not actually load any
3341 registers.
3343 EXTRA is the amount in bytes of extra space to leave next to this arg.
3344 This is ignored if an argument block has already been allocated.
3346 On a machine that lacks real push insns, ARGS_ADDR is the address of
3347 the bottom of the argument block for this call. We use indexing off there
3348 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3349 argument block has not been preallocated.
3351 ARGS_SO_FAR is the size of args previously pushed for this call.
3353 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3354 for arguments passed in registers. If nonzero, it will be the number
3355 of bytes required. */
3357 void
3358 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3359 args_addr, args_so_far, reg_parm_stack_space,
3360 alignment_pad)
3361 rtx x;
3362 enum machine_mode mode;
3363 tree type;
3364 rtx size;
3365 unsigned int align;
3366 int partial;
3367 rtx reg;
3368 int extra;
3369 rtx args_addr;
3370 rtx args_so_far;
3371 int reg_parm_stack_space;
3372 rtx alignment_pad;
3374 rtx xinner;
3375 enum direction stack_direction
3376 #ifdef STACK_GROWS_DOWNWARD
3377 = downward;
3378 #else
3379 = upward;
3380 #endif
3382 /* Decide where to pad the argument: `downward' for below,
3383 `upward' for above, or `none' for don't pad it.
3384 Default is below for small data on big-endian machines; else above. */
3385 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3387 /* Invert direction if stack is post-decrement.
3388 FIXME: why? */
3389 if (STACK_PUSH_CODE == POST_DEC)
3390 if (where_pad != none)
3391 where_pad = (where_pad == downward ? upward : downward);
3393 xinner = x = protect_from_queue (x, 0);
3395 if (mode == BLKmode)
3397 /* Copy a block into the stack, entirely or partially. */
3399 rtx temp;
3400 int used = partial * UNITS_PER_WORD;
3401 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3402 int skip;
3404 if (size == 0)
3405 abort ();
3407 used -= offset;
3409 /* USED is now the # of bytes we need not copy to the stack
3410 because registers will take care of them. */
3412 if (partial != 0)
3413 xinner = adjust_address (xinner, BLKmode, used);
3415 /* If the partial register-part of the arg counts in its stack size,
3416 skip the part of stack space corresponding to the registers.
3417 Otherwise, start copying to the beginning of the stack space,
3418 by setting SKIP to 0. */
3419 skip = (reg_parm_stack_space == 0) ? 0 : used;
3421 #ifdef PUSH_ROUNDING
3422 /* Do it with several push insns if that doesn't take lots of insns
3423 and if there is no difficulty with push insns that skip bytes
3424 on the stack for alignment purposes. */
3425 if (args_addr == 0
3426 && PUSH_ARGS
3427 && GET_CODE (size) == CONST_INT
3428 && skip == 0
3429 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3430 /* Here we avoid the case of a structure whose weak alignment
3431 forces many pushes of a small amount of data,
3432 and such small pushes do rounding that causes trouble. */
3433 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3434 || align >= BIGGEST_ALIGNMENT
3435 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3436 == (align / BITS_PER_UNIT)))
3437 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3439 /* Push padding now if padding above and stack grows down,
3440 or if padding below and stack grows up.
3441 But if space already allocated, this has already been done. */
3442 if (extra && args_addr == 0
3443 && where_pad != none && where_pad != stack_direction)
3444 anti_adjust_stack (GEN_INT (extra));
3446 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3448 else
3449 #endif /* PUSH_ROUNDING */
3451 rtx target;
3453 /* Otherwise make space on the stack and copy the data
3454 to the address of that space. */
3456 /* Deduct words put into registers from the size we must copy. */
3457 if (partial != 0)
3459 if (GET_CODE (size) == CONST_INT)
3460 size = GEN_INT (INTVAL (size) - used);
3461 else
3462 size = expand_binop (GET_MODE (size), sub_optab, size,
3463 GEN_INT (used), NULL_RTX, 0,
3464 OPTAB_LIB_WIDEN);
3467 /* Get the address of the stack space.
3468 In this case, we do not deal with EXTRA separately.
3469 A single stack adjust will do. */
3470 if (! args_addr)
3472 temp = push_block (size, extra, where_pad == downward);
3473 extra = 0;
3475 else if (GET_CODE (args_so_far) == CONST_INT)
3476 temp = memory_address (BLKmode,
3477 plus_constant (args_addr,
3478 skip + INTVAL (args_so_far)));
3479 else
3480 temp = memory_address (BLKmode,
3481 plus_constant (gen_rtx_PLUS (Pmode,
3482 args_addr,
3483 args_so_far),
3484 skip));
3485 target = gen_rtx_MEM (BLKmode, temp);
3487 if (type != 0)
3489 set_mem_attributes (target, type, 1);
3490 /* Function incoming arguments may overlap with sibling call
3491 outgoing arguments and we cannot allow reordering of reads
3492 from function arguments with stores to outgoing arguments
3493 of sibling calls. */
3494 set_mem_alias_set (target, 0);
3496 else
3497 set_mem_align (target, align);
3499 /* TEMP is the address of the block. Copy the data there. */
3500 if (GET_CODE (size) == CONST_INT
3501 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3503 move_by_pieces (target, xinner, INTVAL (size), align);
3504 goto ret;
3506 else
3508 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3509 enum machine_mode mode;
3511 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3512 mode != VOIDmode;
3513 mode = GET_MODE_WIDER_MODE (mode))
3515 enum insn_code code = movstr_optab[(int) mode];
3516 insn_operand_predicate_fn pred;
3518 if (code != CODE_FOR_nothing
3519 && ((GET_CODE (size) == CONST_INT
3520 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3521 <= (GET_MODE_MASK (mode) >> 1)))
3522 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3523 && (!(pred = insn_data[(int) code].operand[0].predicate)
3524 || ((*pred) (target, BLKmode)))
3525 && (!(pred = insn_data[(int) code].operand[1].predicate)
3526 || ((*pred) (xinner, BLKmode)))
3527 && (!(pred = insn_data[(int) code].operand[3].predicate)
3528 || ((*pred) (opalign, VOIDmode))))
3530 rtx op2 = convert_to_mode (mode, size, 1);
3531 rtx last = get_last_insn ();
3532 rtx pat;
3534 pred = insn_data[(int) code].operand[2].predicate;
3535 if (pred != 0 && ! (*pred) (op2, mode))
3536 op2 = copy_to_mode_reg (mode, op2);
3538 pat = GEN_FCN ((int) code) (target, xinner,
3539 op2, opalign);
3540 if (pat)
3542 emit_insn (pat);
3543 goto ret;
3545 else
3546 delete_insns_since (last);
3551 if (!ACCUMULATE_OUTGOING_ARGS)
3553 /* If the source is referenced relative to the stack pointer,
3554 copy it to another register to stabilize it. We do not need
3555 to do this if we know that we won't be changing sp. */
3557 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3558 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3559 temp = copy_to_reg (temp);
3562 /* Make inhibit_defer_pop nonzero around the library call
3563 to force it to pop the bcopy-arguments right away. */
3564 NO_DEFER_POP;
3565 #ifdef TARGET_MEM_FUNCTIONS
3566 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3567 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3568 convert_to_mode (TYPE_MODE (sizetype),
3569 size, TREE_UNSIGNED (sizetype)),
3570 TYPE_MODE (sizetype));
3571 #else
3572 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3573 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3574 convert_to_mode (TYPE_MODE (integer_type_node),
3575 size,
3576 TREE_UNSIGNED (integer_type_node)),
3577 TYPE_MODE (integer_type_node));
3578 #endif
3579 OK_DEFER_POP;
3582 else if (partial > 0)
3584 /* Scalar partly in registers. */
3586 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3587 int i;
3588 int not_stack;
3589 /* # words of start of argument
3590 that we must make space for but need not store. */
3591 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3592 int args_offset = INTVAL (args_so_far);
3593 int skip;
3595 /* Push padding now if padding above and stack grows down,
3596 or if padding below and stack grows up.
3597 But if space already allocated, this has already been done. */
3598 if (extra && args_addr == 0
3599 && where_pad != none && where_pad != stack_direction)
3600 anti_adjust_stack (GEN_INT (extra));
3602 /* If we make space by pushing it, we might as well push
3603 the real data. Otherwise, we can leave OFFSET nonzero
3604 and leave the space uninitialized. */
3605 if (args_addr == 0)
3606 offset = 0;
3608 /* Now NOT_STACK gets the number of words that we don't need to
3609 allocate on the stack. */
3610 not_stack = partial - offset;
3612 /* If the partial register-part of the arg counts in its stack size,
3613 skip the part of stack space corresponding to the registers.
3614 Otherwise, start copying to the beginning of the stack space,
3615 by setting SKIP to 0. */
3616 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3618 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3619 x = validize_mem (force_const_mem (mode, x));
3621 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3622 SUBREGs of such registers are not allowed. */
3623 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3624 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3625 x = copy_to_reg (x);
3627 /* Loop over all the words allocated on the stack for this arg. */
3628 /* We can do it by words, because any scalar bigger than a word
3629 has a size a multiple of a word. */
3630 #ifndef PUSH_ARGS_REVERSED
3631 for (i = not_stack; i < size; i++)
3632 #else
3633 for (i = size - 1; i >= not_stack; i--)
3634 #endif
3635 if (i >= not_stack + offset)
3636 emit_push_insn (operand_subword_force (x, i, mode),
3637 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3638 0, args_addr,
3639 GEN_INT (args_offset + ((i - not_stack + skip)
3640 * UNITS_PER_WORD)),
3641 reg_parm_stack_space, alignment_pad);
3643 else
3645 rtx addr;
3646 rtx target = NULL_RTX;
3647 rtx dest;
3649 /* Push padding now if padding above and stack grows down,
3650 or if padding below and stack grows up.
3651 But if space already allocated, this has already been done. */
3652 if (extra && args_addr == 0
3653 && where_pad != none && where_pad != stack_direction)
3654 anti_adjust_stack (GEN_INT (extra));
3656 #ifdef PUSH_ROUNDING
3657 if (args_addr == 0 && PUSH_ARGS)
3658 emit_single_push_insn (mode, x, type);
3659 else
3660 #endif
3662 if (GET_CODE (args_so_far) == CONST_INT)
3663 addr
3664 = memory_address (mode,
3665 plus_constant (args_addr,
3666 INTVAL (args_so_far)));
3667 else
3668 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3669 args_so_far));
3670 target = addr;
3671 dest = gen_rtx_MEM (mode, addr);
3672 if (type != 0)
3674 set_mem_attributes (dest, type, 1);
3675 /* Function incoming arguments may overlap with sibling call
3676 outgoing arguments and we cannot allow reordering of reads
3677 from function arguments with stores to outgoing arguments
3678 of sibling calls. */
3679 set_mem_alias_set (dest, 0);
3682 emit_move_insn (dest, x);
3687 ret:
3688 /* If part should go in registers, copy that part
3689 into the appropriate registers. Do this now, at the end,
3690 since mem-to-mem copies above may do function calls. */
3691 if (partial > 0 && reg != 0)
3693 /* Handle calls that pass values in multiple non-contiguous locations.
3694 The Irix 6 ABI has examples of this. */
3695 if (GET_CODE (reg) == PARALLEL)
3696 emit_group_load (reg, x, -1); /* ??? size? */
3697 else
3698 move_block_to_reg (REGNO (reg), x, partial, mode);
3701 if (extra && args_addr == 0 && where_pad == stack_direction)
3702 anti_adjust_stack (GEN_INT (extra));
3704 if (alignment_pad && args_addr == 0)
3705 anti_adjust_stack (alignment_pad);
3708 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3709 operations. */
3711 static rtx
3712 get_subtarget (x)
3713 rtx x;
3715 return ((x == 0
3716 /* Only registers can be subtargets. */
3717 || GET_CODE (x) != REG
3718 /* If the register is readonly, it can't be set more than once. */
3719 || RTX_UNCHANGING_P (x)
3720 /* Don't use hard regs to avoid extending their life. */
3721 || REGNO (x) < FIRST_PSEUDO_REGISTER
3722 /* Avoid subtargets inside loops,
3723 since they hide some invariant expressions. */
3724 || preserve_subexpressions_p ())
3725 ? 0 : x);
3728 /* Expand an assignment that stores the value of FROM into TO.
3729 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3730 (This may contain a QUEUED rtx;
3731 if the value is constant, this rtx is a constant.)
3732 Otherwise, the returned value is NULL_RTX.
3734 SUGGEST_REG is no longer actually used.
3735 It used to mean, copy the value through a register
3736 and return that register, if that is possible.
3737 We now use WANT_VALUE to decide whether to do this. */
3740 expand_assignment (to, from, want_value, suggest_reg)
3741 tree to, from;
3742 int want_value;
3743 int suggest_reg ATTRIBUTE_UNUSED;
3745 rtx to_rtx = 0;
3746 rtx result;
3748 /* Don't crash if the lhs of the assignment was erroneous. */
3750 if (TREE_CODE (to) == ERROR_MARK)
3752 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3753 return want_value ? result : NULL_RTX;
3756 /* Assignment of a structure component needs special treatment
3757 if the structure component's rtx is not simply a MEM.
3758 Assignment of an array element at a constant index, and assignment of
3759 an array element in an unaligned packed structure field, has the same
3760 problem. */
3762 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3763 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3765 enum machine_mode mode1;
3766 HOST_WIDE_INT bitsize, bitpos;
3767 rtx orig_to_rtx;
3768 tree offset;
3769 int unsignedp;
3770 int volatilep = 0;
3771 tree tem;
3773 push_temp_slots ();
3774 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3775 &unsignedp, &volatilep);
3777 /* If we are going to use store_bit_field and extract_bit_field,
3778 make sure to_rtx will be safe for multiple use. */
3780 if (mode1 == VOIDmode && want_value)
3781 tem = stabilize_reference (tem);
3783 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3785 if (offset != 0)
3787 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3789 if (GET_CODE (to_rtx) != MEM)
3790 abort ();
3792 #ifdef POINTERS_EXTEND_UNSIGNED
3793 if (GET_MODE (offset_rtx) != Pmode)
3794 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3795 #else
3796 if (GET_MODE (offset_rtx) != ptr_mode)
3797 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3798 #endif
3800 /* A constant address in TO_RTX can have VOIDmode, we must not try
3801 to call force_reg for that case. Avoid that case. */
3802 if (GET_CODE (to_rtx) == MEM
3803 && GET_MODE (to_rtx) == BLKmode
3804 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3805 && bitsize > 0
3806 && (bitpos % bitsize) == 0
3807 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3808 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3810 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3811 bitpos = 0;
3814 to_rtx = offset_address (to_rtx, offset_rtx,
3815 highest_pow2_factor_for_type (TREE_TYPE (to),
3816 offset));
3819 if (GET_CODE (to_rtx) == MEM)
3821 tree old_expr = MEM_EXPR (to_rtx);
3823 /* If the field is at offset zero, we could have been given the
3824 DECL_RTX of the parent struct. Don't munge it. */
3825 to_rtx = shallow_copy_rtx (to_rtx);
3827 set_mem_attributes (to_rtx, to, 0);
3829 /* If we changed MEM_EXPR, that means we're now referencing
3830 the COMPONENT_REF, which means that MEM_OFFSET must be
3831 relative to that field. But we've not yet reflected BITPOS
3832 in TO_RTX. This will be done in store_field. Adjust for
3833 that by biasing MEM_OFFSET by -bitpos. */
3834 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3835 && (bitpos / BITS_PER_UNIT) != 0)
3836 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3837 - (bitpos / BITS_PER_UNIT)));
3840 /* Deal with volatile and readonly fields. The former is only done
3841 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3842 if (volatilep && GET_CODE (to_rtx) == MEM)
3844 if (to_rtx == orig_to_rtx)
3845 to_rtx = copy_rtx (to_rtx);
3846 MEM_VOLATILE_P (to_rtx) = 1;
3849 if (TREE_CODE (to) == COMPONENT_REF
3850 && TREE_READONLY (TREE_OPERAND (to, 1)))
3852 if (to_rtx == orig_to_rtx)
3853 to_rtx = copy_rtx (to_rtx);
3854 RTX_UNCHANGING_P (to_rtx) = 1;
3857 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3859 if (to_rtx == orig_to_rtx)
3860 to_rtx = copy_rtx (to_rtx);
3861 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3864 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3865 (want_value
3866 /* Spurious cast for HPUX compiler. */
3867 ? ((enum machine_mode)
3868 TYPE_MODE (TREE_TYPE (to)))
3869 : VOIDmode),
3870 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3872 preserve_temp_slots (result);
3873 free_temp_slots ();
3874 pop_temp_slots ();
3876 /* If the value is meaningful, convert RESULT to the proper mode.
3877 Otherwise, return nothing. */
3878 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3879 TYPE_MODE (TREE_TYPE (from)),
3880 result,
3881 TREE_UNSIGNED (TREE_TYPE (to)))
3882 : NULL_RTX);
3885 /* If the rhs is a function call and its value is not an aggregate,
3886 call the function before we start to compute the lhs.
3887 This is needed for correct code for cases such as
3888 val = setjmp (buf) on machines where reference to val
3889 requires loading up part of an address in a separate insn.
3891 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3892 since it might be a promoted variable where the zero- or sign- extension
3893 needs to be done. Handling this in the normal way is safe because no
3894 computation is done before the call. */
3895 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3896 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3897 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3898 && GET_CODE (DECL_RTL (to)) == REG))
3900 rtx value;
3902 push_temp_slots ();
3903 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3904 if (to_rtx == 0)
3905 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3907 /* Handle calls that return values in multiple non-contiguous locations.
3908 The Irix 6 ABI has examples of this. */
3909 if (GET_CODE (to_rtx) == PARALLEL)
3910 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3911 else if (GET_MODE (to_rtx) == BLKmode)
3912 emit_block_move (to_rtx, value, expr_size (from));
3913 else
3915 #ifdef POINTERS_EXTEND_UNSIGNED
3916 if (POINTER_TYPE_P (TREE_TYPE (to))
3917 && GET_MODE (to_rtx) != GET_MODE (value))
3918 value = convert_memory_address (GET_MODE (to_rtx), value);
3919 #endif
3920 emit_move_insn (to_rtx, value);
3922 preserve_temp_slots (to_rtx);
3923 free_temp_slots ();
3924 pop_temp_slots ();
3925 return want_value ? to_rtx : NULL_RTX;
3928 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3929 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3931 if (to_rtx == 0)
3932 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3934 /* Don't move directly into a return register. */
3935 if (TREE_CODE (to) == RESULT_DECL
3936 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3938 rtx temp;
3940 push_temp_slots ();
3941 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3943 if (GET_CODE (to_rtx) == PARALLEL)
3944 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3945 else
3946 emit_move_insn (to_rtx, temp);
3948 preserve_temp_slots (to_rtx);
3949 free_temp_slots ();
3950 pop_temp_slots ();
3951 return want_value ? to_rtx : NULL_RTX;
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct)
3961 rtx from_rtx, size;
3963 push_temp_slots ();
3964 size = expr_size (from);
3965 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3967 #ifdef TARGET_MEM_FUNCTIONS
3968 emit_library_call (memmove_libfunc, LCT_NORMAL,
3969 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3970 XEXP (from_rtx, 0), Pmode,
3971 convert_to_mode (TYPE_MODE (sizetype),
3972 size, TREE_UNSIGNED (sizetype)),
3973 TYPE_MODE (sizetype));
3974 #else
3975 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3976 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3977 XEXP (to_rtx, 0), Pmode,
3978 convert_to_mode (TYPE_MODE (integer_type_node),
3979 size, TREE_UNSIGNED (integer_type_node)),
3980 TYPE_MODE (integer_type_node));
3981 #endif
3983 preserve_temp_slots (to_rtx);
3984 free_temp_slots ();
3985 pop_temp_slots ();
3986 return want_value ? to_rtx : NULL_RTX;
3989 /* Compute FROM and store the value in the rtx we got. */
3991 push_temp_slots ();
3992 result = store_expr (from, to_rtx, want_value);
3993 preserve_temp_slots (result);
3994 free_temp_slots ();
3995 pop_temp_slots ();
3996 return want_value ? result : NULL_RTX;
3999 /* Generate code for computing expression EXP,
4000 and storing the value into TARGET.
4001 TARGET may contain a QUEUED rtx.
4003 If WANT_VALUE is nonzero, return a copy of the value
4004 not in TARGET, so that we can be sure to use the proper
4005 value in a containing expression even if TARGET has something
4006 else stored in it. If possible, we copy the value through a pseudo
4007 and return that pseudo. Or, if the value is constant, we try to
4008 return the constant. In some cases, we return a pseudo
4009 copied *from* TARGET.
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4016 be more thorough?
4018 If WANT_VALUE is 0, we return NULL, to make sure
4019 to catch quickly any cases where the caller uses the value
4020 and fails to set WANT_VALUE. */
4023 store_expr (exp, target, want_value)
4024 tree exp;
4025 rtx target;
4026 int want_value;
4028 rtx temp;
4029 int dont_return_target = 0;
4030 int dont_store_target = 0;
4032 if (TREE_CODE (exp) == COMPOUND_EXPR)
4034 /* Perform first part of compound expression, then assign from second
4035 part. */
4036 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4037 emit_queue ();
4038 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4040 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4042 /* For conditional expression, get safe form of the target. Then
4043 test the condition, doing the appropriate assignment on either
4044 side. This avoids the creation of unnecessary temporaries.
4045 For non-BLKmode, it is more efficient not to do this. */
4047 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4049 emit_queue ();
4050 target = protect_from_queue (target, 1);
4052 do_pending_stack_adjust ();
4053 NO_DEFER_POP;
4054 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4055 start_cleanup_deferral ();
4056 store_expr (TREE_OPERAND (exp, 1), target, 0);
4057 end_cleanup_deferral ();
4058 emit_queue ();
4059 emit_jump_insn (gen_jump (lab2));
4060 emit_barrier ();
4061 emit_label (lab1);
4062 start_cleanup_deferral ();
4063 store_expr (TREE_OPERAND (exp, 2), target, 0);
4064 end_cleanup_deferral ();
4065 emit_queue ();
4066 emit_label (lab2);
4067 OK_DEFER_POP;
4069 return want_value ? target : NULL_RTX;
4071 else if (queued_subexp_p (target))
4072 /* If target contains a postincrement, let's not risk
4073 using it as the place to generate the rhs. */
4075 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4077 /* Expand EXP into a new pseudo. */
4078 temp = gen_reg_rtx (GET_MODE (target));
4079 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4081 else
4082 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4084 /* If target is volatile, ANSI requires accessing the value
4085 *from* the target, if it is accessed. So make that happen.
4086 In no case return the target itself. */
4087 if (! MEM_VOLATILE_P (target) && want_value)
4088 dont_return_target = 1;
4090 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4091 && GET_MODE (target) != BLKmode)
4092 /* If target is in memory and caller wants value in a register instead,
4093 arrange that. Pass TARGET as target for expand_expr so that,
4094 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4095 We know expand_expr will not use the target in that case.
4096 Don't do this if TARGET is volatile because we are supposed
4097 to write it and then read it. */
4099 temp = expand_expr (exp, target, GET_MODE (target), 0);
4100 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4102 /* If TEMP is already in the desired TARGET, only copy it from
4103 memory and don't store it there again. */
4104 if (temp == target
4105 || (rtx_equal_p (temp, target)
4106 && ! side_effects_p (temp) && ! side_effects_p (target)))
4107 dont_store_target = 1;
4108 temp = copy_to_reg (temp);
4110 dont_return_target = 1;
4112 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4113 /* If this is an scalar in a register that is stored in a wider mode
4114 than the declared mode, compute the result into its declared mode
4115 and then convert to the wider mode. Our value is the computed
4116 expression. */
4118 rtx inner_target = 0;
4120 /* If we don't want a value, we can do the conversion inside EXP,
4121 which will often result in some optimizations. Do the conversion
4122 in two steps: first change the signedness, if needed, then
4123 the extend. But don't do this if the type of EXP is a subtype
4124 of something else since then the conversion might involve
4125 more than just converting modes. */
4126 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4127 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4129 if (TREE_UNSIGNED (TREE_TYPE (exp))
4130 != SUBREG_PROMOTED_UNSIGNED_P (target))
4131 exp = convert
4132 ((*lang_hooks.types.signed_or_unsigned_type)
4133 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4135 exp = convert ((*lang_hooks.types.type_for_mode)
4136 (GET_MODE (SUBREG_REG (target)),
4137 SUBREG_PROMOTED_UNSIGNED_P (target)),
4138 exp);
4140 inner_target = SUBREG_REG (target);
4143 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4145 /* If TEMP is a volatile MEM and we want a result value, make
4146 the access now so it gets done only once. Likewise if
4147 it contains TARGET. */
4148 if (GET_CODE (temp) == MEM && want_value
4149 && (MEM_VOLATILE_P (temp)
4150 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4151 temp = copy_to_reg (temp);
4153 /* If TEMP is a VOIDmode constant, use convert_modes to make
4154 sure that we properly convert it. */
4155 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4157 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4158 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4159 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4160 GET_MODE (target), temp,
4161 SUBREG_PROMOTED_UNSIGNED_P (target));
4164 convert_move (SUBREG_REG (target), temp,
4165 SUBREG_PROMOTED_UNSIGNED_P (target));
4167 /* If we promoted a constant, change the mode back down to match
4168 target. Otherwise, the caller might get confused by a result whose
4169 mode is larger than expected. */
4171 if (want_value && GET_MODE (temp) != GET_MODE (target))
4173 if (GET_MODE (temp) != VOIDmode)
4175 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4176 SUBREG_PROMOTED_VAR_P (temp) = 1;
4177 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4178 SUBREG_PROMOTED_UNSIGNED_P (target));
4180 else
4181 temp = convert_modes (GET_MODE (target),
4182 GET_MODE (SUBREG_REG (target)),
4183 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4186 return want_value ? temp : NULL_RTX;
4188 else
4190 temp = expand_expr (exp, target, GET_MODE (target), 0);
4191 /* Return TARGET if it's a specified hardware register.
4192 If TARGET is a volatile mem ref, either return TARGET
4193 or return a reg copied *from* TARGET; ANSI requires this.
4195 Otherwise, if TEMP is not TARGET, return TEMP
4196 if it is constant (for efficiency),
4197 or if we really want the correct value. */
4198 if (!(target && GET_CODE (target) == REG
4199 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4200 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4201 && ! rtx_equal_p (temp, target)
4202 && (CONSTANT_P (temp) || want_value))
4203 dont_return_target = 1;
4206 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4207 the same as that of TARGET, adjust the constant. This is needed, for
4208 example, in case it is a CONST_DOUBLE and we want only a word-sized
4209 value. */
4210 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4211 && TREE_CODE (exp) != ERROR_MARK
4212 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4213 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4214 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4216 /* If value was not generated in the target, store it there.
4217 Convert the value to TARGET's type first if necessary.
4218 If TEMP and TARGET compare equal according to rtx_equal_p, but
4219 one or both of them are volatile memory refs, we have to distinguish
4220 two cases:
4221 - expand_expr has used TARGET. In this case, we must not generate
4222 another copy. This can be detected by TARGET being equal according
4223 to == .
4224 - expand_expr has not used TARGET - that means that the source just
4225 happens to have the same RTX form. Since temp will have been created
4226 by expand_expr, it will compare unequal according to == .
4227 We must generate a copy in this case, to reach the correct number
4228 of volatile memory references. */
4230 if ((! rtx_equal_p (temp, target)
4231 || (temp != target && (side_effects_p (temp)
4232 || side_effects_p (target))))
4233 && TREE_CODE (exp) != ERROR_MARK
4234 && ! dont_store_target
4235 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4236 but TARGET is not valid memory reference, TEMP will differ
4237 from TARGET although it is really the same location. */
4238 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4239 || target != DECL_RTL_IF_SET (exp)))
4241 target = protect_from_queue (target, 1);
4242 if (GET_MODE (temp) != GET_MODE (target)
4243 && GET_MODE (temp) != VOIDmode)
4245 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4246 if (dont_return_target)
4248 /* In this case, we will return TEMP,
4249 so make sure it has the proper mode.
4250 But don't forget to store the value into TARGET. */
4251 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4252 emit_move_insn (target, temp);
4254 else
4255 convert_move (target, temp, unsignedp);
4258 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4260 /* Handle copying a string constant into an array. The string
4261 constant may be shorter than the array. So copy just the string's
4262 actual length, and clear the rest. First get the size of the data
4263 type of the string, which is actually the size of the target. */
4264 rtx size = expr_size (exp);
4266 if (GET_CODE (size) == CONST_INT
4267 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4268 emit_block_move (target, temp, size);
4269 else
4271 /* Compute the size of the data to copy from the string. */
4272 tree copy_size
4273 = size_binop (MIN_EXPR,
4274 make_tree (sizetype, size),
4275 size_int (TREE_STRING_LENGTH (exp)));
4276 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4277 VOIDmode, 0);
4278 rtx label = 0;
4280 /* Copy that much. */
4281 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4282 emit_block_move (target, temp, copy_size_rtx);
4284 /* Figure out how much is left in TARGET that we have to clear.
4285 Do all calculations in ptr_mode. */
4286 if (GET_CODE (copy_size_rtx) == CONST_INT)
4288 size = plus_constant (size, -INTVAL (copy_size_rtx));
4289 target = adjust_address (target, BLKmode,
4290 INTVAL (copy_size_rtx));
4292 else
4294 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4295 copy_size_rtx, NULL_RTX, 0,
4296 OPTAB_LIB_WIDEN);
4298 #ifdef POINTERS_EXTEND_UNSIGNED
4299 if (GET_MODE (copy_size_rtx) != Pmode)
4300 copy_size_rtx = convert_memory_address (Pmode,
4301 copy_size_rtx);
4302 #endif
4304 target = offset_address (target, copy_size_rtx,
4305 highest_pow2_factor (copy_size));
4306 label = gen_label_rtx ();
4307 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4308 GET_MODE (size), 0, label);
4311 if (size != const0_rtx)
4312 clear_storage (target, size);
4314 if (label)
4315 emit_label (label);
4318 /* Handle calls that return values in multiple non-contiguous locations.
4319 The Irix 6 ABI has examples of this. */
4320 else if (GET_CODE (target) == PARALLEL)
4321 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4322 else if (GET_MODE (temp) == BLKmode)
4323 emit_block_move (target, temp, expr_size (exp));
4324 else
4325 emit_move_insn (target, temp);
4328 /* If we don't want a value, return NULL_RTX. */
4329 if (! want_value)
4330 return NULL_RTX;
4332 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4333 ??? The latter test doesn't seem to make sense. */
4334 else if (dont_return_target && GET_CODE (temp) != MEM)
4335 return temp;
4337 /* Return TARGET itself if it is a hard register. */
4338 else if (want_value && GET_MODE (target) != BLKmode
4339 && ! (GET_CODE (target) == REG
4340 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4341 return copy_to_reg (target);
4343 else
4344 return target;
4347 /* Return 1 if EXP just contains zeros. */
4349 static int
4350 is_zeros_p (exp)
4351 tree exp;
4353 tree elt;
4355 switch (TREE_CODE (exp))
4357 case CONVERT_EXPR:
4358 case NOP_EXPR:
4359 case NON_LVALUE_EXPR:
4360 case VIEW_CONVERT_EXPR:
4361 return is_zeros_p (TREE_OPERAND (exp, 0));
4363 case INTEGER_CST:
4364 return integer_zerop (exp);
4366 case COMPLEX_CST:
4367 return
4368 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4370 case REAL_CST:
4371 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4373 case VECTOR_CST:
4374 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4375 elt = TREE_CHAIN (elt))
4376 if (!is_zeros_p (TREE_VALUE (elt)))
4377 return 0;
4379 return 1;
4381 case CONSTRUCTOR:
4382 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4383 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4384 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4385 if (! is_zeros_p (TREE_VALUE (elt)))
4386 return 0;
4388 return 1;
4390 default:
4391 return 0;
4395 /* Return 1 if EXP contains mostly (3/4) zeros. */
4397 static int
4398 mostly_zeros_p (exp)
4399 tree exp;
4401 if (TREE_CODE (exp) == CONSTRUCTOR)
4403 int elts = 0, zeros = 0;
4404 tree elt = CONSTRUCTOR_ELTS (exp);
4405 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4407 /* If there are no ranges of true bits, it is all zero. */
4408 return elt == NULL_TREE;
4410 for (; elt; elt = TREE_CHAIN (elt))
4412 /* We do not handle the case where the index is a RANGE_EXPR,
4413 so the statistic will be somewhat inaccurate.
4414 We do make a more accurate count in store_constructor itself,
4415 so since this function is only used for nested array elements,
4416 this should be close enough. */
4417 if (mostly_zeros_p (TREE_VALUE (elt)))
4418 zeros++;
4419 elts++;
4422 return 4 * zeros >= 3 * elts;
4425 return is_zeros_p (exp);
4428 /* Helper function for store_constructor.
4429 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4430 TYPE is the type of the CONSTRUCTOR, not the element type.
4431 CLEARED is as for store_constructor.
4432 ALIAS_SET is the alias set to use for any stores.
4434 This provides a recursive shortcut back to store_constructor when it isn't
4435 necessary to go through store_field. This is so that we can pass through
4436 the cleared field to let store_constructor know that we may not have to
4437 clear a substructure if the outer structure has already been cleared. */
4439 static void
4440 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4441 alias_set)
4442 rtx target;
4443 unsigned HOST_WIDE_INT bitsize;
4444 HOST_WIDE_INT bitpos;
4445 enum machine_mode mode;
4446 tree exp, type;
4447 int cleared;
4448 int alias_set;
4450 if (TREE_CODE (exp) == CONSTRUCTOR
4451 && bitpos % BITS_PER_UNIT == 0
4452 /* If we have a non-zero bitpos for a register target, then we just
4453 let store_field do the bitfield handling. This is unlikely to
4454 generate unnecessary clear instructions anyways. */
4455 && (bitpos == 0 || GET_CODE (target) == MEM))
4457 if (GET_CODE (target) == MEM)
4458 target
4459 = adjust_address (target,
4460 GET_MODE (target) == BLKmode
4461 || 0 != (bitpos
4462 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4463 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4466 /* Update the alias set, if required. */
4467 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4468 && MEM_ALIAS_SET (target) != 0)
4470 target = copy_rtx (target);
4471 set_mem_alias_set (target, alias_set);
4474 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4476 else
4477 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4478 alias_set);
4481 /* Store the value of constructor EXP into the rtx TARGET.
4482 TARGET is either a REG or a MEM; we know it cannot conflict, since
4483 safe_from_p has been called.
4484 CLEARED is true if TARGET is known to have been zero'd.
4485 SIZE is the number of bytes of TARGET we are allowed to modify: this
4486 may not be the same as the size of EXP if we are assigning to a field
4487 which has been packed to exclude padding bits. */
4489 static void
4490 store_constructor (exp, target, cleared, size)
4491 tree exp;
4492 rtx target;
4493 int cleared;
4494 HOST_WIDE_INT size;
4496 tree type = TREE_TYPE (exp);
4497 #ifdef WORD_REGISTER_OPERATIONS
4498 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4499 #endif
4501 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4502 || TREE_CODE (type) == QUAL_UNION_TYPE)
4504 tree elt;
4506 /* We either clear the aggregate or indicate the value is dead. */
4507 if ((TREE_CODE (type) == UNION_TYPE
4508 || TREE_CODE (type) == QUAL_UNION_TYPE)
4509 && ! cleared
4510 && ! CONSTRUCTOR_ELTS (exp))
4511 /* If the constructor is empty, clear the union. */
4513 clear_storage (target, expr_size (exp));
4514 cleared = 1;
4517 /* If we are building a static constructor into a register,
4518 set the initial value as zero so we can fold the value into
4519 a constant. But if more than one register is involved,
4520 this probably loses. */
4521 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4522 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4524 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4525 cleared = 1;
4528 /* If the constructor has fewer fields than the structure
4529 or if we are initializing the structure to mostly zeros,
4530 clear the whole structure first. Don't do this if TARGET is a
4531 register whose mode size isn't equal to SIZE since clear_storage
4532 can't handle this case. */
4533 else if (! cleared && size > 0
4534 && ((list_length (CONSTRUCTOR_ELTS (exp))
4535 != fields_length (type))
4536 || mostly_zeros_p (exp))
4537 && (GET_CODE (target) != REG
4538 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4539 == size)))
4541 clear_storage (target, GEN_INT (size));
4542 cleared = 1;
4545 if (! cleared)
4546 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4548 /* Store each element of the constructor into
4549 the corresponding field of TARGET. */
4551 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4553 tree field = TREE_PURPOSE (elt);
4554 tree value = TREE_VALUE (elt);
4555 enum machine_mode mode;
4556 HOST_WIDE_INT bitsize;
4557 HOST_WIDE_INT bitpos = 0;
4558 int unsignedp;
4559 tree offset;
4560 rtx to_rtx = target;
4562 /* Just ignore missing fields.
4563 We cleared the whole structure, above,
4564 if any fields are missing. */
4565 if (field == 0)
4566 continue;
4568 if (cleared && is_zeros_p (value))
4569 continue;
4571 if (host_integerp (DECL_SIZE (field), 1))
4572 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4573 else
4574 bitsize = -1;
4576 unsignedp = TREE_UNSIGNED (field);
4577 mode = DECL_MODE (field);
4578 if (DECL_BIT_FIELD (field))
4579 mode = VOIDmode;
4581 offset = DECL_FIELD_OFFSET (field);
4582 if (host_integerp (offset, 0)
4583 && host_integerp (bit_position (field), 0))
4585 bitpos = int_bit_position (field);
4586 offset = 0;
4588 else
4589 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4591 if (offset)
4593 rtx offset_rtx;
4595 if (contains_placeholder_p (offset))
4596 offset = build (WITH_RECORD_EXPR, sizetype,
4597 offset, make_tree (TREE_TYPE (exp), target));
4599 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4600 if (GET_CODE (to_rtx) != MEM)
4601 abort ();
4603 #ifdef POINTERS_EXTEND_UNSIGNED
4604 if (GET_MODE (offset_rtx) != Pmode)
4605 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4606 #else
4607 if (GET_MODE (offset_rtx) != ptr_mode)
4608 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4609 #endif
4611 to_rtx = offset_address (to_rtx, offset_rtx,
4612 highest_pow2_factor (offset));
4615 if (TREE_READONLY (field))
4617 if (GET_CODE (to_rtx) == MEM)
4618 to_rtx = copy_rtx (to_rtx);
4620 RTX_UNCHANGING_P (to_rtx) = 1;
4623 #ifdef WORD_REGISTER_OPERATIONS
4624 /* If this initializes a field that is smaller than a word, at the
4625 start of a word, try to widen it to a full word.
4626 This special case allows us to output C++ member function
4627 initializations in a form that the optimizers can understand. */
4628 if (GET_CODE (target) == REG
4629 && bitsize < BITS_PER_WORD
4630 && bitpos % BITS_PER_WORD == 0
4631 && GET_MODE_CLASS (mode) == MODE_INT
4632 && TREE_CODE (value) == INTEGER_CST
4633 && exp_size >= 0
4634 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4636 tree type = TREE_TYPE (value);
4638 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4640 type = (*lang_hooks.types.type_for_size)
4641 (BITS_PER_WORD, TREE_UNSIGNED (type));
4642 value = convert (type, value);
4645 if (BYTES_BIG_ENDIAN)
4646 value
4647 = fold (build (LSHIFT_EXPR, type, value,
4648 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4649 bitsize = BITS_PER_WORD;
4650 mode = word_mode;
4652 #endif
4654 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4655 && DECL_NONADDRESSABLE_P (field))
4657 to_rtx = copy_rtx (to_rtx);
4658 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4661 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4662 value, type, cleared,
4663 get_alias_set (TREE_TYPE (field)));
4666 else if (TREE_CODE (type) == ARRAY_TYPE
4667 || TREE_CODE (type) == VECTOR_TYPE)
4669 tree elt;
4670 int i;
4671 int need_to_clear;
4672 tree domain = TYPE_DOMAIN (type);
4673 tree elttype = TREE_TYPE (type);
4674 int const_bounds_p;
4675 HOST_WIDE_INT minelt = 0;
4676 HOST_WIDE_INT maxelt = 0;
4678 /* Vectors are like arrays, but the domain is stored via an array
4679 type indirectly. */
4680 if (TREE_CODE (type) == VECTOR_TYPE)
4682 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4683 the same field as TYPE_DOMAIN, we are not guaranteed that
4684 it always will. */
4685 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4686 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4689 const_bounds_p = (TYPE_MIN_VALUE (domain)
4690 && TYPE_MAX_VALUE (domain)
4691 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4692 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4694 /* If we have constant bounds for the range of the type, get them. */
4695 if (const_bounds_p)
4697 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4698 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4701 /* If the constructor has fewer elements than the array,
4702 clear the whole array first. Similarly if this is
4703 static constructor of a non-BLKmode object. */
4704 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4705 need_to_clear = 1;
4706 else
4708 HOST_WIDE_INT count = 0, zero_count = 0;
4709 need_to_clear = ! const_bounds_p;
4711 /* This loop is a more accurate version of the loop in
4712 mostly_zeros_p (it handles RANGE_EXPR in an index).
4713 It is also needed to check for missing elements. */
4714 for (elt = CONSTRUCTOR_ELTS (exp);
4715 elt != NULL_TREE && ! need_to_clear;
4716 elt = TREE_CHAIN (elt))
4718 tree index = TREE_PURPOSE (elt);
4719 HOST_WIDE_INT this_node_count;
4721 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4723 tree lo_index = TREE_OPERAND (index, 0);
4724 tree hi_index = TREE_OPERAND (index, 1);
4726 if (! host_integerp (lo_index, 1)
4727 || ! host_integerp (hi_index, 1))
4729 need_to_clear = 1;
4730 break;
4733 this_node_count = (tree_low_cst (hi_index, 1)
4734 - tree_low_cst (lo_index, 1) + 1);
4736 else
4737 this_node_count = 1;
4739 count += this_node_count;
4740 if (mostly_zeros_p (TREE_VALUE (elt)))
4741 zero_count += this_node_count;
4744 /* Clear the entire array first if there are any missing elements,
4745 or if the incidence of zero elements is >= 75%. */
4746 if (! need_to_clear
4747 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4748 need_to_clear = 1;
4751 if (need_to_clear && size > 0)
4753 if (! cleared)
4755 if (REG_P (target))
4756 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4757 else
4758 clear_storage (target, GEN_INT (size));
4760 cleared = 1;
4762 else if (REG_P (target))
4763 /* Inform later passes that the old value is dead. */
4764 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4766 /* Store each element of the constructor into
4767 the corresponding element of TARGET, determined
4768 by counting the elements. */
4769 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4770 elt;
4771 elt = TREE_CHAIN (elt), i++)
4773 enum machine_mode mode;
4774 HOST_WIDE_INT bitsize;
4775 HOST_WIDE_INT bitpos;
4776 int unsignedp;
4777 tree value = TREE_VALUE (elt);
4778 tree index = TREE_PURPOSE (elt);
4779 rtx xtarget = target;
4781 if (cleared && is_zeros_p (value))
4782 continue;
4784 unsignedp = TREE_UNSIGNED (elttype);
4785 mode = TYPE_MODE (elttype);
4786 if (mode == BLKmode)
4787 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4788 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4789 : -1);
4790 else
4791 bitsize = GET_MODE_BITSIZE (mode);
4793 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4795 tree lo_index = TREE_OPERAND (index, 0);
4796 tree hi_index = TREE_OPERAND (index, 1);
4797 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4798 struct nesting *loop;
4799 HOST_WIDE_INT lo, hi, count;
4800 tree position;
4802 /* If the range is constant and "small", unroll the loop. */
4803 if (const_bounds_p
4804 && host_integerp (lo_index, 0)
4805 && host_integerp (hi_index, 0)
4806 && (lo = tree_low_cst (lo_index, 0),
4807 hi = tree_low_cst (hi_index, 0),
4808 count = hi - lo + 1,
4809 (GET_CODE (target) != MEM
4810 || count <= 2
4811 || (host_integerp (TYPE_SIZE (elttype), 1)
4812 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4813 <= 40 * 8)))))
4815 lo -= minelt; hi -= minelt;
4816 for (; lo <= hi; lo++)
4818 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4820 if (GET_CODE (target) == MEM
4821 && !MEM_KEEP_ALIAS_SET_P (target)
4822 && TREE_CODE (type) == ARRAY_TYPE
4823 && TYPE_NONALIASED_COMPONENT (type))
4825 target = copy_rtx (target);
4826 MEM_KEEP_ALIAS_SET_P (target) = 1;
4829 store_constructor_field
4830 (target, bitsize, bitpos, mode, value, type, cleared,
4831 get_alias_set (elttype));
4834 else
4836 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4837 loop_top = gen_label_rtx ();
4838 loop_end = gen_label_rtx ();
4840 unsignedp = TREE_UNSIGNED (domain);
4842 index = build_decl (VAR_DECL, NULL_TREE, domain);
4844 index_r
4845 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4846 &unsignedp, 0));
4847 SET_DECL_RTL (index, index_r);
4848 if (TREE_CODE (value) == SAVE_EXPR
4849 && SAVE_EXPR_RTL (value) == 0)
4851 /* Make sure value gets expanded once before the
4852 loop. */
4853 expand_expr (value, const0_rtx, VOIDmode, 0);
4854 emit_queue ();
4856 store_expr (lo_index, index_r, 0);
4857 loop = expand_start_loop (0);
4859 /* Assign value to element index. */
4860 position
4861 = convert (ssizetype,
4862 fold (build (MINUS_EXPR, TREE_TYPE (index),
4863 index, TYPE_MIN_VALUE (domain))));
4864 position = size_binop (MULT_EXPR, position,
4865 convert (ssizetype,
4866 TYPE_SIZE_UNIT (elttype)));
4868 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4869 xtarget = offset_address (target, pos_rtx,
4870 highest_pow2_factor (position));
4871 xtarget = adjust_address (xtarget, mode, 0);
4872 if (TREE_CODE (value) == CONSTRUCTOR)
4873 store_constructor (value, xtarget, cleared,
4874 bitsize / BITS_PER_UNIT);
4875 else
4876 store_expr (value, xtarget, 0);
4878 expand_exit_loop_if_false (loop,
4879 build (LT_EXPR, integer_type_node,
4880 index, hi_index));
4882 expand_increment (build (PREINCREMENT_EXPR,
4883 TREE_TYPE (index),
4884 index, integer_one_node), 0, 0);
4885 expand_end_loop ();
4886 emit_label (loop_end);
4889 else if ((index != 0 && ! host_integerp (index, 0))
4890 || ! host_integerp (TYPE_SIZE (elttype), 1))
4892 tree position;
4894 if (index == 0)
4895 index = ssize_int (1);
4897 if (minelt)
4898 index = convert (ssizetype,
4899 fold (build (MINUS_EXPR, index,
4900 TYPE_MIN_VALUE (domain))));
4902 position = size_binop (MULT_EXPR, index,
4903 convert (ssizetype,
4904 TYPE_SIZE_UNIT (elttype)));
4905 xtarget = offset_address (target,
4906 expand_expr (position, 0, VOIDmode, 0),
4907 highest_pow2_factor (position));
4908 xtarget = adjust_address (xtarget, mode, 0);
4909 store_expr (value, xtarget, 0);
4911 else
4913 if (index != 0)
4914 bitpos = ((tree_low_cst (index, 0) - minelt)
4915 * tree_low_cst (TYPE_SIZE (elttype), 1));
4916 else
4917 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4919 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4920 && TREE_CODE (type) == ARRAY_TYPE
4921 && TYPE_NONALIASED_COMPONENT (type))
4923 target = copy_rtx (target);
4924 MEM_KEEP_ALIAS_SET_P (target) = 1;
4927 store_constructor_field (target, bitsize, bitpos, mode, value,
4928 type, cleared, get_alias_set (elttype));
4934 /* Set constructor assignments. */
4935 else if (TREE_CODE (type) == SET_TYPE)
4937 tree elt = CONSTRUCTOR_ELTS (exp);
4938 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4939 tree domain = TYPE_DOMAIN (type);
4940 tree domain_min, domain_max, bitlength;
4942 /* The default implementation strategy is to extract the constant
4943 parts of the constructor, use that to initialize the target,
4944 and then "or" in whatever non-constant ranges we need in addition.
4946 If a large set is all zero or all ones, it is
4947 probably better to set it using memset (if available) or bzero.
4948 Also, if a large set has just a single range, it may also be
4949 better to first clear all the first clear the set (using
4950 bzero/memset), and set the bits we want. */
4952 /* Check for all zeros. */
4953 if (elt == NULL_TREE && size > 0)
4955 if (!cleared)
4956 clear_storage (target, GEN_INT (size));
4957 return;
4960 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4961 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4962 bitlength = size_binop (PLUS_EXPR,
4963 size_diffop (domain_max, domain_min),
4964 ssize_int (1));
4966 nbits = tree_low_cst (bitlength, 1);
4968 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4969 are "complicated" (more than one range), initialize (the
4970 constant parts) by copying from a constant. */
4971 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4972 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4974 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4975 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4976 char *bit_buffer = (char *) alloca (nbits);
4977 HOST_WIDE_INT word = 0;
4978 unsigned int bit_pos = 0;
4979 unsigned int ibit = 0;
4980 unsigned int offset = 0; /* In bytes from beginning of set. */
4982 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4983 for (;;)
4985 if (bit_buffer[ibit])
4987 if (BYTES_BIG_ENDIAN)
4988 word |= (1 << (set_word_size - 1 - bit_pos));
4989 else
4990 word |= 1 << bit_pos;
4993 bit_pos++; ibit++;
4994 if (bit_pos >= set_word_size || ibit == nbits)
4996 if (word != 0 || ! cleared)
4998 rtx datum = GEN_INT (word);
4999 rtx to_rtx;
5001 /* The assumption here is that it is safe to use
5002 XEXP if the set is multi-word, but not if
5003 it's single-word. */
5004 if (GET_CODE (target) == MEM)
5005 to_rtx = adjust_address (target, mode, offset);
5006 else if (offset == 0)
5007 to_rtx = target;
5008 else
5009 abort ();
5010 emit_move_insn (to_rtx, datum);
5013 if (ibit == nbits)
5014 break;
5015 word = 0;
5016 bit_pos = 0;
5017 offset += set_word_size / BITS_PER_UNIT;
5021 else if (!cleared)
5022 /* Don't bother clearing storage if the set is all ones. */
5023 if (TREE_CHAIN (elt) != NULL_TREE
5024 || (TREE_PURPOSE (elt) == NULL_TREE
5025 ? nbits != 1
5026 : ( ! host_integerp (TREE_VALUE (elt), 0)
5027 || ! host_integerp (TREE_PURPOSE (elt), 0)
5028 || (tree_low_cst (TREE_VALUE (elt), 0)
5029 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5030 != (HOST_WIDE_INT) nbits))))
5031 clear_storage (target, expr_size (exp));
5033 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5035 /* Start of range of element or NULL. */
5036 tree startbit = TREE_PURPOSE (elt);
5037 /* End of range of element, or element value. */
5038 tree endbit = TREE_VALUE (elt);
5039 #ifdef TARGET_MEM_FUNCTIONS
5040 HOST_WIDE_INT startb, endb;
5041 #endif
5042 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5044 bitlength_rtx = expand_expr (bitlength,
5045 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5047 /* Handle non-range tuple element like [ expr ]. */
5048 if (startbit == NULL_TREE)
5050 startbit = save_expr (endbit);
5051 endbit = startbit;
5054 startbit = convert (sizetype, startbit);
5055 endbit = convert (sizetype, endbit);
5056 if (! integer_zerop (domain_min))
5058 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5059 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5061 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5062 EXPAND_CONST_ADDRESS);
5063 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5064 EXPAND_CONST_ADDRESS);
5066 if (REG_P (target))
5068 targetx
5069 = assign_temp
5070 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5071 (GET_MODE (target), 0),
5072 TYPE_QUAL_CONST)),
5073 0, 1, 1);
5074 emit_move_insn (targetx, target);
5077 else if (GET_CODE (target) == MEM)
5078 targetx = target;
5079 else
5080 abort ();
5082 #ifdef TARGET_MEM_FUNCTIONS
5083 /* Optimization: If startbit and endbit are
5084 constants divisible by BITS_PER_UNIT,
5085 call memset instead. */
5086 if (TREE_CODE (startbit) == INTEGER_CST
5087 && TREE_CODE (endbit) == INTEGER_CST
5088 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5089 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5091 emit_library_call (memset_libfunc, LCT_NORMAL,
5092 VOIDmode, 3,
5093 plus_constant (XEXP (targetx, 0),
5094 startb / BITS_PER_UNIT),
5095 Pmode,
5096 constm1_rtx, TYPE_MODE (integer_type_node),
5097 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5098 TYPE_MODE (sizetype));
5100 else
5101 #endif
5102 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5103 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5104 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5105 startbit_rtx, TYPE_MODE (sizetype),
5106 endbit_rtx, TYPE_MODE (sizetype));
5108 if (REG_P (target))
5109 emit_move_insn (target, targetx);
5113 else
5114 abort ();
5117 /* Store the value of EXP (an expression tree)
5118 into a subfield of TARGET which has mode MODE and occupies
5119 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5120 If MODE is VOIDmode, it means that we are storing into a bit-field.
5122 If VALUE_MODE is VOIDmode, return nothing in particular.
5123 UNSIGNEDP is not used in this case.
5125 Otherwise, return an rtx for the value stored. This rtx
5126 has mode VALUE_MODE if that is convenient to do.
5127 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5129 TYPE is the type of the underlying object,
5131 ALIAS_SET is the alias set for the destination. This value will
5132 (in general) be different from that for TARGET, since TARGET is a
5133 reference to the containing structure. */
5135 static rtx
5136 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5137 alias_set)
5138 rtx target;
5139 HOST_WIDE_INT bitsize;
5140 HOST_WIDE_INT bitpos;
5141 enum machine_mode mode;
5142 tree exp;
5143 enum machine_mode value_mode;
5144 int unsignedp;
5145 tree type;
5146 int alias_set;
5148 HOST_WIDE_INT width_mask = 0;
5150 if (TREE_CODE (exp) == ERROR_MARK)
5151 return const0_rtx;
5153 /* If we have nothing to store, do nothing unless the expression has
5154 side-effects. */
5155 if (bitsize == 0)
5156 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5157 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5158 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5160 /* If we are storing into an unaligned field of an aligned union that is
5161 in a register, we may have the mode of TARGET being an integer mode but
5162 MODE == BLKmode. In that case, get an aligned object whose size and
5163 alignment are the same as TARGET and store TARGET into it (we can avoid
5164 the store if the field being stored is the entire width of TARGET). Then
5165 call ourselves recursively to store the field into a BLKmode version of
5166 that object. Finally, load from the object into TARGET. This is not
5167 very efficient in general, but should only be slightly more expensive
5168 than the otherwise-required unaligned accesses. Perhaps this can be
5169 cleaned up later. */
5171 if (mode == BLKmode
5172 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5174 rtx object
5175 = assign_temp
5176 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5177 0, 1, 1);
5178 rtx blk_object = adjust_address (object, BLKmode, 0);
5180 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5181 emit_move_insn (object, target);
5183 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5184 alias_set);
5186 emit_move_insn (target, object);
5188 /* We want to return the BLKmode version of the data. */
5189 return blk_object;
5192 if (GET_CODE (target) == CONCAT)
5194 /* We're storing into a struct containing a single __complex. */
5196 if (bitpos != 0)
5197 abort ();
5198 return store_expr (exp, target, 0);
5201 /* If the structure is in a register or if the component
5202 is a bit field, we cannot use addressing to access it.
5203 Use bit-field techniques or SUBREG to store in it. */
5205 if (mode == VOIDmode
5206 || (mode != BLKmode && ! direct_store[(int) mode]
5207 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5208 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5209 || GET_CODE (target) == REG
5210 || GET_CODE (target) == SUBREG
5211 /* If the field isn't aligned enough to store as an ordinary memref,
5212 store it as a bit field. */
5213 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5214 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5215 || bitpos % GET_MODE_ALIGNMENT (mode)))
5216 /* If the RHS and field are a constant size and the size of the
5217 RHS isn't the same size as the bitfield, we must use bitfield
5218 operations. */
5219 || (bitsize >= 0
5220 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5221 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5223 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5225 /* If BITSIZE is narrower than the size of the type of EXP
5226 we will be narrowing TEMP. Normally, what's wanted are the
5227 low-order bits. However, if EXP's type is a record and this is
5228 big-endian machine, we want the upper BITSIZE bits. */
5229 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5230 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5231 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5232 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5233 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5234 - bitsize),
5235 temp, 1);
5237 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5238 MODE. */
5239 if (mode != VOIDmode && mode != BLKmode
5240 && mode != TYPE_MODE (TREE_TYPE (exp)))
5241 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5243 /* If the modes of TARGET and TEMP are both BLKmode, both
5244 must be in memory and BITPOS must be aligned on a byte
5245 boundary. If so, we simply do a block copy. */
5246 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5248 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5249 || bitpos % BITS_PER_UNIT != 0)
5250 abort ();
5252 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5253 emit_block_move (target, temp,
5254 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5255 / BITS_PER_UNIT));
5257 return value_mode == VOIDmode ? const0_rtx : target;
5260 /* Store the value in the bitfield. */
5261 store_bit_field (target, bitsize, bitpos, mode, temp,
5262 int_size_in_bytes (type));
5264 if (value_mode != VOIDmode)
5266 /* The caller wants an rtx for the value.
5267 If possible, avoid refetching from the bitfield itself. */
5268 if (width_mask != 0
5269 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5271 tree count;
5272 enum machine_mode tmode;
5274 tmode = GET_MODE (temp);
5275 if (tmode == VOIDmode)
5276 tmode = value_mode;
5278 if (unsignedp)
5279 return expand_and (tmode, temp,
5280 gen_int_mode (width_mask, tmode),
5281 NULL_RTX);
5283 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5284 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5285 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5288 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5289 NULL_RTX, value_mode, VOIDmode,
5290 int_size_in_bytes (type));
5292 return const0_rtx;
5294 else
5296 rtx addr = XEXP (target, 0);
5297 rtx to_rtx = target;
5299 /* If a value is wanted, it must be the lhs;
5300 so make the address stable for multiple use. */
5302 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5303 && ! CONSTANT_ADDRESS_P (addr)
5304 /* A frame-pointer reference is already stable. */
5305 && ! (GET_CODE (addr) == PLUS
5306 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5307 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5308 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5309 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5311 /* Now build a reference to just the desired component. */
5313 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5315 if (to_rtx == target)
5316 to_rtx = copy_rtx (to_rtx);
5318 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5319 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5320 set_mem_alias_set (to_rtx, alias_set);
5322 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5326 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5327 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5328 codes and find the ultimate containing object, which we return.
5330 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5331 bit position, and *PUNSIGNEDP to the signedness of the field.
5332 If the position of the field is variable, we store a tree
5333 giving the variable offset (in units) in *POFFSET.
5334 This offset is in addition to the bit position.
5335 If the position is not variable, we store 0 in *POFFSET.
5337 If any of the extraction expressions is volatile,
5338 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5340 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5341 is a mode that can be used to access the field. In that case, *PBITSIZE
5342 is redundant.
5344 If the field describes a variable-sized object, *PMODE is set to
5345 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5346 this case, but the address of the object can be found. */
5348 tree
5349 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5350 punsignedp, pvolatilep)
5351 tree exp;
5352 HOST_WIDE_INT *pbitsize;
5353 HOST_WIDE_INT *pbitpos;
5354 tree *poffset;
5355 enum machine_mode *pmode;
5356 int *punsignedp;
5357 int *pvolatilep;
5359 tree size_tree = 0;
5360 enum machine_mode mode = VOIDmode;
5361 tree offset = size_zero_node;
5362 tree bit_offset = bitsize_zero_node;
5363 tree placeholder_ptr = 0;
5364 tree tem;
5366 /* First get the mode, signedness, and size. We do this from just the
5367 outermost expression. */
5368 if (TREE_CODE (exp) == COMPONENT_REF)
5370 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5371 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5372 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5374 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5376 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5378 size_tree = TREE_OPERAND (exp, 1);
5379 *punsignedp = TREE_UNSIGNED (exp);
5381 else
5383 mode = TYPE_MODE (TREE_TYPE (exp));
5384 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5386 if (mode == BLKmode)
5387 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5388 else
5389 *pbitsize = GET_MODE_BITSIZE (mode);
5392 if (size_tree != 0)
5394 if (! host_integerp (size_tree, 1))
5395 mode = BLKmode, *pbitsize = -1;
5396 else
5397 *pbitsize = tree_low_cst (size_tree, 1);
5400 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5401 and find the ultimate containing object. */
5402 while (1)
5404 if (TREE_CODE (exp) == BIT_FIELD_REF)
5405 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5406 else if (TREE_CODE (exp) == COMPONENT_REF)
5408 tree field = TREE_OPERAND (exp, 1);
5409 tree this_offset = DECL_FIELD_OFFSET (field);
5411 /* If this field hasn't been filled in yet, don't go
5412 past it. This should only happen when folding expressions
5413 made during type construction. */
5414 if (this_offset == 0)
5415 break;
5416 else if (! TREE_CONSTANT (this_offset)
5417 && contains_placeholder_p (this_offset))
5418 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5420 offset = size_binop (PLUS_EXPR, offset, this_offset);
5421 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5422 DECL_FIELD_BIT_OFFSET (field));
5424 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5427 else if (TREE_CODE (exp) == ARRAY_REF
5428 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5430 tree index = TREE_OPERAND (exp, 1);
5431 tree array = TREE_OPERAND (exp, 0);
5432 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5433 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5434 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5436 /* We assume all arrays have sizes that are a multiple of a byte.
5437 First subtract the lower bound, if any, in the type of the
5438 index, then convert to sizetype and multiply by the size of the
5439 array element. */
5440 if (low_bound != 0 && ! integer_zerop (low_bound))
5441 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5442 index, low_bound));
5444 /* If the index has a self-referential type, pass it to a
5445 WITH_RECORD_EXPR; if the component size is, pass our
5446 component to one. */
5447 if (! TREE_CONSTANT (index)
5448 && contains_placeholder_p (index))
5449 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5450 if (! TREE_CONSTANT (unit_size)
5451 && contains_placeholder_p (unit_size))
5452 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5454 offset = size_binop (PLUS_EXPR, offset,
5455 size_binop (MULT_EXPR,
5456 convert (sizetype, index),
5457 unit_size));
5460 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5462 tree new = find_placeholder (exp, &placeholder_ptr);
5464 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5465 We might have been called from tree optimization where we
5466 haven't set up an object yet. */
5467 if (new == 0)
5468 break;
5469 else
5470 exp = new;
5472 continue;
5474 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5475 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5476 && ! ((TREE_CODE (exp) == NOP_EXPR
5477 || TREE_CODE (exp) == CONVERT_EXPR)
5478 && (TYPE_MODE (TREE_TYPE (exp))
5479 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5480 break;
5482 /* If any reference in the chain is volatile, the effect is volatile. */
5483 if (TREE_THIS_VOLATILE (exp))
5484 *pvolatilep = 1;
5486 exp = TREE_OPERAND (exp, 0);
5489 /* If OFFSET is constant, see if we can return the whole thing as a
5490 constant bit position. Otherwise, split it up. */
5491 if (host_integerp (offset, 0)
5492 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5493 bitsize_unit_node))
5494 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5495 && host_integerp (tem, 0))
5496 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5497 else
5498 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5500 *pmode = mode;
5501 return exp;
5504 /* Return 1 if T is an expression that get_inner_reference handles. */
5507 handled_component_p (t)
5508 tree t;
5510 switch (TREE_CODE (t))
5512 case BIT_FIELD_REF:
5513 case COMPONENT_REF:
5514 case ARRAY_REF:
5515 case ARRAY_RANGE_REF:
5516 case NON_LVALUE_EXPR:
5517 case VIEW_CONVERT_EXPR:
5518 return 1;
5520 case NOP_EXPR:
5521 case CONVERT_EXPR:
5522 return (TYPE_MODE (TREE_TYPE (t))
5523 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5525 default:
5526 return 0;
5530 /* Given an rtx VALUE that may contain additions and multiplications, return
5531 an equivalent value that just refers to a register, memory, or constant.
5532 This is done by generating instructions to perform the arithmetic and
5533 returning a pseudo-register containing the value.
5535 The returned value may be a REG, SUBREG, MEM or constant. */
5538 force_operand (value, target)
5539 rtx value, target;
5541 rtx op1, op2;
5542 /* Use subtarget as the target for operand 0 of a binary operation. */
5543 rtx subtarget = get_subtarget (target);
5544 enum rtx_code code = GET_CODE (value);
5546 /* Check for a PIC address load. */
5547 if ((code == PLUS || code == MINUS)
5548 && XEXP (value, 0) == pic_offset_table_rtx
5549 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5550 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5551 || GET_CODE (XEXP (value, 1)) == CONST))
5553 if (!subtarget)
5554 subtarget = gen_reg_rtx (GET_MODE (value));
5555 emit_move_insn (subtarget, value);
5556 return subtarget;
5559 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5561 if (!target)
5562 target = gen_reg_rtx (GET_MODE (value));
5563 convert_move (target, force_operand (XEXP (value, 0), NULL),
5564 code == ZERO_EXTEND);
5565 return target;
5568 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5570 op2 = XEXP (value, 1);
5571 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5572 subtarget = 0;
5573 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5575 code = PLUS;
5576 op2 = negate_rtx (GET_MODE (value), op2);
5579 /* Check for an addition with OP2 a constant integer and our first
5580 operand a PLUS of a virtual register and something else. In that
5581 case, we want to emit the sum of the virtual register and the
5582 constant first and then add the other value. This allows virtual
5583 register instantiation to simply modify the constant rather than
5584 creating another one around this addition. */
5585 if (code == PLUS && GET_CODE (op2) == CONST_INT
5586 && GET_CODE (XEXP (value, 0)) == PLUS
5587 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5588 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5589 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5591 rtx temp = expand_simple_binop (GET_MODE (value), code,
5592 XEXP (XEXP (value, 0), 0), op2,
5593 subtarget, 0, OPTAB_LIB_WIDEN);
5594 return expand_simple_binop (GET_MODE (value), code, temp,
5595 force_operand (XEXP (XEXP (value,
5596 0), 1), 0),
5597 target, 0, OPTAB_LIB_WIDEN);
5600 op1 = force_operand (XEXP (value, 0), subtarget);
5601 op2 = force_operand (op2, NULL_RTX);
5602 switch (code)
5604 case MULT:
5605 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5606 case DIV:
5607 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5608 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5609 target, 1, OPTAB_LIB_WIDEN);
5610 else
5611 return expand_divmod (0,
5612 FLOAT_MODE_P (GET_MODE (value))
5613 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5614 GET_MODE (value), op1, op2, target, 0);
5615 break;
5616 case MOD:
5617 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5618 target, 0);
5619 break;
5620 case UDIV:
5621 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5622 target, 1);
5623 break;
5624 case UMOD:
5625 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5626 target, 1);
5627 break;
5628 case ASHIFTRT:
5629 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5630 target, 0, OPTAB_LIB_WIDEN);
5631 break;
5632 default:
5633 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5634 target, 1, OPTAB_LIB_WIDEN);
5637 if (GET_RTX_CLASS (code) == '1')
5639 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5640 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5643 #ifdef INSN_SCHEDULING
5644 /* On machines that have insn scheduling, we want all memory reference to be
5645 explicit, so we need to deal with such paradoxical SUBREGs. */
5646 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5647 && (GET_MODE_SIZE (GET_MODE (value))
5648 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5649 value
5650 = simplify_gen_subreg (GET_MODE (value),
5651 force_reg (GET_MODE (SUBREG_REG (value)),
5652 force_operand (SUBREG_REG (value),
5653 NULL_RTX)),
5654 GET_MODE (SUBREG_REG (value)),
5655 SUBREG_BYTE (value));
5656 #endif
5658 return value;
5661 /* Subroutine of expand_expr: return nonzero iff there is no way that
5662 EXP can reference X, which is being modified. TOP_P is nonzero if this
5663 call is going to be used to determine whether we need a temporary
5664 for EXP, as opposed to a recursive call to this function.
5666 It is always safe for this routine to return zero since it merely
5667 searches for optimization opportunities. */
5670 safe_from_p (x, exp, top_p)
5671 rtx x;
5672 tree exp;
5673 int top_p;
5675 rtx exp_rtl = 0;
5676 int i, nops;
5677 static tree save_expr_list;
5679 if (x == 0
5680 /* If EXP has varying size, we MUST use a target since we currently
5681 have no way of allocating temporaries of variable size
5682 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5683 So we assume here that something at a higher level has prevented a
5684 clash. This is somewhat bogus, but the best we can do. Only
5685 do this when X is BLKmode and when we are at the top level. */
5686 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5687 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5688 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5689 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5690 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5691 != INTEGER_CST)
5692 && GET_MODE (x) == BLKmode)
5693 /* If X is in the outgoing argument area, it is always safe. */
5694 || (GET_CODE (x) == MEM
5695 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5696 || (GET_CODE (XEXP (x, 0)) == PLUS
5697 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5698 return 1;
5700 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5701 find the underlying pseudo. */
5702 if (GET_CODE (x) == SUBREG)
5704 x = SUBREG_REG (x);
5705 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5706 return 0;
5709 /* A SAVE_EXPR might appear many times in the expression passed to the
5710 top-level safe_from_p call, and if it has a complex subexpression,
5711 examining it multiple times could result in a combinatorial explosion.
5712 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5713 with optimization took about 28 minutes to compile -- even though it was
5714 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5715 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5716 we have processed. Note that the only test of top_p was above. */
5718 if (top_p)
5720 int rtn;
5721 tree t;
5723 save_expr_list = 0;
5725 rtn = safe_from_p (x, exp, 0);
5727 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5728 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5730 return rtn;
5733 /* Now look at our tree code and possibly recurse. */
5734 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5736 case 'd':
5737 exp_rtl = DECL_RTL_IF_SET (exp);
5738 break;
5740 case 'c':
5741 return 1;
5743 case 'x':
5744 if (TREE_CODE (exp) == TREE_LIST)
5745 return ((TREE_VALUE (exp) == 0
5746 || safe_from_p (x, TREE_VALUE (exp), 0))
5747 && (TREE_CHAIN (exp) == 0
5748 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5749 else if (TREE_CODE (exp) == ERROR_MARK)
5750 return 1; /* An already-visited SAVE_EXPR? */
5751 else
5752 return 0;
5754 case '1':
5755 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5757 case '2':
5758 case '<':
5759 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5760 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5762 case 'e':
5763 case 'r':
5764 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5765 the expression. If it is set, we conflict iff we are that rtx or
5766 both are in memory. Otherwise, we check all operands of the
5767 expression recursively. */
5769 switch (TREE_CODE (exp))
5771 case ADDR_EXPR:
5772 /* If the operand is static or we are static, we can't conflict.
5773 Likewise if we don't conflict with the operand at all. */
5774 if (staticp (TREE_OPERAND (exp, 0))
5775 || TREE_STATIC (exp)
5776 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5777 return 1;
5779 /* Otherwise, the only way this can conflict is if we are taking
5780 the address of a DECL a that address if part of X, which is
5781 very rare. */
5782 exp = TREE_OPERAND (exp, 0);
5783 if (DECL_P (exp))
5785 if (!DECL_RTL_SET_P (exp)
5786 || GET_CODE (DECL_RTL (exp)) != MEM)
5787 return 0;
5788 else
5789 exp_rtl = XEXP (DECL_RTL (exp), 0);
5791 break;
5793 case INDIRECT_REF:
5794 if (GET_CODE (x) == MEM
5795 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5796 get_alias_set (exp)))
5797 return 0;
5798 break;
5800 case CALL_EXPR:
5801 /* Assume that the call will clobber all hard registers and
5802 all of memory. */
5803 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5804 || GET_CODE (x) == MEM)
5805 return 0;
5806 break;
5808 case RTL_EXPR:
5809 /* If a sequence exists, we would have to scan every instruction
5810 in the sequence to see if it was safe. This is probably not
5811 worthwhile. */
5812 if (RTL_EXPR_SEQUENCE (exp))
5813 return 0;
5815 exp_rtl = RTL_EXPR_RTL (exp);
5816 break;
5818 case WITH_CLEANUP_EXPR:
5819 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5820 break;
5822 case CLEANUP_POINT_EXPR:
5823 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5825 case SAVE_EXPR:
5826 exp_rtl = SAVE_EXPR_RTL (exp);
5827 if (exp_rtl)
5828 break;
5830 /* If we've already scanned this, don't do it again. Otherwise,
5831 show we've scanned it and record for clearing the flag if we're
5832 going on. */
5833 if (TREE_PRIVATE (exp))
5834 return 1;
5836 TREE_PRIVATE (exp) = 1;
5837 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5839 TREE_PRIVATE (exp) = 0;
5840 return 0;
5843 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5844 return 1;
5846 case BIND_EXPR:
5847 /* The only operand we look at is operand 1. The rest aren't
5848 part of the expression. */
5849 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5851 case METHOD_CALL_EXPR:
5852 /* This takes an rtx argument, but shouldn't appear here. */
5853 abort ();
5855 default:
5856 break;
5859 /* If we have an rtx, we do not need to scan our operands. */
5860 if (exp_rtl)
5861 break;
5863 nops = first_rtl_op (TREE_CODE (exp));
5864 for (i = 0; i < nops; i++)
5865 if (TREE_OPERAND (exp, i) != 0
5866 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5867 return 0;
5869 /* If this is a language-specific tree code, it may require
5870 special handling. */
5871 if ((unsigned int) TREE_CODE (exp)
5872 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5873 && !(*lang_hooks.safe_from_p) (x, exp))
5874 return 0;
5877 /* If we have an rtl, find any enclosed object. Then see if we conflict
5878 with it. */
5879 if (exp_rtl)
5881 if (GET_CODE (exp_rtl) == SUBREG)
5883 exp_rtl = SUBREG_REG (exp_rtl);
5884 if (GET_CODE (exp_rtl) == REG
5885 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5886 return 0;
5889 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5890 are memory and they conflict. */
5891 return ! (rtx_equal_p (x, exp_rtl)
5892 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5893 && true_dependence (exp_rtl, VOIDmode, x,
5894 rtx_addr_varies_p)));
5897 /* If we reach here, it is safe. */
5898 return 1;
5901 /* Subroutine of expand_expr: return rtx if EXP is a
5902 variable or parameter; else return 0. */
5904 static rtx
5905 var_rtx (exp)
5906 tree exp;
5908 STRIP_NOPS (exp);
5909 switch (TREE_CODE (exp))
5911 case PARM_DECL:
5912 case VAR_DECL:
5913 return DECL_RTL (exp);
5914 default:
5915 return 0;
5919 #ifdef MAX_INTEGER_COMPUTATION_MODE
5921 void
5922 check_max_integer_computation_mode (exp)
5923 tree exp;
5925 enum tree_code code;
5926 enum machine_mode mode;
5928 /* Strip any NOPs that don't change the mode. */
5929 STRIP_NOPS (exp);
5930 code = TREE_CODE (exp);
5932 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5933 if (code == NOP_EXPR
5934 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5935 return;
5937 /* First check the type of the overall operation. We need only look at
5938 unary, binary and relational operations. */
5939 if (TREE_CODE_CLASS (code) == '1'
5940 || TREE_CODE_CLASS (code) == '2'
5941 || TREE_CODE_CLASS (code) == '<')
5943 mode = TYPE_MODE (TREE_TYPE (exp));
5944 if (GET_MODE_CLASS (mode) == MODE_INT
5945 && mode > MAX_INTEGER_COMPUTATION_MODE)
5946 internal_error ("unsupported wide integer operation");
5949 /* Check operand of a unary op. */
5950 if (TREE_CODE_CLASS (code) == '1')
5952 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5953 if (GET_MODE_CLASS (mode) == MODE_INT
5954 && mode > MAX_INTEGER_COMPUTATION_MODE)
5955 internal_error ("unsupported wide integer operation");
5958 /* Check operands of a binary/comparison op. */
5959 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5961 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5962 if (GET_MODE_CLASS (mode) == MODE_INT
5963 && mode > MAX_INTEGER_COMPUTATION_MODE)
5964 internal_error ("unsupported wide integer operation");
5966 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5967 if (GET_MODE_CLASS (mode) == MODE_INT
5968 && mode > MAX_INTEGER_COMPUTATION_MODE)
5969 internal_error ("unsupported wide integer operation");
5972 #endif
5974 /* Return the highest power of two that EXP is known to be a multiple of.
5975 This is used in updating alignment of MEMs in array references. */
5977 static HOST_WIDE_INT
5978 highest_pow2_factor (exp)
5979 tree exp;
5981 HOST_WIDE_INT c0, c1;
5983 switch (TREE_CODE (exp))
5985 case INTEGER_CST:
5986 /* We can find the lowest bit that's a one. If the low
5987 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5988 We need to handle this case since we can find it in a COND_EXPR,
5989 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5990 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5991 later ICE. */
5992 if (TREE_CONSTANT_OVERFLOW (exp))
5993 return BIGGEST_ALIGNMENT;
5994 else
5996 /* Note: tree_low_cst is intentionally not used here,
5997 we don't care about the upper bits. */
5998 c0 = TREE_INT_CST_LOW (exp);
5999 c0 &= -c0;
6000 return c0 ? c0 : BIGGEST_ALIGNMENT;
6002 break;
6004 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6005 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6006 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6007 return MIN (c0, c1);
6009 case MULT_EXPR:
6010 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6011 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6012 return c0 * c1;
6014 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6015 case CEIL_DIV_EXPR:
6016 if (integer_pow2p (TREE_OPERAND (exp, 1))
6017 && host_integerp (TREE_OPERAND (exp, 1), 1))
6019 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6020 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6021 return MAX (1, c0 / c1);
6023 break;
6025 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6026 case SAVE_EXPR: case WITH_RECORD_EXPR:
6027 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6029 case COMPOUND_EXPR:
6030 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6032 case COND_EXPR:
6033 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6034 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6035 return MIN (c0, c1);
6037 default:
6038 break;
6041 return 1;
6044 /* Similar, except that it is known that the expression must be a multiple
6045 of the alignment of TYPE. */
6047 static HOST_WIDE_INT
6048 highest_pow2_factor_for_type (type, exp)
6049 tree type;
6050 tree exp;
6052 HOST_WIDE_INT type_align, factor;
6054 factor = highest_pow2_factor (exp);
6055 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6056 return MAX (factor, type_align);
6059 /* Return an object on the placeholder list that matches EXP, a
6060 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6061 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6062 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6063 is a location which initially points to a starting location in the
6064 placeholder list (zero means start of the list) and where a pointer into
6065 the placeholder list at which the object is found is placed. */
6067 tree
6068 find_placeholder (exp, plist)
6069 tree exp;
6070 tree *plist;
6072 tree type = TREE_TYPE (exp);
6073 tree placeholder_expr;
6075 for (placeholder_expr
6076 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6077 placeholder_expr != 0;
6078 placeholder_expr = TREE_CHAIN (placeholder_expr))
6080 tree need_type = TYPE_MAIN_VARIANT (type);
6081 tree elt;
6083 /* Find the outermost reference that is of the type we want. If none,
6084 see if any object has a type that is a pointer to the type we
6085 want. */
6086 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6087 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6088 || TREE_CODE (elt) == COND_EXPR)
6089 ? TREE_OPERAND (elt, 1)
6090 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6091 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6093 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6094 ? TREE_OPERAND (elt, 0) : 0))
6095 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6097 if (plist)
6098 *plist = placeholder_expr;
6099 return elt;
6102 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6104 = ((TREE_CODE (elt) == COMPOUND_EXPR
6105 || TREE_CODE (elt) == COND_EXPR)
6106 ? TREE_OPERAND (elt, 1)
6107 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6108 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6109 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6110 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6111 ? TREE_OPERAND (elt, 0) : 0))
6112 if (POINTER_TYPE_P (TREE_TYPE (elt))
6113 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6114 == need_type))
6116 if (plist)
6117 *plist = placeholder_expr;
6118 return build1 (INDIRECT_REF, need_type, elt);
6122 return 0;
6125 /* expand_expr: generate code for computing expression EXP.
6126 An rtx for the computed value is returned. The value is never null.
6127 In the case of a void EXP, const0_rtx is returned.
6129 The value may be stored in TARGET if TARGET is nonzero.
6130 TARGET is just a suggestion; callers must assume that
6131 the rtx returned may not be the same as TARGET.
6133 If TARGET is CONST0_RTX, it means that the value will be ignored.
6135 If TMODE is not VOIDmode, it suggests generating the
6136 result in mode TMODE. But this is done only when convenient.
6137 Otherwise, TMODE is ignored and the value generated in its natural mode.
6138 TMODE is just a suggestion; callers must assume that
6139 the rtx returned may not have mode TMODE.
6141 Note that TARGET may have neither TMODE nor MODE. In that case, it
6142 probably will not be used.
6144 If MODIFIER is EXPAND_SUM then when EXP is an addition
6145 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6146 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6147 products as above, or REG or MEM, or constant.
6148 Ordinarily in such cases we would output mul or add instructions
6149 and then return a pseudo reg containing the sum.
6151 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6152 it also marks a label as absolutely required (it can't be dead).
6153 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6154 This is used for outputting expressions used in initializers.
6156 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6157 with a constant address even if that address is not normally legitimate.
6158 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6161 expand_expr (exp, target, tmode, modifier)
6162 tree exp;
6163 rtx target;
6164 enum machine_mode tmode;
6165 enum expand_modifier modifier;
6167 rtx op0, op1, temp;
6168 tree type = TREE_TYPE (exp);
6169 int unsignedp = TREE_UNSIGNED (type);
6170 enum machine_mode mode;
6171 enum tree_code code = TREE_CODE (exp);
6172 optab this_optab;
6173 rtx subtarget, original_target;
6174 int ignore;
6175 tree context;
6177 /* Handle ERROR_MARK before anybody tries to access its type. */
6178 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6180 op0 = CONST0_RTX (tmode);
6181 if (op0 != 0)
6182 return op0;
6183 return const0_rtx;
6186 mode = TYPE_MODE (type);
6187 /* Use subtarget as the target for operand 0 of a binary operation. */
6188 subtarget = get_subtarget (target);
6189 original_target = target;
6190 ignore = (target == const0_rtx
6191 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6192 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6193 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6194 && TREE_CODE (type) == VOID_TYPE));
6196 /* If we are going to ignore this result, we need only do something
6197 if there is a side-effect somewhere in the expression. If there
6198 is, short-circuit the most common cases here. Note that we must
6199 not call expand_expr with anything but const0_rtx in case this
6200 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6202 if (ignore)
6204 if (! TREE_SIDE_EFFECTS (exp))
6205 return const0_rtx;
6207 /* Ensure we reference a volatile object even if value is ignored, but
6208 don't do this if all we are doing is taking its address. */
6209 if (TREE_THIS_VOLATILE (exp)
6210 && TREE_CODE (exp) != FUNCTION_DECL
6211 && mode != VOIDmode && mode != BLKmode
6212 && modifier != EXPAND_CONST_ADDRESS)
6214 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6215 if (GET_CODE (temp) == MEM)
6216 temp = copy_to_reg (temp);
6217 return const0_rtx;
6220 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6221 || code == INDIRECT_REF || code == BUFFER_REF)
6222 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6223 modifier);
6225 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6226 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6228 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6229 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6230 return const0_rtx;
6232 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6233 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6234 /* If the second operand has no side effects, just evaluate
6235 the first. */
6236 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6237 modifier);
6238 else if (code == BIT_FIELD_REF)
6240 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6241 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6242 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6243 return const0_rtx;
6246 target = 0;
6249 #ifdef MAX_INTEGER_COMPUTATION_MODE
6250 /* Only check stuff here if the mode we want is different from the mode
6251 of the expression; if it's the same, check_max_integer_computiation_mode
6252 will handle it. Do we really need to check this stuff at all? */
6254 if (target
6255 && GET_MODE (target) != mode
6256 && TREE_CODE (exp) != INTEGER_CST
6257 && TREE_CODE (exp) != PARM_DECL
6258 && TREE_CODE (exp) != ARRAY_REF
6259 && TREE_CODE (exp) != ARRAY_RANGE_REF
6260 && TREE_CODE (exp) != COMPONENT_REF
6261 && TREE_CODE (exp) != BIT_FIELD_REF
6262 && TREE_CODE (exp) != INDIRECT_REF
6263 && TREE_CODE (exp) != CALL_EXPR
6264 && TREE_CODE (exp) != VAR_DECL
6265 && TREE_CODE (exp) != RTL_EXPR)
6267 enum machine_mode mode = GET_MODE (target);
6269 if (GET_MODE_CLASS (mode) == MODE_INT
6270 && mode > MAX_INTEGER_COMPUTATION_MODE)
6271 internal_error ("unsupported wide integer operation");
6274 if (tmode != mode
6275 && TREE_CODE (exp) != INTEGER_CST
6276 && TREE_CODE (exp) != PARM_DECL
6277 && TREE_CODE (exp) != ARRAY_REF
6278 && TREE_CODE (exp) != ARRAY_RANGE_REF
6279 && TREE_CODE (exp) != COMPONENT_REF
6280 && TREE_CODE (exp) != BIT_FIELD_REF
6281 && TREE_CODE (exp) != INDIRECT_REF
6282 && TREE_CODE (exp) != VAR_DECL
6283 && TREE_CODE (exp) != CALL_EXPR
6284 && TREE_CODE (exp) != RTL_EXPR
6285 && GET_MODE_CLASS (tmode) == MODE_INT
6286 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6287 internal_error ("unsupported wide integer operation");
6289 check_max_integer_computation_mode (exp);
6290 #endif
6292 /* If will do cse, generate all results into pseudo registers
6293 since 1) that allows cse to find more things
6294 and 2) otherwise cse could produce an insn the machine
6295 cannot support. And exception is a CONSTRUCTOR into a multi-word
6296 MEM: that's much more likely to be most efficient into the MEM. */
6298 if (! cse_not_expected && mode != BLKmode && target
6299 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6300 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6301 target = subtarget;
6303 switch (code)
6305 case LABEL_DECL:
6307 tree function = decl_function_context (exp);
6308 /* Handle using a label in a containing function. */
6309 if (function != current_function_decl
6310 && function != inline_function_decl && function != 0)
6312 struct function *p = find_function_data (function);
6313 p->expr->x_forced_labels
6314 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6315 p->expr->x_forced_labels);
6317 else
6319 if (modifier == EXPAND_INITIALIZER)
6320 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6321 label_rtx (exp),
6322 forced_labels);
6325 temp = gen_rtx_MEM (FUNCTION_MODE,
6326 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6327 if (function != current_function_decl
6328 && function != inline_function_decl && function != 0)
6329 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6330 return temp;
6333 case PARM_DECL:
6334 if (DECL_RTL (exp) == 0)
6336 error_with_decl (exp, "prior parameter's size depends on `%s'");
6337 return CONST0_RTX (mode);
6340 /* ... fall through ... */
6342 case VAR_DECL:
6343 /* If a static var's type was incomplete when the decl was written,
6344 but the type is complete now, lay out the decl now. */
6345 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6346 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6348 rtx value = DECL_RTL_IF_SET (exp);
6350 layout_decl (exp, 0);
6352 /* If the RTL was already set, update its mode and memory
6353 attributes. */
6354 if (value != 0)
6356 PUT_MODE (value, DECL_MODE (exp));
6357 SET_DECL_RTL (exp, 0);
6358 set_mem_attributes (value, exp, 1);
6359 SET_DECL_RTL (exp, value);
6363 /* ... fall through ... */
6365 case FUNCTION_DECL:
6366 case RESULT_DECL:
6367 if (DECL_RTL (exp) == 0)
6368 abort ();
6370 /* Ensure variable marked as used even if it doesn't go through
6371 a parser. If it hasn't be used yet, write out an external
6372 definition. */
6373 if (! TREE_USED (exp))
6375 assemble_external (exp);
6376 TREE_USED (exp) = 1;
6379 /* Show we haven't gotten RTL for this yet. */
6380 temp = 0;
6382 /* Handle variables inherited from containing functions. */
6383 context = decl_function_context (exp);
6385 /* We treat inline_function_decl as an alias for the current function
6386 because that is the inline function whose vars, types, etc.
6387 are being merged into the current function.
6388 See expand_inline_function. */
6390 if (context != 0 && context != current_function_decl
6391 && context != inline_function_decl
6392 /* If var is static, we don't need a static chain to access it. */
6393 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6394 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6396 rtx addr;
6398 /* Mark as non-local and addressable. */
6399 DECL_NONLOCAL (exp) = 1;
6400 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6401 abort ();
6402 (*lang_hooks.mark_addressable) (exp);
6403 if (GET_CODE (DECL_RTL (exp)) != MEM)
6404 abort ();
6405 addr = XEXP (DECL_RTL (exp), 0);
6406 if (GET_CODE (addr) == MEM)
6407 addr
6408 = replace_equiv_address (addr,
6409 fix_lexical_addr (XEXP (addr, 0), exp));
6410 else
6411 addr = fix_lexical_addr (addr, exp);
6413 temp = replace_equiv_address (DECL_RTL (exp), addr);
6416 /* This is the case of an array whose size is to be determined
6417 from its initializer, while the initializer is still being parsed.
6418 See expand_decl. */
6420 else if (GET_CODE (DECL_RTL (exp)) == MEM
6421 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6422 temp = validize_mem (DECL_RTL (exp));
6424 /* If DECL_RTL is memory, we are in the normal case and either
6425 the address is not valid or it is not a register and -fforce-addr
6426 is specified, get the address into a register. */
6428 else if (GET_CODE (DECL_RTL (exp)) == MEM
6429 && modifier != EXPAND_CONST_ADDRESS
6430 && modifier != EXPAND_SUM
6431 && modifier != EXPAND_INITIALIZER
6432 && (! memory_address_p (DECL_MODE (exp),
6433 XEXP (DECL_RTL (exp), 0))
6434 || (flag_force_addr
6435 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6436 temp = replace_equiv_address (DECL_RTL (exp),
6437 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6439 /* If we got something, return it. But first, set the alignment
6440 if the address is a register. */
6441 if (temp != 0)
6443 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6444 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6446 return temp;
6449 /* If the mode of DECL_RTL does not match that of the decl, it
6450 must be a promoted value. We return a SUBREG of the wanted mode,
6451 but mark it so that we know that it was already extended. */
6453 if (GET_CODE (DECL_RTL (exp)) == REG
6454 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6456 /* Get the signedness used for this variable. Ensure we get the
6457 same mode we got when the variable was declared. */
6458 if (GET_MODE (DECL_RTL (exp))
6459 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6460 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6461 abort ();
6463 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6464 SUBREG_PROMOTED_VAR_P (temp) = 1;
6465 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6466 return temp;
6469 return DECL_RTL (exp);
6471 case INTEGER_CST:
6472 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6473 TREE_INT_CST_HIGH (exp), mode);
6475 /* ??? If overflow is set, fold will have done an incomplete job,
6476 which can result in (plus xx (const_int 0)), which can get
6477 simplified by validate_replace_rtx during virtual register
6478 instantiation, which can result in unrecognizable insns.
6479 Avoid this by forcing all overflows into registers. */
6480 if (TREE_CONSTANT_OVERFLOW (exp)
6481 && modifier != EXPAND_INITIALIZER)
6482 temp = force_reg (mode, temp);
6484 return temp;
6486 case CONST_DECL:
6487 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6489 case REAL_CST:
6490 /* If optimized, generate immediate CONST_DOUBLE
6491 which will be turned into memory by reload if necessary.
6493 We used to force a register so that loop.c could see it. But
6494 this does not allow gen_* patterns to perform optimizations with
6495 the constants. It also produces two insns in cases like "x = 1.0;".
6496 On most machines, floating-point constants are not permitted in
6497 many insns, so we'd end up copying it to a register in any case.
6499 Now, we do the copying in expand_binop, if appropriate. */
6500 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6501 TYPE_MODE (TREE_TYPE (exp)));
6503 case COMPLEX_CST:
6504 case STRING_CST:
6505 if (! TREE_CST_RTL (exp))
6506 output_constant_def (exp, 1);
6508 /* TREE_CST_RTL probably contains a constant address.
6509 On RISC machines where a constant address isn't valid,
6510 make some insns to get that address into a register. */
6511 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6512 && modifier != EXPAND_CONST_ADDRESS
6513 && modifier != EXPAND_INITIALIZER
6514 && modifier != EXPAND_SUM
6515 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6516 || (flag_force_addr
6517 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6518 return replace_equiv_address (TREE_CST_RTL (exp),
6519 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6520 return TREE_CST_RTL (exp);
6522 case EXPR_WITH_FILE_LOCATION:
6524 rtx to_return;
6525 const char *saved_input_filename = input_filename;
6526 int saved_lineno = lineno;
6527 input_filename = EXPR_WFL_FILENAME (exp);
6528 lineno = EXPR_WFL_LINENO (exp);
6529 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6530 emit_line_note (input_filename, lineno);
6531 /* Possibly avoid switching back and forth here. */
6532 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6533 input_filename = saved_input_filename;
6534 lineno = saved_lineno;
6535 return to_return;
6538 case SAVE_EXPR:
6539 context = decl_function_context (exp);
6541 /* If this SAVE_EXPR was at global context, assume we are an
6542 initialization function and move it into our context. */
6543 if (context == 0)
6544 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6546 /* We treat inline_function_decl as an alias for the current function
6547 because that is the inline function whose vars, types, etc.
6548 are being merged into the current function.
6549 See expand_inline_function. */
6550 if (context == current_function_decl || context == inline_function_decl)
6551 context = 0;
6553 /* If this is non-local, handle it. */
6554 if (context)
6556 /* The following call just exists to abort if the context is
6557 not of a containing function. */
6558 find_function_data (context);
6560 temp = SAVE_EXPR_RTL (exp);
6561 if (temp && GET_CODE (temp) == REG)
6563 put_var_into_stack (exp);
6564 temp = SAVE_EXPR_RTL (exp);
6566 if (temp == 0 || GET_CODE (temp) != MEM)
6567 abort ();
6568 return
6569 replace_equiv_address (temp,
6570 fix_lexical_addr (XEXP (temp, 0), exp));
6572 if (SAVE_EXPR_RTL (exp) == 0)
6574 if (mode == VOIDmode)
6575 temp = const0_rtx;
6576 else
6577 temp = assign_temp (build_qualified_type (type,
6578 (TYPE_QUALS (type)
6579 | TYPE_QUAL_CONST)),
6580 3, 0, 0);
6582 SAVE_EXPR_RTL (exp) = temp;
6583 if (!optimize && GET_CODE (temp) == REG)
6584 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6585 save_expr_regs);
6587 /* If the mode of TEMP does not match that of the expression, it
6588 must be a promoted value. We pass store_expr a SUBREG of the
6589 wanted mode but mark it so that we know that it was already
6590 extended. Note that `unsignedp' was modified above in
6591 this case. */
6593 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6595 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6596 SUBREG_PROMOTED_VAR_P (temp) = 1;
6597 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6600 if (temp == const0_rtx)
6601 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6602 else
6603 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6605 TREE_USED (exp) = 1;
6608 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6609 must be a promoted value. We return a SUBREG of the wanted mode,
6610 but mark it so that we know that it was already extended. */
6612 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6613 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6615 /* Compute the signedness and make the proper SUBREG. */
6616 promote_mode (type, mode, &unsignedp, 0);
6617 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6618 SUBREG_PROMOTED_VAR_P (temp) = 1;
6619 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6620 return temp;
6623 return SAVE_EXPR_RTL (exp);
6625 case UNSAVE_EXPR:
6627 rtx temp;
6628 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6629 TREE_OPERAND (exp, 0)
6630 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6631 return temp;
6634 case PLACEHOLDER_EXPR:
6636 tree old_list = placeholder_list;
6637 tree placeholder_expr = 0;
6639 exp = find_placeholder (exp, &placeholder_expr);
6640 if (exp == 0)
6641 abort ();
6643 placeholder_list = TREE_CHAIN (placeholder_expr);
6644 temp = expand_expr (exp, original_target, tmode, modifier);
6645 placeholder_list = old_list;
6646 return temp;
6649 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6650 abort ();
6652 case WITH_RECORD_EXPR:
6653 /* Put the object on the placeholder list, expand our first operand,
6654 and pop the list. */
6655 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6656 placeholder_list);
6657 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6658 modifier);
6659 placeholder_list = TREE_CHAIN (placeholder_list);
6660 return target;
6662 case GOTO_EXPR:
6663 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6664 expand_goto (TREE_OPERAND (exp, 0));
6665 else
6666 expand_computed_goto (TREE_OPERAND (exp, 0));
6667 return const0_rtx;
6669 case EXIT_EXPR:
6670 expand_exit_loop_if_false (NULL,
6671 invert_truthvalue (TREE_OPERAND (exp, 0)));
6672 return const0_rtx;
6674 case LABELED_BLOCK_EXPR:
6675 if (LABELED_BLOCK_BODY (exp))
6676 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6677 /* Should perhaps use expand_label, but this is simpler and safer. */
6678 do_pending_stack_adjust ();
6679 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6680 return const0_rtx;
6682 case EXIT_BLOCK_EXPR:
6683 if (EXIT_BLOCK_RETURN (exp))
6684 sorry ("returned value in block_exit_expr");
6685 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6686 return const0_rtx;
6688 case LOOP_EXPR:
6689 push_temp_slots ();
6690 expand_start_loop (1);
6691 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6692 expand_end_loop ();
6693 pop_temp_slots ();
6695 return const0_rtx;
6697 case BIND_EXPR:
6699 tree vars = TREE_OPERAND (exp, 0);
6700 int vars_need_expansion = 0;
6702 /* Need to open a binding contour here because
6703 if there are any cleanups they must be contained here. */
6704 expand_start_bindings (2);
6706 /* Mark the corresponding BLOCK for output in its proper place. */
6707 if (TREE_OPERAND (exp, 2) != 0
6708 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6709 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6711 /* If VARS have not yet been expanded, expand them now. */
6712 while (vars)
6714 if (!DECL_RTL_SET_P (vars))
6716 vars_need_expansion = 1;
6717 expand_decl (vars);
6719 expand_decl_init (vars);
6720 vars = TREE_CHAIN (vars);
6723 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6725 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6727 return temp;
6730 case RTL_EXPR:
6731 if (RTL_EXPR_SEQUENCE (exp))
6733 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6734 abort ();
6735 emit_insns (RTL_EXPR_SEQUENCE (exp));
6736 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6738 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6739 free_temps_for_rtl_expr (exp);
6740 return RTL_EXPR_RTL (exp);
6742 case CONSTRUCTOR:
6743 /* If we don't need the result, just ensure we evaluate any
6744 subexpressions. */
6745 if (ignore)
6747 tree elt;
6749 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6750 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6752 return const0_rtx;
6755 /* All elts simple constants => refer to a constant in memory. But
6756 if this is a non-BLKmode mode, let it store a field at a time
6757 since that should make a CONST_INT or CONST_DOUBLE when we
6758 fold. Likewise, if we have a target we can use, it is best to
6759 store directly into the target unless the type is large enough
6760 that memcpy will be used. If we are making an initializer and
6761 all operands are constant, put it in memory as well.
6763 FIXME: Avoid trying to fill vector constructors piece-meal.
6764 Output them with output_constant_def below unless we're sure
6765 they're zeros. This should go away when vector initializers
6766 are treated like VECTOR_CST instead of arrays.
6768 else if ((TREE_STATIC (exp)
6769 && ((mode == BLKmode
6770 && ! (target != 0 && safe_from_p (target, exp, 1)))
6771 || TREE_ADDRESSABLE (exp)
6772 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6773 && (! MOVE_BY_PIECES_P
6774 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6775 TYPE_ALIGN (type)))
6776 && ((TREE_CODE (type) == VECTOR_TYPE
6777 && !is_zeros_p (exp))
6778 || ! mostly_zeros_p (exp)))))
6779 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6781 rtx constructor = output_constant_def (exp, 1);
6783 if (modifier != EXPAND_CONST_ADDRESS
6784 && modifier != EXPAND_INITIALIZER
6785 && modifier != EXPAND_SUM)
6786 constructor = validize_mem (constructor);
6788 return constructor;
6790 else
6792 /* Handle calls that pass values in multiple non-contiguous
6793 locations. The Irix 6 ABI has examples of this. */
6794 if (target == 0 || ! safe_from_p (target, exp, 1)
6795 || GET_CODE (target) == PARALLEL)
6796 target
6797 = assign_temp (build_qualified_type (type,
6798 (TYPE_QUALS (type)
6799 | (TREE_READONLY (exp)
6800 * TYPE_QUAL_CONST))),
6801 0, TREE_ADDRESSABLE (exp), 1);
6803 store_constructor (exp, target, 0,
6804 int_size_in_bytes (TREE_TYPE (exp)));
6805 return target;
6808 case INDIRECT_REF:
6810 tree exp1 = TREE_OPERAND (exp, 0);
6811 tree index;
6812 tree string = string_constant (exp1, &index);
6814 /* Try to optimize reads from const strings. */
6815 if (string
6816 && TREE_CODE (string) == STRING_CST
6817 && TREE_CODE (index) == INTEGER_CST
6818 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6819 && GET_MODE_CLASS (mode) == MODE_INT
6820 && GET_MODE_SIZE (mode) == 1
6821 && modifier != EXPAND_WRITE)
6822 return gen_int_mode (TREE_STRING_POINTER (string)
6823 [TREE_INT_CST_LOW (index)], mode);
6825 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6826 op0 = memory_address (mode, op0);
6827 temp = gen_rtx_MEM (mode, op0);
6828 set_mem_attributes (temp, exp, 0);
6830 /* If we are writing to this object and its type is a record with
6831 readonly fields, we must mark it as readonly so it will
6832 conflict with readonly references to those fields. */
6833 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6834 RTX_UNCHANGING_P (temp) = 1;
6836 return temp;
6839 case ARRAY_REF:
6840 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6841 abort ();
6844 tree array = TREE_OPERAND (exp, 0);
6845 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6846 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6847 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6848 HOST_WIDE_INT i;
6850 /* Optimize the special-case of a zero lower bound.
6852 We convert the low_bound to sizetype to avoid some problems
6853 with constant folding. (E.g. suppose the lower bound is 1,
6854 and its mode is QI. Without the conversion, (ARRAY
6855 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6856 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6858 if (! integer_zerop (low_bound))
6859 index = size_diffop (index, convert (sizetype, low_bound));
6861 /* Fold an expression like: "foo"[2].
6862 This is not done in fold so it won't happen inside &.
6863 Don't fold if this is for wide characters since it's too
6864 difficult to do correctly and this is a very rare case. */
6866 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6867 && TREE_CODE (array) == STRING_CST
6868 && TREE_CODE (index) == INTEGER_CST
6869 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6870 && GET_MODE_CLASS (mode) == MODE_INT
6871 && GET_MODE_SIZE (mode) == 1)
6872 return gen_int_mode (TREE_STRING_POINTER (array)
6873 [TREE_INT_CST_LOW (index)], mode);
6875 /* If this is a constant index into a constant array,
6876 just get the value from the array. Handle both the cases when
6877 we have an explicit constructor and when our operand is a variable
6878 that was declared const. */
6880 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6881 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6882 && TREE_CODE (index) == INTEGER_CST
6883 && 0 > compare_tree_int (index,
6884 list_length (CONSTRUCTOR_ELTS
6885 (TREE_OPERAND (exp, 0)))))
6887 tree elem;
6889 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6890 i = TREE_INT_CST_LOW (index);
6891 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6894 if (elem)
6895 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6896 modifier);
6899 else if (optimize >= 1
6900 && modifier != EXPAND_CONST_ADDRESS
6901 && modifier != EXPAND_INITIALIZER
6902 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6903 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6904 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6906 if (TREE_CODE (index) == INTEGER_CST)
6908 tree init = DECL_INITIAL (array);
6910 if (TREE_CODE (init) == CONSTRUCTOR)
6912 tree elem;
6914 for (elem = CONSTRUCTOR_ELTS (init);
6915 (elem
6916 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6917 elem = TREE_CHAIN (elem))
6920 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6921 return expand_expr (fold (TREE_VALUE (elem)), target,
6922 tmode, modifier);
6924 else if (TREE_CODE (init) == STRING_CST
6925 && 0 > compare_tree_int (index,
6926 TREE_STRING_LENGTH (init)))
6928 tree type = TREE_TYPE (TREE_TYPE (init));
6929 enum machine_mode mode = TYPE_MODE (type);
6931 if (GET_MODE_CLASS (mode) == MODE_INT
6932 && GET_MODE_SIZE (mode) == 1)
6933 return gen_int_mode (TREE_STRING_POINTER (init)
6934 [TREE_INT_CST_LOW (index)], mode);
6939 /* Fall through. */
6941 case COMPONENT_REF:
6942 case BIT_FIELD_REF:
6943 case ARRAY_RANGE_REF:
6944 /* If the operand is a CONSTRUCTOR, we can just extract the
6945 appropriate field if it is present. Don't do this if we have
6946 already written the data since we want to refer to that copy
6947 and varasm.c assumes that's what we'll do. */
6948 if (code == COMPONENT_REF
6949 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6950 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6952 tree elt;
6954 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6955 elt = TREE_CHAIN (elt))
6956 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6957 /* We can normally use the value of the field in the
6958 CONSTRUCTOR. However, if this is a bitfield in
6959 an integral mode that we can fit in a HOST_WIDE_INT,
6960 we must mask only the number of bits in the bitfield,
6961 since this is done implicitly by the constructor. If
6962 the bitfield does not meet either of those conditions,
6963 we can't do this optimization. */
6964 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6965 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6966 == MODE_INT)
6967 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6968 <= HOST_BITS_PER_WIDE_INT))))
6970 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6971 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6973 HOST_WIDE_INT bitsize
6974 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6975 enum machine_mode imode
6976 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6978 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6980 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6981 op0 = expand_and (imode, op0, op1, target);
6983 else
6985 tree count
6986 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6989 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6990 target, 0);
6991 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6992 target, 0);
6996 return op0;
7001 enum machine_mode mode1;
7002 HOST_WIDE_INT bitsize, bitpos;
7003 tree offset;
7004 int volatilep = 0;
7005 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7006 &mode1, &unsignedp, &volatilep);
7007 rtx orig_op0;
7009 /* If we got back the original object, something is wrong. Perhaps
7010 we are evaluating an expression too early. In any event, don't
7011 infinitely recurse. */
7012 if (tem == exp)
7013 abort ();
7015 /* If TEM's type is a union of variable size, pass TARGET to the inner
7016 computation, since it will need a temporary and TARGET is known
7017 to have to do. This occurs in unchecked conversion in Ada. */
7019 orig_op0 = op0
7020 = expand_expr (tem,
7021 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7022 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7023 != INTEGER_CST)
7024 ? target : NULL_RTX),
7025 VOIDmode,
7026 (modifier == EXPAND_INITIALIZER
7027 || modifier == EXPAND_CONST_ADDRESS)
7028 ? modifier : EXPAND_NORMAL);
7030 /* If this is a constant, put it into a register if it is a
7031 legitimate constant and OFFSET is 0 and memory if it isn't. */
7032 if (CONSTANT_P (op0))
7034 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7035 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7036 && offset == 0)
7037 op0 = force_reg (mode, op0);
7038 else
7039 op0 = validize_mem (force_const_mem (mode, op0));
7042 if (offset != 0)
7044 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7046 /* If this object is in a register, put it into memory.
7047 This case can't occur in C, but can in Ada if we have
7048 unchecked conversion of an expression from a scalar type to
7049 an array or record type. */
7050 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7051 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7053 /* If the operand is a SAVE_EXPR, we can deal with this by
7054 forcing the SAVE_EXPR into memory. */
7055 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7057 put_var_into_stack (TREE_OPERAND (exp, 0));
7058 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7060 else
7062 tree nt
7063 = build_qualified_type (TREE_TYPE (tem),
7064 (TYPE_QUALS (TREE_TYPE (tem))
7065 | TYPE_QUAL_CONST));
7066 rtx memloc = assign_temp (nt, 1, 1, 1);
7068 emit_move_insn (memloc, op0);
7069 op0 = memloc;
7073 if (GET_CODE (op0) != MEM)
7074 abort ();
7076 #ifdef POINTERS_EXTEND_UNSIGNED
7077 if (GET_MODE (offset_rtx) != Pmode)
7078 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7079 #else
7080 if (GET_MODE (offset_rtx) != ptr_mode)
7081 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7082 #endif
7084 /* A constant address in OP0 can have VOIDmode, we must not try
7085 to call force_reg for that case. Avoid that case. */
7086 if (GET_CODE (op0) == MEM
7087 && GET_MODE (op0) == BLKmode
7088 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7089 && bitsize != 0
7090 && (bitpos % bitsize) == 0
7091 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7092 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7094 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7095 bitpos = 0;
7098 op0 = offset_address (op0, offset_rtx,
7099 highest_pow2_factor (offset));
7102 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7103 record its alignment as BIGGEST_ALIGNMENT. */
7104 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7105 && is_aligning_offset (offset, tem))
7106 set_mem_align (op0, BIGGEST_ALIGNMENT);
7108 /* Don't forget about volatility even if this is a bitfield. */
7109 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7111 if (op0 == orig_op0)
7112 op0 = copy_rtx (op0);
7114 MEM_VOLATILE_P (op0) = 1;
7117 /* The following code doesn't handle CONCAT.
7118 Assume only bitpos == 0 can be used for CONCAT, due to
7119 one element arrays having the same mode as its element. */
7120 if (GET_CODE (op0) == CONCAT)
7122 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7123 abort ();
7124 return op0;
7127 /* In cases where an aligned union has an unaligned object
7128 as a field, we might be extracting a BLKmode value from
7129 an integer-mode (e.g., SImode) object. Handle this case
7130 by doing the extract into an object as wide as the field
7131 (which we know to be the width of a basic mode), then
7132 storing into memory, and changing the mode to BLKmode. */
7133 if (mode1 == VOIDmode
7134 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7135 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7136 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7137 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7138 && modifier != EXPAND_CONST_ADDRESS
7139 && modifier != EXPAND_INITIALIZER)
7140 /* If the field isn't aligned enough to fetch as a memref,
7141 fetch it as a bit field. */
7142 || (mode1 != BLKmode
7143 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7144 && ((TYPE_ALIGN (TREE_TYPE (tem))
7145 < GET_MODE_ALIGNMENT (mode))
7146 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7147 /* If the type and the field are a constant size and the
7148 size of the type isn't the same size as the bitfield,
7149 we must use bitfield operations. */
7150 || (bitsize >= 0
7151 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7152 == INTEGER_CST)
7153 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7154 bitsize)))
7156 enum machine_mode ext_mode = mode;
7158 if (ext_mode == BLKmode
7159 && ! (target != 0 && GET_CODE (op0) == MEM
7160 && GET_CODE (target) == MEM
7161 && bitpos % BITS_PER_UNIT == 0))
7162 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7164 if (ext_mode == BLKmode)
7166 /* In this case, BITPOS must start at a byte boundary and
7167 TARGET, if specified, must be a MEM. */
7168 if (GET_CODE (op0) != MEM
7169 || (target != 0 && GET_CODE (target) != MEM)
7170 || bitpos % BITS_PER_UNIT != 0)
7171 abort ();
7173 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7174 if (target == 0)
7175 target = assign_temp (type, 0, 1, 1);
7177 emit_block_move (target, op0,
7178 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7179 / BITS_PER_UNIT));
7181 return target;
7184 op0 = validize_mem (op0);
7186 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7187 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7189 op0 = extract_bit_field (op0, bitsize, bitpos,
7190 unsignedp, target, ext_mode, ext_mode,
7191 int_size_in_bytes (TREE_TYPE (tem)));
7193 /* If the result is a record type and BITSIZE is narrower than
7194 the mode of OP0, an integral mode, and this is a big endian
7195 machine, we must put the field into the high-order bits. */
7196 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7197 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7198 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7199 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7200 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7201 - bitsize),
7202 op0, 1);
7204 if (mode == BLKmode)
7206 rtx new = assign_temp (build_qualified_type
7207 ((*lang_hooks.types.type_for_mode)
7208 (ext_mode, 0),
7209 TYPE_QUAL_CONST), 0, 1, 1);
7211 emit_move_insn (new, op0);
7212 op0 = copy_rtx (new);
7213 PUT_MODE (op0, BLKmode);
7214 set_mem_attributes (op0, exp, 1);
7217 return op0;
7220 /* If the result is BLKmode, use that to access the object
7221 now as well. */
7222 if (mode == BLKmode)
7223 mode1 = BLKmode;
7225 /* Get a reference to just this component. */
7226 if (modifier == EXPAND_CONST_ADDRESS
7227 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7228 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7229 else
7230 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7232 if (op0 == orig_op0)
7233 op0 = copy_rtx (op0);
7235 set_mem_attributes (op0, exp, 0);
7236 if (GET_CODE (XEXP (op0, 0)) == REG)
7237 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7239 MEM_VOLATILE_P (op0) |= volatilep;
7240 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7241 || modifier == EXPAND_CONST_ADDRESS
7242 || modifier == EXPAND_INITIALIZER)
7243 return op0;
7244 else if (target == 0)
7245 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7247 convert_move (target, op0, unsignedp);
7248 return target;
7251 case VTABLE_REF:
7253 rtx insn, before = get_last_insn (), vtbl_ref;
7255 /* Evaluate the interior expression. */
7256 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7257 tmode, modifier);
7259 /* Get or create an instruction off which to hang a note. */
7260 if (REG_P (subtarget))
7262 target = subtarget;
7263 insn = get_last_insn ();
7264 if (insn == before)
7265 abort ();
7266 if (! INSN_P (insn))
7267 insn = prev_nonnote_insn (insn);
7269 else
7271 target = gen_reg_rtx (GET_MODE (subtarget));
7272 insn = emit_move_insn (target, subtarget);
7275 /* Collect the data for the note. */
7276 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7277 vtbl_ref = plus_constant (vtbl_ref,
7278 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7279 /* Discard the initial CONST that was added. */
7280 vtbl_ref = XEXP (vtbl_ref, 0);
7282 REG_NOTES (insn)
7283 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7285 return target;
7288 /* Intended for a reference to a buffer of a file-object in Pascal.
7289 But it's not certain that a special tree code will really be
7290 necessary for these. INDIRECT_REF might work for them. */
7291 case BUFFER_REF:
7292 abort ();
7294 case IN_EXPR:
7296 /* Pascal set IN expression.
7298 Algorithm:
7299 rlo = set_low - (set_low%bits_per_word);
7300 the_word = set [ (index - rlo)/bits_per_word ];
7301 bit_index = index % bits_per_word;
7302 bitmask = 1 << bit_index;
7303 return !!(the_word & bitmask); */
7305 tree set = TREE_OPERAND (exp, 0);
7306 tree index = TREE_OPERAND (exp, 1);
7307 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308 tree set_type = TREE_TYPE (set);
7309 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7310 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7311 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7312 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7313 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7314 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7315 rtx setaddr = XEXP (setval, 0);
7316 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7317 rtx rlow;
7318 rtx diff, quo, rem, addr, bit, result;
7320 /* If domain is empty, answer is no. Likewise if index is constant
7321 and out of bounds. */
7322 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7323 && TREE_CODE (set_low_bound) == INTEGER_CST
7324 && tree_int_cst_lt (set_high_bound, set_low_bound))
7325 || (TREE_CODE (index) == INTEGER_CST
7326 && TREE_CODE (set_low_bound) == INTEGER_CST
7327 && tree_int_cst_lt (index, set_low_bound))
7328 || (TREE_CODE (set_high_bound) == INTEGER_CST
7329 && TREE_CODE (index) == INTEGER_CST
7330 && tree_int_cst_lt (set_high_bound, index))))
7331 return const0_rtx;
7333 if (target == 0)
7334 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7336 /* If we get here, we have to generate the code for both cases
7337 (in range and out of range). */
7339 op0 = gen_label_rtx ();
7340 op1 = gen_label_rtx ();
7342 if (! (GET_CODE (index_val) == CONST_INT
7343 && GET_CODE (lo_r) == CONST_INT))
7344 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7345 GET_MODE (index_val), iunsignedp, op1);
7347 if (! (GET_CODE (index_val) == CONST_INT
7348 && GET_CODE (hi_r) == CONST_INT))
7349 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7350 GET_MODE (index_val), iunsignedp, op1);
7352 /* Calculate the element number of bit zero in the first word
7353 of the set. */
7354 if (GET_CODE (lo_r) == CONST_INT)
7355 rlow = GEN_INT (INTVAL (lo_r)
7356 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7357 else
7358 rlow = expand_binop (index_mode, and_optab, lo_r,
7359 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7360 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7362 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7363 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7365 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7366 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7367 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7368 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7370 addr = memory_address (byte_mode,
7371 expand_binop (index_mode, add_optab, diff,
7372 setaddr, NULL_RTX, iunsignedp,
7373 OPTAB_LIB_WIDEN));
7375 /* Extract the bit we want to examine. */
7376 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7377 gen_rtx_MEM (byte_mode, addr),
7378 make_tree (TREE_TYPE (index), rem),
7379 NULL_RTX, 1);
7380 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7381 GET_MODE (target) == byte_mode ? target : 0,
7382 1, OPTAB_LIB_WIDEN);
7384 if (result != target)
7385 convert_move (target, result, 1);
7387 /* Output the code to handle the out-of-range case. */
7388 emit_jump (op0);
7389 emit_label (op1);
7390 emit_move_insn (target, const0_rtx);
7391 emit_label (op0);
7392 return target;
7395 case WITH_CLEANUP_EXPR:
7396 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7398 WITH_CLEANUP_EXPR_RTL (exp)
7399 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7400 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7401 CLEANUP_EH_ONLY (exp));
7403 /* That's it for this cleanup. */
7404 TREE_OPERAND (exp, 1) = 0;
7406 return WITH_CLEANUP_EXPR_RTL (exp);
7408 case CLEANUP_POINT_EXPR:
7410 /* Start a new binding layer that will keep track of all cleanup
7411 actions to be performed. */
7412 expand_start_bindings (2);
7414 target_temp_slot_level = temp_slot_level;
7416 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7417 /* If we're going to use this value, load it up now. */
7418 if (! ignore)
7419 op0 = force_not_mem (op0);
7420 preserve_temp_slots (op0);
7421 expand_end_bindings (NULL_TREE, 0, 0);
7423 return op0;
7425 case CALL_EXPR:
7426 /* Check for a built-in function. */
7427 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7428 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7429 == FUNCTION_DECL)
7430 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7432 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7433 == BUILT_IN_FRONTEND)
7434 return (*lang_hooks.expand_expr)
7435 (exp, original_target, tmode, modifier);
7436 else
7437 return expand_builtin (exp, target, subtarget, tmode, ignore);
7440 return expand_call (exp, target, ignore);
7442 case NON_LVALUE_EXPR:
7443 case NOP_EXPR:
7444 case CONVERT_EXPR:
7445 case REFERENCE_EXPR:
7446 if (TREE_OPERAND (exp, 0) == error_mark_node)
7447 return const0_rtx;
7449 if (TREE_CODE (type) == UNION_TYPE)
7451 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7453 /* If both input and output are BLKmode, this conversion isn't doing
7454 anything except possibly changing memory attribute. */
7455 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7457 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7458 modifier);
7460 result = copy_rtx (result);
7461 set_mem_attributes (result, exp, 0);
7462 return result;
7465 if (target == 0)
7466 target = assign_temp (type, 0, 1, 1);
7468 if (GET_CODE (target) == MEM)
7469 /* Store data into beginning of memory target. */
7470 store_expr (TREE_OPERAND (exp, 0),
7471 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7473 else if (GET_CODE (target) == REG)
7474 /* Store this field into a union of the proper type. */
7475 store_field (target,
7476 MIN ((int_size_in_bytes (TREE_TYPE
7477 (TREE_OPERAND (exp, 0)))
7478 * BITS_PER_UNIT),
7479 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7480 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7481 VOIDmode, 0, type, 0);
7482 else
7483 abort ();
7485 /* Return the entire union. */
7486 return target;
7489 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7491 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7492 modifier);
7494 /* If the signedness of the conversion differs and OP0 is
7495 a promoted SUBREG, clear that indication since we now
7496 have to do the proper extension. */
7497 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7498 && GET_CODE (op0) == SUBREG)
7499 SUBREG_PROMOTED_VAR_P (op0) = 0;
7501 return op0;
7504 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7505 if (GET_MODE (op0) == mode)
7506 return op0;
7508 /* If OP0 is a constant, just convert it into the proper mode. */
7509 if (CONSTANT_P (op0))
7511 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7512 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7514 if (modifier == EXPAND_INITIALIZER)
7515 return simplify_gen_subreg (mode, op0, inner_mode,
7516 subreg_lowpart_offset (mode,
7517 inner_mode));
7518 else
7519 return convert_modes (mode, inner_mode, op0,
7520 TREE_UNSIGNED (inner_type));
7523 if (modifier == EXPAND_INITIALIZER)
7524 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7526 if (target == 0)
7527 return
7528 convert_to_mode (mode, op0,
7529 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7530 else
7531 convert_move (target, op0,
7532 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7533 return target;
7535 case VIEW_CONVERT_EXPR:
7536 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7538 /* If the input and output modes are both the same, we are done.
7539 Otherwise, if neither mode is BLKmode and both are within a word, we
7540 can use gen_lowpart. If neither is true, make sure the operand is
7541 in memory and convert the MEM to the new mode. */
7542 if (TYPE_MODE (type) == GET_MODE (op0))
7544 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7545 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7546 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7547 op0 = gen_lowpart (TYPE_MODE (type), op0);
7548 else if (GET_CODE (op0) != MEM)
7550 /* If the operand is not a MEM, force it into memory. Since we
7551 are going to be be changing the mode of the MEM, don't call
7552 force_const_mem for constants because we don't allow pool
7553 constants to change mode. */
7554 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7556 if (TREE_ADDRESSABLE (exp))
7557 abort ();
7559 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7560 target
7561 = assign_stack_temp_for_type
7562 (TYPE_MODE (inner_type),
7563 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7565 emit_move_insn (target, op0);
7566 op0 = target;
7569 /* At this point, OP0 is in the correct mode. If the output type is such
7570 that the operand is known to be aligned, indicate that it is.
7571 Otherwise, we need only be concerned about alignment for non-BLKmode
7572 results. */
7573 if (GET_CODE (op0) == MEM)
7575 op0 = copy_rtx (op0);
7577 if (TYPE_ALIGN_OK (type))
7578 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7579 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7580 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7582 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7583 HOST_WIDE_INT temp_size
7584 = MAX (int_size_in_bytes (inner_type),
7585 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7586 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7587 temp_size, 0, type);
7588 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7590 if (TREE_ADDRESSABLE (exp))
7591 abort ();
7593 if (GET_MODE (op0) == BLKmode)
7594 emit_block_move (new_with_op0_mode, op0,
7595 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7596 else
7597 emit_move_insn (new_with_op0_mode, op0);
7599 op0 = new;
7602 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7605 return op0;
7607 case PLUS_EXPR:
7608 /* We come here from MINUS_EXPR when the second operand is a
7609 constant. */
7610 plus_expr:
7611 this_optab = ! unsignedp && flag_trapv
7612 && (GET_MODE_CLASS (mode) == MODE_INT)
7613 ? addv_optab : add_optab;
7615 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7616 something else, make sure we add the register to the constant and
7617 then to the other thing. This case can occur during strength
7618 reduction and doing it this way will produce better code if the
7619 frame pointer or argument pointer is eliminated.
7621 fold-const.c will ensure that the constant is always in the inner
7622 PLUS_EXPR, so the only case we need to do anything about is if
7623 sp, ap, or fp is our second argument, in which case we must swap
7624 the innermost first argument and our second argument. */
7626 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7627 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7628 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7629 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7630 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7631 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7633 tree t = TREE_OPERAND (exp, 1);
7635 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7636 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7639 /* If the result is to be ptr_mode and we are adding an integer to
7640 something, we might be forming a constant. So try to use
7641 plus_constant. If it produces a sum and we can't accept it,
7642 use force_operand. This allows P = &ARR[const] to generate
7643 efficient code on machines where a SYMBOL_REF is not a valid
7644 address.
7646 If this is an EXPAND_SUM call, always return the sum. */
7647 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7648 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7650 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7651 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7652 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7654 rtx constant_part;
7656 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7657 EXPAND_SUM);
7658 /* Use immed_double_const to ensure that the constant is
7659 truncated according to the mode of OP1, then sign extended
7660 to a HOST_WIDE_INT. Using the constant directly can result
7661 in non-canonical RTL in a 64x32 cross compile. */
7662 constant_part
7663 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7664 (HOST_WIDE_INT) 0,
7665 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7666 op1 = plus_constant (op1, INTVAL (constant_part));
7667 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7668 op1 = force_operand (op1, target);
7669 return op1;
7672 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7673 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7674 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7676 rtx constant_part;
7678 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7679 (modifier == EXPAND_INITIALIZER
7680 ? EXPAND_INITIALIZER : EXPAND_SUM));
7681 if (! CONSTANT_P (op0))
7683 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7684 VOIDmode, modifier);
7685 /* Don't go to both_summands if modifier
7686 says it's not right to return a PLUS. */
7687 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7688 goto binop2;
7689 goto both_summands;
7691 /* Use immed_double_const to ensure that the constant is
7692 truncated according to the mode of OP1, then sign extended
7693 to a HOST_WIDE_INT. Using the constant directly can result
7694 in non-canonical RTL in a 64x32 cross compile. */
7695 constant_part
7696 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7697 (HOST_WIDE_INT) 0,
7698 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7699 op0 = plus_constant (op0, INTVAL (constant_part));
7700 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7701 op0 = force_operand (op0, target);
7702 return op0;
7706 /* No sense saving up arithmetic to be done
7707 if it's all in the wrong mode to form part of an address.
7708 And force_operand won't know whether to sign-extend or
7709 zero-extend. */
7710 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7711 || mode != ptr_mode)
7712 goto binop;
7714 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7715 subtarget = 0;
7717 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7718 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7720 both_summands:
7721 /* Make sure any term that's a sum with a constant comes last. */
7722 if (GET_CODE (op0) == PLUS
7723 && CONSTANT_P (XEXP (op0, 1)))
7725 temp = op0;
7726 op0 = op1;
7727 op1 = temp;
7729 /* If adding to a sum including a constant,
7730 associate it to put the constant outside. */
7731 if (GET_CODE (op1) == PLUS
7732 && CONSTANT_P (XEXP (op1, 1)))
7734 rtx constant_term = const0_rtx;
7736 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7737 if (temp != 0)
7738 op0 = temp;
7739 /* Ensure that MULT comes first if there is one. */
7740 else if (GET_CODE (op0) == MULT)
7741 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7742 else
7743 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7745 /* Let's also eliminate constants from op0 if possible. */
7746 op0 = eliminate_constant_term (op0, &constant_term);
7748 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7749 their sum should be a constant. Form it into OP1, since the
7750 result we want will then be OP0 + OP1. */
7752 temp = simplify_binary_operation (PLUS, mode, constant_term,
7753 XEXP (op1, 1));
7754 if (temp != 0)
7755 op1 = temp;
7756 else
7757 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7760 /* Put a constant term last and put a multiplication first. */
7761 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7762 temp = op1, op1 = op0, op0 = temp;
7764 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7765 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7767 case MINUS_EXPR:
7768 /* For initializers, we are allowed to return a MINUS of two
7769 symbolic constants. Here we handle all cases when both operands
7770 are constant. */
7771 /* Handle difference of two symbolic constants,
7772 for the sake of an initializer. */
7773 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7774 && really_constant_p (TREE_OPERAND (exp, 0))
7775 && really_constant_p (TREE_OPERAND (exp, 1)))
7777 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7778 modifier);
7779 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7780 modifier);
7782 /* If the last operand is a CONST_INT, use plus_constant of
7783 the negated constant. Else make the MINUS. */
7784 if (GET_CODE (op1) == CONST_INT)
7785 return plus_constant (op0, - INTVAL (op1));
7786 else
7787 return gen_rtx_MINUS (mode, op0, op1);
7789 /* Convert A - const to A + (-const). */
7790 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7792 tree negated = fold (build1 (NEGATE_EXPR, type,
7793 TREE_OPERAND (exp, 1)));
7795 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7796 /* If we can't negate the constant in TYPE, leave it alone and
7797 expand_binop will negate it for us. We used to try to do it
7798 here in the signed version of TYPE, but that doesn't work
7799 on POINTER_TYPEs. */;
7800 else
7802 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7803 goto plus_expr;
7806 this_optab = ! unsignedp && flag_trapv
7807 && (GET_MODE_CLASS(mode) == MODE_INT)
7808 ? subv_optab : sub_optab;
7809 goto binop;
7811 case MULT_EXPR:
7812 /* If first operand is constant, swap them.
7813 Thus the following special case checks need only
7814 check the second operand. */
7815 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7817 tree t1 = TREE_OPERAND (exp, 0);
7818 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7819 TREE_OPERAND (exp, 1) = t1;
7822 /* Attempt to return something suitable for generating an
7823 indexed address, for machines that support that. */
7825 if (modifier == EXPAND_SUM && mode == ptr_mode
7826 && host_integerp (TREE_OPERAND (exp, 1), 0))
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7829 EXPAND_SUM);
7831 /* If we knew for certain that this is arithmetic for an array
7832 reference, and we knew the bounds of the array, then we could
7833 apply the distributive law across (PLUS X C) for constant C.
7834 Without such knowledge, we risk overflowing the computation
7835 when both X and C are large, but X+C isn't. */
7836 /* ??? Could perhaps special-case EXP being unsigned and C being
7837 positive. In that case we are certain that X+C is no smaller
7838 than X and so the transformed expression will overflow iff the
7839 original would have. */
7841 if (GET_CODE (op0) != REG)
7842 op0 = force_operand (op0, NULL_RTX);
7843 if (GET_CODE (op0) != REG)
7844 op0 = copy_to_mode_reg (mode, op0);
7846 return
7847 gen_rtx_MULT (mode, op0,
7848 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7851 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7852 subtarget = 0;
7854 /* Check for multiplying things that have been extended
7855 from a narrower type. If this machine supports multiplying
7856 in that narrower type with a result in the desired type,
7857 do it that way, and avoid the explicit type-conversion. */
7858 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7859 && TREE_CODE (type) == INTEGER_TYPE
7860 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7861 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7862 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7863 && int_fits_type_p (TREE_OPERAND (exp, 1),
7864 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7865 /* Don't use a widening multiply if a shift will do. */
7866 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7867 > HOST_BITS_PER_WIDE_INT)
7868 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7870 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7871 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7873 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7874 /* If both operands are extended, they must either both
7875 be zero-extended or both be sign-extended. */
7876 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7878 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7880 enum machine_mode innermode
7881 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7882 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7883 ? smul_widen_optab : umul_widen_optab);
7884 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7885 ? umul_widen_optab : smul_widen_optab);
7886 if (mode == GET_MODE_WIDER_MODE (innermode))
7888 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7890 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7891 NULL_RTX, VOIDmode, 0);
7892 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7893 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7894 VOIDmode, 0);
7895 else
7896 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7897 NULL_RTX, VOIDmode, 0);
7898 goto binop2;
7900 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7901 && innermode == word_mode)
7903 rtx htem;
7904 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7905 NULL_RTX, VOIDmode, 0);
7906 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7907 op1 = convert_modes (innermode, mode,
7908 expand_expr (TREE_OPERAND (exp, 1),
7909 NULL_RTX, VOIDmode, 0),
7910 unsignedp);
7911 else
7912 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7913 NULL_RTX, VOIDmode, 0);
7914 temp = expand_binop (mode, other_optab, op0, op1, target,
7915 unsignedp, OPTAB_LIB_WIDEN);
7916 htem = expand_mult_highpart_adjust (innermode,
7917 gen_highpart (innermode, temp),
7918 op0, op1,
7919 gen_highpart (innermode, temp),
7920 unsignedp);
7921 emit_move_insn (gen_highpart (innermode, temp), htem);
7922 return temp;
7926 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7927 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7928 return expand_mult (mode, op0, op1, target, unsignedp);
7930 case TRUNC_DIV_EXPR:
7931 case FLOOR_DIV_EXPR:
7932 case CEIL_DIV_EXPR:
7933 case ROUND_DIV_EXPR:
7934 case EXACT_DIV_EXPR:
7935 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7936 subtarget = 0;
7937 /* Possible optimization: compute the dividend with EXPAND_SUM
7938 then if the divisor is constant can optimize the case
7939 where some terms of the dividend have coeffs divisible by it. */
7940 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7941 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7942 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7944 case RDIV_EXPR:
7945 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7946 expensive divide. If not, combine will rebuild the original
7947 computation. */
7948 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7949 && TREE_CODE (type) == REAL_TYPE
7950 && !real_onep (TREE_OPERAND (exp, 0)))
7951 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7952 build (RDIV_EXPR, type,
7953 build_real (type, dconst1),
7954 TREE_OPERAND (exp, 1))),
7955 target, tmode, unsignedp);
7956 this_optab = sdiv_optab;
7957 goto binop;
7959 case TRUNC_MOD_EXPR:
7960 case FLOOR_MOD_EXPR:
7961 case CEIL_MOD_EXPR:
7962 case ROUND_MOD_EXPR:
7963 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7964 subtarget = 0;
7965 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7967 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7969 case FIX_ROUND_EXPR:
7970 case FIX_FLOOR_EXPR:
7971 case FIX_CEIL_EXPR:
7972 abort (); /* Not used for C. */
7974 case FIX_TRUNC_EXPR:
7975 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7976 if (target == 0)
7977 target = gen_reg_rtx (mode);
7978 expand_fix (target, op0, unsignedp);
7979 return target;
7981 case FLOAT_EXPR:
7982 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7983 if (target == 0)
7984 target = gen_reg_rtx (mode);
7985 /* expand_float can't figure out what to do if FROM has VOIDmode.
7986 So give it the correct mode. With -O, cse will optimize this. */
7987 if (GET_MODE (op0) == VOIDmode)
7988 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7989 op0);
7990 expand_float (target, op0,
7991 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7992 return target;
7994 case NEGATE_EXPR:
7995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7996 temp = expand_unop (mode,
7997 ! unsignedp && flag_trapv
7998 && (GET_MODE_CLASS(mode) == MODE_INT)
7999 ? negv_optab : neg_optab, op0, target, 0);
8000 if (temp == 0)
8001 abort ();
8002 return temp;
8004 case ABS_EXPR:
8005 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8007 /* Handle complex values specially. */
8008 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8009 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8010 return expand_complex_abs (mode, op0, target, unsignedp);
8012 /* Unsigned abs is simply the operand. Testing here means we don't
8013 risk generating incorrect code below. */
8014 if (TREE_UNSIGNED (type))
8015 return op0;
8017 return expand_abs (mode, op0, target, unsignedp,
8018 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8020 case MAX_EXPR:
8021 case MIN_EXPR:
8022 target = original_target;
8023 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8024 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8025 || GET_MODE (target) != mode
8026 || (GET_CODE (target) == REG
8027 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8028 target = gen_reg_rtx (mode);
8029 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8030 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8032 /* First try to do it with a special MIN or MAX instruction.
8033 If that does not win, use a conditional jump to select the proper
8034 value. */
8035 this_optab = (TREE_UNSIGNED (type)
8036 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8037 : (code == MIN_EXPR ? smin_optab : smax_optab));
8039 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8040 OPTAB_WIDEN);
8041 if (temp != 0)
8042 return temp;
8044 /* At this point, a MEM target is no longer useful; we will get better
8045 code without it. */
8047 if (GET_CODE (target) == MEM)
8048 target = gen_reg_rtx (mode);
8050 if (target != op0)
8051 emit_move_insn (target, op0);
8053 op0 = gen_label_rtx ();
8055 /* If this mode is an integer too wide to compare properly,
8056 compare word by word. Rely on cse to optimize constant cases. */
8057 if (GET_MODE_CLASS (mode) == MODE_INT
8058 && ! can_compare_p (GE, mode, ccp_jump))
8060 if (code == MAX_EXPR)
8061 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8062 target, op1, NULL_RTX, op0);
8063 else
8064 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8065 op1, target, NULL_RTX, op0);
8067 else
8069 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8070 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8071 unsignedp, mode, NULL_RTX, NULL_RTX,
8072 op0);
8074 emit_move_insn (target, op1);
8075 emit_label (op0);
8076 return target;
8078 case BIT_NOT_EXPR:
8079 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8080 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8081 if (temp == 0)
8082 abort ();
8083 return temp;
8085 case FFS_EXPR:
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8087 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8088 if (temp == 0)
8089 abort ();
8090 return temp;
8092 /* ??? Can optimize bitwise operations with one arg constant.
8093 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8094 and (a bitwise1 b) bitwise2 b (etc)
8095 but that is probably not worth while. */
8097 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8098 boolean values when we want in all cases to compute both of them. In
8099 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8100 as actual zero-or-1 values and then bitwise anding. In cases where
8101 there cannot be any side effects, better code would be made by
8102 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8103 how to recognize those cases. */
8105 case TRUTH_AND_EXPR:
8106 case BIT_AND_EXPR:
8107 this_optab = and_optab;
8108 goto binop;
8110 case TRUTH_OR_EXPR:
8111 case BIT_IOR_EXPR:
8112 this_optab = ior_optab;
8113 goto binop;
8115 case TRUTH_XOR_EXPR:
8116 case BIT_XOR_EXPR:
8117 this_optab = xor_optab;
8118 goto binop;
8120 case LSHIFT_EXPR:
8121 case RSHIFT_EXPR:
8122 case LROTATE_EXPR:
8123 case RROTATE_EXPR:
8124 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8125 subtarget = 0;
8126 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8127 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8128 unsignedp);
8130 /* Could determine the answer when only additive constants differ. Also,
8131 the addition of one can be handled by changing the condition. */
8132 case LT_EXPR:
8133 case LE_EXPR:
8134 case GT_EXPR:
8135 case GE_EXPR:
8136 case EQ_EXPR:
8137 case NE_EXPR:
8138 case UNORDERED_EXPR:
8139 case ORDERED_EXPR:
8140 case UNLT_EXPR:
8141 case UNLE_EXPR:
8142 case UNGT_EXPR:
8143 case UNGE_EXPR:
8144 case UNEQ_EXPR:
8145 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8146 if (temp != 0)
8147 return temp;
8149 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8150 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8151 && original_target
8152 && GET_CODE (original_target) == REG
8153 && (GET_MODE (original_target)
8154 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8156 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8157 VOIDmode, 0);
8159 /* If temp is constant, we can just compute the result. */
8160 if (GET_CODE (temp) == CONST_INT)
8162 if (INTVAL (temp) != 0)
8163 emit_move_insn (target, const1_rtx);
8164 else
8165 emit_move_insn (target, const0_rtx);
8167 return target;
8170 if (temp != original_target)
8172 enum machine_mode mode1 = GET_MODE (temp);
8173 if (mode1 == VOIDmode)
8174 mode1 = tmode != VOIDmode ? tmode : mode;
8176 temp = copy_to_mode_reg (mode1, temp);
8179 op1 = gen_label_rtx ();
8180 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8181 GET_MODE (temp), unsignedp, op1);
8182 emit_move_insn (temp, const1_rtx);
8183 emit_label (op1);
8184 return temp;
8187 /* If no set-flag instruction, must generate a conditional
8188 store into a temporary variable. Drop through
8189 and handle this like && and ||. */
8191 case TRUTH_ANDIF_EXPR:
8192 case TRUTH_ORIF_EXPR:
8193 if (! ignore
8194 && (target == 0 || ! safe_from_p (target, exp, 1)
8195 /* Make sure we don't have a hard reg (such as function's return
8196 value) live across basic blocks, if not optimizing. */
8197 || (!optimize && GET_CODE (target) == REG
8198 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8199 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8201 if (target)
8202 emit_clr_insn (target);
8204 op1 = gen_label_rtx ();
8205 jumpifnot (exp, op1);
8207 if (target)
8208 emit_0_to_1_insn (target);
8210 emit_label (op1);
8211 return ignore ? const0_rtx : target;
8213 case TRUTH_NOT_EXPR:
8214 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8215 /* The parser is careful to generate TRUTH_NOT_EXPR
8216 only with operands that are always zero or one. */
8217 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8218 target, 1, OPTAB_LIB_WIDEN);
8219 if (temp == 0)
8220 abort ();
8221 return temp;
8223 case COMPOUND_EXPR:
8224 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8225 emit_queue ();
8226 return expand_expr (TREE_OPERAND (exp, 1),
8227 (ignore ? const0_rtx : target),
8228 VOIDmode, 0);
8230 case COND_EXPR:
8231 /* If we would have a "singleton" (see below) were it not for a
8232 conversion in each arm, bring that conversion back out. */
8233 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8234 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8235 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8236 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8238 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8239 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8241 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8242 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8243 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8244 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8245 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8246 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8247 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8248 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8249 return expand_expr (build1 (NOP_EXPR, type,
8250 build (COND_EXPR, TREE_TYPE (iftrue),
8251 TREE_OPERAND (exp, 0),
8252 iftrue, iffalse)),
8253 target, tmode, modifier);
8257 /* Note that COND_EXPRs whose type is a structure or union
8258 are required to be constructed to contain assignments of
8259 a temporary variable, so that we can evaluate them here
8260 for side effect only. If type is void, we must do likewise. */
8262 /* If an arm of the branch requires a cleanup,
8263 only that cleanup is performed. */
8265 tree singleton = 0;
8266 tree binary_op = 0, unary_op = 0;
8268 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8269 convert it to our mode, if necessary. */
8270 if (integer_onep (TREE_OPERAND (exp, 1))
8271 && integer_zerop (TREE_OPERAND (exp, 2))
8272 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8274 if (ignore)
8276 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8277 modifier);
8278 return const0_rtx;
8281 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8282 if (GET_MODE (op0) == mode)
8283 return op0;
8285 if (target == 0)
8286 target = gen_reg_rtx (mode);
8287 convert_move (target, op0, unsignedp);
8288 return target;
8291 /* Check for X ? A + B : A. If we have this, we can copy A to the
8292 output and conditionally add B. Similarly for unary operations.
8293 Don't do this if X has side-effects because those side effects
8294 might affect A or B and the "?" operation is a sequence point in
8295 ANSI. (operand_equal_p tests for side effects.) */
8297 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8298 && operand_equal_p (TREE_OPERAND (exp, 2),
8299 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8300 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8301 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8302 && operand_equal_p (TREE_OPERAND (exp, 1),
8303 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8304 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8305 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8306 && operand_equal_p (TREE_OPERAND (exp, 2),
8307 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8308 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8309 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8310 && operand_equal_p (TREE_OPERAND (exp, 1),
8311 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8312 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8314 /* If we are not to produce a result, we have no target. Otherwise,
8315 if a target was specified use it; it will not be used as an
8316 intermediate target unless it is safe. If no target, use a
8317 temporary. */
8319 if (ignore)
8320 temp = 0;
8321 else if (original_target
8322 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8323 || (singleton && GET_CODE (original_target) == REG
8324 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8325 && original_target == var_rtx (singleton)))
8326 && GET_MODE (original_target) == mode
8327 #ifdef HAVE_conditional_move
8328 && (! can_conditionally_move_p (mode)
8329 || GET_CODE (original_target) == REG
8330 || TREE_ADDRESSABLE (type))
8331 #endif
8332 && (GET_CODE (original_target) != MEM
8333 || TREE_ADDRESSABLE (type)))
8334 temp = original_target;
8335 else if (TREE_ADDRESSABLE (type))
8336 abort ();
8337 else
8338 temp = assign_temp (type, 0, 0, 1);
8340 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8341 do the test of X as a store-flag operation, do this as
8342 A + ((X != 0) << log C). Similarly for other simple binary
8343 operators. Only do for C == 1 if BRANCH_COST is low. */
8344 if (temp && singleton && binary_op
8345 && (TREE_CODE (binary_op) == PLUS_EXPR
8346 || TREE_CODE (binary_op) == MINUS_EXPR
8347 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8348 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8349 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8350 : integer_onep (TREE_OPERAND (binary_op, 1)))
8351 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8353 rtx result;
8354 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8355 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8356 ? addv_optab : add_optab)
8357 : TREE_CODE (binary_op) == MINUS_EXPR
8358 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8359 ? subv_optab : sub_optab)
8360 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8361 : xor_optab);
8363 /* If we had X ? A : A + 1, do this as A + (X == 0).
8365 We have to invert the truth value here and then put it
8366 back later if do_store_flag fails. We cannot simply copy
8367 TREE_OPERAND (exp, 0) to another variable and modify that
8368 because invert_truthvalue can modify the tree pointed to
8369 by its argument. */
8370 if (singleton == TREE_OPERAND (exp, 1))
8371 TREE_OPERAND (exp, 0)
8372 = invert_truthvalue (TREE_OPERAND (exp, 0));
8374 result = do_store_flag (TREE_OPERAND (exp, 0),
8375 (safe_from_p (temp, singleton, 1)
8376 ? temp : NULL_RTX),
8377 mode, BRANCH_COST <= 1);
8379 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8380 result = expand_shift (LSHIFT_EXPR, mode, result,
8381 build_int_2 (tree_log2
8382 (TREE_OPERAND
8383 (binary_op, 1)),
8385 (safe_from_p (temp, singleton, 1)
8386 ? temp : NULL_RTX), 0);
8388 if (result)
8390 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8391 return expand_binop (mode, boptab, op1, result, temp,
8392 unsignedp, OPTAB_LIB_WIDEN);
8394 else if (singleton == TREE_OPERAND (exp, 1))
8395 TREE_OPERAND (exp, 0)
8396 = invert_truthvalue (TREE_OPERAND (exp, 0));
8399 do_pending_stack_adjust ();
8400 NO_DEFER_POP;
8401 op0 = gen_label_rtx ();
8403 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8405 if (temp != 0)
8407 /* If the target conflicts with the other operand of the
8408 binary op, we can't use it. Also, we can't use the target
8409 if it is a hard register, because evaluating the condition
8410 might clobber it. */
8411 if ((binary_op
8412 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8413 || (GET_CODE (temp) == REG
8414 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8415 temp = gen_reg_rtx (mode);
8416 store_expr (singleton, temp, 0);
8418 else
8419 expand_expr (singleton,
8420 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8421 if (singleton == TREE_OPERAND (exp, 1))
8422 jumpif (TREE_OPERAND (exp, 0), op0);
8423 else
8424 jumpifnot (TREE_OPERAND (exp, 0), op0);
8426 start_cleanup_deferral ();
8427 if (binary_op && temp == 0)
8428 /* Just touch the other operand. */
8429 expand_expr (TREE_OPERAND (binary_op, 1),
8430 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8431 else if (binary_op)
8432 store_expr (build (TREE_CODE (binary_op), type,
8433 make_tree (type, temp),
8434 TREE_OPERAND (binary_op, 1)),
8435 temp, 0);
8436 else
8437 store_expr (build1 (TREE_CODE (unary_op), type,
8438 make_tree (type, temp)),
8439 temp, 0);
8440 op1 = op0;
8442 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8443 comparison operator. If we have one of these cases, set the
8444 output to A, branch on A (cse will merge these two references),
8445 then set the output to FOO. */
8446 else if (temp
8447 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8448 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8449 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8450 TREE_OPERAND (exp, 1), 0)
8451 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8452 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8453 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8455 if (GET_CODE (temp) == REG
8456 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8457 temp = gen_reg_rtx (mode);
8458 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8459 jumpif (TREE_OPERAND (exp, 0), op0);
8461 start_cleanup_deferral ();
8462 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8463 op1 = op0;
8465 else if (temp
8466 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8467 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8468 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8469 TREE_OPERAND (exp, 2), 0)
8470 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8471 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8472 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8474 if (GET_CODE (temp) == REG
8475 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8476 temp = gen_reg_rtx (mode);
8477 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8478 jumpifnot (TREE_OPERAND (exp, 0), op0);
8480 start_cleanup_deferral ();
8481 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8482 op1 = op0;
8484 else
8486 op1 = gen_label_rtx ();
8487 jumpifnot (TREE_OPERAND (exp, 0), op0);
8489 start_cleanup_deferral ();
8491 /* One branch of the cond can be void, if it never returns. For
8492 example A ? throw : E */
8493 if (temp != 0
8494 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8495 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8496 else
8497 expand_expr (TREE_OPERAND (exp, 1),
8498 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8499 end_cleanup_deferral ();
8500 emit_queue ();
8501 emit_jump_insn (gen_jump (op1));
8502 emit_barrier ();
8503 emit_label (op0);
8504 start_cleanup_deferral ();
8505 if (temp != 0
8506 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8507 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8508 else
8509 expand_expr (TREE_OPERAND (exp, 2),
8510 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8513 end_cleanup_deferral ();
8515 emit_queue ();
8516 emit_label (op1);
8517 OK_DEFER_POP;
8519 return temp;
8522 case TARGET_EXPR:
8524 /* Something needs to be initialized, but we didn't know
8525 where that thing was when building the tree. For example,
8526 it could be the return value of a function, or a parameter
8527 to a function which lays down in the stack, or a temporary
8528 variable which must be passed by reference.
8530 We guarantee that the expression will either be constructed
8531 or copied into our original target. */
8533 tree slot = TREE_OPERAND (exp, 0);
8534 tree cleanups = NULL_TREE;
8535 tree exp1;
8537 if (TREE_CODE (slot) != VAR_DECL)
8538 abort ();
8540 if (! ignore)
8541 target = original_target;
8543 /* Set this here so that if we get a target that refers to a
8544 register variable that's already been used, put_reg_into_stack
8545 knows that it should fix up those uses. */
8546 TREE_USED (slot) = 1;
8548 if (target == 0)
8550 if (DECL_RTL_SET_P (slot))
8552 target = DECL_RTL (slot);
8553 /* If we have already expanded the slot, so don't do
8554 it again. (mrs) */
8555 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8556 return target;
8558 else
8560 target = assign_temp (type, 2, 0, 1);
8561 /* All temp slots at this level must not conflict. */
8562 preserve_temp_slots (target);
8563 SET_DECL_RTL (slot, target);
8564 if (TREE_ADDRESSABLE (slot))
8565 put_var_into_stack (slot);
8567 /* Since SLOT is not known to the called function
8568 to belong to its stack frame, we must build an explicit
8569 cleanup. This case occurs when we must build up a reference
8570 to pass the reference as an argument. In this case,
8571 it is very likely that such a reference need not be
8572 built here. */
8574 if (TREE_OPERAND (exp, 2) == 0)
8575 TREE_OPERAND (exp, 2)
8576 = (*lang_hooks.maybe_build_cleanup) (slot);
8577 cleanups = TREE_OPERAND (exp, 2);
8580 else
8582 /* This case does occur, when expanding a parameter which
8583 needs to be constructed on the stack. The target
8584 is the actual stack address that we want to initialize.
8585 The function we call will perform the cleanup in this case. */
8587 /* If we have already assigned it space, use that space,
8588 not target that we were passed in, as our target
8589 parameter is only a hint. */
8590 if (DECL_RTL_SET_P (slot))
8592 target = DECL_RTL (slot);
8593 /* If we have already expanded the slot, so don't do
8594 it again. (mrs) */
8595 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8596 return target;
8598 else
8600 SET_DECL_RTL (slot, target);
8601 /* If we must have an addressable slot, then make sure that
8602 the RTL that we just stored in slot is OK. */
8603 if (TREE_ADDRESSABLE (slot))
8604 put_var_into_stack (slot);
8608 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8609 /* Mark it as expanded. */
8610 TREE_OPERAND (exp, 1) = NULL_TREE;
8612 store_expr (exp1, target, 0);
8614 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8616 return target;
8619 case INIT_EXPR:
8621 tree lhs = TREE_OPERAND (exp, 0);
8622 tree rhs = TREE_OPERAND (exp, 1);
8624 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8625 return temp;
8628 case MODIFY_EXPR:
8630 /* If lhs is complex, expand calls in rhs before computing it.
8631 That's so we don't compute a pointer and save it over a
8632 call. If lhs is simple, compute it first so we can give it
8633 as a target if the rhs is just a call. This avoids an
8634 extra temp and copy and that prevents a partial-subsumption
8635 which makes bad code. Actually we could treat
8636 component_ref's of vars like vars. */
8638 tree lhs = TREE_OPERAND (exp, 0);
8639 tree rhs = TREE_OPERAND (exp, 1);
8641 temp = 0;
8643 /* Check for |= or &= of a bitfield of size one into another bitfield
8644 of size 1. In this case, (unless we need the result of the
8645 assignment) we can do this more efficiently with a
8646 test followed by an assignment, if necessary.
8648 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8649 things change so we do, this code should be enhanced to
8650 support it. */
8651 if (ignore
8652 && TREE_CODE (lhs) == COMPONENT_REF
8653 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8654 || TREE_CODE (rhs) == BIT_AND_EXPR)
8655 && TREE_OPERAND (rhs, 0) == lhs
8656 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8657 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8658 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8660 rtx label = gen_label_rtx ();
8662 do_jump (TREE_OPERAND (rhs, 1),
8663 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8664 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8665 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8666 (TREE_CODE (rhs) == BIT_IOR_EXPR
8667 ? integer_one_node
8668 : integer_zero_node)),
8669 0, 0);
8670 do_pending_stack_adjust ();
8671 emit_label (label);
8672 return const0_rtx;
8675 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8677 return temp;
8680 case RETURN_EXPR:
8681 if (!TREE_OPERAND (exp, 0))
8682 expand_null_return ();
8683 else
8684 expand_return (TREE_OPERAND (exp, 0));
8685 return const0_rtx;
8687 case PREINCREMENT_EXPR:
8688 case PREDECREMENT_EXPR:
8689 return expand_increment (exp, 0, ignore);
8691 case POSTINCREMENT_EXPR:
8692 case POSTDECREMENT_EXPR:
8693 /* Faster to treat as pre-increment if result is not used. */
8694 return expand_increment (exp, ! ignore, ignore);
8696 case ADDR_EXPR:
8697 /* Are we taking the address of a nested function? */
8698 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8699 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8700 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8701 && ! TREE_STATIC (exp))
8703 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8704 op0 = force_operand (op0, target);
8706 /* If we are taking the address of something erroneous, just
8707 return a zero. */
8708 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8709 return const0_rtx;
8710 /* If we are taking the address of a constant and are at the
8711 top level, we have to use output_constant_def since we can't
8712 call force_const_mem at top level. */
8713 else if (cfun == 0
8714 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8715 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8716 == 'c')))
8717 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8718 else
8720 /* We make sure to pass const0_rtx down if we came in with
8721 ignore set, to avoid doing the cleanups twice for something. */
8722 op0 = expand_expr (TREE_OPERAND (exp, 0),
8723 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8724 (modifier == EXPAND_INITIALIZER
8725 ? modifier : EXPAND_CONST_ADDRESS));
8727 /* If we are going to ignore the result, OP0 will have been set
8728 to const0_rtx, so just return it. Don't get confused and
8729 think we are taking the address of the constant. */
8730 if (ignore)
8731 return op0;
8733 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8734 clever and returns a REG when given a MEM. */
8735 op0 = protect_from_queue (op0, 1);
8737 /* We would like the object in memory. If it is a constant, we can
8738 have it be statically allocated into memory. For a non-constant,
8739 we need to allocate some memory and store the value into it. */
8741 if (CONSTANT_P (op0))
8742 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8743 op0);
8744 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8745 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8746 || GET_CODE (op0) == PARALLEL)
8748 /* If the operand is a SAVE_EXPR, we can deal with this by
8749 forcing the SAVE_EXPR into memory. */
8750 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8752 put_var_into_stack (TREE_OPERAND (exp, 0));
8753 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8755 else
8757 /* If this object is in a register, it can't be BLKmode. */
8758 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8759 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8761 if (GET_CODE (op0) == PARALLEL)
8762 /* Handle calls that pass values in multiple
8763 non-contiguous locations. The Irix 6 ABI has examples
8764 of this. */
8765 emit_group_store (memloc, op0,
8766 int_size_in_bytes (inner_type));
8767 else
8768 emit_move_insn (memloc, op0);
8770 op0 = memloc;
8774 if (GET_CODE (op0) != MEM)
8775 abort ();
8777 mark_temp_addr_taken (op0);
8778 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8780 op0 = XEXP (op0, 0);
8781 #ifdef POINTERS_EXTEND_UNSIGNED
8782 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8783 && mode == ptr_mode)
8784 op0 = convert_memory_address (ptr_mode, op0);
8785 #endif
8786 return op0;
8789 /* If OP0 is not aligned as least as much as the type requires, we
8790 need to make a temporary, copy OP0 to it, and take the address of
8791 the temporary. We want to use the alignment of the type, not of
8792 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8793 the test for BLKmode means that can't happen. The test for
8794 BLKmode is because we never make mis-aligned MEMs with
8795 non-BLKmode.
8797 We don't need to do this at all if the machine doesn't have
8798 strict alignment. */
8799 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8800 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8801 > MEM_ALIGN (op0))
8802 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8804 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8805 rtx new
8806 = assign_stack_temp_for_type
8807 (TYPE_MODE (inner_type),
8808 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8809 : int_size_in_bytes (inner_type),
8810 1, build_qualified_type (inner_type,
8811 (TYPE_QUALS (inner_type)
8812 | TYPE_QUAL_CONST)));
8814 if (TYPE_ALIGN_OK (inner_type))
8815 abort ();
8817 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8818 op0 = new;
8821 op0 = force_operand (XEXP (op0, 0), target);
8824 if (flag_force_addr
8825 && GET_CODE (op0) != REG
8826 && modifier != EXPAND_CONST_ADDRESS
8827 && modifier != EXPAND_INITIALIZER
8828 && modifier != EXPAND_SUM)
8829 op0 = force_reg (Pmode, op0);
8831 if (GET_CODE (op0) == REG
8832 && ! REG_USERVAR_P (op0))
8833 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8835 #ifdef POINTERS_EXTEND_UNSIGNED
8836 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8837 && mode == ptr_mode)
8838 op0 = convert_memory_address (ptr_mode, op0);
8839 #endif
8841 return op0;
8843 case ENTRY_VALUE_EXPR:
8844 abort ();
8846 /* COMPLEX type for Extended Pascal & Fortran */
8847 case COMPLEX_EXPR:
8849 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8850 rtx insns;
8852 /* Get the rtx code of the operands. */
8853 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8854 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8856 if (! target)
8857 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8859 start_sequence ();
8861 /* Move the real (op0) and imaginary (op1) parts to their location. */
8862 emit_move_insn (gen_realpart (mode, target), op0);
8863 emit_move_insn (gen_imagpart (mode, target), op1);
8865 insns = get_insns ();
8866 end_sequence ();
8868 /* Complex construction should appear as a single unit. */
8869 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8870 each with a separate pseudo as destination.
8871 It's not correct for flow to treat them as a unit. */
8872 if (GET_CODE (target) != CONCAT)
8873 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8874 else
8875 emit_insns (insns);
8877 return target;
8880 case REALPART_EXPR:
8881 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8882 return gen_realpart (mode, op0);
8884 case IMAGPART_EXPR:
8885 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8886 return gen_imagpart (mode, op0);
8888 case CONJ_EXPR:
8890 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8891 rtx imag_t;
8892 rtx insns;
8894 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8896 if (! target)
8897 target = gen_reg_rtx (mode);
8899 start_sequence ();
8901 /* Store the realpart and the negated imagpart to target. */
8902 emit_move_insn (gen_realpart (partmode, target),
8903 gen_realpart (partmode, op0));
8905 imag_t = gen_imagpart (partmode, target);
8906 temp = expand_unop (partmode,
8907 ! unsignedp && flag_trapv
8908 && (GET_MODE_CLASS(partmode) == MODE_INT)
8909 ? negv_optab : neg_optab,
8910 gen_imagpart (partmode, op0), imag_t, 0);
8911 if (temp != imag_t)
8912 emit_move_insn (imag_t, temp);
8914 insns = get_insns ();
8915 end_sequence ();
8917 /* Conjugate should appear as a single unit
8918 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8919 each with a separate pseudo as destination.
8920 It's not correct for flow to treat them as a unit. */
8921 if (GET_CODE (target) != CONCAT)
8922 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8923 else
8924 emit_insns (insns);
8926 return target;
8929 case TRY_CATCH_EXPR:
8931 tree handler = TREE_OPERAND (exp, 1);
8933 expand_eh_region_start ();
8935 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8937 expand_eh_region_end_cleanup (handler);
8939 return op0;
8942 case TRY_FINALLY_EXPR:
8944 tree try_block = TREE_OPERAND (exp, 0);
8945 tree finally_block = TREE_OPERAND (exp, 1);
8946 rtx finally_label = gen_label_rtx ();
8947 rtx done_label = gen_label_rtx ();
8948 rtx return_link = gen_reg_rtx (Pmode);
8949 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8950 (tree) finally_label, (tree) return_link);
8951 TREE_SIDE_EFFECTS (cleanup) = 1;
8953 /* Start a new binding layer that will keep track of all cleanup
8954 actions to be performed. */
8955 expand_start_bindings (2);
8957 target_temp_slot_level = temp_slot_level;
8959 expand_decl_cleanup (NULL_TREE, cleanup);
8960 op0 = expand_expr (try_block, target, tmode, modifier);
8962 preserve_temp_slots (op0);
8963 expand_end_bindings (NULL_TREE, 0, 0);
8964 emit_jump (done_label);
8965 emit_label (finally_label);
8966 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8967 emit_indirect_jump (return_link);
8968 emit_label (done_label);
8969 return op0;
8972 case GOTO_SUBROUTINE_EXPR:
8974 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8975 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8976 rtx return_address = gen_label_rtx ();
8977 emit_move_insn (return_link,
8978 gen_rtx_LABEL_REF (Pmode, return_address));
8979 emit_jump (subr);
8980 emit_label (return_address);
8981 return const0_rtx;
8984 case VA_ARG_EXPR:
8985 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8987 case EXC_PTR_EXPR:
8988 return get_exception_pointer (cfun);
8990 case FDESC_EXPR:
8991 /* Function descriptors are not valid except for as
8992 initialization constants, and should not be expanded. */
8993 abort ();
8995 default:
8996 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
8999 /* Here to do an ordinary binary operator, generating an instruction
9000 from the optab already placed in `this_optab'. */
9001 binop:
9002 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9003 subtarget = 0;
9004 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9005 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9006 binop2:
9007 temp = expand_binop (mode, this_optab, op0, op1, target,
9008 unsignedp, OPTAB_LIB_WIDEN);
9009 if (temp == 0)
9010 abort ();
9011 return temp;
9014 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9015 when applied to the address of EXP produces an address known to be
9016 aligned more than BIGGEST_ALIGNMENT. */
9018 static int
9019 is_aligning_offset (offset, exp)
9020 tree offset;
9021 tree exp;
9023 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9024 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9025 || TREE_CODE (offset) == NOP_EXPR
9026 || TREE_CODE (offset) == CONVERT_EXPR
9027 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9028 offset = TREE_OPERAND (offset, 0);
9030 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9031 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9032 if (TREE_CODE (offset) != BIT_AND_EXPR
9033 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9034 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9035 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9036 return 0;
9038 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9039 It must be NEGATE_EXPR. Then strip any more conversions. */
9040 offset = TREE_OPERAND (offset, 0);
9041 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9042 || TREE_CODE (offset) == NOP_EXPR
9043 || TREE_CODE (offset) == CONVERT_EXPR)
9044 offset = TREE_OPERAND (offset, 0);
9046 if (TREE_CODE (offset) != NEGATE_EXPR)
9047 return 0;
9049 offset = TREE_OPERAND (offset, 0);
9050 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9051 || TREE_CODE (offset) == NOP_EXPR
9052 || TREE_CODE (offset) == CONVERT_EXPR)
9053 offset = TREE_OPERAND (offset, 0);
9055 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9056 whose type is the same as EXP. */
9057 return (TREE_CODE (offset) == ADDR_EXPR
9058 && (TREE_OPERAND (offset, 0) == exp
9059 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9060 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9061 == TREE_TYPE (exp)))));
9064 /* Return the tree node if an ARG corresponds to a string constant or zero
9065 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9066 in bytes within the string that ARG is accessing. The type of the
9067 offset will be `sizetype'. */
9069 tree
9070 string_constant (arg, ptr_offset)
9071 tree arg;
9072 tree *ptr_offset;
9074 STRIP_NOPS (arg);
9076 if (TREE_CODE (arg) == ADDR_EXPR
9077 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9079 *ptr_offset = size_zero_node;
9080 return TREE_OPERAND (arg, 0);
9082 else if (TREE_CODE (arg) == PLUS_EXPR)
9084 tree arg0 = TREE_OPERAND (arg, 0);
9085 tree arg1 = TREE_OPERAND (arg, 1);
9087 STRIP_NOPS (arg0);
9088 STRIP_NOPS (arg1);
9090 if (TREE_CODE (arg0) == ADDR_EXPR
9091 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9093 *ptr_offset = convert (sizetype, arg1);
9094 return TREE_OPERAND (arg0, 0);
9096 else if (TREE_CODE (arg1) == ADDR_EXPR
9097 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9099 *ptr_offset = convert (sizetype, arg0);
9100 return TREE_OPERAND (arg1, 0);
9104 return 0;
9107 /* Expand code for a post- or pre- increment or decrement
9108 and return the RTX for the result.
9109 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9111 static rtx
9112 expand_increment (exp, post, ignore)
9113 tree exp;
9114 int post, ignore;
9116 rtx op0, op1;
9117 rtx temp, value;
9118 tree incremented = TREE_OPERAND (exp, 0);
9119 optab this_optab = add_optab;
9120 int icode;
9121 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9122 int op0_is_copy = 0;
9123 int single_insn = 0;
9124 /* 1 means we can't store into OP0 directly,
9125 because it is a subreg narrower than a word,
9126 and we don't dare clobber the rest of the word. */
9127 int bad_subreg = 0;
9129 /* Stabilize any component ref that might need to be
9130 evaluated more than once below. */
9131 if (!post
9132 || TREE_CODE (incremented) == BIT_FIELD_REF
9133 || (TREE_CODE (incremented) == COMPONENT_REF
9134 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9135 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9136 incremented = stabilize_reference (incremented);
9137 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9138 ones into save exprs so that they don't accidentally get evaluated
9139 more than once by the code below. */
9140 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9141 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9142 incremented = save_expr (incremented);
9144 /* Compute the operands as RTX.
9145 Note whether OP0 is the actual lvalue or a copy of it:
9146 I believe it is a copy iff it is a register or subreg
9147 and insns were generated in computing it. */
9149 temp = get_last_insn ();
9150 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9152 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9153 in place but instead must do sign- or zero-extension during assignment,
9154 so we copy it into a new register and let the code below use it as
9155 a copy.
9157 Note that we can safely modify this SUBREG since it is know not to be
9158 shared (it was made by the expand_expr call above). */
9160 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9162 if (post)
9163 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9164 else
9165 bad_subreg = 1;
9167 else if (GET_CODE (op0) == SUBREG
9168 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9170 /* We cannot increment this SUBREG in place. If we are
9171 post-incrementing, get a copy of the old value. Otherwise,
9172 just mark that we cannot increment in place. */
9173 if (post)
9174 op0 = copy_to_reg (op0);
9175 else
9176 bad_subreg = 1;
9179 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9180 && temp != get_last_insn ());
9181 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9183 /* Decide whether incrementing or decrementing. */
9184 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9185 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9186 this_optab = sub_optab;
9188 /* Convert decrement by a constant into a negative increment. */
9189 if (this_optab == sub_optab
9190 && GET_CODE (op1) == CONST_INT)
9192 op1 = GEN_INT (-INTVAL (op1));
9193 this_optab = add_optab;
9196 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9197 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9199 /* For a preincrement, see if we can do this with a single instruction. */
9200 if (!post)
9202 icode = (int) this_optab->handlers[(int) mode].insn_code;
9203 if (icode != (int) CODE_FOR_nothing
9204 /* Make sure that OP0 is valid for operands 0 and 1
9205 of the insn we want to queue. */
9206 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9207 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9208 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9209 single_insn = 1;
9212 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9213 then we cannot just increment OP0. We must therefore contrive to
9214 increment the original value. Then, for postincrement, we can return
9215 OP0 since it is a copy of the old value. For preincrement, expand here
9216 unless we can do it with a single insn.
9218 Likewise if storing directly into OP0 would clobber high bits
9219 we need to preserve (bad_subreg). */
9220 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9222 /* This is the easiest way to increment the value wherever it is.
9223 Problems with multiple evaluation of INCREMENTED are prevented
9224 because either (1) it is a component_ref or preincrement,
9225 in which case it was stabilized above, or (2) it is an array_ref
9226 with constant index in an array in a register, which is
9227 safe to reevaluate. */
9228 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9229 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9230 ? MINUS_EXPR : PLUS_EXPR),
9231 TREE_TYPE (exp),
9232 incremented,
9233 TREE_OPERAND (exp, 1));
9235 while (TREE_CODE (incremented) == NOP_EXPR
9236 || TREE_CODE (incremented) == CONVERT_EXPR)
9238 newexp = convert (TREE_TYPE (incremented), newexp);
9239 incremented = TREE_OPERAND (incremented, 0);
9242 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9243 return post ? op0 : temp;
9246 if (post)
9248 /* We have a true reference to the value in OP0.
9249 If there is an insn to add or subtract in this mode, queue it.
9250 Queueing the increment insn avoids the register shuffling
9251 that often results if we must increment now and first save
9252 the old value for subsequent use. */
9254 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9255 op0 = stabilize (op0);
9256 #endif
9258 icode = (int) this_optab->handlers[(int) mode].insn_code;
9259 if (icode != (int) CODE_FOR_nothing
9260 /* Make sure that OP0 is valid for operands 0 and 1
9261 of the insn we want to queue. */
9262 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9263 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9265 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9266 op1 = force_reg (mode, op1);
9268 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9270 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9272 rtx addr = (general_operand (XEXP (op0, 0), mode)
9273 ? force_reg (Pmode, XEXP (op0, 0))
9274 : copy_to_reg (XEXP (op0, 0)));
9275 rtx temp, result;
9277 op0 = replace_equiv_address (op0, addr);
9278 temp = force_reg (GET_MODE (op0), op0);
9279 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9280 op1 = force_reg (mode, op1);
9282 /* The increment queue is LIFO, thus we have to `queue'
9283 the instructions in reverse order. */
9284 enqueue_insn (op0, gen_move_insn (op0, temp));
9285 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9286 return result;
9290 /* Preincrement, or we can't increment with one simple insn. */
9291 if (post)
9292 /* Save a copy of the value before inc or dec, to return it later. */
9293 temp = value = copy_to_reg (op0);
9294 else
9295 /* Arrange to return the incremented value. */
9296 /* Copy the rtx because expand_binop will protect from the queue,
9297 and the results of that would be invalid for us to return
9298 if our caller does emit_queue before using our result. */
9299 temp = copy_rtx (value = op0);
9301 /* Increment however we can. */
9302 op1 = expand_binop (mode, this_optab, value, op1, op0,
9303 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9305 /* Make sure the value is stored into OP0. */
9306 if (op1 != op0)
9307 emit_move_insn (op0, op1);
9309 return temp;
9312 /* At the start of a function, record that we have no previously-pushed
9313 arguments waiting to be popped. */
9315 void
9316 init_pending_stack_adjust ()
9318 pending_stack_adjust = 0;
9321 /* When exiting from function, if safe, clear out any pending stack adjust
9322 so the adjustment won't get done.
9324 Note, if the current function calls alloca, then it must have a
9325 frame pointer regardless of the value of flag_omit_frame_pointer. */
9327 void
9328 clear_pending_stack_adjust ()
9330 #ifdef EXIT_IGNORE_STACK
9331 if (optimize > 0
9332 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9333 && EXIT_IGNORE_STACK
9334 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9335 && ! flag_inline_functions)
9337 stack_pointer_delta -= pending_stack_adjust,
9338 pending_stack_adjust = 0;
9340 #endif
9343 /* Pop any previously-pushed arguments that have not been popped yet. */
9345 void
9346 do_pending_stack_adjust ()
9348 if (inhibit_defer_pop == 0)
9350 if (pending_stack_adjust != 0)
9351 adjust_stack (GEN_INT (pending_stack_adjust));
9352 pending_stack_adjust = 0;
9356 /* Expand conditional expressions. */
9358 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9359 LABEL is an rtx of code CODE_LABEL, in this function and all the
9360 functions here. */
9362 void
9363 jumpifnot (exp, label)
9364 tree exp;
9365 rtx label;
9367 do_jump (exp, label, NULL_RTX);
9370 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9372 void
9373 jumpif (exp, label)
9374 tree exp;
9375 rtx label;
9377 do_jump (exp, NULL_RTX, label);
9380 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9381 the result is zero, or IF_TRUE_LABEL if the result is one.
9382 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9383 meaning fall through in that case.
9385 do_jump always does any pending stack adjust except when it does not
9386 actually perform a jump. An example where there is no jump
9387 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9389 This function is responsible for optimizing cases such as
9390 &&, || and comparison operators in EXP. */
9392 void
9393 do_jump (exp, if_false_label, if_true_label)
9394 tree exp;
9395 rtx if_false_label, if_true_label;
9397 enum tree_code code = TREE_CODE (exp);
9398 /* Some cases need to create a label to jump to
9399 in order to properly fall through.
9400 These cases set DROP_THROUGH_LABEL nonzero. */
9401 rtx drop_through_label = 0;
9402 rtx temp;
9403 int i;
9404 tree type;
9405 enum machine_mode mode;
9407 #ifdef MAX_INTEGER_COMPUTATION_MODE
9408 check_max_integer_computation_mode (exp);
9409 #endif
9411 emit_queue ();
9413 switch (code)
9415 case ERROR_MARK:
9416 break;
9418 case INTEGER_CST:
9419 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9420 if (temp)
9421 emit_jump (temp);
9422 break;
9424 #if 0
9425 /* This is not true with #pragma weak */
9426 case ADDR_EXPR:
9427 /* The address of something can never be zero. */
9428 if (if_true_label)
9429 emit_jump (if_true_label);
9430 break;
9431 #endif
9433 case NOP_EXPR:
9434 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9435 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9436 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9437 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9438 goto normal;
9439 case CONVERT_EXPR:
9440 /* If we are narrowing the operand, we have to do the compare in the
9441 narrower mode. */
9442 if ((TYPE_PRECISION (TREE_TYPE (exp))
9443 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9444 goto normal;
9445 case NON_LVALUE_EXPR:
9446 case REFERENCE_EXPR:
9447 case ABS_EXPR:
9448 case NEGATE_EXPR:
9449 case LROTATE_EXPR:
9450 case RROTATE_EXPR:
9451 /* These cannot change zero->non-zero or vice versa. */
9452 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9453 break;
9455 case WITH_RECORD_EXPR:
9456 /* Put the object on the placeholder list, recurse through our first
9457 operand, and pop the list. */
9458 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9459 placeholder_list);
9460 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9461 placeholder_list = TREE_CHAIN (placeholder_list);
9462 break;
9464 #if 0
9465 /* This is never less insns than evaluating the PLUS_EXPR followed by
9466 a test and can be longer if the test is eliminated. */
9467 case PLUS_EXPR:
9468 /* Reduce to minus. */
9469 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9470 TREE_OPERAND (exp, 0),
9471 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9472 TREE_OPERAND (exp, 1))));
9473 /* Process as MINUS. */
9474 #endif
9476 case MINUS_EXPR:
9477 /* Non-zero iff operands of minus differ. */
9478 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9479 TREE_OPERAND (exp, 0),
9480 TREE_OPERAND (exp, 1)),
9481 NE, NE, if_false_label, if_true_label);
9482 break;
9484 case BIT_AND_EXPR:
9485 /* If we are AND'ing with a small constant, do this comparison in the
9486 smallest type that fits. If the machine doesn't have comparisons
9487 that small, it will be converted back to the wider comparison.
9488 This helps if we are testing the sign bit of a narrower object.
9489 combine can't do this for us because it can't know whether a
9490 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9492 if (! SLOW_BYTE_ACCESS
9493 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9494 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9495 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9496 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9497 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9498 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9499 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9500 != CODE_FOR_nothing))
9502 do_jump (convert (type, exp), if_false_label, if_true_label);
9503 break;
9505 goto normal;
9507 case TRUTH_NOT_EXPR:
9508 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9509 break;
9511 case TRUTH_ANDIF_EXPR:
9512 if (if_false_label == 0)
9513 if_false_label = drop_through_label = gen_label_rtx ();
9514 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9515 start_cleanup_deferral ();
9516 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9517 end_cleanup_deferral ();
9518 break;
9520 case TRUTH_ORIF_EXPR:
9521 if (if_true_label == 0)
9522 if_true_label = drop_through_label = gen_label_rtx ();
9523 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9524 start_cleanup_deferral ();
9525 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9526 end_cleanup_deferral ();
9527 break;
9529 case COMPOUND_EXPR:
9530 push_temp_slots ();
9531 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9532 preserve_temp_slots (NULL_RTX);
9533 free_temp_slots ();
9534 pop_temp_slots ();
9535 emit_queue ();
9536 do_pending_stack_adjust ();
9537 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9538 break;
9540 case COMPONENT_REF:
9541 case BIT_FIELD_REF:
9542 case ARRAY_REF:
9543 case ARRAY_RANGE_REF:
9545 HOST_WIDE_INT bitsize, bitpos;
9546 int unsignedp;
9547 enum machine_mode mode;
9548 tree type;
9549 tree offset;
9550 int volatilep = 0;
9552 /* Get description of this reference. We don't actually care
9553 about the underlying object here. */
9554 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9555 &unsignedp, &volatilep);
9557 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9558 if (! SLOW_BYTE_ACCESS
9559 && type != 0 && bitsize >= 0
9560 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9561 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9562 != CODE_FOR_nothing))
9564 do_jump (convert (type, exp), if_false_label, if_true_label);
9565 break;
9567 goto normal;
9570 case COND_EXPR:
9571 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9572 if (integer_onep (TREE_OPERAND (exp, 1))
9573 && integer_zerop (TREE_OPERAND (exp, 2)))
9574 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9576 else if (integer_zerop (TREE_OPERAND (exp, 1))
9577 && integer_onep (TREE_OPERAND (exp, 2)))
9578 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9580 else
9582 rtx label1 = gen_label_rtx ();
9583 drop_through_label = gen_label_rtx ();
9585 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9587 start_cleanup_deferral ();
9588 /* Now the THEN-expression. */
9589 do_jump (TREE_OPERAND (exp, 1),
9590 if_false_label ? if_false_label : drop_through_label,
9591 if_true_label ? if_true_label : drop_through_label);
9592 /* In case the do_jump just above never jumps. */
9593 do_pending_stack_adjust ();
9594 emit_label (label1);
9596 /* Now the ELSE-expression. */
9597 do_jump (TREE_OPERAND (exp, 2),
9598 if_false_label ? if_false_label : drop_through_label,
9599 if_true_label ? if_true_label : drop_through_label);
9600 end_cleanup_deferral ();
9602 break;
9604 case EQ_EXPR:
9606 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9608 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9609 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9611 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9612 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9613 do_jump
9614 (fold
9615 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9616 fold (build (EQ_EXPR, TREE_TYPE (exp),
9617 fold (build1 (REALPART_EXPR,
9618 TREE_TYPE (inner_type),
9619 exp0)),
9620 fold (build1 (REALPART_EXPR,
9621 TREE_TYPE (inner_type),
9622 exp1)))),
9623 fold (build (EQ_EXPR, TREE_TYPE (exp),
9624 fold (build1 (IMAGPART_EXPR,
9625 TREE_TYPE (inner_type),
9626 exp0)),
9627 fold (build1 (IMAGPART_EXPR,
9628 TREE_TYPE (inner_type),
9629 exp1)))))),
9630 if_false_label, if_true_label);
9633 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9634 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9636 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9637 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9638 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9639 else
9640 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9641 break;
9644 case NE_EXPR:
9646 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9648 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9649 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9651 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9652 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9653 do_jump
9654 (fold
9655 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9656 fold (build (NE_EXPR, TREE_TYPE (exp),
9657 fold (build1 (REALPART_EXPR,
9658 TREE_TYPE (inner_type),
9659 exp0)),
9660 fold (build1 (REALPART_EXPR,
9661 TREE_TYPE (inner_type),
9662 exp1)))),
9663 fold (build (NE_EXPR, TREE_TYPE (exp),
9664 fold (build1 (IMAGPART_EXPR,
9665 TREE_TYPE (inner_type),
9666 exp0)),
9667 fold (build1 (IMAGPART_EXPR,
9668 TREE_TYPE (inner_type),
9669 exp1)))))),
9670 if_false_label, if_true_label);
9673 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9674 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9676 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9677 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9678 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9679 else
9680 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9681 break;
9684 case LT_EXPR:
9685 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9686 if (GET_MODE_CLASS (mode) == MODE_INT
9687 && ! can_compare_p (LT, mode, ccp_jump))
9688 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9689 else
9690 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9691 break;
9693 case LE_EXPR:
9694 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9695 if (GET_MODE_CLASS (mode) == MODE_INT
9696 && ! can_compare_p (LE, mode, ccp_jump))
9697 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9698 else
9699 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9700 break;
9702 case GT_EXPR:
9703 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9704 if (GET_MODE_CLASS (mode) == MODE_INT
9705 && ! can_compare_p (GT, mode, ccp_jump))
9706 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9707 else
9708 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9709 break;
9711 case GE_EXPR:
9712 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9713 if (GET_MODE_CLASS (mode) == MODE_INT
9714 && ! can_compare_p (GE, mode, ccp_jump))
9715 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9716 else
9717 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9718 break;
9720 case UNORDERED_EXPR:
9721 case ORDERED_EXPR:
9723 enum rtx_code cmp, rcmp;
9724 int do_rev;
9726 if (code == UNORDERED_EXPR)
9727 cmp = UNORDERED, rcmp = ORDERED;
9728 else
9729 cmp = ORDERED, rcmp = UNORDERED;
9730 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9732 do_rev = 0;
9733 if (! can_compare_p (cmp, mode, ccp_jump)
9734 && (can_compare_p (rcmp, mode, ccp_jump)
9735 /* If the target doesn't provide either UNORDERED or ORDERED
9736 comparisons, canonicalize on UNORDERED for the library. */
9737 || rcmp == UNORDERED))
9738 do_rev = 1;
9740 if (! do_rev)
9741 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9742 else
9743 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9745 break;
9748 enum rtx_code rcode1;
9749 enum tree_code tcode2;
9751 case UNLT_EXPR:
9752 rcode1 = UNLT;
9753 tcode2 = LT_EXPR;
9754 goto unordered_bcc;
9755 case UNLE_EXPR:
9756 rcode1 = UNLE;
9757 tcode2 = LE_EXPR;
9758 goto unordered_bcc;
9759 case UNGT_EXPR:
9760 rcode1 = UNGT;
9761 tcode2 = GT_EXPR;
9762 goto unordered_bcc;
9763 case UNGE_EXPR:
9764 rcode1 = UNGE;
9765 tcode2 = GE_EXPR;
9766 goto unordered_bcc;
9767 case UNEQ_EXPR:
9768 rcode1 = UNEQ;
9769 tcode2 = EQ_EXPR;
9770 goto unordered_bcc;
9772 unordered_bcc:
9773 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9774 if (can_compare_p (rcode1, mode, ccp_jump))
9775 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9776 if_true_label);
9777 else
9779 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9780 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9781 tree cmp0, cmp1;
9783 /* If the target doesn't support combined unordered
9784 compares, decompose into UNORDERED + comparison. */
9785 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9786 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9787 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9788 do_jump (exp, if_false_label, if_true_label);
9791 break;
9793 /* Special case:
9794 __builtin_expect (<test>, 0) and
9795 __builtin_expect (<test>, 1)
9797 We need to do this here, so that <test> is not converted to a SCC
9798 operation on machines that use condition code registers and COMPARE
9799 like the PowerPC, and then the jump is done based on whether the SCC
9800 operation produced a 1 or 0. */
9801 case CALL_EXPR:
9802 /* Check for a built-in function. */
9803 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9805 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9806 tree arglist = TREE_OPERAND (exp, 1);
9808 if (TREE_CODE (fndecl) == FUNCTION_DECL
9809 && DECL_BUILT_IN (fndecl)
9810 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9811 && arglist != NULL_TREE
9812 && TREE_CHAIN (arglist) != NULL_TREE)
9814 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9815 if_true_label);
9817 if (seq != NULL_RTX)
9819 emit_insn (seq);
9820 return;
9824 /* fall through and generate the normal code. */
9826 default:
9827 normal:
9828 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9829 #if 0
9830 /* This is not needed any more and causes poor code since it causes
9831 comparisons and tests from non-SI objects to have different code
9832 sequences. */
9833 /* Copy to register to avoid generating bad insns by cse
9834 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9835 if (!cse_not_expected && GET_CODE (temp) == MEM)
9836 temp = copy_to_reg (temp);
9837 #endif
9838 do_pending_stack_adjust ();
9839 /* Do any postincrements in the expression that was tested. */
9840 emit_queue ();
9842 if (GET_CODE (temp) == CONST_INT
9843 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9844 || GET_CODE (temp) == LABEL_REF)
9846 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9847 if (target)
9848 emit_jump (target);
9850 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9851 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9852 /* Note swapping the labels gives us not-equal. */
9853 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9854 else if (GET_MODE (temp) != VOIDmode)
9855 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9856 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9857 GET_MODE (temp), NULL_RTX,
9858 if_false_label, if_true_label);
9859 else
9860 abort ();
9863 if (drop_through_label)
9865 /* If do_jump produces code that might be jumped around,
9866 do any stack adjusts from that code, before the place
9867 where control merges in. */
9868 do_pending_stack_adjust ();
9869 emit_label (drop_through_label);
9873 /* Given a comparison expression EXP for values too wide to be compared
9874 with one insn, test the comparison and jump to the appropriate label.
9875 The code of EXP is ignored; we always test GT if SWAP is 0,
9876 and LT if SWAP is 1. */
9878 static void
9879 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9880 tree exp;
9881 int swap;
9882 rtx if_false_label, if_true_label;
9884 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9885 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9886 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9887 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9889 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9892 /* Compare OP0 with OP1, word at a time, in mode MODE.
9893 UNSIGNEDP says to do unsigned comparison.
9894 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9896 void
9897 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9898 enum machine_mode mode;
9899 int unsignedp;
9900 rtx op0, op1;
9901 rtx if_false_label, if_true_label;
9903 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9904 rtx drop_through_label = 0;
9905 int i;
9907 if (! if_true_label || ! if_false_label)
9908 drop_through_label = gen_label_rtx ();
9909 if (! if_true_label)
9910 if_true_label = drop_through_label;
9911 if (! if_false_label)
9912 if_false_label = drop_through_label;
9914 /* Compare a word at a time, high order first. */
9915 for (i = 0; i < nwords; i++)
9917 rtx op0_word, op1_word;
9919 if (WORDS_BIG_ENDIAN)
9921 op0_word = operand_subword_force (op0, i, mode);
9922 op1_word = operand_subword_force (op1, i, mode);
9924 else
9926 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9927 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9930 /* All but high-order word must be compared as unsigned. */
9931 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9932 (unsignedp || i > 0), word_mode, NULL_RTX,
9933 NULL_RTX, if_true_label);
9935 /* Consider lower words only if these are equal. */
9936 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9937 NULL_RTX, NULL_RTX, if_false_label);
9940 if (if_false_label)
9941 emit_jump (if_false_label);
9942 if (drop_through_label)
9943 emit_label (drop_through_label);
9946 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9947 with one insn, test the comparison and jump to the appropriate label. */
9949 static void
9950 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9951 tree exp;
9952 rtx if_false_label, if_true_label;
9954 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9955 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9956 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9957 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9958 int i;
9959 rtx drop_through_label = 0;
9961 if (! if_false_label)
9962 drop_through_label = if_false_label = gen_label_rtx ();
9964 for (i = 0; i < nwords; i++)
9965 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9966 operand_subword_force (op1, i, mode),
9967 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9968 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9970 if (if_true_label)
9971 emit_jump (if_true_label);
9972 if (drop_through_label)
9973 emit_label (drop_through_label);
9976 /* Jump according to whether OP0 is 0.
9977 We assume that OP0 has an integer mode that is too wide
9978 for the available compare insns. */
9980 void
9981 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9982 rtx op0;
9983 rtx if_false_label, if_true_label;
9985 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9986 rtx part;
9987 int i;
9988 rtx drop_through_label = 0;
9990 /* The fastest way of doing this comparison on almost any machine is to
9991 "or" all the words and compare the result. If all have to be loaded
9992 from memory and this is a very wide item, it's possible this may
9993 be slower, but that's highly unlikely. */
9995 part = gen_reg_rtx (word_mode);
9996 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9997 for (i = 1; i < nwords && part != 0; i++)
9998 part = expand_binop (word_mode, ior_optab, part,
9999 operand_subword_force (op0, i, GET_MODE (op0)),
10000 part, 1, OPTAB_WIDEN);
10002 if (part != 0)
10004 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10005 NULL_RTX, if_false_label, if_true_label);
10007 return;
10010 /* If we couldn't do the "or" simply, do this with a series of compares. */
10011 if (! if_false_label)
10012 drop_through_label = if_false_label = gen_label_rtx ();
10014 for (i = 0; i < nwords; i++)
10015 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10016 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10017 if_false_label, NULL_RTX);
10019 if (if_true_label)
10020 emit_jump (if_true_label);
10022 if (drop_through_label)
10023 emit_label (drop_through_label);
10026 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10027 (including code to compute the values to be compared)
10028 and set (CC0) according to the result.
10029 The decision as to signed or unsigned comparison must be made by the caller.
10031 We force a stack adjustment unless there are currently
10032 things pushed on the stack that aren't yet used.
10034 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10035 compared. */
10038 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10039 rtx op0, op1;
10040 enum rtx_code code;
10041 int unsignedp;
10042 enum machine_mode mode;
10043 rtx size;
10045 rtx tem;
10047 /* If one operand is constant, make it the second one. Only do this
10048 if the other operand is not constant as well. */
10050 if (swap_commutative_operands_p (op0, op1))
10052 tem = op0;
10053 op0 = op1;
10054 op1 = tem;
10055 code = swap_condition (code);
10058 if (flag_force_mem)
10060 op0 = force_not_mem (op0);
10061 op1 = force_not_mem (op1);
10064 do_pending_stack_adjust ();
10066 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10067 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10068 return tem;
10070 #if 0
10071 /* There's no need to do this now that combine.c can eliminate lots of
10072 sign extensions. This can be less efficient in certain cases on other
10073 machines. */
10075 /* If this is a signed equality comparison, we can do it as an
10076 unsigned comparison since zero-extension is cheaper than sign
10077 extension and comparisons with zero are done as unsigned. This is
10078 the case even on machines that can do fast sign extension, since
10079 zero-extension is easier to combine with other operations than
10080 sign-extension is. If we are comparing against a constant, we must
10081 convert it to what it would look like unsigned. */
10082 if ((code == EQ || code == NE) && ! unsignedp
10083 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10085 if (GET_CODE (op1) == CONST_INT
10086 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10087 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10088 unsignedp = 1;
10090 #endif
10092 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10094 #if HAVE_cc0
10095 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10096 #else
10097 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10098 #endif
10101 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10102 The decision as to signed or unsigned comparison must be made by the caller.
10104 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10105 compared. */
10107 void
10108 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10109 if_false_label, if_true_label)
10110 rtx op0, op1;
10111 enum rtx_code code;
10112 int unsignedp;
10113 enum machine_mode mode;
10114 rtx size;
10115 rtx if_false_label, if_true_label;
10117 rtx tem;
10118 int dummy_true_label = 0;
10120 /* Reverse the comparison if that is safe and we want to jump if it is
10121 false. */
10122 if (! if_true_label && ! FLOAT_MODE_P (mode))
10124 if_true_label = if_false_label;
10125 if_false_label = 0;
10126 code = reverse_condition (code);
10129 /* If one operand is constant, make it the second one. Only do this
10130 if the other operand is not constant as well. */
10132 if (swap_commutative_operands_p (op0, op1))
10134 tem = op0;
10135 op0 = op1;
10136 op1 = tem;
10137 code = swap_condition (code);
10140 if (flag_force_mem)
10142 op0 = force_not_mem (op0);
10143 op1 = force_not_mem (op1);
10146 do_pending_stack_adjust ();
10148 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10149 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10151 if (tem == const_true_rtx)
10153 if (if_true_label)
10154 emit_jump (if_true_label);
10156 else
10158 if (if_false_label)
10159 emit_jump (if_false_label);
10161 return;
10164 #if 0
10165 /* There's no need to do this now that combine.c can eliminate lots of
10166 sign extensions. This can be less efficient in certain cases on other
10167 machines. */
10169 /* If this is a signed equality comparison, we can do it as an
10170 unsigned comparison since zero-extension is cheaper than sign
10171 extension and comparisons with zero are done as unsigned. This is
10172 the case even on machines that can do fast sign extension, since
10173 zero-extension is easier to combine with other operations than
10174 sign-extension is. If we are comparing against a constant, we must
10175 convert it to what it would look like unsigned. */
10176 if ((code == EQ || code == NE) && ! unsignedp
10177 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10179 if (GET_CODE (op1) == CONST_INT
10180 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10181 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10182 unsignedp = 1;
10184 #endif
10186 if (! if_true_label)
10188 dummy_true_label = 1;
10189 if_true_label = gen_label_rtx ();
10192 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10193 if_true_label);
10195 if (if_false_label)
10196 emit_jump (if_false_label);
10197 if (dummy_true_label)
10198 emit_label (if_true_label);
10201 /* Generate code for a comparison expression EXP (including code to compute
10202 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10203 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10204 generated code will drop through.
10205 SIGNED_CODE should be the rtx operation for this comparison for
10206 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10208 We force a stack adjustment unless there are currently
10209 things pushed on the stack that aren't yet used. */
10211 static void
10212 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10213 if_true_label)
10214 tree exp;
10215 enum rtx_code signed_code, unsigned_code;
10216 rtx if_false_label, if_true_label;
10218 rtx op0, op1;
10219 tree type;
10220 enum machine_mode mode;
10221 int unsignedp;
10222 enum rtx_code code;
10224 /* Don't crash if the comparison was erroneous. */
10225 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10226 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10227 return;
10229 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10230 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10231 return;
10233 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10234 mode = TYPE_MODE (type);
10235 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10236 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10237 || (GET_MODE_BITSIZE (mode)
10238 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10239 1)))))))
10241 /* op0 might have been replaced by promoted constant, in which
10242 case the type of second argument should be used. */
10243 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10244 mode = TYPE_MODE (type);
10246 unsignedp = TREE_UNSIGNED (type);
10247 code = unsignedp ? unsigned_code : signed_code;
10249 #ifdef HAVE_canonicalize_funcptr_for_compare
10250 /* If function pointers need to be "canonicalized" before they can
10251 be reliably compared, then canonicalize them. */
10252 if (HAVE_canonicalize_funcptr_for_compare
10253 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10254 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10255 == FUNCTION_TYPE))
10257 rtx new_op0 = gen_reg_rtx (mode);
10259 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10260 op0 = new_op0;
10263 if (HAVE_canonicalize_funcptr_for_compare
10264 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10265 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10266 == FUNCTION_TYPE))
10268 rtx new_op1 = gen_reg_rtx (mode);
10270 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10271 op1 = new_op1;
10273 #endif
10275 /* Do any postincrements in the expression that was tested. */
10276 emit_queue ();
10278 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10279 ((mode == BLKmode)
10280 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10281 if_false_label, if_true_label);
10284 /* Generate code to calculate EXP using a store-flag instruction
10285 and return an rtx for the result. EXP is either a comparison
10286 or a TRUTH_NOT_EXPR whose operand is a comparison.
10288 If TARGET is nonzero, store the result there if convenient.
10290 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10291 cheap.
10293 Return zero if there is no suitable set-flag instruction
10294 available on this machine.
10296 Once expand_expr has been called on the arguments of the comparison,
10297 we are committed to doing the store flag, since it is not safe to
10298 re-evaluate the expression. We emit the store-flag insn by calling
10299 emit_store_flag, but only expand the arguments if we have a reason
10300 to believe that emit_store_flag will be successful. If we think that
10301 it will, but it isn't, we have to simulate the store-flag with a
10302 set/jump/set sequence. */
10304 static rtx
10305 do_store_flag (exp, target, mode, only_cheap)
10306 tree exp;
10307 rtx target;
10308 enum machine_mode mode;
10309 int only_cheap;
10311 enum rtx_code code;
10312 tree arg0, arg1, type;
10313 tree tem;
10314 enum machine_mode operand_mode;
10315 int invert = 0;
10316 int unsignedp;
10317 rtx op0, op1;
10318 enum insn_code icode;
10319 rtx subtarget = target;
10320 rtx result, label;
10322 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10323 result at the end. We can't simply invert the test since it would
10324 have already been inverted if it were valid. This case occurs for
10325 some floating-point comparisons. */
10327 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10328 invert = 1, exp = TREE_OPERAND (exp, 0);
10330 arg0 = TREE_OPERAND (exp, 0);
10331 arg1 = TREE_OPERAND (exp, 1);
10333 /* Don't crash if the comparison was erroneous. */
10334 if (arg0 == error_mark_node || arg1 == error_mark_node)
10335 return const0_rtx;
10337 type = TREE_TYPE (arg0);
10338 operand_mode = TYPE_MODE (type);
10339 unsignedp = TREE_UNSIGNED (type);
10341 /* We won't bother with BLKmode store-flag operations because it would mean
10342 passing a lot of information to emit_store_flag. */
10343 if (operand_mode == BLKmode)
10344 return 0;
10346 /* We won't bother with store-flag operations involving function pointers
10347 when function pointers must be canonicalized before comparisons. */
10348 #ifdef HAVE_canonicalize_funcptr_for_compare
10349 if (HAVE_canonicalize_funcptr_for_compare
10350 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10351 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10352 == FUNCTION_TYPE))
10353 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10354 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10355 == FUNCTION_TYPE))))
10356 return 0;
10357 #endif
10359 STRIP_NOPS (arg0);
10360 STRIP_NOPS (arg1);
10362 /* Get the rtx comparison code to use. We know that EXP is a comparison
10363 operation of some type. Some comparisons against 1 and -1 can be
10364 converted to comparisons with zero. Do so here so that the tests
10365 below will be aware that we have a comparison with zero. These
10366 tests will not catch constants in the first operand, but constants
10367 are rarely passed as the first operand. */
10369 switch (TREE_CODE (exp))
10371 case EQ_EXPR:
10372 code = EQ;
10373 break;
10374 case NE_EXPR:
10375 code = NE;
10376 break;
10377 case LT_EXPR:
10378 if (integer_onep (arg1))
10379 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10380 else
10381 code = unsignedp ? LTU : LT;
10382 break;
10383 case LE_EXPR:
10384 if (! unsignedp && integer_all_onesp (arg1))
10385 arg1 = integer_zero_node, code = LT;
10386 else
10387 code = unsignedp ? LEU : LE;
10388 break;
10389 case GT_EXPR:
10390 if (! unsignedp && integer_all_onesp (arg1))
10391 arg1 = integer_zero_node, code = GE;
10392 else
10393 code = unsignedp ? GTU : GT;
10394 break;
10395 case GE_EXPR:
10396 if (integer_onep (arg1))
10397 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10398 else
10399 code = unsignedp ? GEU : GE;
10400 break;
10402 case UNORDERED_EXPR:
10403 code = UNORDERED;
10404 break;
10405 case ORDERED_EXPR:
10406 code = ORDERED;
10407 break;
10408 case UNLT_EXPR:
10409 code = UNLT;
10410 break;
10411 case UNLE_EXPR:
10412 code = UNLE;
10413 break;
10414 case UNGT_EXPR:
10415 code = UNGT;
10416 break;
10417 case UNGE_EXPR:
10418 code = UNGE;
10419 break;
10420 case UNEQ_EXPR:
10421 code = UNEQ;
10422 break;
10424 default:
10425 abort ();
10428 /* Put a constant second. */
10429 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10431 tem = arg0; arg0 = arg1; arg1 = tem;
10432 code = swap_condition (code);
10435 /* If this is an equality or inequality test of a single bit, we can
10436 do this by shifting the bit being tested to the low-order bit and
10437 masking the result with the constant 1. If the condition was EQ,
10438 we xor it with 1. This does not require an scc insn and is faster
10439 than an scc insn even if we have it. */
10441 if ((code == NE || code == EQ)
10442 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10443 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10445 tree inner = TREE_OPERAND (arg0, 0);
10446 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10447 int ops_unsignedp;
10449 /* If INNER is a right shift of a constant and it plus BITNUM does
10450 not overflow, adjust BITNUM and INNER. */
10452 if (TREE_CODE (inner) == RSHIFT_EXPR
10453 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10454 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10455 && bitnum < TYPE_PRECISION (type)
10456 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10457 bitnum - TYPE_PRECISION (type)))
10459 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10460 inner = TREE_OPERAND (inner, 0);
10463 /* If we are going to be able to omit the AND below, we must do our
10464 operations as unsigned. If we must use the AND, we have a choice.
10465 Normally unsigned is faster, but for some machines signed is. */
10466 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10467 #ifdef LOAD_EXTEND_OP
10468 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10469 #else
10471 #endif
10474 if (! get_subtarget (subtarget)
10475 || GET_MODE (subtarget) != operand_mode
10476 || ! safe_from_p (subtarget, inner, 1))
10477 subtarget = 0;
10479 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10481 if (bitnum != 0)
10482 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10483 size_int (bitnum), subtarget, ops_unsignedp);
10485 if (GET_MODE (op0) != mode)
10486 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10488 if ((code == EQ && ! invert) || (code == NE && invert))
10489 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10490 ops_unsignedp, OPTAB_LIB_WIDEN);
10492 /* Put the AND last so it can combine with more things. */
10493 if (bitnum != TYPE_PRECISION (type) - 1)
10494 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10496 return op0;
10499 /* Now see if we are likely to be able to do this. Return if not. */
10500 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10501 return 0;
10503 icode = setcc_gen_code[(int) code];
10504 if (icode == CODE_FOR_nothing
10505 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10507 /* We can only do this if it is one of the special cases that
10508 can be handled without an scc insn. */
10509 if ((code == LT && integer_zerop (arg1))
10510 || (! only_cheap && code == GE && integer_zerop (arg1)))
10512 else if (BRANCH_COST >= 0
10513 && ! only_cheap && (code == NE || code == EQ)
10514 && TREE_CODE (type) != REAL_TYPE
10515 && ((abs_optab->handlers[(int) operand_mode].insn_code
10516 != CODE_FOR_nothing)
10517 || (ffs_optab->handlers[(int) operand_mode].insn_code
10518 != CODE_FOR_nothing)))
10520 else
10521 return 0;
10524 if (! get_subtarget (target)
10525 || GET_MODE (subtarget) != operand_mode
10526 || ! safe_from_p (subtarget, arg1, 1))
10527 subtarget = 0;
10529 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10530 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10532 if (target == 0)
10533 target = gen_reg_rtx (mode);
10535 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10536 because, if the emit_store_flag does anything it will succeed and
10537 OP0 and OP1 will not be used subsequently. */
10539 result = emit_store_flag (target, code,
10540 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10541 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10542 operand_mode, unsignedp, 1);
10544 if (result)
10546 if (invert)
10547 result = expand_binop (mode, xor_optab, result, const1_rtx,
10548 result, 0, OPTAB_LIB_WIDEN);
10549 return result;
10552 /* If this failed, we have to do this with set/compare/jump/set code. */
10553 if (GET_CODE (target) != REG
10554 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10555 target = gen_reg_rtx (GET_MODE (target));
10557 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10558 result = compare_from_rtx (op0, op1, code, unsignedp,
10559 operand_mode, NULL_RTX);
10560 if (GET_CODE (result) == CONST_INT)
10561 return (((result == const0_rtx && ! invert)
10562 || (result != const0_rtx && invert))
10563 ? const0_rtx : const1_rtx);
10565 /* The code of RESULT may not match CODE if compare_from_rtx
10566 decided to swap its operands and reverse the original code.
10568 We know that compare_from_rtx returns either a CONST_INT or
10569 a new comparison code, so it is safe to just extract the
10570 code from RESULT. */
10571 code = GET_CODE (result);
10573 label = gen_label_rtx ();
10574 if (bcc_gen_fctn[(int) code] == 0)
10575 abort ();
10577 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10578 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10579 emit_label (label);
10581 return target;
10585 /* Stubs in case we haven't got a casesi insn. */
10586 #ifndef HAVE_casesi
10587 # define HAVE_casesi 0
10588 # define gen_casesi(a, b, c, d, e) (0)
10589 # define CODE_FOR_casesi CODE_FOR_nothing
10590 #endif
10592 /* If the machine does not have a case insn that compares the bounds,
10593 this means extra overhead for dispatch tables, which raises the
10594 threshold for using them. */
10595 #ifndef CASE_VALUES_THRESHOLD
10596 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10597 #endif /* CASE_VALUES_THRESHOLD */
10599 unsigned int
10600 case_values_threshold ()
10602 return CASE_VALUES_THRESHOLD;
10605 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10606 0 otherwise (i.e. if there is no casesi instruction). */
10608 try_casesi (index_type, index_expr, minval, range,
10609 table_label, default_label)
10610 tree index_type, index_expr, minval, range;
10611 rtx table_label ATTRIBUTE_UNUSED;
10612 rtx default_label;
10614 enum machine_mode index_mode = SImode;
10615 int index_bits = GET_MODE_BITSIZE (index_mode);
10616 rtx op1, op2, index;
10617 enum machine_mode op_mode;
10619 if (! HAVE_casesi)
10620 return 0;
10622 /* Convert the index to SImode. */
10623 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10625 enum machine_mode omode = TYPE_MODE (index_type);
10626 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10628 /* We must handle the endpoints in the original mode. */
10629 index_expr = build (MINUS_EXPR, index_type,
10630 index_expr, minval);
10631 minval = integer_zero_node;
10632 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10633 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10634 omode, 1, default_label);
10635 /* Now we can safely truncate. */
10636 index = convert_to_mode (index_mode, index, 0);
10638 else
10640 if (TYPE_MODE (index_type) != index_mode)
10642 index_expr = convert ((*lang_hooks.types.type_for_size)
10643 (index_bits, 0), index_expr);
10644 index_type = TREE_TYPE (index_expr);
10647 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10649 emit_queue ();
10650 index = protect_from_queue (index, 0);
10651 do_pending_stack_adjust ();
10653 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10654 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10655 (index, op_mode))
10656 index = copy_to_mode_reg (op_mode, index);
10658 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10660 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10661 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10662 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10663 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10664 (op1, op_mode))
10665 op1 = copy_to_mode_reg (op_mode, op1);
10667 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10669 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10670 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10671 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10672 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10673 (op2, op_mode))
10674 op2 = copy_to_mode_reg (op_mode, op2);
10676 emit_jump_insn (gen_casesi (index, op1, op2,
10677 table_label, default_label));
10678 return 1;
10681 /* Attempt to generate a tablejump instruction; same concept. */
10682 #ifndef HAVE_tablejump
10683 #define HAVE_tablejump 0
10684 #define gen_tablejump(x, y) (0)
10685 #endif
10687 /* Subroutine of the next function.
10689 INDEX is the value being switched on, with the lowest value
10690 in the table already subtracted.
10691 MODE is its expected mode (needed if INDEX is constant).
10692 RANGE is the length of the jump table.
10693 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10695 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10696 index value is out of range. */
10698 static void
10699 do_tablejump (index, mode, range, table_label, default_label)
10700 rtx index, range, table_label, default_label;
10701 enum machine_mode mode;
10703 rtx temp, vector;
10705 /* Do an unsigned comparison (in the proper mode) between the index
10706 expression and the value which represents the length of the range.
10707 Since we just finished subtracting the lower bound of the range
10708 from the index expression, this comparison allows us to simultaneously
10709 check that the original index expression value is both greater than
10710 or equal to the minimum value of the range and less than or equal to
10711 the maximum value of the range. */
10713 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10714 default_label);
10716 /* If index is in range, it must fit in Pmode.
10717 Convert to Pmode so we can index with it. */
10718 if (mode != Pmode)
10719 index = convert_to_mode (Pmode, index, 1);
10721 /* Don't let a MEM slip thru, because then INDEX that comes
10722 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10723 and break_out_memory_refs will go to work on it and mess it up. */
10724 #ifdef PIC_CASE_VECTOR_ADDRESS
10725 if (flag_pic && GET_CODE (index) != REG)
10726 index = copy_to_mode_reg (Pmode, index);
10727 #endif
10729 /* If flag_force_addr were to affect this address
10730 it could interfere with the tricky assumptions made
10731 about addresses that contain label-refs,
10732 which may be valid only very near the tablejump itself. */
10733 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10734 GET_MODE_SIZE, because this indicates how large insns are. The other
10735 uses should all be Pmode, because they are addresses. This code
10736 could fail if addresses and insns are not the same size. */
10737 index = gen_rtx_PLUS (Pmode,
10738 gen_rtx_MULT (Pmode, index,
10739 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10740 gen_rtx_LABEL_REF (Pmode, table_label));
10741 #ifdef PIC_CASE_VECTOR_ADDRESS
10742 if (flag_pic)
10743 index = PIC_CASE_VECTOR_ADDRESS (index);
10744 else
10745 #endif
10746 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10747 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10748 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10749 RTX_UNCHANGING_P (vector) = 1;
10750 convert_move (temp, vector, 0);
10752 emit_jump_insn (gen_tablejump (temp, table_label));
10754 /* If we are generating PIC code or if the table is PC-relative, the
10755 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10756 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10757 emit_barrier ();
10761 try_tablejump (index_type, index_expr, minval, range,
10762 table_label, default_label)
10763 tree index_type, index_expr, minval, range;
10764 rtx table_label, default_label;
10766 rtx index;
10768 if (! HAVE_tablejump)
10769 return 0;
10771 index_expr = fold (build (MINUS_EXPR, index_type,
10772 convert (index_type, index_expr),
10773 convert (index_type, minval)));
10774 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10775 emit_queue ();
10776 index = protect_from_queue (index, 0);
10777 do_pending_stack_adjust ();
10779 do_tablejump (index, TYPE_MODE (index_type),
10780 convert_modes (TYPE_MODE (index_type),
10781 TYPE_MODE (TREE_TYPE (range)),
10782 expand_expr (range, NULL_RTX,
10783 VOIDmode, 0),
10784 TREE_UNSIGNED (TREE_TYPE (range))),
10785 table_label, default_label);
10786 return 1;