2002-06-05 David S. Miller <davem@redhat.com>
[official-gcc.git] / gcc / expr.c
blobdc36708952de04448f6a39b717c0439e11298e2b
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
56 #ifdef PUSH_ROUNDING
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
60 #endif
62 #endif
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
67 #else
68 #define STACK_PUSH_CODE PRE_INC
69 #endif
70 #endif
72 /* Assume that case vectors are not pc-relative. */
73 #ifndef CASE_VECTOR_PC_RELATIVE
74 #define CASE_VECTOR_PC_RELATIVE 0
75 #endif
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
83 int cse_not_expected;
85 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
86 static tree placeholder_list = 0;
88 /* This structure is used by move_by_pieces to describe the move to
89 be performed. */
90 struct move_by_pieces
92 rtx to;
93 rtx to_addr;
94 int autinc_to;
95 int explicit_inc_to;
96 rtx from;
97 rtx from_addr;
98 int autinc_from;
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
102 int reverse;
105 /* This structure is used by store_by_pieces to describe the clear to
106 be performed. */
108 struct store_by_pieces
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
117 PTR constfundata;
118 int reverse;
121 extern struct obstack permanent_obstack;
123 static rtx enqueue_insn PARAMS ((rtx, rtx));
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
125 PARAMS ((unsigned HOST_WIDE_INT,
126 unsigned int));
127 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *));
129 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 enum machine_mode));
131 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 unsigned int));
133 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 unsigned int));
135 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 enum machine_mode,
137 struct store_by_pieces *));
138 static rtx compress_float_constant PARAMS ((rtx, rtx));
139 static rtx get_subtarget PARAMS ((rtx));
140 static int is_zeros_p PARAMS ((tree));
141 static int mostly_zeros_p PARAMS ((tree));
142 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, enum machine_mode, int, tree,
149 int));
150 static rtx var_rtx PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153 static int is_aligning_offset PARAMS ((tree, tree));
154 static rtx expand_increment PARAMS ((tree, int, int));
155 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
158 rtx, rtx));
159 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
160 #ifdef PUSH_ROUNDING
161 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
162 #endif
163 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* If a memory-to-memory move would take MOVE_RATIO or more simple
177 move-instruction sequences, we will do a movstr or libcall instead. */
179 #ifndef MOVE_RATIO
180 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
181 #define MOVE_RATIO 2
182 #else
183 /* If we are optimizing for space (-Os), cut down the default move ratio. */
184 #define MOVE_RATIO (optimize_size ? 3 : 15)
185 #endif
186 #endif
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
193 #endif
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movstr_optab[NUM_MACHINE_MODES];
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
201 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
203 #ifndef SLOW_UNALIGNED_ACCESS
204 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
205 #endif
207 /* This is run once per compilation to set up which modes can be used
208 directly in memory and to initialize the block move optab. */
210 void
211 init_expr_once ()
213 rtx insn, pat;
214 enum machine_mode mode;
215 int num_clobbers;
216 rtx mem, mem1;
217 rtx reg;
219 /* Try indexing by frame ptr and try by stack ptr.
220 It is known that on the Convex the stack ptr isn't a valid index.
221 With luck, one or the other is valid on any machine. */
222 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
223 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225 /* A scratch register we can modify in-place below to avoid
226 useless RTL allocations. */
227 reg = gen_rtx_REG (VOIDmode, -1);
229 insn = rtx_alloc (INSN);
230 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
231 PATTERN (insn) = pat;
233 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
234 mode = (enum machine_mode) ((int) mode + 1))
236 int regno;
238 direct_load[(int) mode] = direct_store[(int) mode] = 0;
239 PUT_MODE (mem, mode);
240 PUT_MODE (mem1, mode);
241 PUT_MODE (reg, mode);
243 /* See if there is some register that can be used in this mode and
244 directly loaded or stored from memory. */
246 if (mode != VOIDmode && mode != BLKmode)
247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
248 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
249 regno++)
251 if (! HARD_REGNO_MODE_OK (regno, mode))
252 continue;
254 REGNO (reg) = regno;
256 SET_SRC (pat) = mem;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = mem1;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem1;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
278 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
281 mode = GET_MODE_WIDER_MODE (mode))
283 enum machine_mode srcmode;
284 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
285 srcmode = GET_MODE_WIDER_MODE (srcmode))
287 enum insn_code ic;
289 ic = can_extend_p (mode, srcmode, 0);
290 if (ic == CODE_FOR_nothing)
291 continue;
293 PUT_MODE (mem, srcmode);
295 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
296 float_extend_from_mem[mode][srcmode] = true;
301 /* This is run at the start of compiling a function. */
303 void
304 init_expr ()
306 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
308 pending_chain = 0;
309 pending_stack_adjust = 0;
310 stack_pointer_delta = 0;
311 inhibit_defer_pop = 0;
312 saveregs_value = 0;
313 apply_args_value = 0;
314 forced_labels = 0;
317 /* Small sanity check that the queue is empty at the end of a function. */
319 void
320 finish_expr_for_function ()
322 if (pending_chain)
323 abort ();
326 /* Manage the queue of increment instructions to be output
327 for POSTINCREMENT_EXPR expressions, etc. */
329 /* Queue up to increment (or change) VAR later. BODY says how:
330 BODY should be the same thing you would pass to emit_insn
331 to increment right away. It will go to emit_insn later on.
333 The value is a QUEUED expression to be used in place of VAR
334 where you want to guarantee the pre-incrementation value of VAR. */
336 static rtx
337 enqueue_insn (var, body)
338 rtx var, body;
340 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
341 body, pending_chain);
342 return pending_chain;
345 /* Use protect_from_queue to convert a QUEUED expression
346 into something that you can put immediately into an instruction.
347 If the queued incrementation has not happened yet,
348 protect_from_queue returns the variable itself.
349 If the incrementation has happened, protect_from_queue returns a temp
350 that contains a copy of the old value of the variable.
352 Any time an rtx which might possibly be a QUEUED is to be put
353 into an instruction, it must be passed through protect_from_queue first.
354 QUEUED expressions are not meaningful in instructions.
356 Do not pass a value through protect_from_queue and then hold
357 on to it for a while before putting it in an instruction!
358 If the queue is flushed in between, incorrect code will result. */
361 protect_from_queue (x, modify)
362 rtx x;
363 int modify;
365 RTX_CODE code = GET_CODE (x);
367 #if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
370 return x;
371 #endif
373 if (code != QUEUED)
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
379 shared. */
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
383 rtx y = XEXP (x, 0);
384 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
386 if (QUEUED_INSN (y))
388 rtx temp = gen_reg_rtx (GET_MODE (x));
390 emit_insn_before (gen_move_insn (temp, new),
391 QUEUED_INSN (y));
392 return temp;
395 /* Copy the address into a pseudo, so that the returned value
396 remains correct across calls to emit_queue. */
397 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
402 if (code == MEM)
404 rtx tem = protect_from_queue (XEXP (x, 0), 0);
405 if (tem != XEXP (x, 0))
407 x = copy_rtx (x);
408 XEXP (x, 0) = tem;
411 else if (code == PLUS || code == MULT)
413 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
414 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
415 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
417 x = copy_rtx (x);
418 XEXP (x, 0) = new0;
419 XEXP (x, 1) = new1;
422 return x;
424 /* If the increment has not happened, use the variable itself. Copy it
425 into a new pseudo so that the value remains correct across calls to
426 emit_queue. */
427 if (QUEUED_INSN (x) == 0)
428 return copy_to_reg (QUEUED_VAR (x));
429 /* If the increment has happened and a pre-increment copy exists,
430 use that copy. */
431 if (QUEUED_COPY (x) != 0)
432 return QUEUED_COPY (x);
433 /* The increment has happened but we haven't set up a pre-increment copy.
434 Set one up now, and use it. */
435 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
436 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
437 QUEUED_INSN (x));
438 return QUEUED_COPY (x);
441 /* Return nonzero if X contains a QUEUED expression:
442 if it contains anything that will be altered by a queued increment.
443 We handle only combinations of MEM, PLUS, MINUS and MULT operators
444 since memory addresses generally contain only those. */
447 queued_subexp_p (x)
448 rtx x;
450 enum rtx_code code = GET_CODE (x);
451 switch (code)
453 case QUEUED:
454 return 1;
455 case MEM:
456 return queued_subexp_p (XEXP (x, 0));
457 case MULT:
458 case PLUS:
459 case MINUS:
460 return (queued_subexp_p (XEXP (x, 0))
461 || queued_subexp_p (XEXP (x, 1)));
462 default:
463 return 0;
467 /* Perform all the pending incrementations. */
469 void
470 emit_queue ()
472 rtx p;
473 while ((p = pending_chain))
475 rtx body = QUEUED_BODY (p);
477 switch (GET_CODE (body))
479 case INSN:
480 case JUMP_INSN:
481 case CALL_INSN:
482 case CODE_LABEL:
483 case BARRIER:
484 case NOTE:
485 QUEUED_INSN (p) = body;
486 emit_insn (body);
487 break;
489 #ifdef ENABLE_CHECKING
490 case SEQUENCE:
491 abort ();
492 break;
493 #endif
495 default:
496 QUEUED_INSN (p) = emit_insn (body);
497 break;
500 pending_chain = QUEUED_NEXT (p);
504 /* Copy data from FROM to TO, where the machine modes are not the same.
505 Both modes may be integer, or both may be floating.
506 UNSIGNEDP should be nonzero if FROM is an unsigned type.
507 This causes zero-extension instead of sign-extension. */
509 void
510 convert_move (to, from, unsignedp)
511 rtx to, from;
512 int unsignedp;
514 enum machine_mode to_mode = GET_MODE (to);
515 enum machine_mode from_mode = GET_MODE (from);
516 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
517 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
518 enum insn_code code;
519 rtx libcall;
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
524 to = protect_from_queue (to, 1);
525 from = protect_from_queue (from, 0);
527 if (to_real != from_real)
528 abort ();
530 /* If FROM is a SUBREG that indicates that we have already done at least
531 the required extension, strip it. We don't handle such SUBREGs as
532 TO here. */
534 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
535 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
536 >= GET_MODE_SIZE (to_mode))
537 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
538 from = gen_lowpart (to_mode, from), from_mode = to_mode;
540 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
541 abort ();
543 if (to_mode == from_mode
544 || (from_mode == VOIDmode && CONSTANT_P (from)))
546 emit_move_insn (to, from);
547 return;
550 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
552 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
553 abort ();
555 if (VECTOR_MODE_P (to_mode))
556 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
557 else
558 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
560 emit_move_insn (to, from);
561 return;
564 if (to_real != from_real)
565 abort ();
567 if (to_real)
569 rtx value, insns;
571 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
573 /* Try converting directly if the insn is supported. */
574 if ((code = can_extend_p (to_mode, from_mode, 0))
575 != CODE_FOR_nothing)
577 emit_unop_insn (code, to, from, UNKNOWN);
578 return;
582 #ifdef HAVE_trunchfqf2
583 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
586 return;
588 #endif
589 #ifdef HAVE_trunctqfqf2
590 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
593 return;
595 #endif
596 #ifdef HAVE_truncsfqf2
597 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
600 return;
602 #endif
603 #ifdef HAVE_truncdfqf2
604 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
607 return;
609 #endif
610 #ifdef HAVE_truncxfqf2
611 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
614 return;
616 #endif
617 #ifdef HAVE_trunctfqf2
618 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
620 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
621 return;
623 #endif
625 #ifdef HAVE_trunctqfhf2
626 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
629 return;
631 #endif
632 #ifdef HAVE_truncsfhf2
633 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
636 return;
638 #endif
639 #ifdef HAVE_truncdfhf2
640 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
643 return;
645 #endif
646 #ifdef HAVE_truncxfhf2
647 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
650 return;
652 #endif
653 #ifdef HAVE_trunctfhf2
654 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
657 return;
659 #endif
661 #ifdef HAVE_truncsftqf2
662 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
664 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
665 return;
667 #endif
668 #ifdef HAVE_truncdftqf2
669 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
671 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
672 return;
674 #endif
675 #ifdef HAVE_truncxftqf2
676 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
679 return;
681 #endif
682 #ifdef HAVE_trunctftqf2
683 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
685 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
686 return;
688 #endif
690 #ifdef HAVE_truncdfsf2
691 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
693 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
694 return;
696 #endif
697 #ifdef HAVE_truncxfsf2
698 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
700 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
701 return;
703 #endif
704 #ifdef HAVE_trunctfsf2
705 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
707 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
708 return;
710 #endif
711 #ifdef HAVE_truncxfdf2
712 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
714 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
715 return;
717 #endif
718 #ifdef HAVE_trunctfdf2
719 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
721 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
722 return;
724 #endif
726 libcall = (rtx) 0;
727 switch (from_mode)
729 case SFmode:
730 switch (to_mode)
732 case DFmode:
733 libcall = extendsfdf2_libfunc;
734 break;
736 case XFmode:
737 libcall = extendsfxf2_libfunc;
738 break;
740 case TFmode:
741 libcall = extendsftf2_libfunc;
742 break;
744 default:
745 break;
747 break;
749 case DFmode:
750 switch (to_mode)
752 case SFmode:
753 libcall = truncdfsf2_libfunc;
754 break;
756 case XFmode:
757 libcall = extenddfxf2_libfunc;
758 break;
760 case TFmode:
761 libcall = extenddftf2_libfunc;
762 break;
764 default:
765 break;
767 break;
769 case XFmode:
770 switch (to_mode)
772 case SFmode:
773 libcall = truncxfsf2_libfunc;
774 break;
776 case DFmode:
777 libcall = truncxfdf2_libfunc;
778 break;
780 default:
781 break;
783 break;
785 case TFmode:
786 switch (to_mode)
788 case SFmode:
789 libcall = trunctfsf2_libfunc;
790 break;
792 case DFmode:
793 libcall = trunctfdf2_libfunc;
794 break;
796 default:
797 break;
799 break;
801 default:
802 break;
805 if (libcall == (rtx) 0)
806 /* This conversion is not implemented yet. */
807 abort ();
809 start_sequence ();
810 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
811 1, from, from_mode);
812 insns = get_insns ();
813 end_sequence ();
814 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
815 from));
816 return;
819 /* Now both modes are integers. */
821 /* Handle expanding beyond a word. */
822 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
823 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
825 rtx insns;
826 rtx lowpart;
827 rtx fill_value;
828 rtx lowfrom;
829 int i;
830 enum machine_mode lowpart_mode;
831 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
833 /* Try converting directly if the insn is supported. */
834 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
835 != CODE_FOR_nothing)
837 /* If FROM is a SUBREG, put it into a register. Do this
838 so that we always generate the same set of insns for
839 better cse'ing; if an intermediate assignment occurred,
840 we won't be doing the operation directly on the SUBREG. */
841 if (optimize > 0 && GET_CODE (from) == SUBREG)
842 from = force_reg (from_mode, from);
843 emit_unop_insn (code, to, from, equiv_code);
844 return;
846 /* Next, try converting via full word. */
847 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
848 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
849 != CODE_FOR_nothing))
851 if (GET_CODE (to) == REG)
852 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
853 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
854 emit_unop_insn (code, to,
855 gen_lowpart (word_mode, to), equiv_code);
856 return;
859 /* No special multiword conversion insn; do it by hand. */
860 start_sequence ();
862 /* Since we will turn this into a no conflict block, we must ensure
863 that the source does not overlap the target. */
865 if (reg_overlap_mentioned_p (to, from))
866 from = force_reg (from_mode, from);
868 /* Get a copy of FROM widened to a word, if necessary. */
869 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
870 lowpart_mode = word_mode;
871 else
872 lowpart_mode = from_mode;
874 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
876 lowpart = gen_lowpart (lowpart_mode, to);
877 emit_move_insn (lowpart, lowfrom);
879 /* Compute the value to put in each remaining word. */
880 if (unsignedp)
881 fill_value = const0_rtx;
882 else
884 #ifdef HAVE_slt
885 if (HAVE_slt
886 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
887 && STORE_FLAG_VALUE == -1)
889 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
890 lowpart_mode, 0);
891 fill_value = gen_reg_rtx (word_mode);
892 emit_insn (gen_slt (fill_value));
894 else
895 #endif
897 fill_value
898 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
899 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
900 NULL_RTX, 0);
901 fill_value = convert_to_mode (word_mode, fill_value, 1);
905 /* Fill the remaining words. */
906 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
908 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
909 rtx subword = operand_subword (to, index, 1, to_mode);
911 if (subword == 0)
912 abort ();
914 if (fill_value != subword)
915 emit_move_insn (subword, fill_value);
918 insns = get_insns ();
919 end_sequence ();
921 emit_no_conflict_block (insns, to, from, NULL_RTX,
922 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
923 return;
926 /* Truncating multi-word to a word or less. */
927 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
928 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
930 if (!((GET_CODE (from) == MEM
931 && ! MEM_VOLATILE_P (from)
932 && direct_load[(int) to_mode]
933 && ! mode_dependent_address_p (XEXP (from, 0)))
934 || GET_CODE (from) == REG
935 || GET_CODE (from) == SUBREG))
936 from = force_reg (from_mode, from);
937 convert_move (to, gen_lowpart (word_mode, from), 0);
938 return;
941 /* Handle pointer conversion. */ /* SPEE 900220. */
942 if (to_mode == PQImode)
944 if (from_mode != QImode)
945 from = convert_to_mode (QImode, from, unsignedp);
947 #ifdef HAVE_truncqipqi2
948 if (HAVE_truncqipqi2)
950 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
951 return;
953 #endif /* HAVE_truncqipqi2 */
954 abort ();
957 if (from_mode == PQImode)
959 if (to_mode != QImode)
961 from = convert_to_mode (QImode, from, unsignedp);
962 from_mode = QImode;
964 else
966 #ifdef HAVE_extendpqiqi2
967 if (HAVE_extendpqiqi2)
969 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
970 return;
972 #endif /* HAVE_extendpqiqi2 */
973 abort ();
977 if (to_mode == PSImode)
979 if (from_mode != SImode)
980 from = convert_to_mode (SImode, from, unsignedp);
982 #ifdef HAVE_truncsipsi2
983 if (HAVE_truncsipsi2)
985 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
986 return;
988 #endif /* HAVE_truncsipsi2 */
989 abort ();
992 if (from_mode == PSImode)
994 if (to_mode != SImode)
996 from = convert_to_mode (SImode, from, unsignedp);
997 from_mode = SImode;
999 else
1001 #ifdef HAVE_extendpsisi2
1002 if (! unsignedp && HAVE_extendpsisi2)
1004 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1005 return;
1007 #endif /* HAVE_extendpsisi2 */
1008 #ifdef HAVE_zero_extendpsisi2
1009 if (unsignedp && HAVE_zero_extendpsisi2)
1011 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1012 return;
1014 #endif /* HAVE_zero_extendpsisi2 */
1015 abort ();
1019 if (to_mode == PDImode)
1021 if (from_mode != DImode)
1022 from = convert_to_mode (DImode, from, unsignedp);
1024 #ifdef HAVE_truncdipdi2
1025 if (HAVE_truncdipdi2)
1027 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1028 return;
1030 #endif /* HAVE_truncdipdi2 */
1031 abort ();
1034 if (from_mode == PDImode)
1036 if (to_mode != DImode)
1038 from = convert_to_mode (DImode, from, unsignedp);
1039 from_mode = DImode;
1041 else
1043 #ifdef HAVE_extendpdidi2
1044 if (HAVE_extendpdidi2)
1046 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1047 return;
1049 #endif /* HAVE_extendpdidi2 */
1050 abort ();
1054 /* Now follow all the conversions between integers
1055 no more than a word long. */
1057 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1058 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1059 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1060 GET_MODE_BITSIZE (from_mode)))
1062 if (!((GET_CODE (from) == MEM
1063 && ! MEM_VOLATILE_P (from)
1064 && direct_load[(int) to_mode]
1065 && ! mode_dependent_address_p (XEXP (from, 0)))
1066 || GET_CODE (from) == REG
1067 || GET_CODE (from) == SUBREG))
1068 from = force_reg (from_mode, from);
1069 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1070 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1071 from = copy_to_reg (from);
1072 emit_move_insn (to, gen_lowpart (to_mode, from));
1073 return;
1076 /* Handle extension. */
1077 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1079 /* Convert directly if that works. */
1080 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1081 != CODE_FOR_nothing)
1083 if (flag_force_mem)
1084 from = force_not_mem (from);
1086 emit_unop_insn (code, to, from, equiv_code);
1087 return;
1089 else
1091 enum machine_mode intermediate;
1092 rtx tmp;
1093 tree shift_amount;
1095 /* Search for a mode to convert via. */
1096 for (intermediate = from_mode; intermediate != VOIDmode;
1097 intermediate = GET_MODE_WIDER_MODE (intermediate))
1098 if (((can_extend_p (to_mode, intermediate, unsignedp)
1099 != CODE_FOR_nothing)
1100 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1101 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1102 GET_MODE_BITSIZE (intermediate))))
1103 && (can_extend_p (intermediate, from_mode, unsignedp)
1104 != CODE_FOR_nothing))
1106 convert_move (to, convert_to_mode (intermediate, from,
1107 unsignedp), unsignedp);
1108 return;
1111 /* No suitable intermediate mode.
1112 Generate what we need with shifts. */
1113 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1114 - GET_MODE_BITSIZE (from_mode), 0);
1115 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1116 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1117 to, unsignedp);
1118 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1119 to, unsignedp);
1120 if (tmp != to)
1121 emit_move_insn (to, tmp);
1122 return;
1126 /* Support special truncate insns for certain modes. */
1128 if (from_mode == DImode && to_mode == SImode)
1130 #ifdef HAVE_truncdisi2
1131 if (HAVE_truncdisi2)
1133 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1134 return;
1136 #endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1141 if (from_mode == DImode && to_mode == HImode)
1143 #ifdef HAVE_truncdihi2
1144 if (HAVE_truncdihi2)
1146 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1147 return;
1149 #endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1154 if (from_mode == DImode && to_mode == QImode)
1156 #ifdef HAVE_truncdiqi2
1157 if (HAVE_truncdiqi2)
1159 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1160 return;
1162 #endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1167 if (from_mode == SImode && to_mode == HImode)
1169 #ifdef HAVE_truncsihi2
1170 if (HAVE_truncsihi2)
1172 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1173 return;
1175 #endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1180 if (from_mode == SImode && to_mode == QImode)
1182 #ifdef HAVE_truncsiqi2
1183 if (HAVE_truncsiqi2)
1185 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1186 return;
1188 #endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1193 if (from_mode == HImode && to_mode == QImode)
1195 #ifdef HAVE_trunchiqi2
1196 if (HAVE_trunchiqi2)
1198 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1199 return;
1201 #endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1206 if (from_mode == TImode && to_mode == DImode)
1208 #ifdef HAVE_trunctidi2
1209 if (HAVE_trunctidi2)
1211 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1212 return;
1214 #endif
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 return;
1219 if (from_mode == TImode && to_mode == SImode)
1221 #ifdef HAVE_trunctisi2
1222 if (HAVE_trunctisi2)
1224 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1225 return;
1227 #endif
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 return;
1232 if (from_mode == TImode && to_mode == HImode)
1234 #ifdef HAVE_trunctihi2
1235 if (HAVE_trunctihi2)
1237 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1238 return;
1240 #endif
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 return;
1245 if (from_mode == TImode && to_mode == QImode)
1247 #ifdef HAVE_trunctiqi2
1248 if (HAVE_trunctiqi2)
1250 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1251 return;
1253 #endif
1254 convert_move (to, force_reg (from_mode, from), unsignedp);
1255 return;
1258 /* Handle truncation of volatile memrefs, and so on;
1259 the things that couldn't be truncated directly,
1260 and for which there was no special instruction. */
1261 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1263 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1264 emit_move_insn (to, temp);
1265 return;
1268 /* Mode combination is not recognized. */
1269 abort ();
1272 /* Return an rtx for a value that would result
1273 from converting X to mode MODE.
1274 Both X and MODE may be floating, or both integer.
1275 UNSIGNEDP is nonzero if X is an unsigned value.
1276 This can be done by referring to a part of X in place
1277 or by copying to a new temporary with conversion.
1279 This function *must not* call protect_from_queue
1280 except when putting X into an insn (in which case convert_move does it). */
1283 convert_to_mode (mode, x, unsignedp)
1284 enum machine_mode mode;
1285 rtx x;
1286 int unsignedp;
1288 return convert_modes (mode, VOIDmode, x, unsignedp);
1291 /* Return an rtx for a value that would result
1292 from converting X from mode OLDMODE to mode MODE.
1293 Both modes may be floating, or both integer.
1294 UNSIGNEDP is nonzero if X is an unsigned value.
1296 This can be done by referring to a part of X in place
1297 or by copying to a new temporary with conversion.
1299 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1301 This function *must not* call protect_from_queue
1302 except when putting X into an insn (in which case convert_move does it). */
1305 convert_modes (mode, oldmode, x, unsignedp)
1306 enum machine_mode mode, oldmode;
1307 rtx x;
1308 int unsignedp;
1310 rtx temp;
1312 /* If FROM is a SUBREG that indicates that we have already done at least
1313 the required extension, strip it. */
1315 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1316 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1317 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1318 x = gen_lowpart (mode, x);
1320 if (GET_MODE (x) != VOIDmode)
1321 oldmode = GET_MODE (x);
1323 if (mode == oldmode)
1324 return x;
1326 /* There is one case that we must handle specially: If we are converting
1327 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1328 we are to interpret the constant as unsigned, gen_lowpart will do
1329 the wrong if the constant appears negative. What we want to do is
1330 make the high-order word of the constant zero, not all ones. */
1332 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1333 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1334 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1336 HOST_WIDE_INT val = INTVAL (x);
1338 if (oldmode != VOIDmode
1339 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1341 int width = GET_MODE_BITSIZE (oldmode);
1343 /* We need to zero extend VAL. */
1344 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1347 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1350 /* We can do this with a gen_lowpart if both desired and current modes
1351 are integer, and this is either a constant integer, a register, or a
1352 non-volatile MEM. Except for the constant case where MODE is no
1353 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1355 if ((GET_CODE (x) == CONST_INT
1356 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1357 || (GET_MODE_CLASS (mode) == MODE_INT
1358 && GET_MODE_CLASS (oldmode) == MODE_INT
1359 && (GET_CODE (x) == CONST_DOUBLE
1360 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1361 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1362 && direct_load[(int) mode])
1363 || (GET_CODE (x) == REG
1364 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1365 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1367 /* ?? If we don't know OLDMODE, we have to assume here that
1368 X does not need sign- or zero-extension. This may not be
1369 the case, but it's the best we can do. */
1370 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1371 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1373 HOST_WIDE_INT val = INTVAL (x);
1374 int width = GET_MODE_BITSIZE (oldmode);
1376 /* We must sign or zero-extend in this case. Start by
1377 zero-extending, then sign extend if we need to. */
1378 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1379 if (! unsignedp
1380 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1381 val |= (HOST_WIDE_INT) (-1) << width;
1383 return gen_int_mode (val, mode);
1386 return gen_lowpart (mode, x);
1389 temp = gen_reg_rtx (mode);
1390 convert_move (temp, x, unsignedp);
1391 return temp;
1394 /* This macro is used to determine what the largest unit size that
1395 move_by_pieces can use is. */
1397 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1398 move efficiently, as opposed to MOVE_MAX which is the maximum
1399 number of bytes we can move with a single instruction. */
1401 #ifndef MOVE_MAX_PIECES
1402 #define MOVE_MAX_PIECES MOVE_MAX
1403 #endif
1405 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1406 store efficiently. Due to internal GCC limitations, this is
1407 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1408 for an immediate constant. */
1410 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1412 /* Generate several move instructions to copy LEN bytes from block FROM to
1413 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1414 and TO through protect_from_queue before calling.
1416 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1417 used to push FROM to the stack.
1419 ALIGN is maximum alignment we can assume. */
1421 void
1422 move_by_pieces (to, from, len, align)
1423 rtx to, from;
1424 unsigned HOST_WIDE_INT len;
1425 unsigned int align;
1427 struct move_by_pieces data;
1428 rtx to_addr, from_addr = XEXP (from, 0);
1429 unsigned int max_size = MOVE_MAX_PIECES + 1;
1430 enum machine_mode mode = VOIDmode, tmode;
1431 enum insn_code icode;
1433 data.offset = 0;
1434 data.from_addr = from_addr;
1435 if (to)
1437 to_addr = XEXP (to, 0);
1438 data.to = to;
1439 data.autinc_to
1440 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1441 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1442 data.reverse
1443 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1445 else
1447 to_addr = NULL_RTX;
1448 data.to = NULL_RTX;
1449 data.autinc_to = 1;
1450 #ifdef STACK_GROWS_DOWNWARD
1451 data.reverse = 1;
1452 #else
1453 data.reverse = 0;
1454 #endif
1456 data.to_addr = to_addr;
1457 data.from = from;
1458 data.autinc_from
1459 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1460 || GET_CODE (from_addr) == POST_INC
1461 || GET_CODE (from_addr) == POST_DEC);
1463 data.explicit_inc_from = 0;
1464 data.explicit_inc_to = 0;
1465 if (data.reverse) data.offset = len;
1466 data.len = len;
1468 /* If copying requires more than two move insns,
1469 copy addresses to registers (to make displacements shorter)
1470 and use post-increment if available. */
1471 if (!(data.autinc_from && data.autinc_to)
1472 && move_by_pieces_ninsns (len, align) > 2)
1474 /* Find the mode of the largest move... */
1475 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1476 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1477 if (GET_MODE_SIZE (tmode) < max_size)
1478 mode = tmode;
1480 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1482 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1483 data.autinc_from = 1;
1484 data.explicit_inc_from = -1;
1486 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1488 data.from_addr = copy_addr_to_reg (from_addr);
1489 data.autinc_from = 1;
1490 data.explicit_inc_from = 1;
1492 if (!data.autinc_from && CONSTANT_P (from_addr))
1493 data.from_addr = copy_addr_to_reg (from_addr);
1494 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1496 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1497 data.autinc_to = 1;
1498 data.explicit_inc_to = -1;
1500 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1502 data.to_addr = copy_addr_to_reg (to_addr);
1503 data.autinc_to = 1;
1504 data.explicit_inc_to = 1;
1506 if (!data.autinc_to && CONSTANT_P (to_addr))
1507 data.to_addr = copy_addr_to_reg (to_addr);
1510 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1511 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1512 align = MOVE_MAX * BITS_PER_UNIT;
1514 /* First move what we can in the largest integer mode, then go to
1515 successively smaller modes. */
1517 while (max_size > 1)
1519 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1520 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1521 if (GET_MODE_SIZE (tmode) < max_size)
1522 mode = tmode;
1524 if (mode == VOIDmode)
1525 break;
1527 icode = mov_optab->handlers[(int) mode].insn_code;
1528 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1529 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1531 max_size = GET_MODE_SIZE (mode);
1534 /* The code above should have handled everything. */
1535 if (data.len > 0)
1536 abort ();
1539 /* Return number of insns required to move L bytes by pieces.
1540 ALIGN (in bits) is maximum alignment we can assume. */
1542 static unsigned HOST_WIDE_INT
1543 move_by_pieces_ninsns (l, align)
1544 unsigned HOST_WIDE_INT l;
1545 unsigned int align;
1547 unsigned HOST_WIDE_INT n_insns = 0;
1548 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1550 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1551 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1552 align = MOVE_MAX * BITS_PER_UNIT;
1554 while (max_size > 1)
1556 enum machine_mode mode = VOIDmode, tmode;
1557 enum insn_code icode;
1559 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1560 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1561 if (GET_MODE_SIZE (tmode) < max_size)
1562 mode = tmode;
1564 if (mode == VOIDmode)
1565 break;
1567 icode = mov_optab->handlers[(int) mode].insn_code;
1568 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1569 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1571 max_size = GET_MODE_SIZE (mode);
1574 if (l)
1575 abort ();
1576 return n_insns;
1579 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1580 with move instructions for mode MODE. GENFUN is the gen_... function
1581 to make a move insn for that mode. DATA has all the other info. */
1583 static void
1584 move_by_pieces_1 (genfun, mode, data)
1585 rtx (*genfun) PARAMS ((rtx, ...));
1586 enum machine_mode mode;
1587 struct move_by_pieces *data;
1589 unsigned int size = GET_MODE_SIZE (mode);
1590 rtx to1 = NULL_RTX, from1;
1592 while (data->len >= size)
1594 if (data->reverse)
1595 data->offset -= size;
1597 if (data->to)
1599 if (data->autinc_to)
1600 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1601 data->offset);
1602 else
1603 to1 = adjust_address (data->to, mode, data->offset);
1606 if (data->autinc_from)
1607 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1608 data->offset);
1609 else
1610 from1 = adjust_address (data->from, mode, data->offset);
1612 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1613 emit_insn (gen_add2_insn (data->to_addr,
1614 GEN_INT (-(HOST_WIDE_INT)size)));
1615 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1616 emit_insn (gen_add2_insn (data->from_addr,
1617 GEN_INT (-(HOST_WIDE_INT)size)));
1619 if (data->to)
1620 emit_insn ((*genfun) (to1, from1));
1621 else
1623 #ifdef PUSH_ROUNDING
1624 emit_single_push_insn (mode, from1, NULL);
1625 #else
1626 abort ();
1627 #endif
1630 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1631 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1632 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1633 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1635 if (! data->reverse)
1636 data->offset += size;
1638 data->len -= size;
1642 /* Emit code to move a block Y to a block X.
1643 This may be done with string-move instructions,
1644 with multiple scalar move instructions, or with a library call.
1646 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1647 with mode BLKmode.
1648 SIZE is an rtx that says how long they are.
1649 ALIGN is the maximum alignment we can assume they have.
1651 Return the address of the new block, if memcpy is called and returns it,
1652 0 otherwise. */
1654 static GTY(()) tree block_move_fn;
1656 emit_block_move (x, y, size)
1657 rtx x, y;
1658 rtx size;
1660 rtx retval = 0;
1661 #ifdef TARGET_MEM_FUNCTIONS
1662 tree call_expr, arg_list;
1663 #endif
1664 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1666 if (GET_MODE (x) != BLKmode)
1667 abort ();
1669 if (GET_MODE (y) != BLKmode)
1670 abort ();
1672 x = protect_from_queue (x, 1);
1673 y = protect_from_queue (y, 0);
1674 size = protect_from_queue (size, 0);
1676 if (GET_CODE (x) != MEM)
1677 abort ();
1678 if (GET_CODE (y) != MEM)
1679 abort ();
1680 if (size == 0)
1681 abort ();
1683 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1684 move_by_pieces (x, y, INTVAL (size), align);
1685 else
1687 /* Try the most limited insn first, because there's no point
1688 including more than one in the machine description unless
1689 the more limited one has some advantage. */
1691 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1692 enum machine_mode mode;
1694 /* Since this is a move insn, we don't care about volatility. */
1695 volatile_ok = 1;
1697 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1698 mode = GET_MODE_WIDER_MODE (mode))
1700 enum insn_code code = movstr_optab[(int) mode];
1701 insn_operand_predicate_fn pred;
1703 if (code != CODE_FOR_nothing
1704 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1705 here because if SIZE is less than the mode mask, as it is
1706 returned by the macro, it will definitely be less than the
1707 actual mode mask. */
1708 && ((GET_CODE (size) == CONST_INT
1709 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1710 <= (GET_MODE_MASK (mode) >> 1)))
1711 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1712 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1713 || (*pred) (x, BLKmode))
1714 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1715 || (*pred) (y, BLKmode))
1716 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1717 || (*pred) (opalign, VOIDmode)))
1719 rtx op2;
1720 rtx last = get_last_insn ();
1721 rtx pat;
1723 op2 = convert_to_mode (mode, size, 1);
1724 pred = insn_data[(int) code].operand[2].predicate;
1725 if (pred != 0 && ! (*pred) (op2, mode))
1726 op2 = copy_to_mode_reg (mode, op2);
1728 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1729 if (pat)
1731 emit_insn (pat);
1732 volatile_ok = 0;
1733 return 0;
1735 else
1736 delete_insns_since (last);
1740 volatile_ok = 0;
1742 /* X, Y, or SIZE may have been passed through protect_from_queue.
1744 It is unsafe to save the value generated by protect_from_queue
1745 and reuse it later. Consider what happens if emit_queue is
1746 called before the return value from protect_from_queue is used.
1748 Expansion of the CALL_EXPR below will call emit_queue before
1749 we are finished emitting RTL for argument setup. So if we are
1750 not careful we could get the wrong value for an argument.
1752 To avoid this problem we go ahead and emit code to copy X, Y &
1753 SIZE into new pseudos. We can then place those new pseudos
1754 into an RTL_EXPR and use them later, even after a call to
1755 emit_queue.
1757 Note this is not strictly needed for library calls since they
1758 do not call emit_queue before loading their arguments. However,
1759 we may need to have library calls call emit_queue in the future
1760 since failing to do so could cause problems for targets which
1761 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1762 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1763 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1765 #ifdef TARGET_MEM_FUNCTIONS
1766 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1767 #else
1768 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1769 TREE_UNSIGNED (integer_type_node));
1770 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1771 #endif
1773 #ifdef TARGET_MEM_FUNCTIONS
1774 /* It is incorrect to use the libcall calling conventions to call
1775 memcpy in this context.
1777 This could be a user call to memcpy and the user may wish to
1778 examine the return value from memcpy.
1780 For targets where libcalls and normal calls have different conventions
1781 for returning pointers, we could end up generating incorrect code.
1783 So instead of using a libcall sequence we build up a suitable
1784 CALL_EXPR and expand the call in the normal fashion. */
1785 if (block_move_fn == NULL_TREE)
1787 tree fntype;
1789 /* This was copied from except.c, I don't know if all this is
1790 necessary in this context or not. */
1791 block_move_fn = get_identifier ("memcpy");
1792 fntype = build_pointer_type (void_type_node);
1793 fntype = build_function_type (fntype, NULL_TREE);
1794 block_move_fn = build_decl (FUNCTION_DECL, block_move_fn, fntype);
1795 DECL_EXTERNAL (block_move_fn) = 1;
1796 TREE_PUBLIC (block_move_fn) = 1;
1797 DECL_ARTIFICIAL (block_move_fn) = 1;
1798 TREE_NOTHROW (block_move_fn) = 1;
1799 make_decl_rtl (block_move_fn, NULL);
1800 assemble_external (block_move_fn);
1803 /* We need to make an argument list for the function call.
1805 memcpy has three arguments, the first two are void * addresses and
1806 the last is a size_t byte count for the copy. */
1807 arg_list
1808 = build_tree_list (NULL_TREE,
1809 make_tree (build_pointer_type (void_type_node), x));
1810 TREE_CHAIN (arg_list)
1811 = build_tree_list (NULL_TREE,
1812 make_tree (build_pointer_type (void_type_node), y));
1813 TREE_CHAIN (TREE_CHAIN (arg_list))
1814 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1815 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1817 /* Now we have to build up the CALL_EXPR itself. */
1818 call_expr = build1 (ADDR_EXPR,
1819 build_pointer_type (TREE_TYPE (block_move_fn)),
1820 block_move_fn);
1821 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_move_fn)),
1822 call_expr, arg_list, NULL_TREE);
1823 TREE_SIDE_EFFECTS (call_expr) = 1;
1825 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1826 #else
1827 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1828 VOIDmode, 3, y, Pmode, x, Pmode,
1829 convert_to_mode (TYPE_MODE (integer_type_node), size,
1830 TREE_UNSIGNED (integer_type_node)),
1831 TYPE_MODE (integer_type_node));
1832 #endif
1834 /* If we are initializing a readonly value, show the above call
1835 clobbered it. Otherwise, a load from it may erroneously be hoisted
1836 from a loop. */
1837 if (RTX_UNCHANGING_P (x))
1838 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1841 return retval;
1844 /* Copy all or part of a value X into registers starting at REGNO.
1845 The number of registers to be filled is NREGS. */
1847 void
1848 move_block_to_reg (regno, x, nregs, mode)
1849 int regno;
1850 rtx x;
1851 int nregs;
1852 enum machine_mode mode;
1854 int i;
1855 #ifdef HAVE_load_multiple
1856 rtx pat;
1857 rtx last;
1858 #endif
1860 if (nregs == 0)
1861 return;
1863 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1864 x = validize_mem (force_const_mem (mode, x));
1866 /* See if the machine can do this with a load multiple insn. */
1867 #ifdef HAVE_load_multiple
1868 if (HAVE_load_multiple)
1870 last = get_last_insn ();
1871 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1872 GEN_INT (nregs));
1873 if (pat)
1875 emit_insn (pat);
1876 return;
1878 else
1879 delete_insns_since (last);
1881 #endif
1883 for (i = 0; i < nregs; i++)
1884 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1885 operand_subword_force (x, i, mode));
1888 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1889 The number of registers to be filled is NREGS. SIZE indicates the number
1890 of bytes in the object X. */
1892 void
1893 move_block_from_reg (regno, x, nregs, size)
1894 int regno;
1895 rtx x;
1896 int nregs;
1897 int size;
1899 int i;
1900 #ifdef HAVE_store_multiple
1901 rtx pat;
1902 rtx last;
1903 #endif
1904 enum machine_mode mode;
1906 if (nregs == 0)
1907 return;
1909 /* If SIZE is that of a mode no bigger than a word, just use that
1910 mode's store operation. */
1911 if (size <= UNITS_PER_WORD
1912 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1913 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1915 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1916 return;
1919 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1920 to the left before storing to memory. Note that the previous test
1921 doesn't handle all cases (e.g. SIZE == 3). */
1922 if (size < UNITS_PER_WORD
1923 && BYTES_BIG_ENDIAN
1924 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1926 rtx tem = operand_subword (x, 0, 1, BLKmode);
1927 rtx shift;
1929 if (tem == 0)
1930 abort ();
1932 shift = expand_shift (LSHIFT_EXPR, word_mode,
1933 gen_rtx_REG (word_mode, regno),
1934 build_int_2 ((UNITS_PER_WORD - size)
1935 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1936 emit_move_insn (tem, shift);
1937 return;
1940 /* See if the machine can do this with a store multiple insn. */
1941 #ifdef HAVE_store_multiple
1942 if (HAVE_store_multiple)
1944 last = get_last_insn ();
1945 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1946 GEN_INT (nregs));
1947 if (pat)
1949 emit_insn (pat);
1950 return;
1952 else
1953 delete_insns_since (last);
1955 #endif
1957 for (i = 0; i < nregs; i++)
1959 rtx tem = operand_subword (x, i, 1, BLKmode);
1961 if (tem == 0)
1962 abort ();
1964 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1968 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1969 registers represented by a PARALLEL. SSIZE represents the total size of
1970 block SRC in bytes, or -1 if not known. */
1971 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1972 the balance will be in what would be the low-order memory addresses, i.e.
1973 left justified for big endian, right justified for little endian. This
1974 happens to be true for the targets currently using this support. If this
1975 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1976 would be needed. */
1978 void
1979 emit_group_load (dst, orig_src, ssize)
1980 rtx dst, orig_src;
1981 int ssize;
1983 rtx *tmps, src;
1984 int start, i;
1986 if (GET_CODE (dst) != PARALLEL)
1987 abort ();
1989 /* Check for a NULL entry, used to indicate that the parameter goes
1990 both on the stack and in registers. */
1991 if (XEXP (XVECEXP (dst, 0, 0), 0))
1992 start = 0;
1993 else
1994 start = 1;
1996 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1998 /* Process the pieces. */
1999 for (i = start; i < XVECLEN (dst, 0); i++)
2001 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2002 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2003 unsigned int bytelen = GET_MODE_SIZE (mode);
2004 int shift = 0;
2006 /* Handle trailing fragments that run over the size of the struct. */
2007 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2009 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2010 bytelen = ssize - bytepos;
2011 if (bytelen <= 0)
2012 abort ();
2015 /* If we won't be loading directly from memory, protect the real source
2016 from strange tricks we might play; but make sure that the source can
2017 be loaded directly into the destination. */
2018 src = orig_src;
2019 if (GET_CODE (orig_src) != MEM
2020 && (!CONSTANT_P (orig_src)
2021 || (GET_MODE (orig_src) != mode
2022 && GET_MODE (orig_src) != VOIDmode)))
2024 if (GET_MODE (orig_src) == VOIDmode)
2025 src = gen_reg_rtx (mode);
2026 else
2027 src = gen_reg_rtx (GET_MODE (orig_src));
2029 emit_move_insn (src, orig_src);
2032 /* Optimize the access just a bit. */
2033 if (GET_CODE (src) == MEM
2034 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2035 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2036 && bytelen == GET_MODE_SIZE (mode))
2038 tmps[i] = gen_reg_rtx (mode);
2039 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2041 else if (GET_CODE (src) == CONCAT)
2043 if ((bytepos == 0
2044 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2045 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2046 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2048 tmps[i] = XEXP (src, bytepos != 0);
2049 if (! CONSTANT_P (tmps[i])
2050 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2051 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2052 0, 1, NULL_RTX, mode, mode, ssize);
2054 else if (bytepos == 0)
2056 rtx mem = assign_stack_temp (GET_MODE (src),
2057 GET_MODE_SIZE (GET_MODE (src)), 0);
2058 emit_move_insn (mem, src);
2059 tmps[i] = adjust_address (mem, mode, 0);
2061 else
2062 abort ();
2064 else if (CONSTANT_P (src)
2065 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2066 tmps[i] = src;
2067 else
2068 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2069 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2070 mode, mode, ssize);
2072 if (BYTES_BIG_ENDIAN && shift)
2073 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2077 emit_queue ();
2079 /* Copy the extracted pieces into the proper (probable) hard regs. */
2080 for (i = start; i < XVECLEN (dst, 0); i++)
2081 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2084 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2085 registers represented by a PARALLEL. SSIZE represents the total size of
2086 block DST, or -1 if not known. */
2088 void
2089 emit_group_store (orig_dst, src, ssize)
2090 rtx orig_dst, src;
2091 int ssize;
2093 rtx *tmps, dst;
2094 int start, i;
2096 if (GET_CODE (src) != PARALLEL)
2097 abort ();
2099 /* Check for a NULL entry, used to indicate that the parameter goes
2100 both on the stack and in registers. */
2101 if (XEXP (XVECEXP (src, 0, 0), 0))
2102 start = 0;
2103 else
2104 start = 1;
2106 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2108 /* Copy the (probable) hard regs into pseudos. */
2109 for (i = start; i < XVECLEN (src, 0); i++)
2111 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2112 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2113 emit_move_insn (tmps[i], reg);
2115 emit_queue ();
2117 /* If we won't be storing directly into memory, protect the real destination
2118 from strange tricks we might play. */
2119 dst = orig_dst;
2120 if (GET_CODE (dst) == PARALLEL)
2122 rtx temp;
2124 /* We can get a PARALLEL dst if there is a conditional expression in
2125 a return statement. In that case, the dst and src are the same,
2126 so no action is necessary. */
2127 if (rtx_equal_p (dst, src))
2128 return;
2130 /* It is unclear if we can ever reach here, but we may as well handle
2131 it. Allocate a temporary, and split this into a store/load to/from
2132 the temporary. */
2134 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2135 emit_group_store (temp, src, ssize);
2136 emit_group_load (dst, temp, ssize);
2137 return;
2139 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2141 dst = gen_reg_rtx (GET_MODE (orig_dst));
2142 /* Make life a bit easier for combine. */
2143 emit_move_insn (dst, const0_rtx);
2146 /* Process the pieces. */
2147 for (i = start; i < XVECLEN (src, 0); i++)
2149 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2150 enum machine_mode mode = GET_MODE (tmps[i]);
2151 unsigned int bytelen = GET_MODE_SIZE (mode);
2152 rtx dest = dst;
2154 /* Handle trailing fragments that run over the size of the struct. */
2155 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2157 if (BYTES_BIG_ENDIAN)
2159 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2160 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2161 tmps[i], 0, OPTAB_WIDEN);
2163 bytelen = ssize - bytepos;
2166 if (GET_CODE (dst) == CONCAT)
2168 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2169 dest = XEXP (dst, 0);
2170 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2172 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2173 dest = XEXP (dst, 1);
2175 else
2176 abort ();
2179 /* Optimize the access just a bit. */
2180 if (GET_CODE (dest) == MEM
2181 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2182 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2183 && bytelen == GET_MODE_SIZE (mode))
2184 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2185 else
2186 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2187 mode, tmps[i], ssize);
2190 emit_queue ();
2192 /* Copy from the pseudo into the (probable) hard reg. */
2193 if (GET_CODE (dst) == REG)
2194 emit_move_insn (orig_dst, dst);
2197 /* Generate code to copy a BLKmode object of TYPE out of a
2198 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2199 is null, a stack temporary is created. TGTBLK is returned.
2201 The primary purpose of this routine is to handle functions
2202 that return BLKmode structures in registers. Some machines
2203 (the PA for example) want to return all small structures
2204 in registers regardless of the structure's alignment. */
2207 copy_blkmode_from_reg (tgtblk, srcreg, type)
2208 rtx tgtblk;
2209 rtx srcreg;
2210 tree type;
2212 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2213 rtx src = NULL, dst = NULL;
2214 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2215 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2217 if (tgtblk == 0)
2219 tgtblk = assign_temp (build_qualified_type (type,
2220 (TYPE_QUALS (type)
2221 | TYPE_QUAL_CONST)),
2222 0, 1, 1);
2223 preserve_temp_slots (tgtblk);
2226 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2227 into a new pseudo which is a full word.
2229 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2230 the wrong part of the register gets copied so we fake a type conversion
2231 in place. */
2232 if (GET_MODE (srcreg) != BLKmode
2233 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2235 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2236 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2237 else
2238 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2241 /* Structures whose size is not a multiple of a word are aligned
2242 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2243 machine, this means we must skip the empty high order bytes when
2244 calculating the bit offset. */
2245 if (BYTES_BIG_ENDIAN
2246 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2247 && bytes % UNITS_PER_WORD)
2248 big_endian_correction
2249 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2251 /* Copy the structure BITSIZE bites at a time.
2253 We could probably emit more efficient code for machines which do not use
2254 strict alignment, but it doesn't seem worth the effort at the current
2255 time. */
2256 for (bitpos = 0, xbitpos = big_endian_correction;
2257 bitpos < bytes * BITS_PER_UNIT;
2258 bitpos += bitsize, xbitpos += bitsize)
2260 /* We need a new source operand each time xbitpos is on a
2261 word boundary and when xbitpos == big_endian_correction
2262 (the first time through). */
2263 if (xbitpos % BITS_PER_WORD == 0
2264 || xbitpos == big_endian_correction)
2265 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2266 GET_MODE (srcreg));
2268 /* We need a new destination operand each time bitpos is on
2269 a word boundary. */
2270 if (bitpos % BITS_PER_WORD == 0)
2271 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2273 /* Use xbitpos for the source extraction (right justified) and
2274 xbitpos for the destination store (left justified). */
2275 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2276 extract_bit_field (src, bitsize,
2277 xbitpos % BITS_PER_WORD, 1,
2278 NULL_RTX, word_mode, word_mode,
2279 BITS_PER_WORD),
2280 BITS_PER_WORD);
2283 return tgtblk;
2286 /* Add a USE expression for REG to the (possibly empty) list pointed
2287 to by CALL_FUSAGE. REG must denote a hard register. */
2289 void
2290 use_reg (call_fusage, reg)
2291 rtx *call_fusage, reg;
2293 if (GET_CODE (reg) != REG
2294 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2295 abort ();
2297 *call_fusage
2298 = gen_rtx_EXPR_LIST (VOIDmode,
2299 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2302 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2303 starting at REGNO. All of these registers must be hard registers. */
2305 void
2306 use_regs (call_fusage, regno, nregs)
2307 rtx *call_fusage;
2308 int regno;
2309 int nregs;
2311 int i;
2313 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2314 abort ();
2316 for (i = 0; i < nregs; i++)
2317 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2320 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2321 PARALLEL REGS. This is for calls that pass values in multiple
2322 non-contiguous locations. The Irix 6 ABI has examples of this. */
2324 void
2325 use_group_regs (call_fusage, regs)
2326 rtx *call_fusage;
2327 rtx regs;
2329 int i;
2331 for (i = 0; i < XVECLEN (regs, 0); i++)
2333 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2335 /* A NULL entry means the parameter goes both on the stack and in
2336 registers. This can also be a MEM for targets that pass values
2337 partially on the stack and partially in registers. */
2338 if (reg != 0 && GET_CODE (reg) == REG)
2339 use_reg (call_fusage, reg);
2344 /* Determine whether the LEN bytes generated by CONSTFUN can be
2345 stored to memory using several move instructions. CONSTFUNDATA is
2346 a pointer which will be passed as argument in every CONSTFUN call.
2347 ALIGN is maximum alignment we can assume. Return nonzero if a
2348 call to store_by_pieces should succeed. */
2351 can_store_by_pieces (len, constfun, constfundata, align)
2352 unsigned HOST_WIDE_INT len;
2353 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2354 PTR constfundata;
2355 unsigned int align;
2357 unsigned HOST_WIDE_INT max_size, l;
2358 HOST_WIDE_INT offset = 0;
2359 enum machine_mode mode, tmode;
2360 enum insn_code icode;
2361 int reverse;
2362 rtx cst;
2364 if (! MOVE_BY_PIECES_P (len, align))
2365 return 0;
2367 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2368 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2369 align = MOVE_MAX * BITS_PER_UNIT;
2371 /* We would first store what we can in the largest integer mode, then go to
2372 successively smaller modes. */
2374 for (reverse = 0;
2375 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2376 reverse++)
2378 l = len;
2379 mode = VOIDmode;
2380 max_size = STORE_MAX_PIECES + 1;
2381 while (max_size > 1)
2383 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2384 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2385 if (GET_MODE_SIZE (tmode) < max_size)
2386 mode = tmode;
2388 if (mode == VOIDmode)
2389 break;
2391 icode = mov_optab->handlers[(int) mode].insn_code;
2392 if (icode != CODE_FOR_nothing
2393 && align >= GET_MODE_ALIGNMENT (mode))
2395 unsigned int size = GET_MODE_SIZE (mode);
2397 while (l >= size)
2399 if (reverse)
2400 offset -= size;
2402 cst = (*constfun) (constfundata, offset, mode);
2403 if (!LEGITIMATE_CONSTANT_P (cst))
2404 return 0;
2406 if (!reverse)
2407 offset += size;
2409 l -= size;
2413 max_size = GET_MODE_SIZE (mode);
2416 /* The code above should have handled everything. */
2417 if (l != 0)
2418 abort ();
2421 return 1;
2424 /* Generate several move instructions to store LEN bytes generated by
2425 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2426 pointer which will be passed as argument in every CONSTFUN call.
2427 ALIGN is maximum alignment we can assume. */
2429 void
2430 store_by_pieces (to, len, constfun, constfundata, align)
2431 rtx to;
2432 unsigned HOST_WIDE_INT len;
2433 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2434 PTR constfundata;
2435 unsigned int align;
2437 struct store_by_pieces data;
2439 if (! MOVE_BY_PIECES_P (len, align))
2440 abort ();
2441 to = protect_from_queue (to, 1);
2442 data.constfun = constfun;
2443 data.constfundata = constfundata;
2444 data.len = len;
2445 data.to = to;
2446 store_by_pieces_1 (&data, align);
2449 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2450 rtx with BLKmode). The caller must pass TO through protect_from_queue
2451 before calling. ALIGN is maximum alignment we can assume. */
2453 static void
2454 clear_by_pieces (to, len, align)
2455 rtx to;
2456 unsigned HOST_WIDE_INT len;
2457 unsigned int align;
2459 struct store_by_pieces data;
2461 data.constfun = clear_by_pieces_1;
2462 data.constfundata = NULL;
2463 data.len = len;
2464 data.to = to;
2465 store_by_pieces_1 (&data, align);
2468 /* Callback routine for clear_by_pieces.
2469 Return const0_rtx unconditionally. */
2471 static rtx
2472 clear_by_pieces_1 (data, offset, mode)
2473 PTR data ATTRIBUTE_UNUSED;
2474 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2475 enum machine_mode mode ATTRIBUTE_UNUSED;
2477 return const0_rtx;
2480 /* Subroutine of clear_by_pieces and store_by_pieces.
2481 Generate several move instructions to store LEN bytes of block TO. (A MEM
2482 rtx with BLKmode). The caller must pass TO through protect_from_queue
2483 before calling. ALIGN is maximum alignment we can assume. */
2485 static void
2486 store_by_pieces_1 (data, align)
2487 struct store_by_pieces *data;
2488 unsigned int align;
2490 rtx to_addr = XEXP (data->to, 0);
2491 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2492 enum machine_mode mode = VOIDmode, tmode;
2493 enum insn_code icode;
2495 data->offset = 0;
2496 data->to_addr = to_addr;
2497 data->autinc_to
2498 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2499 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2501 data->explicit_inc_to = 0;
2502 data->reverse
2503 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2504 if (data->reverse)
2505 data->offset = data->len;
2507 /* If storing requires more than two move insns,
2508 copy addresses to registers (to make displacements shorter)
2509 and use post-increment if available. */
2510 if (!data->autinc_to
2511 && move_by_pieces_ninsns (data->len, align) > 2)
2513 /* Determine the main mode we'll be using. */
2514 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2515 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2516 if (GET_MODE_SIZE (tmode) < max_size)
2517 mode = tmode;
2519 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2521 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2522 data->autinc_to = 1;
2523 data->explicit_inc_to = -1;
2526 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2527 && ! data->autinc_to)
2529 data->to_addr = copy_addr_to_reg (to_addr);
2530 data->autinc_to = 1;
2531 data->explicit_inc_to = 1;
2534 if ( !data->autinc_to && CONSTANT_P (to_addr))
2535 data->to_addr = copy_addr_to_reg (to_addr);
2538 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2539 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2540 align = MOVE_MAX * BITS_PER_UNIT;
2542 /* First store what we can in the largest integer mode, then go to
2543 successively smaller modes. */
2545 while (max_size > 1)
2547 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2548 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2549 if (GET_MODE_SIZE (tmode) < max_size)
2550 mode = tmode;
2552 if (mode == VOIDmode)
2553 break;
2555 icode = mov_optab->handlers[(int) mode].insn_code;
2556 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2557 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2559 max_size = GET_MODE_SIZE (mode);
2562 /* The code above should have handled everything. */
2563 if (data->len != 0)
2564 abort ();
2567 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2568 with move instructions for mode MODE. GENFUN is the gen_... function
2569 to make a move insn for that mode. DATA has all the other info. */
2571 static void
2572 store_by_pieces_2 (genfun, mode, data)
2573 rtx (*genfun) PARAMS ((rtx, ...));
2574 enum machine_mode mode;
2575 struct store_by_pieces *data;
2577 unsigned int size = GET_MODE_SIZE (mode);
2578 rtx to1, cst;
2580 while (data->len >= size)
2582 if (data->reverse)
2583 data->offset -= size;
2585 if (data->autinc_to)
2586 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2587 data->offset);
2588 else
2589 to1 = adjust_address (data->to, mode, data->offset);
2591 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2592 emit_insn (gen_add2_insn (data->to_addr,
2593 GEN_INT (-(HOST_WIDE_INT) size)));
2595 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2596 emit_insn ((*genfun) (to1, cst));
2598 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2599 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2601 if (! data->reverse)
2602 data->offset += size;
2604 data->len -= size;
2608 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2609 its length in bytes. */
2611 static GTY(()) tree block_clear_fn;
2613 clear_storage (object, size)
2614 rtx object;
2615 rtx size;
2617 #ifdef TARGET_MEM_FUNCTIONS
2618 tree call_expr, arg_list;
2619 #endif
2620 rtx retval = 0;
2621 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2622 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2624 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2625 just move a zero. Otherwise, do this a piece at a time. */
2626 if (GET_MODE (object) != BLKmode
2627 && GET_CODE (size) == CONST_INT
2628 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2629 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2630 else
2632 object = protect_from_queue (object, 1);
2633 size = protect_from_queue (size, 0);
2635 if (GET_CODE (size) == CONST_INT
2636 && MOVE_BY_PIECES_P (INTVAL (size), align))
2637 clear_by_pieces (object, INTVAL (size), align);
2638 else
2640 /* Try the most limited insn first, because there's no point
2641 including more than one in the machine description unless
2642 the more limited one has some advantage. */
2644 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2645 enum machine_mode mode;
2647 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2648 mode = GET_MODE_WIDER_MODE (mode))
2650 enum insn_code code = clrstr_optab[(int) mode];
2651 insn_operand_predicate_fn pred;
2653 if (code != CODE_FOR_nothing
2654 /* We don't need MODE to be narrower than
2655 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2656 the mode mask, as it is returned by the macro, it will
2657 definitely be less than the actual mode mask. */
2658 && ((GET_CODE (size) == CONST_INT
2659 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2660 <= (GET_MODE_MASK (mode) >> 1)))
2661 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2662 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2663 || (*pred) (object, BLKmode))
2664 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2665 || (*pred) (opalign, VOIDmode)))
2667 rtx op1;
2668 rtx last = get_last_insn ();
2669 rtx pat;
2671 op1 = convert_to_mode (mode, size, 1);
2672 pred = insn_data[(int) code].operand[1].predicate;
2673 if (pred != 0 && ! (*pred) (op1, mode))
2674 op1 = copy_to_mode_reg (mode, op1);
2676 pat = GEN_FCN ((int) code) (object, op1, opalign);
2677 if (pat)
2679 emit_insn (pat);
2680 return 0;
2682 else
2683 delete_insns_since (last);
2687 /* OBJECT or SIZE may have been passed through protect_from_queue.
2689 It is unsafe to save the value generated by protect_from_queue
2690 and reuse it later. Consider what happens if emit_queue is
2691 called before the return value from protect_from_queue is used.
2693 Expansion of the CALL_EXPR below will call emit_queue before
2694 we are finished emitting RTL for argument setup. So if we are
2695 not careful we could get the wrong value for an argument.
2697 To avoid this problem we go ahead and emit code to copy OBJECT
2698 and SIZE into new pseudos. We can then place those new pseudos
2699 into an RTL_EXPR and use them later, even after a call to
2700 emit_queue.
2702 Note this is not strictly needed for library calls since they
2703 do not call emit_queue before loading their arguments. However,
2704 we may need to have library calls call emit_queue in the future
2705 since failing to do so could cause problems for targets which
2706 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2707 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2709 #ifdef TARGET_MEM_FUNCTIONS
2710 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2711 #else
2712 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2713 TREE_UNSIGNED (integer_type_node));
2714 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2715 #endif
2717 #ifdef TARGET_MEM_FUNCTIONS
2718 /* It is incorrect to use the libcall calling conventions to call
2719 memset in this context.
2721 This could be a user call to memset and the user may wish to
2722 examine the return value from memset.
2724 For targets where libcalls and normal calls have different
2725 conventions for returning pointers, we could end up generating
2726 incorrect code.
2728 So instead of using a libcall sequence we build up a suitable
2729 CALL_EXPR and expand the call in the normal fashion. */
2730 if (block_clear_fn == NULL_TREE)
2732 tree fntype;
2734 /* This was copied from except.c, I don't know if all this is
2735 necessary in this context or not. */
2736 block_clear_fn = get_identifier ("memset");
2737 fntype = build_pointer_type (void_type_node);
2738 fntype = build_function_type (fntype, NULL_TREE);
2739 block_clear_fn = build_decl (FUNCTION_DECL, block_clear_fn,
2740 fntype);
2741 DECL_EXTERNAL (block_clear_fn) = 1;
2742 TREE_PUBLIC (block_clear_fn) = 1;
2743 DECL_ARTIFICIAL (block_clear_fn) = 1;
2744 TREE_NOTHROW (block_clear_fn) = 1;
2745 make_decl_rtl (block_clear_fn, NULL);
2746 assemble_external (block_clear_fn);
2749 /* We need to make an argument list for the function call.
2751 memset has three arguments, the first is a void * addresses, the
2752 second an integer with the initialization value, the last is a
2753 size_t byte count for the copy. */
2754 arg_list
2755 = build_tree_list (NULL_TREE,
2756 make_tree (build_pointer_type (void_type_node),
2757 object));
2758 TREE_CHAIN (arg_list)
2759 = build_tree_list (NULL_TREE,
2760 make_tree (integer_type_node, const0_rtx));
2761 TREE_CHAIN (TREE_CHAIN (arg_list))
2762 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2763 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2765 /* Now we have to build up the CALL_EXPR itself. */
2766 call_expr = build1 (ADDR_EXPR,
2767 build_pointer_type (TREE_TYPE (block_clear_fn)),
2768 block_clear_fn);
2769 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_clear_fn)),
2770 call_expr, arg_list, NULL_TREE);
2771 TREE_SIDE_EFFECTS (call_expr) = 1;
2773 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2774 #else
2775 emit_library_call (bzero_libfunc, LCT_NORMAL,
2776 VOIDmode, 2, object, Pmode, size,
2777 TYPE_MODE (integer_type_node));
2778 #endif
2780 /* If we are initializing a readonly value, show the above call
2781 clobbered it. Otherwise, a load from it may erroneously be
2782 hoisted from a loop. */
2783 if (RTX_UNCHANGING_P (object))
2784 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2788 return retval;
2791 /* Generate code to copy Y into X.
2792 Both Y and X must have the same mode, except that
2793 Y can be a constant with VOIDmode.
2794 This mode cannot be BLKmode; use emit_block_move for that.
2796 Return the last instruction emitted. */
2799 emit_move_insn (x, y)
2800 rtx x, y;
2802 enum machine_mode mode = GET_MODE (x);
2803 rtx y_cst = NULL_RTX;
2804 rtx last_insn;
2806 x = protect_from_queue (x, 1);
2807 y = protect_from_queue (y, 0);
2809 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2810 abort ();
2812 /* Never force constant_p_rtx to memory. */
2813 if (GET_CODE (y) == CONSTANT_P_RTX)
2815 else if (CONSTANT_P (y))
2817 if (optimize
2818 && FLOAT_MODE_P (GET_MODE (x))
2819 && (last_insn = compress_float_constant (x, y)))
2820 return last_insn;
2822 if (!LEGITIMATE_CONSTANT_P (y))
2824 y_cst = y;
2825 y = force_const_mem (mode, y);
2829 /* If X or Y are memory references, verify that their addresses are valid
2830 for the machine. */
2831 if (GET_CODE (x) == MEM
2832 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2833 && ! push_operand (x, GET_MODE (x)))
2834 || (flag_force_addr
2835 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2836 x = validize_mem (x);
2838 if (GET_CODE (y) == MEM
2839 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2840 || (flag_force_addr
2841 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2842 y = validize_mem (y);
2844 if (mode == BLKmode)
2845 abort ();
2847 last_insn = emit_move_insn_1 (x, y);
2849 if (y_cst && GET_CODE (x) == REG)
2850 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2852 return last_insn;
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2860 emit_move_insn_1 (x, y)
2861 rtx x, y;
2863 enum machine_mode mode = GET_MODE (x);
2864 enum machine_mode submode;
2865 enum mode_class class = GET_MODE_CLASS (mode);
2867 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2868 abort ();
2870 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2871 return
2872 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2874 /* Expand complex moves by moving real part and imag part, if possible. */
2875 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2876 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2877 * BITS_PER_UNIT),
2878 (class == MODE_COMPLEX_INT
2879 ? MODE_INT : MODE_FLOAT),
2881 && (mov_optab->handlers[(int) submode].insn_code
2882 != CODE_FOR_nothing))
2884 /* Don't split destination if it is a stack push. */
2885 int stack = push_operand (x, GET_MODE (x));
2887 #ifdef PUSH_ROUNDING
2888 /* In case we output to the stack, but the size is smaller machine can
2889 push exactly, we need to use move instructions. */
2890 if (stack
2891 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2892 != GET_MODE_SIZE (submode)))
2894 rtx temp;
2895 HOST_WIDE_INT offset1, offset2;
2897 /* Do not use anti_adjust_stack, since we don't want to update
2898 stack_pointer_delta. */
2899 temp = expand_binop (Pmode,
2900 #ifdef STACK_GROWS_DOWNWARD
2901 sub_optab,
2902 #else
2903 add_optab,
2904 #endif
2905 stack_pointer_rtx,
2906 GEN_INT
2907 (PUSH_ROUNDING
2908 (GET_MODE_SIZE (GET_MODE (x)))),
2909 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2911 if (temp != stack_pointer_rtx)
2912 emit_move_insn (stack_pointer_rtx, temp);
2914 #ifdef STACK_GROWS_DOWNWARD
2915 offset1 = 0;
2916 offset2 = GET_MODE_SIZE (submode);
2917 #else
2918 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2919 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2920 + GET_MODE_SIZE (submode));
2921 #endif
2923 emit_move_insn (change_address (x, submode,
2924 gen_rtx_PLUS (Pmode,
2925 stack_pointer_rtx,
2926 GEN_INT (offset1))),
2927 gen_realpart (submode, y));
2928 emit_move_insn (change_address (x, submode,
2929 gen_rtx_PLUS (Pmode,
2930 stack_pointer_rtx,
2931 GEN_INT (offset2))),
2932 gen_imagpart (submode, y));
2934 else
2935 #endif
2936 /* If this is a stack, push the highpart first, so it
2937 will be in the argument order.
2939 In that case, change_address is used only to convert
2940 the mode, not to change the address. */
2941 if (stack)
2943 /* Note that the real part always precedes the imag part in memory
2944 regardless of machine's endianness. */
2945 #ifdef STACK_GROWS_DOWNWARD
2946 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2947 (gen_rtx_MEM (submode, XEXP (x, 0)),
2948 gen_imagpart (submode, y)));
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (gen_rtx_MEM (submode, XEXP (x, 0)),
2951 gen_realpart (submode, y)));
2952 #else
2953 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2954 (gen_rtx_MEM (submode, XEXP (x, 0)),
2955 gen_realpart (submode, y)));
2956 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2957 (gen_rtx_MEM (submode, XEXP (x, 0)),
2958 gen_imagpart (submode, y)));
2959 #endif
2961 else
2963 rtx realpart_x, realpart_y;
2964 rtx imagpart_x, imagpart_y;
2966 /* If this is a complex value with each part being smaller than a
2967 word, the usual calling sequence will likely pack the pieces into
2968 a single register. Unfortunately, SUBREG of hard registers only
2969 deals in terms of words, so we have a problem converting input
2970 arguments to the CONCAT of two registers that is used elsewhere
2971 for complex values. If this is before reload, we can copy it into
2972 memory and reload. FIXME, we should see about using extract and
2973 insert on integer registers, but complex short and complex char
2974 variables should be rarely used. */
2975 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2976 && (reload_in_progress | reload_completed) == 0)
2978 int packed_dest_p
2979 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2980 int packed_src_p
2981 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2983 if (packed_dest_p || packed_src_p)
2985 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2986 ? MODE_FLOAT : MODE_INT);
2988 enum machine_mode reg_mode
2989 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2991 if (reg_mode != BLKmode)
2993 rtx mem = assign_stack_temp (reg_mode,
2994 GET_MODE_SIZE (mode), 0);
2995 rtx cmem = adjust_address (mem, mode, 0);
2997 cfun->cannot_inline
2998 = N_("function using short complex types cannot be inline");
3000 if (packed_dest_p)
3002 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3004 emit_move_insn_1 (cmem, y);
3005 return emit_move_insn_1 (sreg, mem);
3007 else
3009 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3011 emit_move_insn_1 (mem, sreg);
3012 return emit_move_insn_1 (x, cmem);
3018 realpart_x = gen_realpart (submode, x);
3019 realpart_y = gen_realpart (submode, y);
3020 imagpart_x = gen_imagpart (submode, x);
3021 imagpart_y = gen_imagpart (submode, y);
3023 /* Show the output dies here. This is necessary for SUBREGs
3024 of pseudos since we cannot track their lifetimes correctly;
3025 hard regs shouldn't appear here except as return values.
3026 We never want to emit such a clobber after reload. */
3027 if (x != y
3028 && ! (reload_in_progress || reload_completed)
3029 && (GET_CODE (realpart_x) == SUBREG
3030 || GET_CODE (imagpart_x) == SUBREG))
3031 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3033 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3034 (realpart_x, realpart_y));
3035 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3036 (imagpart_x, imagpart_y));
3039 return get_last_insn ();
3042 /* This will handle any multi-word mode that lacks a move_insn pattern.
3043 However, you will get better code if you define such patterns,
3044 even if they must turn into multiple assembler instructions. */
3045 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3047 rtx last_insn = 0;
3048 rtx seq, inner;
3049 int need_clobber;
3050 int i;
3052 #ifdef PUSH_ROUNDING
3054 /* If X is a push on the stack, do the push now and replace
3055 X with a reference to the stack pointer. */
3056 if (push_operand (x, GET_MODE (x)))
3058 rtx temp;
3059 enum rtx_code code;
3061 /* Do not use anti_adjust_stack, since we don't want to update
3062 stack_pointer_delta. */
3063 temp = expand_binop (Pmode,
3064 #ifdef STACK_GROWS_DOWNWARD
3065 sub_optab,
3066 #else
3067 add_optab,
3068 #endif
3069 stack_pointer_rtx,
3070 GEN_INT
3071 (PUSH_ROUNDING
3072 (GET_MODE_SIZE (GET_MODE (x)))),
3073 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3075 if (temp != stack_pointer_rtx)
3076 emit_move_insn (stack_pointer_rtx, temp);
3078 code = GET_CODE (XEXP (x, 0));
3080 /* Just hope that small offsets off SP are OK. */
3081 if (code == POST_INC)
3082 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3083 GEN_INT (-((HOST_WIDE_INT)
3084 GET_MODE_SIZE (GET_MODE (x)))));
3085 else if (code == POST_DEC)
3086 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3087 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3088 else
3089 temp = stack_pointer_rtx;
3091 x = change_address (x, VOIDmode, temp);
3093 #endif
3095 /* If we are in reload, see if either operand is a MEM whose address
3096 is scheduled for replacement. */
3097 if (reload_in_progress && GET_CODE (x) == MEM
3098 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3099 x = replace_equiv_address_nv (x, inner);
3100 if (reload_in_progress && GET_CODE (y) == MEM
3101 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3102 y = replace_equiv_address_nv (y, inner);
3104 start_sequence ();
3106 need_clobber = 0;
3107 for (i = 0;
3108 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3109 i++)
3111 rtx xpart = operand_subword (x, i, 1, mode);
3112 rtx ypart = operand_subword (y, i, 1, mode);
3114 /* If we can't get a part of Y, put Y into memory if it is a
3115 constant. Otherwise, force it into a register. If we still
3116 can't get a part of Y, abort. */
3117 if (ypart == 0 && CONSTANT_P (y))
3119 y = force_const_mem (mode, y);
3120 ypart = operand_subword (y, i, 1, mode);
3122 else if (ypart == 0)
3123 ypart = operand_subword_force (y, i, mode);
3125 if (xpart == 0 || ypart == 0)
3126 abort ();
3128 need_clobber |= (GET_CODE (xpart) == SUBREG);
3130 last_insn = emit_move_insn (xpart, ypart);
3133 seq = get_insns ();
3134 end_sequence ();
3136 /* Show the output dies here. This is necessary for SUBREGs
3137 of pseudos since we cannot track their lifetimes correctly;
3138 hard regs shouldn't appear here except as return values.
3139 We never want to emit such a clobber after reload. */
3140 if (x != y
3141 && ! (reload_in_progress || reload_completed)
3142 && need_clobber != 0)
3143 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3145 emit_insn (seq);
3147 return last_insn;
3149 else
3150 abort ();
3153 /* If Y is representable exactly in a narrower mode, and the target can
3154 perform the extension directly from constant or memory, then emit the
3155 move as an extension. */
3157 static rtx
3158 compress_float_constant (x, y)
3159 rtx x, y;
3161 enum machine_mode dstmode = GET_MODE (x);
3162 enum machine_mode orig_srcmode = GET_MODE (y);
3163 enum machine_mode srcmode;
3164 REAL_VALUE_TYPE r;
3166 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3168 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3169 srcmode != orig_srcmode;
3170 srcmode = GET_MODE_WIDER_MODE (srcmode))
3172 enum insn_code ic;
3173 rtx trunc_y, last_insn;
3175 /* Skip if the target can't extend this way. */
3176 ic = can_extend_p (dstmode, srcmode, 0);
3177 if (ic == CODE_FOR_nothing)
3178 continue;
3180 /* Skip if the narrowed value isn't exact. */
3181 if (! exact_real_truncate (srcmode, &r))
3182 continue;
3184 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3186 if (LEGITIMATE_CONSTANT_P (trunc_y))
3188 /* Skip if the target needs extra instructions to perform
3189 the extension. */
3190 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3191 continue;
3193 else if (float_extend_from_mem[dstmode][srcmode])
3194 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3195 else
3196 continue;
3198 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3199 last_insn = get_last_insn ();
3201 if (GET_CODE (x) == REG)
3202 REG_NOTES (last_insn)
3203 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3205 return last_insn;
3208 return NULL_RTX;
3211 /* Pushing data onto the stack. */
3213 /* Push a block of length SIZE (perhaps variable)
3214 and return an rtx to address the beginning of the block.
3215 Note that it is not possible for the value returned to be a QUEUED.
3216 The value may be virtual_outgoing_args_rtx.
3218 EXTRA is the number of bytes of padding to push in addition to SIZE.
3219 BELOW nonzero means this padding comes at low addresses;
3220 otherwise, the padding comes at high addresses. */
3223 push_block (size, extra, below)
3224 rtx size;
3225 int extra, below;
3227 rtx temp;
3229 size = convert_modes (Pmode, ptr_mode, size, 1);
3230 if (CONSTANT_P (size))
3231 anti_adjust_stack (plus_constant (size, extra));
3232 else if (GET_CODE (size) == REG && extra == 0)
3233 anti_adjust_stack (size);
3234 else
3236 temp = copy_to_mode_reg (Pmode, size);
3237 if (extra != 0)
3238 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3239 temp, 0, OPTAB_LIB_WIDEN);
3240 anti_adjust_stack (temp);
3243 #ifndef STACK_GROWS_DOWNWARD
3244 if (0)
3245 #else
3246 if (1)
3247 #endif
3249 temp = virtual_outgoing_args_rtx;
3250 if (extra != 0 && below)
3251 temp = plus_constant (temp, extra);
3253 else
3255 if (GET_CODE (size) == CONST_INT)
3256 temp = plus_constant (virtual_outgoing_args_rtx,
3257 -INTVAL (size) - (below ? 0 : extra));
3258 else if (extra != 0 && !below)
3259 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3260 negate_rtx (Pmode, plus_constant (size, extra)));
3261 else
3262 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3263 negate_rtx (Pmode, size));
3266 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3269 #ifdef PUSH_ROUNDING
3271 /* Emit single push insn. */
3273 static void
3274 emit_single_push_insn (mode, x, type)
3275 rtx x;
3276 enum machine_mode mode;
3277 tree type;
3279 rtx dest_addr;
3280 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3281 rtx dest;
3282 enum insn_code icode;
3283 insn_operand_predicate_fn pred;
3285 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3286 /* If there is push pattern, use it. Otherwise try old way of throwing
3287 MEM representing push operation to move expander. */
3288 icode = push_optab->handlers[(int) mode].insn_code;
3289 if (icode != CODE_FOR_nothing)
3291 if (((pred = insn_data[(int) icode].operand[0].predicate)
3292 && !((*pred) (x, mode))))
3293 x = force_reg (mode, x);
3294 emit_insn (GEN_FCN (icode) (x));
3295 return;
3297 if (GET_MODE_SIZE (mode) == rounded_size)
3298 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3299 else
3301 #ifdef STACK_GROWS_DOWNWARD
3302 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3303 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3304 #else
3305 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3306 GEN_INT (rounded_size));
3307 #endif
3308 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3311 dest = gen_rtx_MEM (mode, dest_addr);
3313 if (type != 0)
3315 set_mem_attributes (dest, type, 1);
3317 if (flag_optimize_sibling_calls)
3318 /* Function incoming arguments may overlap with sibling call
3319 outgoing arguments and we cannot allow reordering of reads
3320 from function arguments with stores to outgoing arguments
3321 of sibling calls. */
3322 set_mem_alias_set (dest, 0);
3324 emit_move_insn (dest, x);
3326 #endif
3328 /* Generate code to push X onto the stack, assuming it has mode MODE and
3329 type TYPE.
3330 MODE is redundant except when X is a CONST_INT (since they don't
3331 carry mode info).
3332 SIZE is an rtx for the size of data to be copied (in bytes),
3333 needed only if X is BLKmode.
3335 ALIGN (in bits) is maximum alignment we can assume.
3337 If PARTIAL and REG are both nonzero, then copy that many of the first
3338 words of X into registers starting with REG, and push the rest of X.
3339 The amount of space pushed is decreased by PARTIAL words,
3340 rounded *down* to a multiple of PARM_BOUNDARY.
3341 REG must be a hard register in this case.
3342 If REG is zero but PARTIAL is not, take any all others actions for an
3343 argument partially in registers, but do not actually load any
3344 registers.
3346 EXTRA is the amount in bytes of extra space to leave next to this arg.
3347 This is ignored if an argument block has already been allocated.
3349 On a machine that lacks real push insns, ARGS_ADDR is the address of
3350 the bottom of the argument block for this call. We use indexing off there
3351 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3352 argument block has not been preallocated.
3354 ARGS_SO_FAR is the size of args previously pushed for this call.
3356 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3357 for arguments passed in registers. If nonzero, it will be the number
3358 of bytes required. */
3360 void
3361 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3362 args_addr, args_so_far, reg_parm_stack_space,
3363 alignment_pad)
3364 rtx x;
3365 enum machine_mode mode;
3366 tree type;
3367 rtx size;
3368 unsigned int align;
3369 int partial;
3370 rtx reg;
3371 int extra;
3372 rtx args_addr;
3373 rtx args_so_far;
3374 int reg_parm_stack_space;
3375 rtx alignment_pad;
3377 rtx xinner;
3378 enum direction stack_direction
3379 #ifdef STACK_GROWS_DOWNWARD
3380 = downward;
3381 #else
3382 = upward;
3383 #endif
3385 /* Decide where to pad the argument: `downward' for below,
3386 `upward' for above, or `none' for don't pad it.
3387 Default is below for small data on big-endian machines; else above. */
3388 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3390 /* Invert direction if stack is post-decrement.
3391 FIXME: why? */
3392 if (STACK_PUSH_CODE == POST_DEC)
3393 if (where_pad != none)
3394 where_pad = (where_pad == downward ? upward : downward);
3396 xinner = x = protect_from_queue (x, 0);
3398 if (mode == BLKmode)
3400 /* Copy a block into the stack, entirely or partially. */
3402 rtx temp;
3403 int used = partial * UNITS_PER_WORD;
3404 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3405 int skip;
3407 if (size == 0)
3408 abort ();
3410 used -= offset;
3412 /* USED is now the # of bytes we need not copy to the stack
3413 because registers will take care of them. */
3415 if (partial != 0)
3416 xinner = adjust_address (xinner, BLKmode, used);
3418 /* If the partial register-part of the arg counts in its stack size,
3419 skip the part of stack space corresponding to the registers.
3420 Otherwise, start copying to the beginning of the stack space,
3421 by setting SKIP to 0. */
3422 skip = (reg_parm_stack_space == 0) ? 0 : used;
3424 #ifdef PUSH_ROUNDING
3425 /* Do it with several push insns if that doesn't take lots of insns
3426 and if there is no difficulty with push insns that skip bytes
3427 on the stack for alignment purposes. */
3428 if (args_addr == 0
3429 && PUSH_ARGS
3430 && GET_CODE (size) == CONST_INT
3431 && skip == 0
3432 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3433 /* Here we avoid the case of a structure whose weak alignment
3434 forces many pushes of a small amount of data,
3435 and such small pushes do rounding that causes trouble. */
3436 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3437 || align >= BIGGEST_ALIGNMENT
3438 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3439 == (align / BITS_PER_UNIT)))
3440 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3442 /* Push padding now if padding above and stack grows down,
3443 or if padding below and stack grows up.
3444 But if space already allocated, this has already been done. */
3445 if (extra && args_addr == 0
3446 && where_pad != none && where_pad != stack_direction)
3447 anti_adjust_stack (GEN_INT (extra));
3449 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3451 else
3452 #endif /* PUSH_ROUNDING */
3454 rtx target;
3456 /* Otherwise make space on the stack and copy the data
3457 to the address of that space. */
3459 /* Deduct words put into registers from the size we must copy. */
3460 if (partial != 0)
3462 if (GET_CODE (size) == CONST_INT)
3463 size = GEN_INT (INTVAL (size) - used);
3464 else
3465 size = expand_binop (GET_MODE (size), sub_optab, size,
3466 GEN_INT (used), NULL_RTX, 0,
3467 OPTAB_LIB_WIDEN);
3470 /* Get the address of the stack space.
3471 In this case, we do not deal with EXTRA separately.
3472 A single stack adjust will do. */
3473 if (! args_addr)
3475 temp = push_block (size, extra, where_pad == downward);
3476 extra = 0;
3478 else if (GET_CODE (args_so_far) == CONST_INT)
3479 temp = memory_address (BLKmode,
3480 plus_constant (args_addr,
3481 skip + INTVAL (args_so_far)));
3482 else
3483 temp = memory_address (BLKmode,
3484 plus_constant (gen_rtx_PLUS (Pmode,
3485 args_addr,
3486 args_so_far),
3487 skip));
3488 target = gen_rtx_MEM (BLKmode, temp);
3490 if (type != 0)
3492 set_mem_attributes (target, type, 1);
3493 /* Function incoming arguments may overlap with sibling call
3494 outgoing arguments and we cannot allow reordering of reads
3495 from function arguments with stores to outgoing arguments
3496 of sibling calls. */
3497 set_mem_alias_set (target, 0);
3499 else
3500 set_mem_align (target, align);
3502 /* TEMP is the address of the block. Copy the data there. */
3503 if (GET_CODE (size) == CONST_INT
3504 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3506 move_by_pieces (target, xinner, INTVAL (size), align);
3507 goto ret;
3509 else
3511 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3512 enum machine_mode mode;
3514 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3515 mode != VOIDmode;
3516 mode = GET_MODE_WIDER_MODE (mode))
3518 enum insn_code code = movstr_optab[(int) mode];
3519 insn_operand_predicate_fn pred;
3521 if (code != CODE_FOR_nothing
3522 && ((GET_CODE (size) == CONST_INT
3523 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3524 <= (GET_MODE_MASK (mode) >> 1)))
3525 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3526 && (!(pred = insn_data[(int) code].operand[0].predicate)
3527 || ((*pred) (target, BLKmode)))
3528 && (!(pred = insn_data[(int) code].operand[1].predicate)
3529 || ((*pred) (xinner, BLKmode)))
3530 && (!(pred = insn_data[(int) code].operand[3].predicate)
3531 || ((*pred) (opalign, VOIDmode))))
3533 rtx op2 = convert_to_mode (mode, size, 1);
3534 rtx last = get_last_insn ();
3535 rtx pat;
3537 pred = insn_data[(int) code].operand[2].predicate;
3538 if (pred != 0 && ! (*pred) (op2, mode))
3539 op2 = copy_to_mode_reg (mode, op2);
3541 pat = GEN_FCN ((int) code) (target, xinner,
3542 op2, opalign);
3543 if (pat)
3545 emit_insn (pat);
3546 goto ret;
3548 else
3549 delete_insns_since (last);
3554 if (!ACCUMULATE_OUTGOING_ARGS)
3556 /* If the source is referenced relative to the stack pointer,
3557 copy it to another register to stabilize it. We do not need
3558 to do this if we know that we won't be changing sp. */
3560 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3561 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3562 temp = copy_to_reg (temp);
3565 /* Make inhibit_defer_pop nonzero around the library call
3566 to force it to pop the bcopy-arguments right away. */
3567 NO_DEFER_POP;
3568 #ifdef TARGET_MEM_FUNCTIONS
3569 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3570 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3571 convert_to_mode (TYPE_MODE (sizetype),
3572 size, TREE_UNSIGNED (sizetype)),
3573 TYPE_MODE (sizetype));
3574 #else
3575 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3576 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3577 convert_to_mode (TYPE_MODE (integer_type_node),
3578 size,
3579 TREE_UNSIGNED (integer_type_node)),
3580 TYPE_MODE (integer_type_node));
3581 #endif
3582 OK_DEFER_POP;
3585 else if (partial > 0)
3587 /* Scalar partly in registers. */
3589 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3590 int i;
3591 int not_stack;
3592 /* # words of start of argument
3593 that we must make space for but need not store. */
3594 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3595 int args_offset = INTVAL (args_so_far);
3596 int skip;
3598 /* Push padding now if padding above and stack grows down,
3599 or if padding below and stack grows up.
3600 But if space already allocated, this has already been done. */
3601 if (extra && args_addr == 0
3602 && where_pad != none && where_pad != stack_direction)
3603 anti_adjust_stack (GEN_INT (extra));
3605 /* If we make space by pushing it, we might as well push
3606 the real data. Otherwise, we can leave OFFSET nonzero
3607 and leave the space uninitialized. */
3608 if (args_addr == 0)
3609 offset = 0;
3611 /* Now NOT_STACK gets the number of words that we don't need to
3612 allocate on the stack. */
3613 not_stack = partial - offset;
3615 /* If the partial register-part of the arg counts in its stack size,
3616 skip the part of stack space corresponding to the registers.
3617 Otherwise, start copying to the beginning of the stack space,
3618 by setting SKIP to 0. */
3619 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3621 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3622 x = validize_mem (force_const_mem (mode, x));
3624 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3625 SUBREGs of such registers are not allowed. */
3626 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3627 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3628 x = copy_to_reg (x);
3630 /* Loop over all the words allocated on the stack for this arg. */
3631 /* We can do it by words, because any scalar bigger than a word
3632 has a size a multiple of a word. */
3633 #ifndef PUSH_ARGS_REVERSED
3634 for (i = not_stack; i < size; i++)
3635 #else
3636 for (i = size - 1; i >= not_stack; i--)
3637 #endif
3638 if (i >= not_stack + offset)
3639 emit_push_insn (operand_subword_force (x, i, mode),
3640 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3641 0, args_addr,
3642 GEN_INT (args_offset + ((i - not_stack + skip)
3643 * UNITS_PER_WORD)),
3644 reg_parm_stack_space, alignment_pad);
3646 else
3648 rtx addr;
3649 rtx target = NULL_RTX;
3650 rtx dest;
3652 /* Push padding now if padding above and stack grows down,
3653 or if padding below and stack grows up.
3654 But if space already allocated, this has already been done. */
3655 if (extra && args_addr == 0
3656 && where_pad != none && where_pad != stack_direction)
3657 anti_adjust_stack (GEN_INT (extra));
3659 #ifdef PUSH_ROUNDING
3660 if (args_addr == 0 && PUSH_ARGS)
3661 emit_single_push_insn (mode, x, type);
3662 else
3663 #endif
3665 if (GET_CODE (args_so_far) == CONST_INT)
3666 addr
3667 = memory_address (mode,
3668 plus_constant (args_addr,
3669 INTVAL (args_so_far)));
3670 else
3671 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3672 args_so_far));
3673 target = addr;
3674 dest = gen_rtx_MEM (mode, addr);
3675 if (type != 0)
3677 set_mem_attributes (dest, type, 1);
3678 /* Function incoming arguments may overlap with sibling call
3679 outgoing arguments and we cannot allow reordering of reads
3680 from function arguments with stores to outgoing arguments
3681 of sibling calls. */
3682 set_mem_alias_set (dest, 0);
3685 emit_move_insn (dest, x);
3690 ret:
3691 /* If part should go in registers, copy that part
3692 into the appropriate registers. Do this now, at the end,
3693 since mem-to-mem copies above may do function calls. */
3694 if (partial > 0 && reg != 0)
3696 /* Handle calls that pass values in multiple non-contiguous locations.
3697 The Irix 6 ABI has examples of this. */
3698 if (GET_CODE (reg) == PARALLEL)
3699 emit_group_load (reg, x, -1); /* ??? size? */
3700 else
3701 move_block_to_reg (REGNO (reg), x, partial, mode);
3704 if (extra && args_addr == 0 && where_pad == stack_direction)
3705 anti_adjust_stack (GEN_INT (extra));
3707 if (alignment_pad && args_addr == 0)
3708 anti_adjust_stack (alignment_pad);
3711 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3712 operations. */
3714 static rtx
3715 get_subtarget (x)
3716 rtx x;
3718 return ((x == 0
3719 /* Only registers can be subtargets. */
3720 || GET_CODE (x) != REG
3721 /* If the register is readonly, it can't be set more than once. */
3722 || RTX_UNCHANGING_P (x)
3723 /* Don't use hard regs to avoid extending their life. */
3724 || REGNO (x) < FIRST_PSEUDO_REGISTER
3725 /* Avoid subtargets inside loops,
3726 since they hide some invariant expressions. */
3727 || preserve_subexpressions_p ())
3728 ? 0 : x);
3731 /* Expand an assignment that stores the value of FROM into TO.
3732 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3733 (This may contain a QUEUED rtx;
3734 if the value is constant, this rtx is a constant.)
3735 Otherwise, the returned value is NULL_RTX.
3737 SUGGEST_REG is no longer actually used.
3738 It used to mean, copy the value through a register
3739 and return that register, if that is possible.
3740 We now use WANT_VALUE to decide whether to do this. */
3743 expand_assignment (to, from, want_value, suggest_reg)
3744 tree to, from;
3745 int want_value;
3746 int suggest_reg ATTRIBUTE_UNUSED;
3748 rtx to_rtx = 0;
3749 rtx result;
3751 /* Don't crash if the lhs of the assignment was erroneous. */
3753 if (TREE_CODE (to) == ERROR_MARK)
3755 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3756 return want_value ? result : NULL_RTX;
3759 /* Assignment of a structure component needs special treatment
3760 if the structure component's rtx is not simply a MEM.
3761 Assignment of an array element at a constant index, and assignment of
3762 an array element in an unaligned packed structure field, has the same
3763 problem. */
3765 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3766 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3768 enum machine_mode mode1;
3769 HOST_WIDE_INT bitsize, bitpos;
3770 rtx orig_to_rtx;
3771 tree offset;
3772 int unsignedp;
3773 int volatilep = 0;
3774 tree tem;
3776 push_temp_slots ();
3777 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3778 &unsignedp, &volatilep);
3780 /* If we are going to use store_bit_field and extract_bit_field,
3781 make sure to_rtx will be safe for multiple use. */
3783 if (mode1 == VOIDmode && want_value)
3784 tem = stabilize_reference (tem);
3786 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3788 if (offset != 0)
3790 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3792 if (GET_CODE (to_rtx) != MEM)
3793 abort ();
3795 #ifdef POINTERS_EXTEND_UNSIGNED
3796 if (GET_MODE (offset_rtx) != Pmode)
3797 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3798 #else
3799 if (GET_MODE (offset_rtx) != ptr_mode)
3800 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3801 #endif
3803 /* A constant address in TO_RTX can have VOIDmode, we must not try
3804 to call force_reg for that case. Avoid that case. */
3805 if (GET_CODE (to_rtx) == MEM
3806 && GET_MODE (to_rtx) == BLKmode
3807 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3808 && bitsize > 0
3809 && (bitpos % bitsize) == 0
3810 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3811 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3813 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3814 bitpos = 0;
3817 to_rtx = offset_address (to_rtx, offset_rtx,
3818 highest_pow2_factor_for_type (TREE_TYPE (to),
3819 offset));
3822 if (GET_CODE (to_rtx) == MEM)
3824 tree old_expr = MEM_EXPR (to_rtx);
3826 /* If the field is at offset zero, we could have been given the
3827 DECL_RTX of the parent struct. Don't munge it. */
3828 to_rtx = shallow_copy_rtx (to_rtx);
3830 set_mem_attributes (to_rtx, to, 0);
3832 /* If we changed MEM_EXPR, that means we're now referencing
3833 the COMPONENT_REF, which means that MEM_OFFSET must be
3834 relative to that field. But we've not yet reflected BITPOS
3835 in TO_RTX. This will be done in store_field. Adjust for
3836 that by biasing MEM_OFFSET by -bitpos. */
3837 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3838 && (bitpos / BITS_PER_UNIT) != 0)
3839 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3840 - (bitpos / BITS_PER_UNIT)));
3843 /* Deal with volatile and readonly fields. The former is only done
3844 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3845 if (volatilep && GET_CODE (to_rtx) == MEM)
3847 if (to_rtx == orig_to_rtx)
3848 to_rtx = copy_rtx (to_rtx);
3849 MEM_VOLATILE_P (to_rtx) = 1;
3852 if (TREE_CODE (to) == COMPONENT_REF
3853 && TREE_READONLY (TREE_OPERAND (to, 1)))
3855 if (to_rtx == orig_to_rtx)
3856 to_rtx = copy_rtx (to_rtx);
3857 RTX_UNCHANGING_P (to_rtx) = 1;
3860 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3862 if (to_rtx == orig_to_rtx)
3863 to_rtx = copy_rtx (to_rtx);
3864 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3867 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3868 (want_value
3869 /* Spurious cast for HPUX compiler. */
3870 ? ((enum machine_mode)
3871 TYPE_MODE (TREE_TYPE (to)))
3872 : VOIDmode),
3873 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3875 preserve_temp_slots (result);
3876 free_temp_slots ();
3877 pop_temp_slots ();
3879 /* If the value is meaningful, convert RESULT to the proper mode.
3880 Otherwise, return nothing. */
3881 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3882 TYPE_MODE (TREE_TYPE (from)),
3883 result,
3884 TREE_UNSIGNED (TREE_TYPE (to)))
3885 : NULL_RTX);
3888 /* If the rhs is a function call and its value is not an aggregate,
3889 call the function before we start to compute the lhs.
3890 This is needed for correct code for cases such as
3891 val = setjmp (buf) on machines where reference to val
3892 requires loading up part of an address in a separate insn.
3894 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3895 since it might be a promoted variable where the zero- or sign- extension
3896 needs to be done. Handling this in the normal way is safe because no
3897 computation is done before the call. */
3898 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3899 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3900 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3901 && GET_CODE (DECL_RTL (to)) == REG))
3903 rtx value;
3905 push_temp_slots ();
3906 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3907 if (to_rtx == 0)
3908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3910 /* Handle calls that return values in multiple non-contiguous locations.
3911 The Irix 6 ABI has examples of this. */
3912 if (GET_CODE (to_rtx) == PARALLEL)
3913 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3914 else if (GET_MODE (to_rtx) == BLKmode)
3915 emit_block_move (to_rtx, value, expr_size (from));
3916 else
3918 #ifdef POINTERS_EXTEND_UNSIGNED
3919 if (POINTER_TYPE_P (TREE_TYPE (to))
3920 && GET_MODE (to_rtx) != GET_MODE (value))
3921 value = convert_memory_address (GET_MODE (to_rtx), value);
3922 #endif
3923 emit_move_insn (to_rtx, value);
3925 preserve_temp_slots (to_rtx);
3926 free_temp_slots ();
3927 pop_temp_slots ();
3928 return want_value ? to_rtx : NULL_RTX;
3931 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3932 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3934 if (to_rtx == 0)
3935 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3937 /* Don't move directly into a return register. */
3938 if (TREE_CODE (to) == RESULT_DECL
3939 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3941 rtx temp;
3943 push_temp_slots ();
3944 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3946 if (GET_CODE (to_rtx) == PARALLEL)
3947 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3948 else
3949 emit_move_insn (to_rtx, temp);
3951 preserve_temp_slots (to_rtx);
3952 free_temp_slots ();
3953 pop_temp_slots ();
3954 return want_value ? to_rtx : NULL_RTX;
3957 /* In case we are returning the contents of an object which overlaps
3958 the place the value is being stored, use a safe function when copying
3959 a value through a pointer into a structure value return block. */
3960 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3961 && current_function_returns_struct
3962 && !current_function_returns_pcc_struct)
3964 rtx from_rtx, size;
3966 push_temp_slots ();
3967 size = expr_size (from);
3968 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3970 #ifdef TARGET_MEM_FUNCTIONS
3971 emit_library_call (memmove_libfunc, LCT_NORMAL,
3972 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3973 XEXP (from_rtx, 0), Pmode,
3974 convert_to_mode (TYPE_MODE (sizetype),
3975 size, TREE_UNSIGNED (sizetype)),
3976 TYPE_MODE (sizetype));
3977 #else
3978 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3979 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3980 XEXP (to_rtx, 0), Pmode,
3981 convert_to_mode (TYPE_MODE (integer_type_node),
3982 size, TREE_UNSIGNED (integer_type_node)),
3983 TYPE_MODE (integer_type_node));
3984 #endif
3986 preserve_temp_slots (to_rtx);
3987 free_temp_slots ();
3988 pop_temp_slots ();
3989 return want_value ? to_rtx : NULL_RTX;
3992 /* Compute FROM and store the value in the rtx we got. */
3994 push_temp_slots ();
3995 result = store_expr (from, to_rtx, want_value);
3996 preserve_temp_slots (result);
3997 free_temp_slots ();
3998 pop_temp_slots ();
3999 return want_value ? result : NULL_RTX;
4002 /* Generate code for computing expression EXP,
4003 and storing the value into TARGET.
4004 TARGET may contain a QUEUED rtx.
4006 If WANT_VALUE is nonzero, return a copy of the value
4007 not in TARGET, so that we can be sure to use the proper
4008 value in a containing expression even if TARGET has something
4009 else stored in it. If possible, we copy the value through a pseudo
4010 and return that pseudo. Or, if the value is constant, we try to
4011 return the constant. In some cases, we return a pseudo
4012 copied *from* TARGET.
4014 If the mode is BLKmode then we may return TARGET itself.
4015 It turns out that in BLKmode it doesn't cause a problem.
4016 because C has no operators that could combine two different
4017 assignments into the same BLKmode object with different values
4018 with no sequence point. Will other languages need this to
4019 be more thorough?
4021 If WANT_VALUE is 0, we return NULL, to make sure
4022 to catch quickly any cases where the caller uses the value
4023 and fails to set WANT_VALUE. */
4026 store_expr (exp, target, want_value)
4027 tree exp;
4028 rtx target;
4029 int want_value;
4031 rtx temp;
4032 int dont_return_target = 0;
4033 int dont_store_target = 0;
4035 if (TREE_CODE (exp) == COMPOUND_EXPR)
4037 /* Perform first part of compound expression, then assign from second
4038 part. */
4039 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4040 emit_queue ();
4041 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4043 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4045 /* For conditional expression, get safe form of the target. Then
4046 test the condition, doing the appropriate assignment on either
4047 side. This avoids the creation of unnecessary temporaries.
4048 For non-BLKmode, it is more efficient not to do this. */
4050 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4052 emit_queue ();
4053 target = protect_from_queue (target, 1);
4055 do_pending_stack_adjust ();
4056 NO_DEFER_POP;
4057 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4058 start_cleanup_deferral ();
4059 store_expr (TREE_OPERAND (exp, 1), target, 0);
4060 end_cleanup_deferral ();
4061 emit_queue ();
4062 emit_jump_insn (gen_jump (lab2));
4063 emit_barrier ();
4064 emit_label (lab1);
4065 start_cleanup_deferral ();
4066 store_expr (TREE_OPERAND (exp, 2), target, 0);
4067 end_cleanup_deferral ();
4068 emit_queue ();
4069 emit_label (lab2);
4070 OK_DEFER_POP;
4072 return want_value ? target : NULL_RTX;
4074 else if (queued_subexp_p (target))
4075 /* If target contains a postincrement, let's not risk
4076 using it as the place to generate the rhs. */
4078 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4080 /* Expand EXP into a new pseudo. */
4081 temp = gen_reg_rtx (GET_MODE (target));
4082 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4084 else
4085 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4087 /* If target is volatile, ANSI requires accessing the value
4088 *from* the target, if it is accessed. So make that happen.
4089 In no case return the target itself. */
4090 if (! MEM_VOLATILE_P (target) && want_value)
4091 dont_return_target = 1;
4093 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4094 && GET_MODE (target) != BLKmode)
4095 /* If target is in memory and caller wants value in a register instead,
4096 arrange that. Pass TARGET as target for expand_expr so that,
4097 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4098 We know expand_expr will not use the target in that case.
4099 Don't do this if TARGET is volatile because we are supposed
4100 to write it and then read it. */
4102 temp = expand_expr (exp, target, GET_MODE (target), 0);
4103 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4105 /* If TEMP is already in the desired TARGET, only copy it from
4106 memory and don't store it there again. */
4107 if (temp == target
4108 || (rtx_equal_p (temp, target)
4109 && ! side_effects_p (temp) && ! side_effects_p (target)))
4110 dont_store_target = 1;
4111 temp = copy_to_reg (temp);
4113 dont_return_target = 1;
4115 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4116 /* If this is an scalar in a register that is stored in a wider mode
4117 than the declared mode, compute the result into its declared mode
4118 and then convert to the wider mode. Our value is the computed
4119 expression. */
4121 rtx inner_target = 0;
4123 /* If we don't want a value, we can do the conversion inside EXP,
4124 which will often result in some optimizations. Do the conversion
4125 in two steps: first change the signedness, if needed, then
4126 the extend. But don't do this if the type of EXP is a subtype
4127 of something else since then the conversion might involve
4128 more than just converting modes. */
4129 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4130 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4132 if (TREE_UNSIGNED (TREE_TYPE (exp))
4133 != SUBREG_PROMOTED_UNSIGNED_P (target))
4134 exp = convert
4135 ((*lang_hooks.types.signed_or_unsigned_type)
4136 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4138 exp = convert ((*lang_hooks.types.type_for_mode)
4139 (GET_MODE (SUBREG_REG (target)),
4140 SUBREG_PROMOTED_UNSIGNED_P (target)),
4141 exp);
4143 inner_target = SUBREG_REG (target);
4146 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4148 /* If TEMP is a volatile MEM and we want a result value, make
4149 the access now so it gets done only once. Likewise if
4150 it contains TARGET. */
4151 if (GET_CODE (temp) == MEM && want_value
4152 && (MEM_VOLATILE_P (temp)
4153 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4154 temp = copy_to_reg (temp);
4156 /* If TEMP is a VOIDmode constant, use convert_modes to make
4157 sure that we properly convert it. */
4158 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4160 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4161 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4162 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4163 GET_MODE (target), temp,
4164 SUBREG_PROMOTED_UNSIGNED_P (target));
4167 convert_move (SUBREG_REG (target), temp,
4168 SUBREG_PROMOTED_UNSIGNED_P (target));
4170 /* If we promoted a constant, change the mode back down to match
4171 target. Otherwise, the caller might get confused by a result whose
4172 mode is larger than expected. */
4174 if (want_value && GET_MODE (temp) != GET_MODE (target))
4176 if (GET_MODE (temp) != VOIDmode)
4178 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4179 SUBREG_PROMOTED_VAR_P (temp) = 1;
4180 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4181 SUBREG_PROMOTED_UNSIGNED_P (target));
4183 else
4184 temp = convert_modes (GET_MODE (target),
4185 GET_MODE (SUBREG_REG (target)),
4186 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4189 return want_value ? temp : NULL_RTX;
4191 else
4193 temp = expand_expr (exp, target, GET_MODE (target), 0);
4194 /* Return TARGET if it's a specified hardware register.
4195 If TARGET is a volatile mem ref, either return TARGET
4196 or return a reg copied *from* TARGET; ANSI requires this.
4198 Otherwise, if TEMP is not TARGET, return TEMP
4199 if it is constant (for efficiency),
4200 or if we really want the correct value. */
4201 if (!(target && GET_CODE (target) == REG
4202 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4203 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4204 && ! rtx_equal_p (temp, target)
4205 && (CONSTANT_P (temp) || want_value))
4206 dont_return_target = 1;
4209 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4210 the same as that of TARGET, adjust the constant. This is needed, for
4211 example, in case it is a CONST_DOUBLE and we want only a word-sized
4212 value. */
4213 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4214 && TREE_CODE (exp) != ERROR_MARK
4215 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4216 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4217 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4219 /* If value was not generated in the target, store it there.
4220 Convert the value to TARGET's type first if necessary.
4221 If TEMP and TARGET compare equal according to rtx_equal_p, but
4222 one or both of them are volatile memory refs, we have to distinguish
4223 two cases:
4224 - expand_expr has used TARGET. In this case, we must not generate
4225 another copy. This can be detected by TARGET being equal according
4226 to == .
4227 - expand_expr has not used TARGET - that means that the source just
4228 happens to have the same RTX form. Since temp will have been created
4229 by expand_expr, it will compare unequal according to == .
4230 We must generate a copy in this case, to reach the correct number
4231 of volatile memory references. */
4233 if ((! rtx_equal_p (temp, target)
4234 || (temp != target && (side_effects_p (temp)
4235 || side_effects_p (target))))
4236 && TREE_CODE (exp) != ERROR_MARK
4237 && ! dont_store_target
4238 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4239 but TARGET is not valid memory reference, TEMP will differ
4240 from TARGET although it is really the same location. */
4241 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4242 || target != DECL_RTL_IF_SET (exp)))
4244 target = protect_from_queue (target, 1);
4245 if (GET_MODE (temp) != GET_MODE (target)
4246 && GET_MODE (temp) != VOIDmode)
4248 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4249 if (dont_return_target)
4251 /* In this case, we will return TEMP,
4252 so make sure it has the proper mode.
4253 But don't forget to store the value into TARGET. */
4254 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4255 emit_move_insn (target, temp);
4257 else
4258 convert_move (target, temp, unsignedp);
4261 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4263 /* Handle copying a string constant into an array. The string
4264 constant may be shorter than the array. So copy just the string's
4265 actual length, and clear the rest. First get the size of the data
4266 type of the string, which is actually the size of the target. */
4267 rtx size = expr_size (exp);
4269 if (GET_CODE (size) == CONST_INT
4270 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4271 emit_block_move (target, temp, size);
4272 else
4274 /* Compute the size of the data to copy from the string. */
4275 tree copy_size
4276 = size_binop (MIN_EXPR,
4277 make_tree (sizetype, size),
4278 size_int (TREE_STRING_LENGTH (exp)));
4279 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4280 VOIDmode, 0);
4281 rtx label = 0;
4283 /* Copy that much. */
4284 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4285 emit_block_move (target, temp, copy_size_rtx);
4287 /* Figure out how much is left in TARGET that we have to clear.
4288 Do all calculations in ptr_mode. */
4289 if (GET_CODE (copy_size_rtx) == CONST_INT)
4291 size = plus_constant (size, -INTVAL (copy_size_rtx));
4292 target = adjust_address (target, BLKmode,
4293 INTVAL (copy_size_rtx));
4295 else
4297 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4298 copy_size_rtx, NULL_RTX, 0,
4299 OPTAB_LIB_WIDEN);
4301 #ifdef POINTERS_EXTEND_UNSIGNED
4302 if (GET_MODE (copy_size_rtx) != Pmode)
4303 copy_size_rtx = convert_memory_address (Pmode,
4304 copy_size_rtx);
4305 #endif
4307 target = offset_address (target, copy_size_rtx,
4308 highest_pow2_factor (copy_size));
4309 label = gen_label_rtx ();
4310 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4311 GET_MODE (size), 0, label);
4314 if (size != const0_rtx)
4315 clear_storage (target, size);
4317 if (label)
4318 emit_label (label);
4321 /* Handle calls that return values in multiple non-contiguous locations.
4322 The Irix 6 ABI has examples of this. */
4323 else if (GET_CODE (target) == PARALLEL)
4324 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4325 else if (GET_MODE (temp) == BLKmode)
4326 emit_block_move (target, temp, expr_size (exp));
4327 else
4328 emit_move_insn (target, temp);
4331 /* If we don't want a value, return NULL_RTX. */
4332 if (! want_value)
4333 return NULL_RTX;
4335 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4336 ??? The latter test doesn't seem to make sense. */
4337 else if (dont_return_target && GET_CODE (temp) != MEM)
4338 return temp;
4340 /* Return TARGET itself if it is a hard register. */
4341 else if (want_value && GET_MODE (target) != BLKmode
4342 && ! (GET_CODE (target) == REG
4343 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4344 return copy_to_reg (target);
4346 else
4347 return target;
4350 /* Return 1 if EXP just contains zeros. */
4352 static int
4353 is_zeros_p (exp)
4354 tree exp;
4356 tree elt;
4358 switch (TREE_CODE (exp))
4360 case CONVERT_EXPR:
4361 case NOP_EXPR:
4362 case NON_LVALUE_EXPR:
4363 case VIEW_CONVERT_EXPR:
4364 return is_zeros_p (TREE_OPERAND (exp, 0));
4366 case INTEGER_CST:
4367 return integer_zerop (exp);
4369 case COMPLEX_CST:
4370 return
4371 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4373 case REAL_CST:
4374 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4376 case VECTOR_CST:
4377 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4378 elt = TREE_CHAIN (elt))
4379 if (!is_zeros_p (TREE_VALUE (elt)))
4380 return 0;
4382 return 1;
4384 case CONSTRUCTOR:
4385 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4386 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4387 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4388 if (! is_zeros_p (TREE_VALUE (elt)))
4389 return 0;
4391 return 1;
4393 default:
4394 return 0;
4398 /* Return 1 if EXP contains mostly (3/4) zeros. */
4400 static int
4401 mostly_zeros_p (exp)
4402 tree exp;
4404 if (TREE_CODE (exp) == CONSTRUCTOR)
4406 int elts = 0, zeros = 0;
4407 tree elt = CONSTRUCTOR_ELTS (exp);
4408 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4410 /* If there are no ranges of true bits, it is all zero. */
4411 return elt == NULL_TREE;
4413 for (; elt; elt = TREE_CHAIN (elt))
4415 /* We do not handle the case where the index is a RANGE_EXPR,
4416 so the statistic will be somewhat inaccurate.
4417 We do make a more accurate count in store_constructor itself,
4418 so since this function is only used for nested array elements,
4419 this should be close enough. */
4420 if (mostly_zeros_p (TREE_VALUE (elt)))
4421 zeros++;
4422 elts++;
4425 return 4 * zeros >= 3 * elts;
4428 return is_zeros_p (exp);
4431 /* Helper function for store_constructor.
4432 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4433 TYPE is the type of the CONSTRUCTOR, not the element type.
4434 CLEARED is as for store_constructor.
4435 ALIAS_SET is the alias set to use for any stores.
4437 This provides a recursive shortcut back to store_constructor when it isn't
4438 necessary to go through store_field. This is so that we can pass through
4439 the cleared field to let store_constructor know that we may not have to
4440 clear a substructure if the outer structure has already been cleared. */
4442 static void
4443 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4444 alias_set)
4445 rtx target;
4446 unsigned HOST_WIDE_INT bitsize;
4447 HOST_WIDE_INT bitpos;
4448 enum machine_mode mode;
4449 tree exp, type;
4450 int cleared;
4451 int alias_set;
4453 if (TREE_CODE (exp) == CONSTRUCTOR
4454 && bitpos % BITS_PER_UNIT == 0
4455 /* If we have a non-zero bitpos for a register target, then we just
4456 let store_field do the bitfield handling. This is unlikely to
4457 generate unnecessary clear instructions anyways. */
4458 && (bitpos == 0 || GET_CODE (target) == MEM))
4460 if (GET_CODE (target) == MEM)
4461 target
4462 = adjust_address (target,
4463 GET_MODE (target) == BLKmode
4464 || 0 != (bitpos
4465 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4466 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4469 /* Update the alias set, if required. */
4470 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4471 && MEM_ALIAS_SET (target) != 0)
4473 target = copy_rtx (target);
4474 set_mem_alias_set (target, alias_set);
4477 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4479 else
4480 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4481 alias_set);
4484 /* Store the value of constructor EXP into the rtx TARGET.
4485 TARGET is either a REG or a MEM; we know it cannot conflict, since
4486 safe_from_p has been called.
4487 CLEARED is true if TARGET is known to have been zero'd.
4488 SIZE is the number of bytes of TARGET we are allowed to modify: this
4489 may not be the same as the size of EXP if we are assigning to a field
4490 which has been packed to exclude padding bits. */
4492 static void
4493 store_constructor (exp, target, cleared, size)
4494 tree exp;
4495 rtx target;
4496 int cleared;
4497 HOST_WIDE_INT size;
4499 tree type = TREE_TYPE (exp);
4500 #ifdef WORD_REGISTER_OPERATIONS
4501 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4502 #endif
4504 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4505 || TREE_CODE (type) == QUAL_UNION_TYPE)
4507 tree elt;
4509 /* We either clear the aggregate or indicate the value is dead. */
4510 if ((TREE_CODE (type) == UNION_TYPE
4511 || TREE_CODE (type) == QUAL_UNION_TYPE)
4512 && ! cleared
4513 && ! CONSTRUCTOR_ELTS (exp))
4514 /* If the constructor is empty, clear the union. */
4516 clear_storage (target, expr_size (exp));
4517 cleared = 1;
4520 /* If we are building a static constructor into a register,
4521 set the initial value as zero so we can fold the value into
4522 a constant. But if more than one register is involved,
4523 this probably loses. */
4524 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4525 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4527 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4528 cleared = 1;
4531 /* If the constructor has fewer fields than the structure
4532 or if we are initializing the structure to mostly zeros,
4533 clear the whole structure first. Don't do this if TARGET is a
4534 register whose mode size isn't equal to SIZE since clear_storage
4535 can't handle this case. */
4536 else if (! cleared && size > 0
4537 && ((list_length (CONSTRUCTOR_ELTS (exp))
4538 != fields_length (type))
4539 || mostly_zeros_p (exp))
4540 && (GET_CODE (target) != REG
4541 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4542 == size)))
4544 clear_storage (target, GEN_INT (size));
4545 cleared = 1;
4548 if (! cleared)
4549 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4551 /* Store each element of the constructor into
4552 the corresponding field of TARGET. */
4554 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4556 tree field = TREE_PURPOSE (elt);
4557 tree value = TREE_VALUE (elt);
4558 enum machine_mode mode;
4559 HOST_WIDE_INT bitsize;
4560 HOST_WIDE_INT bitpos = 0;
4561 int unsignedp;
4562 tree offset;
4563 rtx to_rtx = target;
4565 /* Just ignore missing fields.
4566 We cleared the whole structure, above,
4567 if any fields are missing. */
4568 if (field == 0)
4569 continue;
4571 if (cleared && is_zeros_p (value))
4572 continue;
4574 if (host_integerp (DECL_SIZE (field), 1))
4575 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4576 else
4577 bitsize = -1;
4579 unsignedp = TREE_UNSIGNED (field);
4580 mode = DECL_MODE (field);
4581 if (DECL_BIT_FIELD (field))
4582 mode = VOIDmode;
4584 offset = DECL_FIELD_OFFSET (field);
4585 if (host_integerp (offset, 0)
4586 && host_integerp (bit_position (field), 0))
4588 bitpos = int_bit_position (field);
4589 offset = 0;
4591 else
4592 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4594 if (offset)
4596 rtx offset_rtx;
4598 if (contains_placeholder_p (offset))
4599 offset = build (WITH_RECORD_EXPR, sizetype,
4600 offset, make_tree (TREE_TYPE (exp), target));
4602 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4603 if (GET_CODE (to_rtx) != MEM)
4604 abort ();
4606 #ifdef POINTERS_EXTEND_UNSIGNED
4607 if (GET_MODE (offset_rtx) != Pmode)
4608 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4609 #else
4610 if (GET_MODE (offset_rtx) != ptr_mode)
4611 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4612 #endif
4614 to_rtx = offset_address (to_rtx, offset_rtx,
4615 highest_pow2_factor (offset));
4618 if (TREE_READONLY (field))
4620 if (GET_CODE (to_rtx) == MEM)
4621 to_rtx = copy_rtx (to_rtx);
4623 RTX_UNCHANGING_P (to_rtx) = 1;
4626 #ifdef WORD_REGISTER_OPERATIONS
4627 /* If this initializes a field that is smaller than a word, at the
4628 start of a word, try to widen it to a full word.
4629 This special case allows us to output C++ member function
4630 initializations in a form that the optimizers can understand. */
4631 if (GET_CODE (target) == REG
4632 && bitsize < BITS_PER_WORD
4633 && bitpos % BITS_PER_WORD == 0
4634 && GET_MODE_CLASS (mode) == MODE_INT
4635 && TREE_CODE (value) == INTEGER_CST
4636 && exp_size >= 0
4637 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4639 tree type = TREE_TYPE (value);
4641 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4643 type = (*lang_hooks.types.type_for_size)
4644 (BITS_PER_WORD, TREE_UNSIGNED (type));
4645 value = convert (type, value);
4648 if (BYTES_BIG_ENDIAN)
4649 value
4650 = fold (build (LSHIFT_EXPR, type, value,
4651 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4652 bitsize = BITS_PER_WORD;
4653 mode = word_mode;
4655 #endif
4657 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4658 && DECL_NONADDRESSABLE_P (field))
4660 to_rtx = copy_rtx (to_rtx);
4661 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4664 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4665 value, type, cleared,
4666 get_alias_set (TREE_TYPE (field)));
4669 else if (TREE_CODE (type) == ARRAY_TYPE
4670 || TREE_CODE (type) == VECTOR_TYPE)
4672 tree elt;
4673 int i;
4674 int need_to_clear;
4675 tree domain = TYPE_DOMAIN (type);
4676 tree elttype = TREE_TYPE (type);
4677 int const_bounds_p;
4678 HOST_WIDE_INT minelt = 0;
4679 HOST_WIDE_INT maxelt = 0;
4681 /* Vectors are like arrays, but the domain is stored via an array
4682 type indirectly. */
4683 if (TREE_CODE (type) == VECTOR_TYPE)
4685 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4686 the same field as TYPE_DOMAIN, we are not guaranteed that
4687 it always will. */
4688 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4689 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4692 const_bounds_p = (TYPE_MIN_VALUE (domain)
4693 && TYPE_MAX_VALUE (domain)
4694 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4695 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4697 /* If we have constant bounds for the range of the type, get them. */
4698 if (const_bounds_p)
4700 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4701 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4704 /* If the constructor has fewer elements than the array,
4705 clear the whole array first. Similarly if this is
4706 static constructor of a non-BLKmode object. */
4707 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4708 need_to_clear = 1;
4709 else
4711 HOST_WIDE_INT count = 0, zero_count = 0;
4712 need_to_clear = ! const_bounds_p;
4714 /* This loop is a more accurate version of the loop in
4715 mostly_zeros_p (it handles RANGE_EXPR in an index).
4716 It is also needed to check for missing elements. */
4717 for (elt = CONSTRUCTOR_ELTS (exp);
4718 elt != NULL_TREE && ! need_to_clear;
4719 elt = TREE_CHAIN (elt))
4721 tree index = TREE_PURPOSE (elt);
4722 HOST_WIDE_INT this_node_count;
4724 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4726 tree lo_index = TREE_OPERAND (index, 0);
4727 tree hi_index = TREE_OPERAND (index, 1);
4729 if (! host_integerp (lo_index, 1)
4730 || ! host_integerp (hi_index, 1))
4732 need_to_clear = 1;
4733 break;
4736 this_node_count = (tree_low_cst (hi_index, 1)
4737 - tree_low_cst (lo_index, 1) + 1);
4739 else
4740 this_node_count = 1;
4742 count += this_node_count;
4743 if (mostly_zeros_p (TREE_VALUE (elt)))
4744 zero_count += this_node_count;
4747 /* Clear the entire array first if there are any missing elements,
4748 or if the incidence of zero elements is >= 75%. */
4749 if (! need_to_clear
4750 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4751 need_to_clear = 1;
4754 if (need_to_clear && size > 0)
4756 if (! cleared)
4758 if (REG_P (target))
4759 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4760 else
4761 clear_storage (target, GEN_INT (size));
4763 cleared = 1;
4765 else if (REG_P (target))
4766 /* Inform later passes that the old value is dead. */
4767 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4769 /* Store each element of the constructor into
4770 the corresponding element of TARGET, determined
4771 by counting the elements. */
4772 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4773 elt;
4774 elt = TREE_CHAIN (elt), i++)
4776 enum machine_mode mode;
4777 HOST_WIDE_INT bitsize;
4778 HOST_WIDE_INT bitpos;
4779 int unsignedp;
4780 tree value = TREE_VALUE (elt);
4781 tree index = TREE_PURPOSE (elt);
4782 rtx xtarget = target;
4784 if (cleared && is_zeros_p (value))
4785 continue;
4787 unsignedp = TREE_UNSIGNED (elttype);
4788 mode = TYPE_MODE (elttype);
4789 if (mode == BLKmode)
4790 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4791 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4792 : -1);
4793 else
4794 bitsize = GET_MODE_BITSIZE (mode);
4796 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4798 tree lo_index = TREE_OPERAND (index, 0);
4799 tree hi_index = TREE_OPERAND (index, 1);
4800 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4801 struct nesting *loop;
4802 HOST_WIDE_INT lo, hi, count;
4803 tree position;
4805 /* If the range is constant and "small", unroll the loop. */
4806 if (const_bounds_p
4807 && host_integerp (lo_index, 0)
4808 && host_integerp (hi_index, 0)
4809 && (lo = tree_low_cst (lo_index, 0),
4810 hi = tree_low_cst (hi_index, 0),
4811 count = hi - lo + 1,
4812 (GET_CODE (target) != MEM
4813 || count <= 2
4814 || (host_integerp (TYPE_SIZE (elttype), 1)
4815 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4816 <= 40 * 8)))))
4818 lo -= minelt; hi -= minelt;
4819 for (; lo <= hi; lo++)
4821 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4823 if (GET_CODE (target) == MEM
4824 && !MEM_KEEP_ALIAS_SET_P (target)
4825 && TREE_CODE (type) == ARRAY_TYPE
4826 && TYPE_NONALIASED_COMPONENT (type))
4828 target = copy_rtx (target);
4829 MEM_KEEP_ALIAS_SET_P (target) = 1;
4832 store_constructor_field
4833 (target, bitsize, bitpos, mode, value, type, cleared,
4834 get_alias_set (elttype));
4837 else
4839 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4840 loop_top = gen_label_rtx ();
4841 loop_end = gen_label_rtx ();
4843 unsignedp = TREE_UNSIGNED (domain);
4845 index = build_decl (VAR_DECL, NULL_TREE, domain);
4847 index_r
4848 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4849 &unsignedp, 0));
4850 SET_DECL_RTL (index, index_r);
4851 if (TREE_CODE (value) == SAVE_EXPR
4852 && SAVE_EXPR_RTL (value) == 0)
4854 /* Make sure value gets expanded once before the
4855 loop. */
4856 expand_expr (value, const0_rtx, VOIDmode, 0);
4857 emit_queue ();
4859 store_expr (lo_index, index_r, 0);
4860 loop = expand_start_loop (0);
4862 /* Assign value to element index. */
4863 position
4864 = convert (ssizetype,
4865 fold (build (MINUS_EXPR, TREE_TYPE (index),
4866 index, TYPE_MIN_VALUE (domain))));
4867 position = size_binop (MULT_EXPR, position,
4868 convert (ssizetype,
4869 TYPE_SIZE_UNIT (elttype)));
4871 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4872 xtarget = offset_address (target, pos_rtx,
4873 highest_pow2_factor (position));
4874 xtarget = adjust_address (xtarget, mode, 0);
4875 if (TREE_CODE (value) == CONSTRUCTOR)
4876 store_constructor (value, xtarget, cleared,
4877 bitsize / BITS_PER_UNIT);
4878 else
4879 store_expr (value, xtarget, 0);
4881 expand_exit_loop_if_false (loop,
4882 build (LT_EXPR, integer_type_node,
4883 index, hi_index));
4885 expand_increment (build (PREINCREMENT_EXPR,
4886 TREE_TYPE (index),
4887 index, integer_one_node), 0, 0);
4888 expand_end_loop ();
4889 emit_label (loop_end);
4892 else if ((index != 0 && ! host_integerp (index, 0))
4893 || ! host_integerp (TYPE_SIZE (elttype), 1))
4895 tree position;
4897 if (index == 0)
4898 index = ssize_int (1);
4900 if (minelt)
4901 index = convert (ssizetype,
4902 fold (build (MINUS_EXPR, index,
4903 TYPE_MIN_VALUE (domain))));
4905 position = size_binop (MULT_EXPR, index,
4906 convert (ssizetype,
4907 TYPE_SIZE_UNIT (elttype)));
4908 xtarget = offset_address (target,
4909 expand_expr (position, 0, VOIDmode, 0),
4910 highest_pow2_factor (position));
4911 xtarget = adjust_address (xtarget, mode, 0);
4912 store_expr (value, xtarget, 0);
4914 else
4916 if (index != 0)
4917 bitpos = ((tree_low_cst (index, 0) - minelt)
4918 * tree_low_cst (TYPE_SIZE (elttype), 1));
4919 else
4920 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4922 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4923 && TREE_CODE (type) == ARRAY_TYPE
4924 && TYPE_NONALIASED_COMPONENT (type))
4926 target = copy_rtx (target);
4927 MEM_KEEP_ALIAS_SET_P (target) = 1;
4930 store_constructor_field (target, bitsize, bitpos, mode, value,
4931 type, cleared, get_alias_set (elttype));
4937 /* Set constructor assignments. */
4938 else if (TREE_CODE (type) == SET_TYPE)
4940 tree elt = CONSTRUCTOR_ELTS (exp);
4941 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4942 tree domain = TYPE_DOMAIN (type);
4943 tree domain_min, domain_max, bitlength;
4945 /* The default implementation strategy is to extract the constant
4946 parts of the constructor, use that to initialize the target,
4947 and then "or" in whatever non-constant ranges we need in addition.
4949 If a large set is all zero or all ones, it is
4950 probably better to set it using memset (if available) or bzero.
4951 Also, if a large set has just a single range, it may also be
4952 better to first clear all the first clear the set (using
4953 bzero/memset), and set the bits we want. */
4955 /* Check for all zeros. */
4956 if (elt == NULL_TREE && size > 0)
4958 if (!cleared)
4959 clear_storage (target, GEN_INT (size));
4960 return;
4963 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4964 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4965 bitlength = size_binop (PLUS_EXPR,
4966 size_diffop (domain_max, domain_min),
4967 ssize_int (1));
4969 nbits = tree_low_cst (bitlength, 1);
4971 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4972 are "complicated" (more than one range), initialize (the
4973 constant parts) by copying from a constant. */
4974 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4975 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4977 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4978 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4979 char *bit_buffer = (char *) alloca (nbits);
4980 HOST_WIDE_INT word = 0;
4981 unsigned int bit_pos = 0;
4982 unsigned int ibit = 0;
4983 unsigned int offset = 0; /* In bytes from beginning of set. */
4985 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4986 for (;;)
4988 if (bit_buffer[ibit])
4990 if (BYTES_BIG_ENDIAN)
4991 word |= (1 << (set_word_size - 1 - bit_pos));
4992 else
4993 word |= 1 << bit_pos;
4996 bit_pos++; ibit++;
4997 if (bit_pos >= set_word_size || ibit == nbits)
4999 if (word != 0 || ! cleared)
5001 rtx datum = GEN_INT (word);
5002 rtx to_rtx;
5004 /* The assumption here is that it is safe to use
5005 XEXP if the set is multi-word, but not if
5006 it's single-word. */
5007 if (GET_CODE (target) == MEM)
5008 to_rtx = adjust_address (target, mode, offset);
5009 else if (offset == 0)
5010 to_rtx = target;
5011 else
5012 abort ();
5013 emit_move_insn (to_rtx, datum);
5016 if (ibit == nbits)
5017 break;
5018 word = 0;
5019 bit_pos = 0;
5020 offset += set_word_size / BITS_PER_UNIT;
5024 else if (!cleared)
5025 /* Don't bother clearing storage if the set is all ones. */
5026 if (TREE_CHAIN (elt) != NULL_TREE
5027 || (TREE_PURPOSE (elt) == NULL_TREE
5028 ? nbits != 1
5029 : ( ! host_integerp (TREE_VALUE (elt), 0)
5030 || ! host_integerp (TREE_PURPOSE (elt), 0)
5031 || (tree_low_cst (TREE_VALUE (elt), 0)
5032 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5033 != (HOST_WIDE_INT) nbits))))
5034 clear_storage (target, expr_size (exp));
5036 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5038 /* Start of range of element or NULL. */
5039 tree startbit = TREE_PURPOSE (elt);
5040 /* End of range of element, or element value. */
5041 tree endbit = TREE_VALUE (elt);
5042 #ifdef TARGET_MEM_FUNCTIONS
5043 HOST_WIDE_INT startb, endb;
5044 #endif
5045 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5047 bitlength_rtx = expand_expr (bitlength,
5048 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5050 /* Handle non-range tuple element like [ expr ]. */
5051 if (startbit == NULL_TREE)
5053 startbit = save_expr (endbit);
5054 endbit = startbit;
5057 startbit = convert (sizetype, startbit);
5058 endbit = convert (sizetype, endbit);
5059 if (! integer_zerop (domain_min))
5061 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5062 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5064 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5065 EXPAND_CONST_ADDRESS);
5066 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5067 EXPAND_CONST_ADDRESS);
5069 if (REG_P (target))
5071 targetx
5072 = assign_temp
5073 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5074 (GET_MODE (target), 0),
5075 TYPE_QUAL_CONST)),
5076 0, 1, 1);
5077 emit_move_insn (targetx, target);
5080 else if (GET_CODE (target) == MEM)
5081 targetx = target;
5082 else
5083 abort ();
5085 #ifdef TARGET_MEM_FUNCTIONS
5086 /* Optimization: If startbit and endbit are
5087 constants divisible by BITS_PER_UNIT,
5088 call memset instead. */
5089 if (TREE_CODE (startbit) == INTEGER_CST
5090 && TREE_CODE (endbit) == INTEGER_CST
5091 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5092 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5094 emit_library_call (memset_libfunc, LCT_NORMAL,
5095 VOIDmode, 3,
5096 plus_constant (XEXP (targetx, 0),
5097 startb / BITS_PER_UNIT),
5098 Pmode,
5099 constm1_rtx, TYPE_MODE (integer_type_node),
5100 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5101 TYPE_MODE (sizetype));
5103 else
5104 #endif
5105 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5106 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5107 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5108 startbit_rtx, TYPE_MODE (sizetype),
5109 endbit_rtx, TYPE_MODE (sizetype));
5111 if (REG_P (target))
5112 emit_move_insn (target, targetx);
5116 else
5117 abort ();
5120 /* Store the value of EXP (an expression tree)
5121 into a subfield of TARGET which has mode MODE and occupies
5122 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5123 If MODE is VOIDmode, it means that we are storing into a bit-field.
5125 If VALUE_MODE is VOIDmode, return nothing in particular.
5126 UNSIGNEDP is not used in this case.
5128 Otherwise, return an rtx for the value stored. This rtx
5129 has mode VALUE_MODE if that is convenient to do.
5130 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5132 TYPE is the type of the underlying object,
5134 ALIAS_SET is the alias set for the destination. This value will
5135 (in general) be different from that for TARGET, since TARGET is a
5136 reference to the containing structure. */
5138 static rtx
5139 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5140 alias_set)
5141 rtx target;
5142 HOST_WIDE_INT bitsize;
5143 HOST_WIDE_INT bitpos;
5144 enum machine_mode mode;
5145 tree exp;
5146 enum machine_mode value_mode;
5147 int unsignedp;
5148 tree type;
5149 int alias_set;
5151 HOST_WIDE_INT width_mask = 0;
5153 if (TREE_CODE (exp) == ERROR_MARK)
5154 return const0_rtx;
5156 /* If we have nothing to store, do nothing unless the expression has
5157 side-effects. */
5158 if (bitsize == 0)
5159 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5160 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5161 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5163 /* If we are storing into an unaligned field of an aligned union that is
5164 in a register, we may have the mode of TARGET being an integer mode but
5165 MODE == BLKmode. In that case, get an aligned object whose size and
5166 alignment are the same as TARGET and store TARGET into it (we can avoid
5167 the store if the field being stored is the entire width of TARGET). Then
5168 call ourselves recursively to store the field into a BLKmode version of
5169 that object. Finally, load from the object into TARGET. This is not
5170 very efficient in general, but should only be slightly more expensive
5171 than the otherwise-required unaligned accesses. Perhaps this can be
5172 cleaned up later. */
5174 if (mode == BLKmode
5175 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5177 rtx object
5178 = assign_temp
5179 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5180 0, 1, 1);
5181 rtx blk_object = adjust_address (object, BLKmode, 0);
5183 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5184 emit_move_insn (object, target);
5186 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5187 alias_set);
5189 emit_move_insn (target, object);
5191 /* We want to return the BLKmode version of the data. */
5192 return blk_object;
5195 if (GET_CODE (target) == CONCAT)
5197 /* We're storing into a struct containing a single __complex. */
5199 if (bitpos != 0)
5200 abort ();
5201 return store_expr (exp, target, 0);
5204 /* If the structure is in a register or if the component
5205 is a bit field, we cannot use addressing to access it.
5206 Use bit-field techniques or SUBREG to store in it. */
5208 if (mode == VOIDmode
5209 || (mode != BLKmode && ! direct_store[(int) mode]
5210 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5211 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5212 || GET_CODE (target) == REG
5213 || GET_CODE (target) == SUBREG
5214 /* If the field isn't aligned enough to store as an ordinary memref,
5215 store it as a bit field. */
5216 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5217 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5218 || bitpos % GET_MODE_ALIGNMENT (mode)))
5219 /* If the RHS and field are a constant size and the size of the
5220 RHS isn't the same size as the bitfield, we must use bitfield
5221 operations. */
5222 || (bitsize >= 0
5223 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5224 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5226 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5228 /* If BITSIZE is narrower than the size of the type of EXP
5229 we will be narrowing TEMP. Normally, what's wanted are the
5230 low-order bits. However, if EXP's type is a record and this is
5231 big-endian machine, we want the upper BITSIZE bits. */
5232 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5233 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5234 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5235 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5236 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5237 - bitsize),
5238 temp, 1);
5240 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5241 MODE. */
5242 if (mode != VOIDmode && mode != BLKmode
5243 && mode != TYPE_MODE (TREE_TYPE (exp)))
5244 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5246 /* If the modes of TARGET and TEMP are both BLKmode, both
5247 must be in memory and BITPOS must be aligned on a byte
5248 boundary. If so, we simply do a block copy. */
5249 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5251 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5252 || bitpos % BITS_PER_UNIT != 0)
5253 abort ();
5255 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5256 emit_block_move (target, temp,
5257 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5258 / BITS_PER_UNIT));
5260 return value_mode == VOIDmode ? const0_rtx : target;
5263 /* Store the value in the bitfield. */
5264 store_bit_field (target, bitsize, bitpos, mode, temp,
5265 int_size_in_bytes (type));
5267 if (value_mode != VOIDmode)
5269 /* The caller wants an rtx for the value.
5270 If possible, avoid refetching from the bitfield itself. */
5271 if (width_mask != 0
5272 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5274 tree count;
5275 enum machine_mode tmode;
5277 tmode = GET_MODE (temp);
5278 if (tmode == VOIDmode)
5279 tmode = value_mode;
5281 if (unsignedp)
5282 return expand_and (tmode, temp,
5283 gen_int_mode (width_mask, tmode),
5284 NULL_RTX);
5286 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5287 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5288 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5291 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5292 NULL_RTX, value_mode, VOIDmode,
5293 int_size_in_bytes (type));
5295 return const0_rtx;
5297 else
5299 rtx addr = XEXP (target, 0);
5300 rtx to_rtx = target;
5302 /* If a value is wanted, it must be the lhs;
5303 so make the address stable for multiple use. */
5305 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5306 && ! CONSTANT_ADDRESS_P (addr)
5307 /* A frame-pointer reference is already stable. */
5308 && ! (GET_CODE (addr) == PLUS
5309 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5310 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5311 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5312 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5314 /* Now build a reference to just the desired component. */
5316 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5318 if (to_rtx == target)
5319 to_rtx = copy_rtx (to_rtx);
5321 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5322 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5323 set_mem_alias_set (to_rtx, alias_set);
5325 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5329 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5330 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5331 codes and find the ultimate containing object, which we return.
5333 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5334 bit position, and *PUNSIGNEDP to the signedness of the field.
5335 If the position of the field is variable, we store a tree
5336 giving the variable offset (in units) in *POFFSET.
5337 This offset is in addition to the bit position.
5338 If the position is not variable, we store 0 in *POFFSET.
5340 If any of the extraction expressions is volatile,
5341 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5343 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5344 is a mode that can be used to access the field. In that case, *PBITSIZE
5345 is redundant.
5347 If the field describes a variable-sized object, *PMODE is set to
5348 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5349 this case, but the address of the object can be found. */
5351 tree
5352 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5353 punsignedp, pvolatilep)
5354 tree exp;
5355 HOST_WIDE_INT *pbitsize;
5356 HOST_WIDE_INT *pbitpos;
5357 tree *poffset;
5358 enum machine_mode *pmode;
5359 int *punsignedp;
5360 int *pvolatilep;
5362 tree size_tree = 0;
5363 enum machine_mode mode = VOIDmode;
5364 tree offset = size_zero_node;
5365 tree bit_offset = bitsize_zero_node;
5366 tree placeholder_ptr = 0;
5367 tree tem;
5369 /* First get the mode, signedness, and size. We do this from just the
5370 outermost expression. */
5371 if (TREE_CODE (exp) == COMPONENT_REF)
5373 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5374 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5375 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5377 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5379 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5381 size_tree = TREE_OPERAND (exp, 1);
5382 *punsignedp = TREE_UNSIGNED (exp);
5384 else
5386 mode = TYPE_MODE (TREE_TYPE (exp));
5387 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5389 if (mode == BLKmode)
5390 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5391 else
5392 *pbitsize = GET_MODE_BITSIZE (mode);
5395 if (size_tree != 0)
5397 if (! host_integerp (size_tree, 1))
5398 mode = BLKmode, *pbitsize = -1;
5399 else
5400 *pbitsize = tree_low_cst (size_tree, 1);
5403 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5404 and find the ultimate containing object. */
5405 while (1)
5407 if (TREE_CODE (exp) == BIT_FIELD_REF)
5408 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5409 else if (TREE_CODE (exp) == COMPONENT_REF)
5411 tree field = TREE_OPERAND (exp, 1);
5412 tree this_offset = DECL_FIELD_OFFSET (field);
5414 /* If this field hasn't been filled in yet, don't go
5415 past it. This should only happen when folding expressions
5416 made during type construction. */
5417 if (this_offset == 0)
5418 break;
5419 else if (! TREE_CONSTANT (this_offset)
5420 && contains_placeholder_p (this_offset))
5421 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5423 offset = size_binop (PLUS_EXPR, offset, this_offset);
5424 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5425 DECL_FIELD_BIT_OFFSET (field));
5427 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5430 else if (TREE_CODE (exp) == ARRAY_REF
5431 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5433 tree index = TREE_OPERAND (exp, 1);
5434 tree array = TREE_OPERAND (exp, 0);
5435 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5436 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5437 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5439 /* We assume all arrays have sizes that are a multiple of a byte.
5440 First subtract the lower bound, if any, in the type of the
5441 index, then convert to sizetype and multiply by the size of the
5442 array element. */
5443 if (low_bound != 0 && ! integer_zerop (low_bound))
5444 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5445 index, low_bound));
5447 /* If the index has a self-referential type, pass it to a
5448 WITH_RECORD_EXPR; if the component size is, pass our
5449 component to one. */
5450 if (! TREE_CONSTANT (index)
5451 && contains_placeholder_p (index))
5452 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5453 if (! TREE_CONSTANT (unit_size)
5454 && contains_placeholder_p (unit_size))
5455 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5457 offset = size_binop (PLUS_EXPR, offset,
5458 size_binop (MULT_EXPR,
5459 convert (sizetype, index),
5460 unit_size));
5463 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5465 tree new = find_placeholder (exp, &placeholder_ptr);
5467 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5468 We might have been called from tree optimization where we
5469 haven't set up an object yet. */
5470 if (new == 0)
5471 break;
5472 else
5473 exp = new;
5475 continue;
5477 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5478 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5479 && ! ((TREE_CODE (exp) == NOP_EXPR
5480 || TREE_CODE (exp) == CONVERT_EXPR)
5481 && (TYPE_MODE (TREE_TYPE (exp))
5482 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5483 break;
5485 /* If any reference in the chain is volatile, the effect is volatile. */
5486 if (TREE_THIS_VOLATILE (exp))
5487 *pvolatilep = 1;
5489 exp = TREE_OPERAND (exp, 0);
5492 /* If OFFSET is constant, see if we can return the whole thing as a
5493 constant bit position. Otherwise, split it up. */
5494 if (host_integerp (offset, 0)
5495 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5496 bitsize_unit_node))
5497 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5498 && host_integerp (tem, 0))
5499 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5500 else
5501 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5503 *pmode = mode;
5504 return exp;
5507 /* Return 1 if T is an expression that get_inner_reference handles. */
5510 handled_component_p (t)
5511 tree t;
5513 switch (TREE_CODE (t))
5515 case BIT_FIELD_REF:
5516 case COMPONENT_REF:
5517 case ARRAY_REF:
5518 case ARRAY_RANGE_REF:
5519 case NON_LVALUE_EXPR:
5520 case VIEW_CONVERT_EXPR:
5521 return 1;
5523 case NOP_EXPR:
5524 case CONVERT_EXPR:
5525 return (TYPE_MODE (TREE_TYPE (t))
5526 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5528 default:
5529 return 0;
5533 /* Given an rtx VALUE that may contain additions and multiplications, return
5534 an equivalent value that just refers to a register, memory, or constant.
5535 This is done by generating instructions to perform the arithmetic and
5536 returning a pseudo-register containing the value.
5538 The returned value may be a REG, SUBREG, MEM or constant. */
5541 force_operand (value, target)
5542 rtx value, target;
5544 rtx op1, op2;
5545 /* Use subtarget as the target for operand 0 of a binary operation. */
5546 rtx subtarget = get_subtarget (target);
5547 enum rtx_code code = GET_CODE (value);
5549 /* Check for a PIC address load. */
5550 if ((code == PLUS || code == MINUS)
5551 && XEXP (value, 0) == pic_offset_table_rtx
5552 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5553 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5554 || GET_CODE (XEXP (value, 1)) == CONST))
5556 if (!subtarget)
5557 subtarget = gen_reg_rtx (GET_MODE (value));
5558 emit_move_insn (subtarget, value);
5559 return subtarget;
5562 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5564 if (!target)
5565 target = gen_reg_rtx (GET_MODE (value));
5566 convert_move (target, force_operand (XEXP (value, 0), NULL),
5567 code == ZERO_EXTEND);
5568 return target;
5571 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5573 op2 = XEXP (value, 1);
5574 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5575 subtarget = 0;
5576 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5578 code = PLUS;
5579 op2 = negate_rtx (GET_MODE (value), op2);
5582 /* Check for an addition with OP2 a constant integer and our first
5583 operand a PLUS of a virtual register and something else. In that
5584 case, we want to emit the sum of the virtual register and the
5585 constant first and then add the other value. This allows virtual
5586 register instantiation to simply modify the constant rather than
5587 creating another one around this addition. */
5588 if (code == PLUS && GET_CODE (op2) == CONST_INT
5589 && GET_CODE (XEXP (value, 0)) == PLUS
5590 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5591 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5592 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5594 rtx temp = expand_simple_binop (GET_MODE (value), code,
5595 XEXP (XEXP (value, 0), 0), op2,
5596 subtarget, 0, OPTAB_LIB_WIDEN);
5597 return expand_simple_binop (GET_MODE (value), code, temp,
5598 force_operand (XEXP (XEXP (value,
5599 0), 1), 0),
5600 target, 0, OPTAB_LIB_WIDEN);
5603 op1 = force_operand (XEXP (value, 0), subtarget);
5604 op2 = force_operand (op2, NULL_RTX);
5605 switch (code)
5607 case MULT:
5608 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5609 case DIV:
5610 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5611 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5612 target, 1, OPTAB_LIB_WIDEN);
5613 else
5614 return expand_divmod (0,
5615 FLOAT_MODE_P (GET_MODE (value))
5616 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5617 GET_MODE (value), op1, op2, target, 0);
5618 break;
5619 case MOD:
5620 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5621 target, 0);
5622 break;
5623 case UDIV:
5624 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5625 target, 1);
5626 break;
5627 case UMOD:
5628 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5629 target, 1);
5630 break;
5631 case ASHIFTRT:
5632 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5633 target, 0, OPTAB_LIB_WIDEN);
5634 break;
5635 default:
5636 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5637 target, 1, OPTAB_LIB_WIDEN);
5640 if (GET_RTX_CLASS (code) == '1')
5642 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5643 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5646 #ifdef INSN_SCHEDULING
5647 /* On machines that have insn scheduling, we want all memory reference to be
5648 explicit, so we need to deal with such paradoxical SUBREGs. */
5649 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5650 && (GET_MODE_SIZE (GET_MODE (value))
5651 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5652 value
5653 = simplify_gen_subreg (GET_MODE (value),
5654 force_reg (GET_MODE (SUBREG_REG (value)),
5655 force_operand (SUBREG_REG (value),
5656 NULL_RTX)),
5657 GET_MODE (SUBREG_REG (value)),
5658 SUBREG_BYTE (value));
5659 #endif
5661 return value;
5664 /* Subroutine of expand_expr: return nonzero iff there is no way that
5665 EXP can reference X, which is being modified. TOP_P is nonzero if this
5666 call is going to be used to determine whether we need a temporary
5667 for EXP, as opposed to a recursive call to this function.
5669 It is always safe for this routine to return zero since it merely
5670 searches for optimization opportunities. */
5673 safe_from_p (x, exp, top_p)
5674 rtx x;
5675 tree exp;
5676 int top_p;
5678 rtx exp_rtl = 0;
5679 int i, nops;
5680 static tree save_expr_list;
5682 if (x == 0
5683 /* If EXP has varying size, we MUST use a target since we currently
5684 have no way of allocating temporaries of variable size
5685 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5686 So we assume here that something at a higher level has prevented a
5687 clash. This is somewhat bogus, but the best we can do. Only
5688 do this when X is BLKmode and when we are at the top level. */
5689 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5690 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5691 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5692 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5693 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5694 != INTEGER_CST)
5695 && GET_MODE (x) == BLKmode)
5696 /* If X is in the outgoing argument area, it is always safe. */
5697 || (GET_CODE (x) == MEM
5698 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5699 || (GET_CODE (XEXP (x, 0)) == PLUS
5700 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5701 return 1;
5703 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5704 find the underlying pseudo. */
5705 if (GET_CODE (x) == SUBREG)
5707 x = SUBREG_REG (x);
5708 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5709 return 0;
5712 /* A SAVE_EXPR might appear many times in the expression passed to the
5713 top-level safe_from_p call, and if it has a complex subexpression,
5714 examining it multiple times could result in a combinatorial explosion.
5715 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5716 with optimization took about 28 minutes to compile -- even though it was
5717 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5718 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5719 we have processed. Note that the only test of top_p was above. */
5721 if (top_p)
5723 int rtn;
5724 tree t;
5726 save_expr_list = 0;
5728 rtn = safe_from_p (x, exp, 0);
5730 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5731 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5733 return rtn;
5736 /* Now look at our tree code and possibly recurse. */
5737 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5739 case 'd':
5740 exp_rtl = DECL_RTL_IF_SET (exp);
5741 break;
5743 case 'c':
5744 return 1;
5746 case 'x':
5747 if (TREE_CODE (exp) == TREE_LIST)
5748 return ((TREE_VALUE (exp) == 0
5749 || safe_from_p (x, TREE_VALUE (exp), 0))
5750 && (TREE_CHAIN (exp) == 0
5751 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5752 else if (TREE_CODE (exp) == ERROR_MARK)
5753 return 1; /* An already-visited SAVE_EXPR? */
5754 else
5755 return 0;
5757 case '1':
5758 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5760 case '2':
5761 case '<':
5762 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5763 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5765 case 'e':
5766 case 'r':
5767 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5768 the expression. If it is set, we conflict iff we are that rtx or
5769 both are in memory. Otherwise, we check all operands of the
5770 expression recursively. */
5772 switch (TREE_CODE (exp))
5774 case ADDR_EXPR:
5775 /* If the operand is static or we are static, we can't conflict.
5776 Likewise if we don't conflict with the operand at all. */
5777 if (staticp (TREE_OPERAND (exp, 0))
5778 || TREE_STATIC (exp)
5779 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5780 return 1;
5782 /* Otherwise, the only way this can conflict is if we are taking
5783 the address of a DECL a that address if part of X, which is
5784 very rare. */
5785 exp = TREE_OPERAND (exp, 0);
5786 if (DECL_P (exp))
5788 if (!DECL_RTL_SET_P (exp)
5789 || GET_CODE (DECL_RTL (exp)) != MEM)
5790 return 0;
5791 else
5792 exp_rtl = XEXP (DECL_RTL (exp), 0);
5794 break;
5796 case INDIRECT_REF:
5797 if (GET_CODE (x) == MEM
5798 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5799 get_alias_set (exp)))
5800 return 0;
5801 break;
5803 case CALL_EXPR:
5804 /* Assume that the call will clobber all hard registers and
5805 all of memory. */
5806 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5807 || GET_CODE (x) == MEM)
5808 return 0;
5809 break;
5811 case RTL_EXPR:
5812 /* If a sequence exists, we would have to scan every instruction
5813 in the sequence to see if it was safe. This is probably not
5814 worthwhile. */
5815 if (RTL_EXPR_SEQUENCE (exp))
5816 return 0;
5818 exp_rtl = RTL_EXPR_RTL (exp);
5819 break;
5821 case WITH_CLEANUP_EXPR:
5822 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5823 break;
5825 case CLEANUP_POINT_EXPR:
5826 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5828 case SAVE_EXPR:
5829 exp_rtl = SAVE_EXPR_RTL (exp);
5830 if (exp_rtl)
5831 break;
5833 /* If we've already scanned this, don't do it again. Otherwise,
5834 show we've scanned it and record for clearing the flag if we're
5835 going on. */
5836 if (TREE_PRIVATE (exp))
5837 return 1;
5839 TREE_PRIVATE (exp) = 1;
5840 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5842 TREE_PRIVATE (exp) = 0;
5843 return 0;
5846 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5847 return 1;
5849 case BIND_EXPR:
5850 /* The only operand we look at is operand 1. The rest aren't
5851 part of the expression. */
5852 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5854 case METHOD_CALL_EXPR:
5855 /* This takes an rtx argument, but shouldn't appear here. */
5856 abort ();
5858 default:
5859 break;
5862 /* If we have an rtx, we do not need to scan our operands. */
5863 if (exp_rtl)
5864 break;
5866 nops = first_rtl_op (TREE_CODE (exp));
5867 for (i = 0; i < nops; i++)
5868 if (TREE_OPERAND (exp, i) != 0
5869 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5870 return 0;
5872 /* If this is a language-specific tree code, it may require
5873 special handling. */
5874 if ((unsigned int) TREE_CODE (exp)
5875 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5876 && !(*lang_hooks.safe_from_p) (x, exp))
5877 return 0;
5880 /* If we have an rtl, find any enclosed object. Then see if we conflict
5881 with it. */
5882 if (exp_rtl)
5884 if (GET_CODE (exp_rtl) == SUBREG)
5886 exp_rtl = SUBREG_REG (exp_rtl);
5887 if (GET_CODE (exp_rtl) == REG
5888 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5889 return 0;
5892 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5893 are memory and they conflict. */
5894 return ! (rtx_equal_p (x, exp_rtl)
5895 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5896 && true_dependence (exp_rtl, VOIDmode, x,
5897 rtx_addr_varies_p)));
5900 /* If we reach here, it is safe. */
5901 return 1;
5904 /* Subroutine of expand_expr: return rtx if EXP is a
5905 variable or parameter; else return 0. */
5907 static rtx
5908 var_rtx (exp)
5909 tree exp;
5911 STRIP_NOPS (exp);
5912 switch (TREE_CODE (exp))
5914 case PARM_DECL:
5915 case VAR_DECL:
5916 return DECL_RTL (exp);
5917 default:
5918 return 0;
5922 #ifdef MAX_INTEGER_COMPUTATION_MODE
5924 void
5925 check_max_integer_computation_mode (exp)
5926 tree exp;
5928 enum tree_code code;
5929 enum machine_mode mode;
5931 /* Strip any NOPs that don't change the mode. */
5932 STRIP_NOPS (exp);
5933 code = TREE_CODE (exp);
5935 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5936 if (code == NOP_EXPR
5937 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5938 return;
5940 /* First check the type of the overall operation. We need only look at
5941 unary, binary and relational operations. */
5942 if (TREE_CODE_CLASS (code) == '1'
5943 || TREE_CODE_CLASS (code) == '2'
5944 || TREE_CODE_CLASS (code) == '<')
5946 mode = TYPE_MODE (TREE_TYPE (exp));
5947 if (GET_MODE_CLASS (mode) == MODE_INT
5948 && mode > MAX_INTEGER_COMPUTATION_MODE)
5949 internal_error ("unsupported wide integer operation");
5952 /* Check operand of a unary op. */
5953 if (TREE_CODE_CLASS (code) == '1')
5955 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5956 if (GET_MODE_CLASS (mode) == MODE_INT
5957 && mode > MAX_INTEGER_COMPUTATION_MODE)
5958 internal_error ("unsupported wide integer operation");
5961 /* Check operands of a binary/comparison op. */
5962 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5964 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5965 if (GET_MODE_CLASS (mode) == MODE_INT
5966 && mode > MAX_INTEGER_COMPUTATION_MODE)
5967 internal_error ("unsupported wide integer operation");
5969 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5970 if (GET_MODE_CLASS (mode) == MODE_INT
5971 && mode > MAX_INTEGER_COMPUTATION_MODE)
5972 internal_error ("unsupported wide integer operation");
5975 #endif
5977 /* Return the highest power of two that EXP is known to be a multiple of.
5978 This is used in updating alignment of MEMs in array references. */
5980 static HOST_WIDE_INT
5981 highest_pow2_factor (exp)
5982 tree exp;
5984 HOST_WIDE_INT c0, c1;
5986 switch (TREE_CODE (exp))
5988 case INTEGER_CST:
5989 /* We can find the lowest bit that's a one. If the low
5990 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5991 We need to handle this case since we can find it in a COND_EXPR,
5992 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5993 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5994 later ICE. */
5995 if (TREE_CONSTANT_OVERFLOW (exp))
5996 return BIGGEST_ALIGNMENT;
5997 else
5999 /* Note: tree_low_cst is intentionally not used here,
6000 we don't care about the upper bits. */
6001 c0 = TREE_INT_CST_LOW (exp);
6002 c0 &= -c0;
6003 return c0 ? c0 : BIGGEST_ALIGNMENT;
6005 break;
6007 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6008 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6009 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6010 return MIN (c0, c1);
6012 case MULT_EXPR:
6013 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6014 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6015 return c0 * c1;
6017 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6018 case CEIL_DIV_EXPR:
6019 if (integer_pow2p (TREE_OPERAND (exp, 1))
6020 && host_integerp (TREE_OPERAND (exp, 1), 1))
6022 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6023 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6024 return MAX (1, c0 / c1);
6026 break;
6028 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6029 case SAVE_EXPR: case WITH_RECORD_EXPR:
6030 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6032 case COMPOUND_EXPR:
6033 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6035 case COND_EXPR:
6036 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6037 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6038 return MIN (c0, c1);
6040 default:
6041 break;
6044 return 1;
6047 /* Similar, except that it is known that the expression must be a multiple
6048 of the alignment of TYPE. */
6050 static HOST_WIDE_INT
6051 highest_pow2_factor_for_type (type, exp)
6052 tree type;
6053 tree exp;
6055 HOST_WIDE_INT type_align, factor;
6057 factor = highest_pow2_factor (exp);
6058 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6059 return MAX (factor, type_align);
6062 /* Return an object on the placeholder list that matches EXP, a
6063 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6064 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6065 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6066 is a location which initially points to a starting location in the
6067 placeholder list (zero means start of the list) and where a pointer into
6068 the placeholder list at which the object is found is placed. */
6070 tree
6071 find_placeholder (exp, plist)
6072 tree exp;
6073 tree *plist;
6075 tree type = TREE_TYPE (exp);
6076 tree placeholder_expr;
6078 for (placeholder_expr
6079 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6080 placeholder_expr != 0;
6081 placeholder_expr = TREE_CHAIN (placeholder_expr))
6083 tree need_type = TYPE_MAIN_VARIANT (type);
6084 tree elt;
6086 /* Find the outermost reference that is of the type we want. If none,
6087 see if any object has a type that is a pointer to the type we
6088 want. */
6089 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6090 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6091 || TREE_CODE (elt) == COND_EXPR)
6092 ? TREE_OPERAND (elt, 1)
6093 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6097 ? TREE_OPERAND (elt, 0) : 0))
6098 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6100 if (plist)
6101 *plist = placeholder_expr;
6102 return elt;
6105 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6107 = ((TREE_CODE (elt) == COMPOUND_EXPR
6108 || TREE_CODE (elt) == COND_EXPR)
6109 ? TREE_OPERAND (elt, 1)
6110 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6111 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6112 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6114 ? TREE_OPERAND (elt, 0) : 0))
6115 if (POINTER_TYPE_P (TREE_TYPE (elt))
6116 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6117 == need_type))
6119 if (plist)
6120 *plist = placeholder_expr;
6121 return build1 (INDIRECT_REF, need_type, elt);
6125 return 0;
6128 /* expand_expr: generate code for computing expression EXP.
6129 An rtx for the computed value is returned. The value is never null.
6130 In the case of a void EXP, const0_rtx is returned.
6132 The value may be stored in TARGET if TARGET is nonzero.
6133 TARGET is just a suggestion; callers must assume that
6134 the rtx returned may not be the same as TARGET.
6136 If TARGET is CONST0_RTX, it means that the value will be ignored.
6138 If TMODE is not VOIDmode, it suggests generating the
6139 result in mode TMODE. But this is done only when convenient.
6140 Otherwise, TMODE is ignored and the value generated in its natural mode.
6141 TMODE is just a suggestion; callers must assume that
6142 the rtx returned may not have mode TMODE.
6144 Note that TARGET may have neither TMODE nor MODE. In that case, it
6145 probably will not be used.
6147 If MODIFIER is EXPAND_SUM then when EXP is an addition
6148 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6149 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6150 products as above, or REG or MEM, or constant.
6151 Ordinarily in such cases we would output mul or add instructions
6152 and then return a pseudo reg containing the sum.
6154 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6155 it also marks a label as absolutely required (it can't be dead).
6156 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6157 This is used for outputting expressions used in initializers.
6159 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6160 with a constant address even if that address is not normally legitimate.
6161 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6164 expand_expr (exp, target, tmode, modifier)
6165 tree exp;
6166 rtx target;
6167 enum machine_mode tmode;
6168 enum expand_modifier modifier;
6170 rtx op0, op1, temp;
6171 tree type = TREE_TYPE (exp);
6172 int unsignedp = TREE_UNSIGNED (type);
6173 enum machine_mode mode;
6174 enum tree_code code = TREE_CODE (exp);
6175 optab this_optab;
6176 rtx subtarget, original_target;
6177 int ignore;
6178 tree context;
6180 /* Handle ERROR_MARK before anybody tries to access its type. */
6181 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6183 op0 = CONST0_RTX (tmode);
6184 if (op0 != 0)
6185 return op0;
6186 return const0_rtx;
6189 mode = TYPE_MODE (type);
6190 /* Use subtarget as the target for operand 0 of a binary operation. */
6191 subtarget = get_subtarget (target);
6192 original_target = target;
6193 ignore = (target == const0_rtx
6194 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6195 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6196 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6197 && TREE_CODE (type) == VOID_TYPE));
6199 /* If we are going to ignore this result, we need only do something
6200 if there is a side-effect somewhere in the expression. If there
6201 is, short-circuit the most common cases here. Note that we must
6202 not call expand_expr with anything but const0_rtx in case this
6203 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6205 if (ignore)
6207 if (! TREE_SIDE_EFFECTS (exp))
6208 return const0_rtx;
6210 /* Ensure we reference a volatile object even if value is ignored, but
6211 don't do this if all we are doing is taking its address. */
6212 if (TREE_THIS_VOLATILE (exp)
6213 && TREE_CODE (exp) != FUNCTION_DECL
6214 && mode != VOIDmode && mode != BLKmode
6215 && modifier != EXPAND_CONST_ADDRESS)
6217 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6218 if (GET_CODE (temp) == MEM)
6219 temp = copy_to_reg (temp);
6220 return const0_rtx;
6223 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6224 || code == INDIRECT_REF || code == BUFFER_REF)
6225 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6226 modifier);
6228 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6229 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6231 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6232 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6233 return const0_rtx;
6235 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6236 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6237 /* If the second operand has no side effects, just evaluate
6238 the first. */
6239 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6240 modifier);
6241 else if (code == BIT_FIELD_REF)
6243 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6244 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6245 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6246 return const0_rtx;
6249 target = 0;
6252 #ifdef MAX_INTEGER_COMPUTATION_MODE
6253 /* Only check stuff here if the mode we want is different from the mode
6254 of the expression; if it's the same, check_max_integer_computiation_mode
6255 will handle it. Do we really need to check this stuff at all? */
6257 if (target
6258 && GET_MODE (target) != mode
6259 && TREE_CODE (exp) != INTEGER_CST
6260 && TREE_CODE (exp) != PARM_DECL
6261 && TREE_CODE (exp) != ARRAY_REF
6262 && TREE_CODE (exp) != ARRAY_RANGE_REF
6263 && TREE_CODE (exp) != COMPONENT_REF
6264 && TREE_CODE (exp) != BIT_FIELD_REF
6265 && TREE_CODE (exp) != INDIRECT_REF
6266 && TREE_CODE (exp) != CALL_EXPR
6267 && TREE_CODE (exp) != VAR_DECL
6268 && TREE_CODE (exp) != RTL_EXPR)
6270 enum machine_mode mode = GET_MODE (target);
6272 if (GET_MODE_CLASS (mode) == MODE_INT
6273 && mode > MAX_INTEGER_COMPUTATION_MODE)
6274 internal_error ("unsupported wide integer operation");
6277 if (tmode != mode
6278 && TREE_CODE (exp) != INTEGER_CST
6279 && TREE_CODE (exp) != PARM_DECL
6280 && TREE_CODE (exp) != ARRAY_REF
6281 && TREE_CODE (exp) != ARRAY_RANGE_REF
6282 && TREE_CODE (exp) != COMPONENT_REF
6283 && TREE_CODE (exp) != BIT_FIELD_REF
6284 && TREE_CODE (exp) != INDIRECT_REF
6285 && TREE_CODE (exp) != VAR_DECL
6286 && TREE_CODE (exp) != CALL_EXPR
6287 && TREE_CODE (exp) != RTL_EXPR
6288 && GET_MODE_CLASS (tmode) == MODE_INT
6289 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6290 internal_error ("unsupported wide integer operation");
6292 check_max_integer_computation_mode (exp);
6293 #endif
6295 /* If will do cse, generate all results into pseudo registers
6296 since 1) that allows cse to find more things
6297 and 2) otherwise cse could produce an insn the machine
6298 cannot support. And exception is a CONSTRUCTOR into a multi-word
6299 MEM: that's much more likely to be most efficient into the MEM. */
6301 if (! cse_not_expected && mode != BLKmode && target
6302 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6303 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6304 target = subtarget;
6306 switch (code)
6308 case LABEL_DECL:
6310 tree function = decl_function_context (exp);
6311 /* Handle using a label in a containing function. */
6312 if (function != current_function_decl
6313 && function != inline_function_decl && function != 0)
6315 struct function *p = find_function_data (function);
6316 p->expr->x_forced_labels
6317 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6318 p->expr->x_forced_labels);
6320 else
6322 if (modifier == EXPAND_INITIALIZER)
6323 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6324 label_rtx (exp),
6325 forced_labels);
6328 temp = gen_rtx_MEM (FUNCTION_MODE,
6329 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6330 if (function != current_function_decl
6331 && function != inline_function_decl && function != 0)
6332 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6333 return temp;
6336 case PARM_DECL:
6337 if (DECL_RTL (exp) == 0)
6339 error_with_decl (exp, "prior parameter's size depends on `%s'");
6340 return CONST0_RTX (mode);
6343 /* ... fall through ... */
6345 case VAR_DECL:
6346 /* If a static var's type was incomplete when the decl was written,
6347 but the type is complete now, lay out the decl now. */
6348 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6349 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6351 rtx value = DECL_RTL_IF_SET (exp);
6353 layout_decl (exp, 0);
6355 /* If the RTL was already set, update its mode and memory
6356 attributes. */
6357 if (value != 0)
6359 PUT_MODE (value, DECL_MODE (exp));
6360 SET_DECL_RTL (exp, 0);
6361 set_mem_attributes (value, exp, 1);
6362 SET_DECL_RTL (exp, value);
6366 /* ... fall through ... */
6368 case FUNCTION_DECL:
6369 case RESULT_DECL:
6370 if (DECL_RTL (exp) == 0)
6371 abort ();
6373 /* Ensure variable marked as used even if it doesn't go through
6374 a parser. If it hasn't be used yet, write out an external
6375 definition. */
6376 if (! TREE_USED (exp))
6378 assemble_external (exp);
6379 TREE_USED (exp) = 1;
6382 /* Show we haven't gotten RTL for this yet. */
6383 temp = 0;
6385 /* Handle variables inherited from containing functions. */
6386 context = decl_function_context (exp);
6388 /* We treat inline_function_decl as an alias for the current function
6389 because that is the inline function whose vars, types, etc.
6390 are being merged into the current function.
6391 See expand_inline_function. */
6393 if (context != 0 && context != current_function_decl
6394 && context != inline_function_decl
6395 /* If var is static, we don't need a static chain to access it. */
6396 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6397 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6399 rtx addr;
6401 /* Mark as non-local and addressable. */
6402 DECL_NONLOCAL (exp) = 1;
6403 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6404 abort ();
6405 (*lang_hooks.mark_addressable) (exp);
6406 if (GET_CODE (DECL_RTL (exp)) != MEM)
6407 abort ();
6408 addr = XEXP (DECL_RTL (exp), 0);
6409 if (GET_CODE (addr) == MEM)
6410 addr
6411 = replace_equiv_address (addr,
6412 fix_lexical_addr (XEXP (addr, 0), exp));
6413 else
6414 addr = fix_lexical_addr (addr, exp);
6416 temp = replace_equiv_address (DECL_RTL (exp), addr);
6419 /* This is the case of an array whose size is to be determined
6420 from its initializer, while the initializer is still being parsed.
6421 See expand_decl. */
6423 else if (GET_CODE (DECL_RTL (exp)) == MEM
6424 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6425 temp = validize_mem (DECL_RTL (exp));
6427 /* If DECL_RTL is memory, we are in the normal case and either
6428 the address is not valid or it is not a register and -fforce-addr
6429 is specified, get the address into a register. */
6431 else if (GET_CODE (DECL_RTL (exp)) == MEM
6432 && modifier != EXPAND_CONST_ADDRESS
6433 && modifier != EXPAND_SUM
6434 && modifier != EXPAND_INITIALIZER
6435 && (! memory_address_p (DECL_MODE (exp),
6436 XEXP (DECL_RTL (exp), 0))
6437 || (flag_force_addr
6438 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6439 temp = replace_equiv_address (DECL_RTL (exp),
6440 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6442 /* If we got something, return it. But first, set the alignment
6443 if the address is a register. */
6444 if (temp != 0)
6446 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6447 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6449 return temp;
6452 /* If the mode of DECL_RTL does not match that of the decl, it
6453 must be a promoted value. We return a SUBREG of the wanted mode,
6454 but mark it so that we know that it was already extended. */
6456 if (GET_CODE (DECL_RTL (exp)) == REG
6457 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6459 /* Get the signedness used for this variable. Ensure we get the
6460 same mode we got when the variable was declared. */
6461 if (GET_MODE (DECL_RTL (exp))
6462 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6463 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6464 abort ();
6466 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6467 SUBREG_PROMOTED_VAR_P (temp) = 1;
6468 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6469 return temp;
6472 return DECL_RTL (exp);
6474 case INTEGER_CST:
6475 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6476 TREE_INT_CST_HIGH (exp), mode);
6478 /* ??? If overflow is set, fold will have done an incomplete job,
6479 which can result in (plus xx (const_int 0)), which can get
6480 simplified by validate_replace_rtx during virtual register
6481 instantiation, which can result in unrecognizable insns.
6482 Avoid this by forcing all overflows into registers. */
6483 if (TREE_CONSTANT_OVERFLOW (exp)
6484 && modifier != EXPAND_INITIALIZER)
6485 temp = force_reg (mode, temp);
6487 return temp;
6489 case CONST_DECL:
6490 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6492 case REAL_CST:
6493 /* If optimized, generate immediate CONST_DOUBLE
6494 which will be turned into memory by reload if necessary.
6496 We used to force a register so that loop.c could see it. But
6497 this does not allow gen_* patterns to perform optimizations with
6498 the constants. It also produces two insns in cases like "x = 1.0;".
6499 On most machines, floating-point constants are not permitted in
6500 many insns, so we'd end up copying it to a register in any case.
6502 Now, we do the copying in expand_binop, if appropriate. */
6503 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6504 TYPE_MODE (TREE_TYPE (exp)));
6506 case COMPLEX_CST:
6507 case STRING_CST:
6508 if (! TREE_CST_RTL (exp))
6509 output_constant_def (exp, 1);
6511 /* TREE_CST_RTL probably contains a constant address.
6512 On RISC machines where a constant address isn't valid,
6513 make some insns to get that address into a register. */
6514 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6515 && modifier != EXPAND_CONST_ADDRESS
6516 && modifier != EXPAND_INITIALIZER
6517 && modifier != EXPAND_SUM
6518 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6519 || (flag_force_addr
6520 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6521 return replace_equiv_address (TREE_CST_RTL (exp),
6522 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6523 return TREE_CST_RTL (exp);
6525 case EXPR_WITH_FILE_LOCATION:
6527 rtx to_return;
6528 const char *saved_input_filename = input_filename;
6529 int saved_lineno = lineno;
6530 input_filename = EXPR_WFL_FILENAME (exp);
6531 lineno = EXPR_WFL_LINENO (exp);
6532 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6533 emit_line_note (input_filename, lineno);
6534 /* Possibly avoid switching back and forth here. */
6535 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6536 input_filename = saved_input_filename;
6537 lineno = saved_lineno;
6538 return to_return;
6541 case SAVE_EXPR:
6542 context = decl_function_context (exp);
6544 /* If this SAVE_EXPR was at global context, assume we are an
6545 initialization function and move it into our context. */
6546 if (context == 0)
6547 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6549 /* We treat inline_function_decl as an alias for the current function
6550 because that is the inline function whose vars, types, etc.
6551 are being merged into the current function.
6552 See expand_inline_function. */
6553 if (context == current_function_decl || context == inline_function_decl)
6554 context = 0;
6556 /* If this is non-local, handle it. */
6557 if (context)
6559 /* The following call just exists to abort if the context is
6560 not of a containing function. */
6561 find_function_data (context);
6563 temp = SAVE_EXPR_RTL (exp);
6564 if (temp && GET_CODE (temp) == REG)
6566 put_var_into_stack (exp);
6567 temp = SAVE_EXPR_RTL (exp);
6569 if (temp == 0 || GET_CODE (temp) != MEM)
6570 abort ();
6571 return
6572 replace_equiv_address (temp,
6573 fix_lexical_addr (XEXP (temp, 0), exp));
6575 if (SAVE_EXPR_RTL (exp) == 0)
6577 if (mode == VOIDmode)
6578 temp = const0_rtx;
6579 else
6580 temp = assign_temp (build_qualified_type (type,
6581 (TYPE_QUALS (type)
6582 | TYPE_QUAL_CONST)),
6583 3, 0, 0);
6585 SAVE_EXPR_RTL (exp) = temp;
6586 if (!optimize && GET_CODE (temp) == REG)
6587 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6588 save_expr_regs);
6590 /* If the mode of TEMP does not match that of the expression, it
6591 must be a promoted value. We pass store_expr a SUBREG of the
6592 wanted mode but mark it so that we know that it was already
6593 extended. Note that `unsignedp' was modified above in
6594 this case. */
6596 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6598 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6599 SUBREG_PROMOTED_VAR_P (temp) = 1;
6600 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6603 if (temp == const0_rtx)
6604 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6605 else
6606 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6608 TREE_USED (exp) = 1;
6611 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6612 must be a promoted value. We return a SUBREG of the wanted mode,
6613 but mark it so that we know that it was already extended. */
6615 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6616 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6618 /* Compute the signedness and make the proper SUBREG. */
6619 promote_mode (type, mode, &unsignedp, 0);
6620 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6621 SUBREG_PROMOTED_VAR_P (temp) = 1;
6622 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6623 return temp;
6626 return SAVE_EXPR_RTL (exp);
6628 case UNSAVE_EXPR:
6630 rtx temp;
6631 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6632 TREE_OPERAND (exp, 0)
6633 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6634 return temp;
6637 case PLACEHOLDER_EXPR:
6639 tree old_list = placeholder_list;
6640 tree placeholder_expr = 0;
6642 exp = find_placeholder (exp, &placeholder_expr);
6643 if (exp == 0)
6644 abort ();
6646 placeholder_list = TREE_CHAIN (placeholder_expr);
6647 temp = expand_expr (exp, original_target, tmode, modifier);
6648 placeholder_list = old_list;
6649 return temp;
6652 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6653 abort ();
6655 case WITH_RECORD_EXPR:
6656 /* Put the object on the placeholder list, expand our first operand,
6657 and pop the list. */
6658 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6659 placeholder_list);
6660 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6661 modifier);
6662 placeholder_list = TREE_CHAIN (placeholder_list);
6663 return target;
6665 case GOTO_EXPR:
6666 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6667 expand_goto (TREE_OPERAND (exp, 0));
6668 else
6669 expand_computed_goto (TREE_OPERAND (exp, 0));
6670 return const0_rtx;
6672 case EXIT_EXPR:
6673 expand_exit_loop_if_false (NULL,
6674 invert_truthvalue (TREE_OPERAND (exp, 0)));
6675 return const0_rtx;
6677 case LABELED_BLOCK_EXPR:
6678 if (LABELED_BLOCK_BODY (exp))
6679 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6680 /* Should perhaps use expand_label, but this is simpler and safer. */
6681 do_pending_stack_adjust ();
6682 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6683 return const0_rtx;
6685 case EXIT_BLOCK_EXPR:
6686 if (EXIT_BLOCK_RETURN (exp))
6687 sorry ("returned value in block_exit_expr");
6688 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6689 return const0_rtx;
6691 case LOOP_EXPR:
6692 push_temp_slots ();
6693 expand_start_loop (1);
6694 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6695 expand_end_loop ();
6696 pop_temp_slots ();
6698 return const0_rtx;
6700 case BIND_EXPR:
6702 tree vars = TREE_OPERAND (exp, 0);
6703 int vars_need_expansion = 0;
6705 /* Need to open a binding contour here because
6706 if there are any cleanups they must be contained here. */
6707 expand_start_bindings (2);
6709 /* Mark the corresponding BLOCK for output in its proper place. */
6710 if (TREE_OPERAND (exp, 2) != 0
6711 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6712 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6714 /* If VARS have not yet been expanded, expand them now. */
6715 while (vars)
6717 if (!DECL_RTL_SET_P (vars))
6719 vars_need_expansion = 1;
6720 expand_decl (vars);
6722 expand_decl_init (vars);
6723 vars = TREE_CHAIN (vars);
6726 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6728 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6730 return temp;
6733 case RTL_EXPR:
6734 if (RTL_EXPR_SEQUENCE (exp))
6736 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6737 abort ();
6738 emit_insn (RTL_EXPR_SEQUENCE (exp));
6739 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6741 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6742 free_temps_for_rtl_expr (exp);
6743 return RTL_EXPR_RTL (exp);
6745 case CONSTRUCTOR:
6746 /* If we don't need the result, just ensure we evaluate any
6747 subexpressions. */
6748 if (ignore)
6750 tree elt;
6752 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6753 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6755 return const0_rtx;
6758 /* All elts simple constants => refer to a constant in memory. But
6759 if this is a non-BLKmode mode, let it store a field at a time
6760 since that should make a CONST_INT or CONST_DOUBLE when we
6761 fold. Likewise, if we have a target we can use, it is best to
6762 store directly into the target unless the type is large enough
6763 that memcpy will be used. If we are making an initializer and
6764 all operands are constant, put it in memory as well.
6766 FIXME: Avoid trying to fill vector constructors piece-meal.
6767 Output them with output_constant_def below unless we're sure
6768 they're zeros. This should go away when vector initializers
6769 are treated like VECTOR_CST instead of arrays.
6771 else if ((TREE_STATIC (exp)
6772 && ((mode == BLKmode
6773 && ! (target != 0 && safe_from_p (target, exp, 1)))
6774 || TREE_ADDRESSABLE (exp)
6775 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6776 && (! MOVE_BY_PIECES_P
6777 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6778 TYPE_ALIGN (type)))
6779 && ((TREE_CODE (type) == VECTOR_TYPE
6780 && !is_zeros_p (exp))
6781 || ! mostly_zeros_p (exp)))))
6782 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6784 rtx constructor = output_constant_def (exp, 1);
6786 if (modifier != EXPAND_CONST_ADDRESS
6787 && modifier != EXPAND_INITIALIZER
6788 && modifier != EXPAND_SUM)
6789 constructor = validize_mem (constructor);
6791 return constructor;
6793 else
6795 /* Handle calls that pass values in multiple non-contiguous
6796 locations. The Irix 6 ABI has examples of this. */
6797 if (target == 0 || ! safe_from_p (target, exp, 1)
6798 || GET_CODE (target) == PARALLEL)
6799 target
6800 = assign_temp (build_qualified_type (type,
6801 (TYPE_QUALS (type)
6802 | (TREE_READONLY (exp)
6803 * TYPE_QUAL_CONST))),
6804 0, TREE_ADDRESSABLE (exp), 1);
6806 store_constructor (exp, target, 0,
6807 int_size_in_bytes (TREE_TYPE (exp)));
6808 return target;
6811 case INDIRECT_REF:
6813 tree exp1 = TREE_OPERAND (exp, 0);
6814 tree index;
6815 tree string = string_constant (exp1, &index);
6817 /* Try to optimize reads from const strings. */
6818 if (string
6819 && TREE_CODE (string) == STRING_CST
6820 && TREE_CODE (index) == INTEGER_CST
6821 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6822 && GET_MODE_CLASS (mode) == MODE_INT
6823 && GET_MODE_SIZE (mode) == 1
6824 && modifier != EXPAND_WRITE)
6825 return gen_int_mode (TREE_STRING_POINTER (string)
6826 [TREE_INT_CST_LOW (index)], mode);
6828 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6829 op0 = memory_address (mode, op0);
6830 temp = gen_rtx_MEM (mode, op0);
6831 set_mem_attributes (temp, exp, 0);
6833 /* If we are writing to this object and its type is a record with
6834 readonly fields, we must mark it as readonly so it will
6835 conflict with readonly references to those fields. */
6836 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6837 RTX_UNCHANGING_P (temp) = 1;
6839 return temp;
6842 case ARRAY_REF:
6843 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6844 abort ();
6847 tree array = TREE_OPERAND (exp, 0);
6848 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6849 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6850 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6851 HOST_WIDE_INT i;
6853 /* Optimize the special-case of a zero lower bound.
6855 We convert the low_bound to sizetype to avoid some problems
6856 with constant folding. (E.g. suppose the lower bound is 1,
6857 and its mode is QI. Without the conversion, (ARRAY
6858 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6859 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6861 if (! integer_zerop (low_bound))
6862 index = size_diffop (index, convert (sizetype, low_bound));
6864 /* Fold an expression like: "foo"[2].
6865 This is not done in fold so it won't happen inside &.
6866 Don't fold if this is for wide characters since it's too
6867 difficult to do correctly and this is a very rare case. */
6869 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6870 && TREE_CODE (array) == STRING_CST
6871 && TREE_CODE (index) == INTEGER_CST
6872 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6873 && GET_MODE_CLASS (mode) == MODE_INT
6874 && GET_MODE_SIZE (mode) == 1)
6875 return gen_int_mode (TREE_STRING_POINTER (array)
6876 [TREE_INT_CST_LOW (index)], mode);
6878 /* If this is a constant index into a constant array,
6879 just get the value from the array. Handle both the cases when
6880 we have an explicit constructor and when our operand is a variable
6881 that was declared const. */
6883 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6884 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6885 && TREE_CODE (index) == INTEGER_CST
6886 && 0 > compare_tree_int (index,
6887 list_length (CONSTRUCTOR_ELTS
6888 (TREE_OPERAND (exp, 0)))))
6890 tree elem;
6892 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6893 i = TREE_INT_CST_LOW (index);
6894 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6897 if (elem)
6898 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6899 modifier);
6902 else if (optimize >= 1
6903 && modifier != EXPAND_CONST_ADDRESS
6904 && modifier != EXPAND_INITIALIZER
6905 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6906 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6907 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6909 if (TREE_CODE (index) == INTEGER_CST)
6911 tree init = DECL_INITIAL (array);
6913 if (TREE_CODE (init) == CONSTRUCTOR)
6915 tree elem;
6917 for (elem = CONSTRUCTOR_ELTS (init);
6918 (elem
6919 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6920 elem = TREE_CHAIN (elem))
6923 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6924 return expand_expr (fold (TREE_VALUE (elem)), target,
6925 tmode, modifier);
6927 else if (TREE_CODE (init) == STRING_CST
6928 && 0 > compare_tree_int (index,
6929 TREE_STRING_LENGTH (init)))
6931 tree type = TREE_TYPE (TREE_TYPE (init));
6932 enum machine_mode mode = TYPE_MODE (type);
6934 if (GET_MODE_CLASS (mode) == MODE_INT
6935 && GET_MODE_SIZE (mode) == 1)
6936 return gen_int_mode (TREE_STRING_POINTER (init)
6937 [TREE_INT_CST_LOW (index)], mode);
6942 /* Fall through. */
6944 case COMPONENT_REF:
6945 case BIT_FIELD_REF:
6946 case ARRAY_RANGE_REF:
6947 /* If the operand is a CONSTRUCTOR, we can just extract the
6948 appropriate field if it is present. Don't do this if we have
6949 already written the data since we want to refer to that copy
6950 and varasm.c assumes that's what we'll do. */
6951 if (code == COMPONENT_REF
6952 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6953 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6955 tree elt;
6957 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6958 elt = TREE_CHAIN (elt))
6959 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6960 /* We can normally use the value of the field in the
6961 CONSTRUCTOR. However, if this is a bitfield in
6962 an integral mode that we can fit in a HOST_WIDE_INT,
6963 we must mask only the number of bits in the bitfield,
6964 since this is done implicitly by the constructor. If
6965 the bitfield does not meet either of those conditions,
6966 we can't do this optimization. */
6967 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6968 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6969 == MODE_INT)
6970 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6971 <= HOST_BITS_PER_WIDE_INT))))
6973 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6974 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6976 HOST_WIDE_INT bitsize
6977 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6978 enum machine_mode imode
6979 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6981 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6983 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6984 op0 = expand_and (imode, op0, op1, target);
6986 else
6988 tree count
6989 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6992 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6993 target, 0);
6994 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6995 target, 0);
6999 return op0;
7004 enum machine_mode mode1;
7005 HOST_WIDE_INT bitsize, bitpos;
7006 tree offset;
7007 int volatilep = 0;
7008 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7009 &mode1, &unsignedp, &volatilep);
7010 rtx orig_op0;
7012 /* If we got back the original object, something is wrong. Perhaps
7013 we are evaluating an expression too early. In any event, don't
7014 infinitely recurse. */
7015 if (tem == exp)
7016 abort ();
7018 /* If TEM's type is a union of variable size, pass TARGET to the inner
7019 computation, since it will need a temporary and TARGET is known
7020 to have to do. This occurs in unchecked conversion in Ada. */
7022 orig_op0 = op0
7023 = expand_expr (tem,
7024 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7025 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7026 != INTEGER_CST)
7027 ? target : NULL_RTX),
7028 VOIDmode,
7029 (modifier == EXPAND_INITIALIZER
7030 || modifier == EXPAND_CONST_ADDRESS)
7031 ? modifier : EXPAND_NORMAL);
7033 /* If this is a constant, put it into a register if it is a
7034 legitimate constant and OFFSET is 0 and memory if it isn't. */
7035 if (CONSTANT_P (op0))
7037 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7038 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7039 && offset == 0)
7040 op0 = force_reg (mode, op0);
7041 else
7042 op0 = validize_mem (force_const_mem (mode, op0));
7045 if (offset != 0)
7047 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7049 /* If this object is in a register, put it into memory.
7050 This case can't occur in C, but can in Ada if we have
7051 unchecked conversion of an expression from a scalar type to
7052 an array or record type. */
7053 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7054 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7056 /* If the operand is a SAVE_EXPR, we can deal with this by
7057 forcing the SAVE_EXPR into memory. */
7058 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7060 put_var_into_stack (TREE_OPERAND (exp, 0));
7061 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7063 else
7065 tree nt
7066 = build_qualified_type (TREE_TYPE (tem),
7067 (TYPE_QUALS (TREE_TYPE (tem))
7068 | TYPE_QUAL_CONST));
7069 rtx memloc = assign_temp (nt, 1, 1, 1);
7071 emit_move_insn (memloc, op0);
7072 op0 = memloc;
7076 if (GET_CODE (op0) != MEM)
7077 abort ();
7079 #ifdef POINTERS_EXTEND_UNSIGNED
7080 if (GET_MODE (offset_rtx) != Pmode)
7081 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7082 #else
7083 if (GET_MODE (offset_rtx) != ptr_mode)
7084 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7085 #endif
7087 /* A constant address in OP0 can have VOIDmode, we must not try
7088 to call force_reg for that case. Avoid that case. */
7089 if (GET_CODE (op0) == MEM
7090 && GET_MODE (op0) == BLKmode
7091 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7092 && bitsize != 0
7093 && (bitpos % bitsize) == 0
7094 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7095 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7097 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7098 bitpos = 0;
7101 op0 = offset_address (op0, offset_rtx,
7102 highest_pow2_factor (offset));
7105 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7106 record its alignment as BIGGEST_ALIGNMENT. */
7107 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7108 && is_aligning_offset (offset, tem))
7109 set_mem_align (op0, BIGGEST_ALIGNMENT);
7111 /* Don't forget about volatility even if this is a bitfield. */
7112 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7114 if (op0 == orig_op0)
7115 op0 = copy_rtx (op0);
7117 MEM_VOLATILE_P (op0) = 1;
7120 /* The following code doesn't handle CONCAT.
7121 Assume only bitpos == 0 can be used for CONCAT, due to
7122 one element arrays having the same mode as its element. */
7123 if (GET_CODE (op0) == CONCAT)
7125 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7126 abort ();
7127 return op0;
7130 /* In cases where an aligned union has an unaligned object
7131 as a field, we might be extracting a BLKmode value from
7132 an integer-mode (e.g., SImode) object. Handle this case
7133 by doing the extract into an object as wide as the field
7134 (which we know to be the width of a basic mode), then
7135 storing into memory, and changing the mode to BLKmode. */
7136 if (mode1 == VOIDmode
7137 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7138 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7139 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7140 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7141 && modifier != EXPAND_CONST_ADDRESS
7142 && modifier != EXPAND_INITIALIZER)
7143 /* If the field isn't aligned enough to fetch as a memref,
7144 fetch it as a bit field. */
7145 || (mode1 != BLKmode
7146 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7147 && ((TYPE_ALIGN (TREE_TYPE (tem))
7148 < GET_MODE_ALIGNMENT (mode))
7149 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7150 /* If the type and the field are a constant size and the
7151 size of the type isn't the same size as the bitfield,
7152 we must use bitfield operations. */
7153 || (bitsize >= 0
7154 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7155 == INTEGER_CST)
7156 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7157 bitsize)))
7159 enum machine_mode ext_mode = mode;
7161 if (ext_mode == BLKmode
7162 && ! (target != 0 && GET_CODE (op0) == MEM
7163 && GET_CODE (target) == MEM
7164 && bitpos % BITS_PER_UNIT == 0))
7165 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7167 if (ext_mode == BLKmode)
7169 /* In this case, BITPOS must start at a byte boundary and
7170 TARGET, if specified, must be a MEM. */
7171 if (GET_CODE (op0) != MEM
7172 || (target != 0 && GET_CODE (target) != MEM)
7173 || bitpos % BITS_PER_UNIT != 0)
7174 abort ();
7176 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7177 if (target == 0)
7178 target = assign_temp (type, 0, 1, 1);
7180 emit_block_move (target, op0,
7181 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7182 / BITS_PER_UNIT));
7184 return target;
7187 op0 = validize_mem (op0);
7189 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7190 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7192 op0 = extract_bit_field (op0, bitsize, bitpos,
7193 unsignedp, target, ext_mode, ext_mode,
7194 int_size_in_bytes (TREE_TYPE (tem)));
7196 /* If the result is a record type and BITSIZE is narrower than
7197 the mode of OP0, an integral mode, and this is a big endian
7198 machine, we must put the field into the high-order bits. */
7199 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7200 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7201 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7202 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7203 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7204 - bitsize),
7205 op0, 1);
7207 if (mode == BLKmode)
7209 rtx new = assign_temp (build_qualified_type
7210 ((*lang_hooks.types.type_for_mode)
7211 (ext_mode, 0),
7212 TYPE_QUAL_CONST), 0, 1, 1);
7214 emit_move_insn (new, op0);
7215 op0 = copy_rtx (new);
7216 PUT_MODE (op0, BLKmode);
7217 set_mem_attributes (op0, exp, 1);
7220 return op0;
7223 /* If the result is BLKmode, use that to access the object
7224 now as well. */
7225 if (mode == BLKmode)
7226 mode1 = BLKmode;
7228 /* Get a reference to just this component. */
7229 if (modifier == EXPAND_CONST_ADDRESS
7230 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7231 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7232 else
7233 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7235 if (op0 == orig_op0)
7236 op0 = copy_rtx (op0);
7238 set_mem_attributes (op0, exp, 0);
7239 if (GET_CODE (XEXP (op0, 0)) == REG)
7240 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7242 MEM_VOLATILE_P (op0) |= volatilep;
7243 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7244 || modifier == EXPAND_CONST_ADDRESS
7245 || modifier == EXPAND_INITIALIZER)
7246 return op0;
7247 else if (target == 0)
7248 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7250 convert_move (target, op0, unsignedp);
7251 return target;
7254 case VTABLE_REF:
7256 rtx insn, before = get_last_insn (), vtbl_ref;
7258 /* Evaluate the interior expression. */
7259 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7260 tmode, modifier);
7262 /* Get or create an instruction off which to hang a note. */
7263 if (REG_P (subtarget))
7265 target = subtarget;
7266 insn = get_last_insn ();
7267 if (insn == before)
7268 abort ();
7269 if (! INSN_P (insn))
7270 insn = prev_nonnote_insn (insn);
7272 else
7274 target = gen_reg_rtx (GET_MODE (subtarget));
7275 insn = emit_move_insn (target, subtarget);
7278 /* Collect the data for the note. */
7279 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7280 vtbl_ref = plus_constant (vtbl_ref,
7281 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7282 /* Discard the initial CONST that was added. */
7283 vtbl_ref = XEXP (vtbl_ref, 0);
7285 REG_NOTES (insn)
7286 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7288 return target;
7291 /* Intended for a reference to a buffer of a file-object in Pascal.
7292 But it's not certain that a special tree code will really be
7293 necessary for these. INDIRECT_REF might work for them. */
7294 case BUFFER_REF:
7295 abort ();
7297 case IN_EXPR:
7299 /* Pascal set IN expression.
7301 Algorithm:
7302 rlo = set_low - (set_low%bits_per_word);
7303 the_word = set [ (index - rlo)/bits_per_word ];
7304 bit_index = index % bits_per_word;
7305 bitmask = 1 << bit_index;
7306 return !!(the_word & bitmask); */
7308 tree set = TREE_OPERAND (exp, 0);
7309 tree index = TREE_OPERAND (exp, 1);
7310 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7311 tree set_type = TREE_TYPE (set);
7312 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7313 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7314 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7315 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7316 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7317 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7318 rtx setaddr = XEXP (setval, 0);
7319 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7320 rtx rlow;
7321 rtx diff, quo, rem, addr, bit, result;
7323 /* If domain is empty, answer is no. Likewise if index is constant
7324 and out of bounds. */
7325 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7326 && TREE_CODE (set_low_bound) == INTEGER_CST
7327 && tree_int_cst_lt (set_high_bound, set_low_bound))
7328 || (TREE_CODE (index) == INTEGER_CST
7329 && TREE_CODE (set_low_bound) == INTEGER_CST
7330 && tree_int_cst_lt (index, set_low_bound))
7331 || (TREE_CODE (set_high_bound) == INTEGER_CST
7332 && TREE_CODE (index) == INTEGER_CST
7333 && tree_int_cst_lt (set_high_bound, index))))
7334 return const0_rtx;
7336 if (target == 0)
7337 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7339 /* If we get here, we have to generate the code for both cases
7340 (in range and out of range). */
7342 op0 = gen_label_rtx ();
7343 op1 = gen_label_rtx ();
7345 if (! (GET_CODE (index_val) == CONST_INT
7346 && GET_CODE (lo_r) == CONST_INT))
7347 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7348 GET_MODE (index_val), iunsignedp, op1);
7350 if (! (GET_CODE (index_val) == CONST_INT
7351 && GET_CODE (hi_r) == CONST_INT))
7352 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7353 GET_MODE (index_val), iunsignedp, op1);
7355 /* Calculate the element number of bit zero in the first word
7356 of the set. */
7357 if (GET_CODE (lo_r) == CONST_INT)
7358 rlow = GEN_INT (INTVAL (lo_r)
7359 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7360 else
7361 rlow = expand_binop (index_mode, and_optab, lo_r,
7362 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7363 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7365 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7366 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7368 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7369 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7370 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7371 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7373 addr = memory_address (byte_mode,
7374 expand_binop (index_mode, add_optab, diff,
7375 setaddr, NULL_RTX, iunsignedp,
7376 OPTAB_LIB_WIDEN));
7378 /* Extract the bit we want to examine. */
7379 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7380 gen_rtx_MEM (byte_mode, addr),
7381 make_tree (TREE_TYPE (index), rem),
7382 NULL_RTX, 1);
7383 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7384 GET_MODE (target) == byte_mode ? target : 0,
7385 1, OPTAB_LIB_WIDEN);
7387 if (result != target)
7388 convert_move (target, result, 1);
7390 /* Output the code to handle the out-of-range case. */
7391 emit_jump (op0);
7392 emit_label (op1);
7393 emit_move_insn (target, const0_rtx);
7394 emit_label (op0);
7395 return target;
7398 case WITH_CLEANUP_EXPR:
7399 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7401 WITH_CLEANUP_EXPR_RTL (exp)
7402 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7403 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7404 CLEANUP_EH_ONLY (exp));
7406 /* That's it for this cleanup. */
7407 TREE_OPERAND (exp, 1) = 0;
7409 return WITH_CLEANUP_EXPR_RTL (exp);
7411 case CLEANUP_POINT_EXPR:
7413 /* Start a new binding layer that will keep track of all cleanup
7414 actions to be performed. */
7415 expand_start_bindings (2);
7417 target_temp_slot_level = temp_slot_level;
7419 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7420 /* If we're going to use this value, load it up now. */
7421 if (! ignore)
7422 op0 = force_not_mem (op0);
7423 preserve_temp_slots (op0);
7424 expand_end_bindings (NULL_TREE, 0, 0);
7426 return op0;
7428 case CALL_EXPR:
7429 /* Check for a built-in function. */
7430 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7431 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7432 == FUNCTION_DECL)
7433 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7435 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7436 == BUILT_IN_FRONTEND)
7437 return (*lang_hooks.expand_expr)
7438 (exp, original_target, tmode, modifier);
7439 else
7440 return expand_builtin (exp, target, subtarget, tmode, ignore);
7443 return expand_call (exp, target, ignore);
7445 case NON_LVALUE_EXPR:
7446 case NOP_EXPR:
7447 case CONVERT_EXPR:
7448 case REFERENCE_EXPR:
7449 if (TREE_OPERAND (exp, 0) == error_mark_node)
7450 return const0_rtx;
7452 if (TREE_CODE (type) == UNION_TYPE)
7454 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7456 /* If both input and output are BLKmode, this conversion isn't doing
7457 anything except possibly changing memory attribute. */
7458 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7460 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7461 modifier);
7463 result = copy_rtx (result);
7464 set_mem_attributes (result, exp, 0);
7465 return result;
7468 if (target == 0)
7469 target = assign_temp (type, 0, 1, 1);
7471 if (GET_CODE (target) == MEM)
7472 /* Store data into beginning of memory target. */
7473 store_expr (TREE_OPERAND (exp, 0),
7474 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7476 else if (GET_CODE (target) == REG)
7477 /* Store this field into a union of the proper type. */
7478 store_field (target,
7479 MIN ((int_size_in_bytes (TREE_TYPE
7480 (TREE_OPERAND (exp, 0)))
7481 * BITS_PER_UNIT),
7482 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7483 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7484 VOIDmode, 0, type, 0);
7485 else
7486 abort ();
7488 /* Return the entire union. */
7489 return target;
7492 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7494 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7495 modifier);
7497 /* If the signedness of the conversion differs and OP0 is
7498 a promoted SUBREG, clear that indication since we now
7499 have to do the proper extension. */
7500 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7501 && GET_CODE (op0) == SUBREG)
7502 SUBREG_PROMOTED_VAR_P (op0) = 0;
7504 return op0;
7507 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7508 if (GET_MODE (op0) == mode)
7509 return op0;
7511 /* If OP0 is a constant, just convert it into the proper mode. */
7512 if (CONSTANT_P (op0))
7514 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7515 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7517 if (modifier == EXPAND_INITIALIZER)
7518 return simplify_gen_subreg (mode, op0, inner_mode,
7519 subreg_lowpart_offset (mode,
7520 inner_mode));
7521 else
7522 return convert_modes (mode, inner_mode, op0,
7523 TREE_UNSIGNED (inner_type));
7526 if (modifier == EXPAND_INITIALIZER)
7527 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7529 if (target == 0)
7530 return
7531 convert_to_mode (mode, op0,
7532 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7533 else
7534 convert_move (target, op0,
7535 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7536 return target;
7538 case VIEW_CONVERT_EXPR:
7539 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7541 /* If the input and output modes are both the same, we are done.
7542 Otherwise, if neither mode is BLKmode and both are within a word, we
7543 can use gen_lowpart. If neither is true, make sure the operand is
7544 in memory and convert the MEM to the new mode. */
7545 if (TYPE_MODE (type) == GET_MODE (op0))
7547 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7548 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7549 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7550 op0 = gen_lowpart (TYPE_MODE (type), op0);
7551 else if (GET_CODE (op0) != MEM)
7553 /* If the operand is not a MEM, force it into memory. Since we
7554 are going to be be changing the mode of the MEM, don't call
7555 force_const_mem for constants because we don't allow pool
7556 constants to change mode. */
7557 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7559 if (TREE_ADDRESSABLE (exp))
7560 abort ();
7562 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7563 target
7564 = assign_stack_temp_for_type
7565 (TYPE_MODE (inner_type),
7566 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7568 emit_move_insn (target, op0);
7569 op0 = target;
7572 /* At this point, OP0 is in the correct mode. If the output type is such
7573 that the operand is known to be aligned, indicate that it is.
7574 Otherwise, we need only be concerned about alignment for non-BLKmode
7575 results. */
7576 if (GET_CODE (op0) == MEM)
7578 op0 = copy_rtx (op0);
7580 if (TYPE_ALIGN_OK (type))
7581 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7582 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7583 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7585 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7586 HOST_WIDE_INT temp_size
7587 = MAX (int_size_in_bytes (inner_type),
7588 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7589 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7590 temp_size, 0, type);
7591 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7593 if (TREE_ADDRESSABLE (exp))
7594 abort ();
7596 if (GET_MODE (op0) == BLKmode)
7597 emit_block_move (new_with_op0_mode, op0,
7598 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7599 else
7600 emit_move_insn (new_with_op0_mode, op0);
7602 op0 = new;
7605 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7608 return op0;
7610 case PLUS_EXPR:
7611 /* We come here from MINUS_EXPR when the second operand is a
7612 constant. */
7613 plus_expr:
7614 this_optab = ! unsignedp && flag_trapv
7615 && (GET_MODE_CLASS (mode) == MODE_INT)
7616 ? addv_optab : add_optab;
7618 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7619 something else, make sure we add the register to the constant and
7620 then to the other thing. This case can occur during strength
7621 reduction and doing it this way will produce better code if the
7622 frame pointer or argument pointer is eliminated.
7624 fold-const.c will ensure that the constant is always in the inner
7625 PLUS_EXPR, so the only case we need to do anything about is if
7626 sp, ap, or fp is our second argument, in which case we must swap
7627 the innermost first argument and our second argument. */
7629 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7630 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7631 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7632 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7633 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7634 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7636 tree t = TREE_OPERAND (exp, 1);
7638 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7639 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7642 /* If the result is to be ptr_mode and we are adding an integer to
7643 something, we might be forming a constant. So try to use
7644 plus_constant. If it produces a sum and we can't accept it,
7645 use force_operand. This allows P = &ARR[const] to generate
7646 efficient code on machines where a SYMBOL_REF is not a valid
7647 address.
7649 If this is an EXPAND_SUM call, always return the sum. */
7650 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7651 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7653 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7654 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7655 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7657 rtx constant_part;
7659 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7660 EXPAND_SUM);
7661 /* Use immed_double_const to ensure that the constant is
7662 truncated according to the mode of OP1, then sign extended
7663 to a HOST_WIDE_INT. Using the constant directly can result
7664 in non-canonical RTL in a 64x32 cross compile. */
7665 constant_part
7666 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7667 (HOST_WIDE_INT) 0,
7668 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7669 op1 = plus_constant (op1, INTVAL (constant_part));
7670 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7671 op1 = force_operand (op1, target);
7672 return op1;
7675 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7676 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7677 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7679 rtx constant_part;
7681 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7682 (modifier == EXPAND_INITIALIZER
7683 ? EXPAND_INITIALIZER : EXPAND_SUM));
7684 if (! CONSTANT_P (op0))
7686 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7687 VOIDmode, modifier);
7688 /* Don't go to both_summands if modifier
7689 says it's not right to return a PLUS. */
7690 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7691 goto binop2;
7692 goto both_summands;
7694 /* Use immed_double_const to ensure that the constant is
7695 truncated according to the mode of OP1, then sign extended
7696 to a HOST_WIDE_INT. Using the constant directly can result
7697 in non-canonical RTL in a 64x32 cross compile. */
7698 constant_part
7699 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7700 (HOST_WIDE_INT) 0,
7701 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7702 op0 = plus_constant (op0, INTVAL (constant_part));
7703 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7704 op0 = force_operand (op0, target);
7705 return op0;
7709 /* No sense saving up arithmetic to be done
7710 if it's all in the wrong mode to form part of an address.
7711 And force_operand won't know whether to sign-extend or
7712 zero-extend. */
7713 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7714 || mode != ptr_mode)
7715 goto binop;
7717 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7718 subtarget = 0;
7720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7721 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7723 both_summands:
7724 /* Make sure any term that's a sum with a constant comes last. */
7725 if (GET_CODE (op0) == PLUS
7726 && CONSTANT_P (XEXP (op0, 1)))
7728 temp = op0;
7729 op0 = op1;
7730 op1 = temp;
7732 /* If adding to a sum including a constant,
7733 associate it to put the constant outside. */
7734 if (GET_CODE (op1) == PLUS
7735 && CONSTANT_P (XEXP (op1, 1)))
7737 rtx constant_term = const0_rtx;
7739 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7740 if (temp != 0)
7741 op0 = temp;
7742 /* Ensure that MULT comes first if there is one. */
7743 else if (GET_CODE (op0) == MULT)
7744 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7745 else
7746 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7748 /* Let's also eliminate constants from op0 if possible. */
7749 op0 = eliminate_constant_term (op0, &constant_term);
7751 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7752 their sum should be a constant. Form it into OP1, since the
7753 result we want will then be OP0 + OP1. */
7755 temp = simplify_binary_operation (PLUS, mode, constant_term,
7756 XEXP (op1, 1));
7757 if (temp != 0)
7758 op1 = temp;
7759 else
7760 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7763 /* Put a constant term last and put a multiplication first. */
7764 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7765 temp = op1, op1 = op0, op0 = temp;
7767 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7768 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7770 case MINUS_EXPR:
7771 /* For initializers, we are allowed to return a MINUS of two
7772 symbolic constants. Here we handle all cases when both operands
7773 are constant. */
7774 /* Handle difference of two symbolic constants,
7775 for the sake of an initializer. */
7776 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7777 && really_constant_p (TREE_OPERAND (exp, 0))
7778 && really_constant_p (TREE_OPERAND (exp, 1)))
7780 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7781 modifier);
7782 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7783 modifier);
7785 /* If the last operand is a CONST_INT, use plus_constant of
7786 the negated constant. Else make the MINUS. */
7787 if (GET_CODE (op1) == CONST_INT)
7788 return plus_constant (op0, - INTVAL (op1));
7789 else
7790 return gen_rtx_MINUS (mode, op0, op1);
7792 /* Convert A - const to A + (-const). */
7793 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7795 tree negated = fold (build1 (NEGATE_EXPR, type,
7796 TREE_OPERAND (exp, 1)));
7798 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7799 /* If we can't negate the constant in TYPE, leave it alone and
7800 expand_binop will negate it for us. We used to try to do it
7801 here in the signed version of TYPE, but that doesn't work
7802 on POINTER_TYPEs. */;
7803 else
7805 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7806 goto plus_expr;
7809 this_optab = ! unsignedp && flag_trapv
7810 && (GET_MODE_CLASS(mode) == MODE_INT)
7811 ? subv_optab : sub_optab;
7812 goto binop;
7814 case MULT_EXPR:
7815 /* If first operand is constant, swap them.
7816 Thus the following special case checks need only
7817 check the second operand. */
7818 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7820 tree t1 = TREE_OPERAND (exp, 0);
7821 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7822 TREE_OPERAND (exp, 1) = t1;
7825 /* Attempt to return something suitable for generating an
7826 indexed address, for machines that support that. */
7828 if (modifier == EXPAND_SUM && mode == ptr_mode
7829 && host_integerp (TREE_OPERAND (exp, 1), 0))
7831 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7832 EXPAND_SUM);
7834 /* If we knew for certain that this is arithmetic for an array
7835 reference, and we knew the bounds of the array, then we could
7836 apply the distributive law across (PLUS X C) for constant C.
7837 Without such knowledge, we risk overflowing the computation
7838 when both X and C are large, but X+C isn't. */
7839 /* ??? Could perhaps special-case EXP being unsigned and C being
7840 positive. In that case we are certain that X+C is no smaller
7841 than X and so the transformed expression will overflow iff the
7842 original would have. */
7844 if (GET_CODE (op0) != REG)
7845 op0 = force_operand (op0, NULL_RTX);
7846 if (GET_CODE (op0) != REG)
7847 op0 = copy_to_mode_reg (mode, op0);
7849 return
7850 gen_rtx_MULT (mode, op0,
7851 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7854 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7855 subtarget = 0;
7857 /* Check for multiplying things that have been extended
7858 from a narrower type. If this machine supports multiplying
7859 in that narrower type with a result in the desired type,
7860 do it that way, and avoid the explicit type-conversion. */
7861 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7862 && TREE_CODE (type) == INTEGER_TYPE
7863 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7864 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7865 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7866 && int_fits_type_p (TREE_OPERAND (exp, 1),
7867 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7868 /* Don't use a widening multiply if a shift will do. */
7869 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7870 > HOST_BITS_PER_WIDE_INT)
7871 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7873 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7874 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7876 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7877 /* If both operands are extended, they must either both
7878 be zero-extended or both be sign-extended. */
7879 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7881 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7883 enum machine_mode innermode
7884 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7885 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7886 ? smul_widen_optab : umul_widen_optab);
7887 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7888 ? umul_widen_optab : smul_widen_optab);
7889 if (mode == GET_MODE_WIDER_MODE (innermode))
7891 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7893 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7894 NULL_RTX, VOIDmode, 0);
7895 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7896 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7897 VOIDmode, 0);
7898 else
7899 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7900 NULL_RTX, VOIDmode, 0);
7901 goto binop2;
7903 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7904 && innermode == word_mode)
7906 rtx htem;
7907 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7908 NULL_RTX, VOIDmode, 0);
7909 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7910 op1 = convert_modes (innermode, mode,
7911 expand_expr (TREE_OPERAND (exp, 1),
7912 NULL_RTX, VOIDmode, 0),
7913 unsignedp);
7914 else
7915 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7916 NULL_RTX, VOIDmode, 0);
7917 temp = expand_binop (mode, other_optab, op0, op1, target,
7918 unsignedp, OPTAB_LIB_WIDEN);
7919 htem = expand_mult_highpart_adjust (innermode,
7920 gen_highpart (innermode, temp),
7921 op0, op1,
7922 gen_highpart (innermode, temp),
7923 unsignedp);
7924 emit_move_insn (gen_highpart (innermode, temp), htem);
7925 return temp;
7929 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7930 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7931 return expand_mult (mode, op0, op1, target, unsignedp);
7933 case TRUNC_DIV_EXPR:
7934 case FLOOR_DIV_EXPR:
7935 case CEIL_DIV_EXPR:
7936 case ROUND_DIV_EXPR:
7937 case EXACT_DIV_EXPR:
7938 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7939 subtarget = 0;
7940 /* Possible optimization: compute the dividend with EXPAND_SUM
7941 then if the divisor is constant can optimize the case
7942 where some terms of the dividend have coeffs divisible by it. */
7943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7944 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7945 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7947 case RDIV_EXPR:
7948 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7949 expensive divide. If not, combine will rebuild the original
7950 computation. */
7951 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7952 && TREE_CODE (type) == REAL_TYPE
7953 && !real_onep (TREE_OPERAND (exp, 0)))
7954 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7955 build (RDIV_EXPR, type,
7956 build_real (type, dconst1),
7957 TREE_OPERAND (exp, 1))),
7958 target, tmode, unsignedp);
7959 this_optab = sdiv_optab;
7960 goto binop;
7962 case TRUNC_MOD_EXPR:
7963 case FLOOR_MOD_EXPR:
7964 case CEIL_MOD_EXPR:
7965 case ROUND_MOD_EXPR:
7966 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7967 subtarget = 0;
7968 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7969 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7970 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7972 case FIX_ROUND_EXPR:
7973 case FIX_FLOOR_EXPR:
7974 case FIX_CEIL_EXPR:
7975 abort (); /* Not used for C. */
7977 case FIX_TRUNC_EXPR:
7978 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7979 if (target == 0)
7980 target = gen_reg_rtx (mode);
7981 expand_fix (target, op0, unsignedp);
7982 return target;
7984 case FLOAT_EXPR:
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7986 if (target == 0)
7987 target = gen_reg_rtx (mode);
7988 /* expand_float can't figure out what to do if FROM has VOIDmode.
7989 So give it the correct mode. With -O, cse will optimize this. */
7990 if (GET_MODE (op0) == VOIDmode)
7991 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7992 op0);
7993 expand_float (target, op0,
7994 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7995 return target;
7997 case NEGATE_EXPR:
7998 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7999 temp = expand_unop (mode,
8000 ! unsignedp && flag_trapv
8001 && (GET_MODE_CLASS(mode) == MODE_INT)
8002 ? negv_optab : neg_optab, op0, target, 0);
8003 if (temp == 0)
8004 abort ();
8005 return temp;
8007 case ABS_EXPR:
8008 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8010 /* Handle complex values specially. */
8011 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8012 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8013 return expand_complex_abs (mode, op0, target, unsignedp);
8015 /* Unsigned abs is simply the operand. Testing here means we don't
8016 risk generating incorrect code below. */
8017 if (TREE_UNSIGNED (type))
8018 return op0;
8020 return expand_abs (mode, op0, target, unsignedp,
8021 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8023 case MAX_EXPR:
8024 case MIN_EXPR:
8025 target = original_target;
8026 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8027 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8028 || GET_MODE (target) != mode
8029 || (GET_CODE (target) == REG
8030 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8031 target = gen_reg_rtx (mode);
8032 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8033 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8035 /* First try to do it with a special MIN or MAX instruction.
8036 If that does not win, use a conditional jump to select the proper
8037 value. */
8038 this_optab = (TREE_UNSIGNED (type)
8039 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8040 : (code == MIN_EXPR ? smin_optab : smax_optab));
8042 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8043 OPTAB_WIDEN);
8044 if (temp != 0)
8045 return temp;
8047 /* At this point, a MEM target is no longer useful; we will get better
8048 code without it. */
8050 if (GET_CODE (target) == MEM)
8051 target = gen_reg_rtx (mode);
8053 if (target != op0)
8054 emit_move_insn (target, op0);
8056 op0 = gen_label_rtx ();
8058 /* If this mode is an integer too wide to compare properly,
8059 compare word by word. Rely on cse to optimize constant cases. */
8060 if (GET_MODE_CLASS (mode) == MODE_INT
8061 && ! can_compare_p (GE, mode, ccp_jump))
8063 if (code == MAX_EXPR)
8064 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8065 target, op1, NULL_RTX, op0);
8066 else
8067 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8068 op1, target, NULL_RTX, op0);
8070 else
8072 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8073 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8074 unsignedp, mode, NULL_RTX, NULL_RTX,
8075 op0);
8077 emit_move_insn (target, op1);
8078 emit_label (op0);
8079 return target;
8081 case BIT_NOT_EXPR:
8082 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8083 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8084 if (temp == 0)
8085 abort ();
8086 return temp;
8088 case FFS_EXPR:
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8090 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8091 if (temp == 0)
8092 abort ();
8093 return temp;
8095 /* ??? Can optimize bitwise operations with one arg constant.
8096 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8097 and (a bitwise1 b) bitwise2 b (etc)
8098 but that is probably not worth while. */
8100 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8101 boolean values when we want in all cases to compute both of them. In
8102 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8103 as actual zero-or-1 values and then bitwise anding. In cases where
8104 there cannot be any side effects, better code would be made by
8105 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8106 how to recognize those cases. */
8108 case TRUTH_AND_EXPR:
8109 case BIT_AND_EXPR:
8110 this_optab = and_optab;
8111 goto binop;
8113 case TRUTH_OR_EXPR:
8114 case BIT_IOR_EXPR:
8115 this_optab = ior_optab;
8116 goto binop;
8118 case TRUTH_XOR_EXPR:
8119 case BIT_XOR_EXPR:
8120 this_optab = xor_optab;
8121 goto binop;
8123 case LSHIFT_EXPR:
8124 case RSHIFT_EXPR:
8125 case LROTATE_EXPR:
8126 case RROTATE_EXPR:
8127 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8128 subtarget = 0;
8129 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8130 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8131 unsignedp);
8133 /* Could determine the answer when only additive constants differ. Also,
8134 the addition of one can be handled by changing the condition. */
8135 case LT_EXPR:
8136 case LE_EXPR:
8137 case GT_EXPR:
8138 case GE_EXPR:
8139 case EQ_EXPR:
8140 case NE_EXPR:
8141 case UNORDERED_EXPR:
8142 case ORDERED_EXPR:
8143 case UNLT_EXPR:
8144 case UNLE_EXPR:
8145 case UNGT_EXPR:
8146 case UNGE_EXPR:
8147 case UNEQ_EXPR:
8148 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8149 if (temp != 0)
8150 return temp;
8152 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8153 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8154 && original_target
8155 && GET_CODE (original_target) == REG
8156 && (GET_MODE (original_target)
8157 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8159 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8160 VOIDmode, 0);
8162 /* If temp is constant, we can just compute the result. */
8163 if (GET_CODE (temp) == CONST_INT)
8165 if (INTVAL (temp) != 0)
8166 emit_move_insn (target, const1_rtx);
8167 else
8168 emit_move_insn (target, const0_rtx);
8170 return target;
8173 if (temp != original_target)
8175 enum machine_mode mode1 = GET_MODE (temp);
8176 if (mode1 == VOIDmode)
8177 mode1 = tmode != VOIDmode ? tmode : mode;
8179 temp = copy_to_mode_reg (mode1, temp);
8182 op1 = gen_label_rtx ();
8183 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8184 GET_MODE (temp), unsignedp, op1);
8185 emit_move_insn (temp, const1_rtx);
8186 emit_label (op1);
8187 return temp;
8190 /* If no set-flag instruction, must generate a conditional
8191 store into a temporary variable. Drop through
8192 and handle this like && and ||. */
8194 case TRUTH_ANDIF_EXPR:
8195 case TRUTH_ORIF_EXPR:
8196 if (! ignore
8197 && (target == 0 || ! safe_from_p (target, exp, 1)
8198 /* Make sure we don't have a hard reg (such as function's return
8199 value) live across basic blocks, if not optimizing. */
8200 || (!optimize && GET_CODE (target) == REG
8201 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8202 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8204 if (target)
8205 emit_clr_insn (target);
8207 op1 = gen_label_rtx ();
8208 jumpifnot (exp, op1);
8210 if (target)
8211 emit_0_to_1_insn (target);
8213 emit_label (op1);
8214 return ignore ? const0_rtx : target;
8216 case TRUTH_NOT_EXPR:
8217 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8218 /* The parser is careful to generate TRUTH_NOT_EXPR
8219 only with operands that are always zero or one. */
8220 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8221 target, 1, OPTAB_LIB_WIDEN);
8222 if (temp == 0)
8223 abort ();
8224 return temp;
8226 case COMPOUND_EXPR:
8227 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8228 emit_queue ();
8229 return expand_expr (TREE_OPERAND (exp, 1),
8230 (ignore ? const0_rtx : target),
8231 VOIDmode, 0);
8233 case COND_EXPR:
8234 /* If we would have a "singleton" (see below) were it not for a
8235 conversion in each arm, bring that conversion back out. */
8236 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8237 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8238 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8239 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8241 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8242 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8244 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8245 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8246 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8247 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8248 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8249 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8250 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8251 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8252 return expand_expr (build1 (NOP_EXPR, type,
8253 build (COND_EXPR, TREE_TYPE (iftrue),
8254 TREE_OPERAND (exp, 0),
8255 iftrue, iffalse)),
8256 target, tmode, modifier);
8260 /* Note that COND_EXPRs whose type is a structure or union
8261 are required to be constructed to contain assignments of
8262 a temporary variable, so that we can evaluate them here
8263 for side effect only. If type is void, we must do likewise. */
8265 /* If an arm of the branch requires a cleanup,
8266 only that cleanup is performed. */
8268 tree singleton = 0;
8269 tree binary_op = 0, unary_op = 0;
8271 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8272 convert it to our mode, if necessary. */
8273 if (integer_onep (TREE_OPERAND (exp, 1))
8274 && integer_zerop (TREE_OPERAND (exp, 2))
8275 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8277 if (ignore)
8279 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8280 modifier);
8281 return const0_rtx;
8284 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8285 if (GET_MODE (op0) == mode)
8286 return op0;
8288 if (target == 0)
8289 target = gen_reg_rtx (mode);
8290 convert_move (target, op0, unsignedp);
8291 return target;
8294 /* Check for X ? A + B : A. If we have this, we can copy A to the
8295 output and conditionally add B. Similarly for unary operations.
8296 Don't do this if X has side-effects because those side effects
8297 might affect A or B and the "?" operation is a sequence point in
8298 ANSI. (operand_equal_p tests for side effects.) */
8300 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8301 && operand_equal_p (TREE_OPERAND (exp, 2),
8302 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8303 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8304 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8305 && operand_equal_p (TREE_OPERAND (exp, 1),
8306 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8307 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8308 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8309 && operand_equal_p (TREE_OPERAND (exp, 2),
8310 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8311 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8312 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8313 && operand_equal_p (TREE_OPERAND (exp, 1),
8314 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8315 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8317 /* If we are not to produce a result, we have no target. Otherwise,
8318 if a target was specified use it; it will not be used as an
8319 intermediate target unless it is safe. If no target, use a
8320 temporary. */
8322 if (ignore)
8323 temp = 0;
8324 else if (original_target
8325 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8326 || (singleton && GET_CODE (original_target) == REG
8327 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8328 && original_target == var_rtx (singleton)))
8329 && GET_MODE (original_target) == mode
8330 #ifdef HAVE_conditional_move
8331 && (! can_conditionally_move_p (mode)
8332 || GET_CODE (original_target) == REG
8333 || TREE_ADDRESSABLE (type))
8334 #endif
8335 && (GET_CODE (original_target) != MEM
8336 || TREE_ADDRESSABLE (type)))
8337 temp = original_target;
8338 else if (TREE_ADDRESSABLE (type))
8339 abort ();
8340 else
8341 temp = assign_temp (type, 0, 0, 1);
8343 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8344 do the test of X as a store-flag operation, do this as
8345 A + ((X != 0) << log C). Similarly for other simple binary
8346 operators. Only do for C == 1 if BRANCH_COST is low. */
8347 if (temp && singleton && binary_op
8348 && (TREE_CODE (binary_op) == PLUS_EXPR
8349 || TREE_CODE (binary_op) == MINUS_EXPR
8350 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8351 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8352 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8353 : integer_onep (TREE_OPERAND (binary_op, 1)))
8354 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8356 rtx result;
8357 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8358 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8359 ? addv_optab : add_optab)
8360 : TREE_CODE (binary_op) == MINUS_EXPR
8361 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8362 ? subv_optab : sub_optab)
8363 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8364 : xor_optab);
8366 /* If we had X ? A : A + 1, do this as A + (X == 0).
8368 We have to invert the truth value here and then put it
8369 back later if do_store_flag fails. We cannot simply copy
8370 TREE_OPERAND (exp, 0) to another variable and modify that
8371 because invert_truthvalue can modify the tree pointed to
8372 by its argument. */
8373 if (singleton == TREE_OPERAND (exp, 1))
8374 TREE_OPERAND (exp, 0)
8375 = invert_truthvalue (TREE_OPERAND (exp, 0));
8377 result = do_store_flag (TREE_OPERAND (exp, 0),
8378 (safe_from_p (temp, singleton, 1)
8379 ? temp : NULL_RTX),
8380 mode, BRANCH_COST <= 1);
8382 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8383 result = expand_shift (LSHIFT_EXPR, mode, result,
8384 build_int_2 (tree_log2
8385 (TREE_OPERAND
8386 (binary_op, 1)),
8388 (safe_from_p (temp, singleton, 1)
8389 ? temp : NULL_RTX), 0);
8391 if (result)
8393 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8394 return expand_binop (mode, boptab, op1, result, temp,
8395 unsignedp, OPTAB_LIB_WIDEN);
8397 else if (singleton == TREE_OPERAND (exp, 1))
8398 TREE_OPERAND (exp, 0)
8399 = invert_truthvalue (TREE_OPERAND (exp, 0));
8402 do_pending_stack_adjust ();
8403 NO_DEFER_POP;
8404 op0 = gen_label_rtx ();
8406 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8408 if (temp != 0)
8410 /* If the target conflicts with the other operand of the
8411 binary op, we can't use it. Also, we can't use the target
8412 if it is a hard register, because evaluating the condition
8413 might clobber it. */
8414 if ((binary_op
8415 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8416 || (GET_CODE (temp) == REG
8417 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8418 temp = gen_reg_rtx (mode);
8419 store_expr (singleton, temp, 0);
8421 else
8422 expand_expr (singleton,
8423 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8424 if (singleton == TREE_OPERAND (exp, 1))
8425 jumpif (TREE_OPERAND (exp, 0), op0);
8426 else
8427 jumpifnot (TREE_OPERAND (exp, 0), op0);
8429 start_cleanup_deferral ();
8430 if (binary_op && temp == 0)
8431 /* Just touch the other operand. */
8432 expand_expr (TREE_OPERAND (binary_op, 1),
8433 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8434 else if (binary_op)
8435 store_expr (build (TREE_CODE (binary_op), type,
8436 make_tree (type, temp),
8437 TREE_OPERAND (binary_op, 1)),
8438 temp, 0);
8439 else
8440 store_expr (build1 (TREE_CODE (unary_op), type,
8441 make_tree (type, temp)),
8442 temp, 0);
8443 op1 = op0;
8445 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8446 comparison operator. If we have one of these cases, set the
8447 output to A, branch on A (cse will merge these two references),
8448 then set the output to FOO. */
8449 else if (temp
8450 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8451 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8452 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8453 TREE_OPERAND (exp, 1), 0)
8454 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8455 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8456 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8458 if (GET_CODE (temp) == REG
8459 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8460 temp = gen_reg_rtx (mode);
8461 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8462 jumpif (TREE_OPERAND (exp, 0), op0);
8464 start_cleanup_deferral ();
8465 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8466 op1 = op0;
8468 else if (temp
8469 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8470 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8471 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8472 TREE_OPERAND (exp, 2), 0)
8473 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8474 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8475 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8477 if (GET_CODE (temp) == REG
8478 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8479 temp = gen_reg_rtx (mode);
8480 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8481 jumpifnot (TREE_OPERAND (exp, 0), op0);
8483 start_cleanup_deferral ();
8484 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8485 op1 = op0;
8487 else
8489 op1 = gen_label_rtx ();
8490 jumpifnot (TREE_OPERAND (exp, 0), op0);
8492 start_cleanup_deferral ();
8494 /* One branch of the cond can be void, if it never returns. For
8495 example A ? throw : E */
8496 if (temp != 0
8497 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8498 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8499 else
8500 expand_expr (TREE_OPERAND (exp, 1),
8501 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8502 end_cleanup_deferral ();
8503 emit_queue ();
8504 emit_jump_insn (gen_jump (op1));
8505 emit_barrier ();
8506 emit_label (op0);
8507 start_cleanup_deferral ();
8508 if (temp != 0
8509 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8510 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8511 else
8512 expand_expr (TREE_OPERAND (exp, 2),
8513 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8516 end_cleanup_deferral ();
8518 emit_queue ();
8519 emit_label (op1);
8520 OK_DEFER_POP;
8522 return temp;
8525 case TARGET_EXPR:
8527 /* Something needs to be initialized, but we didn't know
8528 where that thing was when building the tree. For example,
8529 it could be the return value of a function, or a parameter
8530 to a function which lays down in the stack, or a temporary
8531 variable which must be passed by reference.
8533 We guarantee that the expression will either be constructed
8534 or copied into our original target. */
8536 tree slot = TREE_OPERAND (exp, 0);
8537 tree cleanups = NULL_TREE;
8538 tree exp1;
8540 if (TREE_CODE (slot) != VAR_DECL)
8541 abort ();
8543 if (! ignore)
8544 target = original_target;
8546 /* Set this here so that if we get a target that refers to a
8547 register variable that's already been used, put_reg_into_stack
8548 knows that it should fix up those uses. */
8549 TREE_USED (slot) = 1;
8551 if (target == 0)
8553 if (DECL_RTL_SET_P (slot))
8555 target = DECL_RTL (slot);
8556 /* If we have already expanded the slot, so don't do
8557 it again. (mrs) */
8558 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8559 return target;
8561 else
8563 target = assign_temp (type, 2, 0, 1);
8564 /* All temp slots at this level must not conflict. */
8565 preserve_temp_slots (target);
8566 SET_DECL_RTL (slot, target);
8567 if (TREE_ADDRESSABLE (slot))
8568 put_var_into_stack (slot);
8570 /* Since SLOT is not known to the called function
8571 to belong to its stack frame, we must build an explicit
8572 cleanup. This case occurs when we must build up a reference
8573 to pass the reference as an argument. In this case,
8574 it is very likely that such a reference need not be
8575 built here. */
8577 if (TREE_OPERAND (exp, 2) == 0)
8578 TREE_OPERAND (exp, 2)
8579 = (*lang_hooks.maybe_build_cleanup) (slot);
8580 cleanups = TREE_OPERAND (exp, 2);
8583 else
8585 /* This case does occur, when expanding a parameter which
8586 needs to be constructed on the stack. The target
8587 is the actual stack address that we want to initialize.
8588 The function we call will perform the cleanup in this case. */
8590 /* If we have already assigned it space, use that space,
8591 not target that we were passed in, as our target
8592 parameter is only a hint. */
8593 if (DECL_RTL_SET_P (slot))
8595 target = DECL_RTL (slot);
8596 /* If we have already expanded the slot, so don't do
8597 it again. (mrs) */
8598 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8599 return target;
8601 else
8603 SET_DECL_RTL (slot, target);
8604 /* If we must have an addressable slot, then make sure that
8605 the RTL that we just stored in slot is OK. */
8606 if (TREE_ADDRESSABLE (slot))
8607 put_var_into_stack (slot);
8611 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8612 /* Mark it as expanded. */
8613 TREE_OPERAND (exp, 1) = NULL_TREE;
8615 store_expr (exp1, target, 0);
8617 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8619 return target;
8622 case INIT_EXPR:
8624 tree lhs = TREE_OPERAND (exp, 0);
8625 tree rhs = TREE_OPERAND (exp, 1);
8627 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8628 return temp;
8631 case MODIFY_EXPR:
8633 /* If lhs is complex, expand calls in rhs before computing it.
8634 That's so we don't compute a pointer and save it over a
8635 call. If lhs is simple, compute it first so we can give it
8636 as a target if the rhs is just a call. This avoids an
8637 extra temp and copy and that prevents a partial-subsumption
8638 which makes bad code. Actually we could treat
8639 component_ref's of vars like vars. */
8641 tree lhs = TREE_OPERAND (exp, 0);
8642 tree rhs = TREE_OPERAND (exp, 1);
8644 temp = 0;
8646 /* Check for |= or &= of a bitfield of size one into another bitfield
8647 of size 1. In this case, (unless we need the result of the
8648 assignment) we can do this more efficiently with a
8649 test followed by an assignment, if necessary.
8651 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8652 things change so we do, this code should be enhanced to
8653 support it. */
8654 if (ignore
8655 && TREE_CODE (lhs) == COMPONENT_REF
8656 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8657 || TREE_CODE (rhs) == BIT_AND_EXPR)
8658 && TREE_OPERAND (rhs, 0) == lhs
8659 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8660 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8661 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8663 rtx label = gen_label_rtx ();
8665 do_jump (TREE_OPERAND (rhs, 1),
8666 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8667 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8668 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8669 (TREE_CODE (rhs) == BIT_IOR_EXPR
8670 ? integer_one_node
8671 : integer_zero_node)),
8672 0, 0);
8673 do_pending_stack_adjust ();
8674 emit_label (label);
8675 return const0_rtx;
8678 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8680 return temp;
8683 case RETURN_EXPR:
8684 if (!TREE_OPERAND (exp, 0))
8685 expand_null_return ();
8686 else
8687 expand_return (TREE_OPERAND (exp, 0));
8688 return const0_rtx;
8690 case PREINCREMENT_EXPR:
8691 case PREDECREMENT_EXPR:
8692 return expand_increment (exp, 0, ignore);
8694 case POSTINCREMENT_EXPR:
8695 case POSTDECREMENT_EXPR:
8696 /* Faster to treat as pre-increment if result is not used. */
8697 return expand_increment (exp, ! ignore, ignore);
8699 case ADDR_EXPR:
8700 /* Are we taking the address of a nested function? */
8701 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8702 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8703 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8704 && ! TREE_STATIC (exp))
8706 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8707 op0 = force_operand (op0, target);
8709 /* If we are taking the address of something erroneous, just
8710 return a zero. */
8711 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8712 return const0_rtx;
8713 /* If we are taking the address of a constant and are at the
8714 top level, we have to use output_constant_def since we can't
8715 call force_const_mem at top level. */
8716 else if (cfun == 0
8717 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8718 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8719 == 'c')))
8720 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8721 else
8723 /* We make sure to pass const0_rtx down if we came in with
8724 ignore set, to avoid doing the cleanups twice for something. */
8725 op0 = expand_expr (TREE_OPERAND (exp, 0),
8726 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8727 (modifier == EXPAND_INITIALIZER
8728 ? modifier : EXPAND_CONST_ADDRESS));
8730 /* If we are going to ignore the result, OP0 will have been set
8731 to const0_rtx, so just return it. Don't get confused and
8732 think we are taking the address of the constant. */
8733 if (ignore)
8734 return op0;
8736 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8737 clever and returns a REG when given a MEM. */
8738 op0 = protect_from_queue (op0, 1);
8740 /* We would like the object in memory. If it is a constant, we can
8741 have it be statically allocated into memory. For a non-constant,
8742 we need to allocate some memory and store the value into it. */
8744 if (CONSTANT_P (op0))
8745 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8746 op0);
8747 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8748 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8749 || GET_CODE (op0) == PARALLEL)
8751 /* If the operand is a SAVE_EXPR, we can deal with this by
8752 forcing the SAVE_EXPR into memory. */
8753 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8755 put_var_into_stack (TREE_OPERAND (exp, 0));
8756 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8758 else
8760 /* If this object is in a register, it can't be BLKmode. */
8761 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8762 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8764 if (GET_CODE (op0) == PARALLEL)
8765 /* Handle calls that pass values in multiple
8766 non-contiguous locations. The Irix 6 ABI has examples
8767 of this. */
8768 emit_group_store (memloc, op0,
8769 int_size_in_bytes (inner_type));
8770 else
8771 emit_move_insn (memloc, op0);
8773 op0 = memloc;
8777 if (GET_CODE (op0) != MEM)
8778 abort ();
8780 mark_temp_addr_taken (op0);
8781 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8783 op0 = XEXP (op0, 0);
8784 #ifdef POINTERS_EXTEND_UNSIGNED
8785 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8786 && mode == ptr_mode)
8787 op0 = convert_memory_address (ptr_mode, op0);
8788 #endif
8789 return op0;
8792 /* If OP0 is not aligned as least as much as the type requires, we
8793 need to make a temporary, copy OP0 to it, and take the address of
8794 the temporary. We want to use the alignment of the type, not of
8795 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8796 the test for BLKmode means that can't happen. The test for
8797 BLKmode is because we never make mis-aligned MEMs with
8798 non-BLKmode.
8800 We don't need to do this at all if the machine doesn't have
8801 strict alignment. */
8802 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8803 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8804 > MEM_ALIGN (op0))
8805 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8807 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8808 rtx new
8809 = assign_stack_temp_for_type
8810 (TYPE_MODE (inner_type),
8811 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8812 : int_size_in_bytes (inner_type),
8813 1, build_qualified_type (inner_type,
8814 (TYPE_QUALS (inner_type)
8815 | TYPE_QUAL_CONST)));
8817 if (TYPE_ALIGN_OK (inner_type))
8818 abort ();
8820 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8821 op0 = new;
8824 op0 = force_operand (XEXP (op0, 0), target);
8827 if (flag_force_addr
8828 && GET_CODE (op0) != REG
8829 && modifier != EXPAND_CONST_ADDRESS
8830 && modifier != EXPAND_INITIALIZER
8831 && modifier != EXPAND_SUM)
8832 op0 = force_reg (Pmode, op0);
8834 if (GET_CODE (op0) == REG
8835 && ! REG_USERVAR_P (op0))
8836 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8838 #ifdef POINTERS_EXTEND_UNSIGNED
8839 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8840 && mode == ptr_mode)
8841 op0 = convert_memory_address (ptr_mode, op0);
8842 #endif
8844 return op0;
8846 case ENTRY_VALUE_EXPR:
8847 abort ();
8849 /* COMPLEX type for Extended Pascal & Fortran */
8850 case COMPLEX_EXPR:
8852 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8853 rtx insns;
8855 /* Get the rtx code of the operands. */
8856 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8857 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8859 if (! target)
8860 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8862 start_sequence ();
8864 /* Move the real (op0) and imaginary (op1) parts to their location. */
8865 emit_move_insn (gen_realpart (mode, target), op0);
8866 emit_move_insn (gen_imagpart (mode, target), op1);
8868 insns = get_insns ();
8869 end_sequence ();
8871 /* Complex construction should appear as a single unit. */
8872 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8873 each with a separate pseudo as destination.
8874 It's not correct for flow to treat them as a unit. */
8875 if (GET_CODE (target) != CONCAT)
8876 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8877 else
8878 emit_insn (insns);
8880 return target;
8883 case REALPART_EXPR:
8884 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8885 return gen_realpart (mode, op0);
8887 case IMAGPART_EXPR:
8888 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8889 return gen_imagpart (mode, op0);
8891 case CONJ_EXPR:
8893 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8894 rtx imag_t;
8895 rtx insns;
8897 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8899 if (! target)
8900 target = gen_reg_rtx (mode);
8902 start_sequence ();
8904 /* Store the realpart and the negated imagpart to target. */
8905 emit_move_insn (gen_realpart (partmode, target),
8906 gen_realpart (partmode, op0));
8908 imag_t = gen_imagpart (partmode, target);
8909 temp = expand_unop (partmode,
8910 ! unsignedp && flag_trapv
8911 && (GET_MODE_CLASS(partmode) == MODE_INT)
8912 ? negv_optab : neg_optab,
8913 gen_imagpart (partmode, op0), imag_t, 0);
8914 if (temp != imag_t)
8915 emit_move_insn (imag_t, temp);
8917 insns = get_insns ();
8918 end_sequence ();
8920 /* Conjugate should appear as a single unit
8921 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8922 each with a separate pseudo as destination.
8923 It's not correct for flow to treat them as a unit. */
8924 if (GET_CODE (target) != CONCAT)
8925 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8926 else
8927 emit_insn (insns);
8929 return target;
8932 case TRY_CATCH_EXPR:
8934 tree handler = TREE_OPERAND (exp, 1);
8936 expand_eh_region_start ();
8938 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8940 expand_eh_region_end_cleanup (handler);
8942 return op0;
8945 case TRY_FINALLY_EXPR:
8947 tree try_block = TREE_OPERAND (exp, 0);
8948 tree finally_block = TREE_OPERAND (exp, 1);
8949 rtx finally_label = gen_label_rtx ();
8950 rtx done_label = gen_label_rtx ();
8951 rtx return_link = gen_reg_rtx (Pmode);
8952 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8953 (tree) finally_label, (tree) return_link);
8954 TREE_SIDE_EFFECTS (cleanup) = 1;
8956 /* Start a new binding layer that will keep track of all cleanup
8957 actions to be performed. */
8958 expand_start_bindings (2);
8960 target_temp_slot_level = temp_slot_level;
8962 expand_decl_cleanup (NULL_TREE, cleanup);
8963 op0 = expand_expr (try_block, target, tmode, modifier);
8965 preserve_temp_slots (op0);
8966 expand_end_bindings (NULL_TREE, 0, 0);
8967 emit_jump (done_label);
8968 emit_label (finally_label);
8969 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8970 emit_indirect_jump (return_link);
8971 emit_label (done_label);
8972 return op0;
8975 case GOTO_SUBROUTINE_EXPR:
8977 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8978 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8979 rtx return_address = gen_label_rtx ();
8980 emit_move_insn (return_link,
8981 gen_rtx_LABEL_REF (Pmode, return_address));
8982 emit_jump (subr);
8983 emit_label (return_address);
8984 return const0_rtx;
8987 case VA_ARG_EXPR:
8988 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8990 case EXC_PTR_EXPR:
8991 return get_exception_pointer (cfun);
8993 case FDESC_EXPR:
8994 /* Function descriptors are not valid except for as
8995 initialization constants, and should not be expanded. */
8996 abort ();
8998 default:
8999 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9002 /* Here to do an ordinary binary operator, generating an instruction
9003 from the optab already placed in `this_optab'. */
9004 binop:
9005 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9006 subtarget = 0;
9007 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9008 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9009 binop2:
9010 temp = expand_binop (mode, this_optab, op0, op1, target,
9011 unsignedp, OPTAB_LIB_WIDEN);
9012 if (temp == 0)
9013 abort ();
9014 return temp;
9017 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9018 when applied to the address of EXP produces an address known to be
9019 aligned more than BIGGEST_ALIGNMENT. */
9021 static int
9022 is_aligning_offset (offset, exp)
9023 tree offset;
9024 tree exp;
9026 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9027 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9028 || TREE_CODE (offset) == NOP_EXPR
9029 || TREE_CODE (offset) == CONVERT_EXPR
9030 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9031 offset = TREE_OPERAND (offset, 0);
9033 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9034 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9035 if (TREE_CODE (offset) != BIT_AND_EXPR
9036 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9037 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9038 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9039 return 0;
9041 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9042 It must be NEGATE_EXPR. Then strip any more conversions. */
9043 offset = TREE_OPERAND (offset, 0);
9044 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9045 || TREE_CODE (offset) == NOP_EXPR
9046 || TREE_CODE (offset) == CONVERT_EXPR)
9047 offset = TREE_OPERAND (offset, 0);
9049 if (TREE_CODE (offset) != NEGATE_EXPR)
9050 return 0;
9052 offset = TREE_OPERAND (offset, 0);
9053 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9054 || TREE_CODE (offset) == NOP_EXPR
9055 || TREE_CODE (offset) == CONVERT_EXPR)
9056 offset = TREE_OPERAND (offset, 0);
9058 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9059 whose type is the same as EXP. */
9060 return (TREE_CODE (offset) == ADDR_EXPR
9061 && (TREE_OPERAND (offset, 0) == exp
9062 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9063 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9064 == TREE_TYPE (exp)))));
9067 /* Return the tree node if an ARG corresponds to a string constant or zero
9068 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9069 in bytes within the string that ARG is accessing. The type of the
9070 offset will be `sizetype'. */
9072 tree
9073 string_constant (arg, ptr_offset)
9074 tree arg;
9075 tree *ptr_offset;
9077 STRIP_NOPS (arg);
9079 if (TREE_CODE (arg) == ADDR_EXPR
9080 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9082 *ptr_offset = size_zero_node;
9083 return TREE_OPERAND (arg, 0);
9085 else if (TREE_CODE (arg) == PLUS_EXPR)
9087 tree arg0 = TREE_OPERAND (arg, 0);
9088 tree arg1 = TREE_OPERAND (arg, 1);
9090 STRIP_NOPS (arg0);
9091 STRIP_NOPS (arg1);
9093 if (TREE_CODE (arg0) == ADDR_EXPR
9094 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9096 *ptr_offset = convert (sizetype, arg1);
9097 return TREE_OPERAND (arg0, 0);
9099 else if (TREE_CODE (arg1) == ADDR_EXPR
9100 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9102 *ptr_offset = convert (sizetype, arg0);
9103 return TREE_OPERAND (arg1, 0);
9107 return 0;
9110 /* Expand code for a post- or pre- increment or decrement
9111 and return the RTX for the result.
9112 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9114 static rtx
9115 expand_increment (exp, post, ignore)
9116 tree exp;
9117 int post, ignore;
9119 rtx op0, op1;
9120 rtx temp, value;
9121 tree incremented = TREE_OPERAND (exp, 0);
9122 optab this_optab = add_optab;
9123 int icode;
9124 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9125 int op0_is_copy = 0;
9126 int single_insn = 0;
9127 /* 1 means we can't store into OP0 directly,
9128 because it is a subreg narrower than a word,
9129 and we don't dare clobber the rest of the word. */
9130 int bad_subreg = 0;
9132 /* Stabilize any component ref that might need to be
9133 evaluated more than once below. */
9134 if (!post
9135 || TREE_CODE (incremented) == BIT_FIELD_REF
9136 || (TREE_CODE (incremented) == COMPONENT_REF
9137 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9138 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9139 incremented = stabilize_reference (incremented);
9140 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9141 ones into save exprs so that they don't accidentally get evaluated
9142 more than once by the code below. */
9143 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9144 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9145 incremented = save_expr (incremented);
9147 /* Compute the operands as RTX.
9148 Note whether OP0 is the actual lvalue or a copy of it:
9149 I believe it is a copy iff it is a register or subreg
9150 and insns were generated in computing it. */
9152 temp = get_last_insn ();
9153 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9155 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9156 in place but instead must do sign- or zero-extension during assignment,
9157 so we copy it into a new register and let the code below use it as
9158 a copy.
9160 Note that we can safely modify this SUBREG since it is know not to be
9161 shared (it was made by the expand_expr call above). */
9163 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9165 if (post)
9166 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9167 else
9168 bad_subreg = 1;
9170 else if (GET_CODE (op0) == SUBREG
9171 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9173 /* We cannot increment this SUBREG in place. If we are
9174 post-incrementing, get a copy of the old value. Otherwise,
9175 just mark that we cannot increment in place. */
9176 if (post)
9177 op0 = copy_to_reg (op0);
9178 else
9179 bad_subreg = 1;
9182 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9183 && temp != get_last_insn ());
9184 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9186 /* Decide whether incrementing or decrementing. */
9187 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9188 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9189 this_optab = sub_optab;
9191 /* Convert decrement by a constant into a negative increment. */
9192 if (this_optab == sub_optab
9193 && GET_CODE (op1) == CONST_INT)
9195 op1 = GEN_INT (-INTVAL (op1));
9196 this_optab = add_optab;
9199 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9200 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9202 /* For a preincrement, see if we can do this with a single instruction. */
9203 if (!post)
9205 icode = (int) this_optab->handlers[(int) mode].insn_code;
9206 if (icode != (int) CODE_FOR_nothing
9207 /* Make sure that OP0 is valid for operands 0 and 1
9208 of the insn we want to queue. */
9209 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9210 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9211 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9212 single_insn = 1;
9215 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9216 then we cannot just increment OP0. We must therefore contrive to
9217 increment the original value. Then, for postincrement, we can return
9218 OP0 since it is a copy of the old value. For preincrement, expand here
9219 unless we can do it with a single insn.
9221 Likewise if storing directly into OP0 would clobber high bits
9222 we need to preserve (bad_subreg). */
9223 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9225 /* This is the easiest way to increment the value wherever it is.
9226 Problems with multiple evaluation of INCREMENTED are prevented
9227 because either (1) it is a component_ref or preincrement,
9228 in which case it was stabilized above, or (2) it is an array_ref
9229 with constant index in an array in a register, which is
9230 safe to reevaluate. */
9231 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9232 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9233 ? MINUS_EXPR : PLUS_EXPR),
9234 TREE_TYPE (exp),
9235 incremented,
9236 TREE_OPERAND (exp, 1));
9238 while (TREE_CODE (incremented) == NOP_EXPR
9239 || TREE_CODE (incremented) == CONVERT_EXPR)
9241 newexp = convert (TREE_TYPE (incremented), newexp);
9242 incremented = TREE_OPERAND (incremented, 0);
9245 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9246 return post ? op0 : temp;
9249 if (post)
9251 /* We have a true reference to the value in OP0.
9252 If there is an insn to add or subtract in this mode, queue it.
9253 Queueing the increment insn avoids the register shuffling
9254 that often results if we must increment now and first save
9255 the old value for subsequent use. */
9257 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9258 op0 = stabilize (op0);
9259 #endif
9261 icode = (int) this_optab->handlers[(int) mode].insn_code;
9262 if (icode != (int) CODE_FOR_nothing
9263 /* Make sure that OP0 is valid for operands 0 and 1
9264 of the insn we want to queue. */
9265 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9266 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9268 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9269 op1 = force_reg (mode, op1);
9271 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9273 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9275 rtx addr = (general_operand (XEXP (op0, 0), mode)
9276 ? force_reg (Pmode, XEXP (op0, 0))
9277 : copy_to_reg (XEXP (op0, 0)));
9278 rtx temp, result;
9280 op0 = replace_equiv_address (op0, addr);
9281 temp = force_reg (GET_MODE (op0), op0);
9282 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9283 op1 = force_reg (mode, op1);
9285 /* The increment queue is LIFO, thus we have to `queue'
9286 the instructions in reverse order. */
9287 enqueue_insn (op0, gen_move_insn (op0, temp));
9288 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9289 return result;
9293 /* Preincrement, or we can't increment with one simple insn. */
9294 if (post)
9295 /* Save a copy of the value before inc or dec, to return it later. */
9296 temp = value = copy_to_reg (op0);
9297 else
9298 /* Arrange to return the incremented value. */
9299 /* Copy the rtx because expand_binop will protect from the queue,
9300 and the results of that would be invalid for us to return
9301 if our caller does emit_queue before using our result. */
9302 temp = copy_rtx (value = op0);
9304 /* Increment however we can. */
9305 op1 = expand_binop (mode, this_optab, value, op1, op0,
9306 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9308 /* Make sure the value is stored into OP0. */
9309 if (op1 != op0)
9310 emit_move_insn (op0, op1);
9312 return temp;
9315 /* At the start of a function, record that we have no previously-pushed
9316 arguments waiting to be popped. */
9318 void
9319 init_pending_stack_adjust ()
9321 pending_stack_adjust = 0;
9324 /* When exiting from function, if safe, clear out any pending stack adjust
9325 so the adjustment won't get done.
9327 Note, if the current function calls alloca, then it must have a
9328 frame pointer regardless of the value of flag_omit_frame_pointer. */
9330 void
9331 clear_pending_stack_adjust ()
9333 #ifdef EXIT_IGNORE_STACK
9334 if (optimize > 0
9335 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9336 && EXIT_IGNORE_STACK
9337 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9338 && ! flag_inline_functions)
9340 stack_pointer_delta -= pending_stack_adjust,
9341 pending_stack_adjust = 0;
9343 #endif
9346 /* Pop any previously-pushed arguments that have not been popped yet. */
9348 void
9349 do_pending_stack_adjust ()
9351 if (inhibit_defer_pop == 0)
9353 if (pending_stack_adjust != 0)
9354 adjust_stack (GEN_INT (pending_stack_adjust));
9355 pending_stack_adjust = 0;
9359 /* Expand conditional expressions. */
9361 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9362 LABEL is an rtx of code CODE_LABEL, in this function and all the
9363 functions here. */
9365 void
9366 jumpifnot (exp, label)
9367 tree exp;
9368 rtx label;
9370 do_jump (exp, label, NULL_RTX);
9373 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9375 void
9376 jumpif (exp, label)
9377 tree exp;
9378 rtx label;
9380 do_jump (exp, NULL_RTX, label);
9383 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9384 the result is zero, or IF_TRUE_LABEL if the result is one.
9385 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9386 meaning fall through in that case.
9388 do_jump always does any pending stack adjust except when it does not
9389 actually perform a jump. An example where there is no jump
9390 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9392 This function is responsible for optimizing cases such as
9393 &&, || and comparison operators in EXP. */
9395 void
9396 do_jump (exp, if_false_label, if_true_label)
9397 tree exp;
9398 rtx if_false_label, if_true_label;
9400 enum tree_code code = TREE_CODE (exp);
9401 /* Some cases need to create a label to jump to
9402 in order to properly fall through.
9403 These cases set DROP_THROUGH_LABEL nonzero. */
9404 rtx drop_through_label = 0;
9405 rtx temp;
9406 int i;
9407 tree type;
9408 enum machine_mode mode;
9410 #ifdef MAX_INTEGER_COMPUTATION_MODE
9411 check_max_integer_computation_mode (exp);
9412 #endif
9414 emit_queue ();
9416 switch (code)
9418 case ERROR_MARK:
9419 break;
9421 case INTEGER_CST:
9422 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9423 if (temp)
9424 emit_jump (temp);
9425 break;
9427 #if 0
9428 /* This is not true with #pragma weak */
9429 case ADDR_EXPR:
9430 /* The address of something can never be zero. */
9431 if (if_true_label)
9432 emit_jump (if_true_label);
9433 break;
9434 #endif
9436 case NOP_EXPR:
9437 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9438 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9439 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9440 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9441 goto normal;
9442 case CONVERT_EXPR:
9443 /* If we are narrowing the operand, we have to do the compare in the
9444 narrower mode. */
9445 if ((TYPE_PRECISION (TREE_TYPE (exp))
9446 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9447 goto normal;
9448 case NON_LVALUE_EXPR:
9449 case REFERENCE_EXPR:
9450 case ABS_EXPR:
9451 case NEGATE_EXPR:
9452 case LROTATE_EXPR:
9453 case RROTATE_EXPR:
9454 /* These cannot change zero->non-zero or vice versa. */
9455 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9456 break;
9458 case WITH_RECORD_EXPR:
9459 /* Put the object on the placeholder list, recurse through our first
9460 operand, and pop the list. */
9461 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9462 placeholder_list);
9463 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9464 placeholder_list = TREE_CHAIN (placeholder_list);
9465 break;
9467 #if 0
9468 /* This is never less insns than evaluating the PLUS_EXPR followed by
9469 a test and can be longer if the test is eliminated. */
9470 case PLUS_EXPR:
9471 /* Reduce to minus. */
9472 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9473 TREE_OPERAND (exp, 0),
9474 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9475 TREE_OPERAND (exp, 1))));
9476 /* Process as MINUS. */
9477 #endif
9479 case MINUS_EXPR:
9480 /* Non-zero iff operands of minus differ. */
9481 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9482 TREE_OPERAND (exp, 0),
9483 TREE_OPERAND (exp, 1)),
9484 NE, NE, if_false_label, if_true_label);
9485 break;
9487 case BIT_AND_EXPR:
9488 /* If we are AND'ing with a small constant, do this comparison in the
9489 smallest type that fits. If the machine doesn't have comparisons
9490 that small, it will be converted back to the wider comparison.
9491 This helps if we are testing the sign bit of a narrower object.
9492 combine can't do this for us because it can't know whether a
9493 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9495 if (! SLOW_BYTE_ACCESS
9496 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9497 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9498 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9499 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9500 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9501 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9502 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9503 != CODE_FOR_nothing))
9505 do_jump (convert (type, exp), if_false_label, if_true_label);
9506 break;
9508 goto normal;
9510 case TRUTH_NOT_EXPR:
9511 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9512 break;
9514 case TRUTH_ANDIF_EXPR:
9515 if (if_false_label == 0)
9516 if_false_label = drop_through_label = gen_label_rtx ();
9517 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9518 start_cleanup_deferral ();
9519 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9520 end_cleanup_deferral ();
9521 break;
9523 case TRUTH_ORIF_EXPR:
9524 if (if_true_label == 0)
9525 if_true_label = drop_through_label = gen_label_rtx ();
9526 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9527 start_cleanup_deferral ();
9528 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9529 end_cleanup_deferral ();
9530 break;
9532 case COMPOUND_EXPR:
9533 push_temp_slots ();
9534 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9535 preserve_temp_slots (NULL_RTX);
9536 free_temp_slots ();
9537 pop_temp_slots ();
9538 emit_queue ();
9539 do_pending_stack_adjust ();
9540 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9541 break;
9543 case COMPONENT_REF:
9544 case BIT_FIELD_REF:
9545 case ARRAY_REF:
9546 case ARRAY_RANGE_REF:
9548 HOST_WIDE_INT bitsize, bitpos;
9549 int unsignedp;
9550 enum machine_mode mode;
9551 tree type;
9552 tree offset;
9553 int volatilep = 0;
9555 /* Get description of this reference. We don't actually care
9556 about the underlying object here. */
9557 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9558 &unsignedp, &volatilep);
9560 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9561 if (! SLOW_BYTE_ACCESS
9562 && type != 0 && bitsize >= 0
9563 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9564 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9565 != CODE_FOR_nothing))
9567 do_jump (convert (type, exp), if_false_label, if_true_label);
9568 break;
9570 goto normal;
9573 case COND_EXPR:
9574 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9575 if (integer_onep (TREE_OPERAND (exp, 1))
9576 && integer_zerop (TREE_OPERAND (exp, 2)))
9577 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9579 else if (integer_zerop (TREE_OPERAND (exp, 1))
9580 && integer_onep (TREE_OPERAND (exp, 2)))
9581 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9583 else
9585 rtx label1 = gen_label_rtx ();
9586 drop_through_label = gen_label_rtx ();
9588 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9590 start_cleanup_deferral ();
9591 /* Now the THEN-expression. */
9592 do_jump (TREE_OPERAND (exp, 1),
9593 if_false_label ? if_false_label : drop_through_label,
9594 if_true_label ? if_true_label : drop_through_label);
9595 /* In case the do_jump just above never jumps. */
9596 do_pending_stack_adjust ();
9597 emit_label (label1);
9599 /* Now the ELSE-expression. */
9600 do_jump (TREE_OPERAND (exp, 2),
9601 if_false_label ? if_false_label : drop_through_label,
9602 if_true_label ? if_true_label : drop_through_label);
9603 end_cleanup_deferral ();
9605 break;
9607 case EQ_EXPR:
9609 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9611 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9612 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9614 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9615 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9616 do_jump
9617 (fold
9618 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9619 fold (build (EQ_EXPR, TREE_TYPE (exp),
9620 fold (build1 (REALPART_EXPR,
9621 TREE_TYPE (inner_type),
9622 exp0)),
9623 fold (build1 (REALPART_EXPR,
9624 TREE_TYPE (inner_type),
9625 exp1)))),
9626 fold (build (EQ_EXPR, TREE_TYPE (exp),
9627 fold (build1 (IMAGPART_EXPR,
9628 TREE_TYPE (inner_type),
9629 exp0)),
9630 fold (build1 (IMAGPART_EXPR,
9631 TREE_TYPE (inner_type),
9632 exp1)))))),
9633 if_false_label, if_true_label);
9636 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9637 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9639 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9640 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9641 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9642 else
9643 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9644 break;
9647 case NE_EXPR:
9649 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9651 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9652 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9654 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9655 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9656 do_jump
9657 (fold
9658 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9659 fold (build (NE_EXPR, TREE_TYPE (exp),
9660 fold (build1 (REALPART_EXPR,
9661 TREE_TYPE (inner_type),
9662 exp0)),
9663 fold (build1 (REALPART_EXPR,
9664 TREE_TYPE (inner_type),
9665 exp1)))),
9666 fold (build (NE_EXPR, TREE_TYPE (exp),
9667 fold (build1 (IMAGPART_EXPR,
9668 TREE_TYPE (inner_type),
9669 exp0)),
9670 fold (build1 (IMAGPART_EXPR,
9671 TREE_TYPE (inner_type),
9672 exp1)))))),
9673 if_false_label, if_true_label);
9676 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9677 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9679 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9680 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9681 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9682 else
9683 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9684 break;
9687 case LT_EXPR:
9688 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9689 if (GET_MODE_CLASS (mode) == MODE_INT
9690 && ! can_compare_p (LT, mode, ccp_jump))
9691 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9692 else
9693 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9694 break;
9696 case LE_EXPR:
9697 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9698 if (GET_MODE_CLASS (mode) == MODE_INT
9699 && ! can_compare_p (LE, mode, ccp_jump))
9700 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9701 else
9702 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9703 break;
9705 case GT_EXPR:
9706 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9707 if (GET_MODE_CLASS (mode) == MODE_INT
9708 && ! can_compare_p (GT, mode, ccp_jump))
9709 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9710 else
9711 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9712 break;
9714 case GE_EXPR:
9715 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9716 if (GET_MODE_CLASS (mode) == MODE_INT
9717 && ! can_compare_p (GE, mode, ccp_jump))
9718 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9719 else
9720 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9721 break;
9723 case UNORDERED_EXPR:
9724 case ORDERED_EXPR:
9726 enum rtx_code cmp, rcmp;
9727 int do_rev;
9729 if (code == UNORDERED_EXPR)
9730 cmp = UNORDERED, rcmp = ORDERED;
9731 else
9732 cmp = ORDERED, rcmp = UNORDERED;
9733 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9735 do_rev = 0;
9736 if (! can_compare_p (cmp, mode, ccp_jump)
9737 && (can_compare_p (rcmp, mode, ccp_jump)
9738 /* If the target doesn't provide either UNORDERED or ORDERED
9739 comparisons, canonicalize on UNORDERED for the library. */
9740 || rcmp == UNORDERED))
9741 do_rev = 1;
9743 if (! do_rev)
9744 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9745 else
9746 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9748 break;
9751 enum rtx_code rcode1;
9752 enum tree_code tcode2;
9754 case UNLT_EXPR:
9755 rcode1 = UNLT;
9756 tcode2 = LT_EXPR;
9757 goto unordered_bcc;
9758 case UNLE_EXPR:
9759 rcode1 = UNLE;
9760 tcode2 = LE_EXPR;
9761 goto unordered_bcc;
9762 case UNGT_EXPR:
9763 rcode1 = UNGT;
9764 tcode2 = GT_EXPR;
9765 goto unordered_bcc;
9766 case UNGE_EXPR:
9767 rcode1 = UNGE;
9768 tcode2 = GE_EXPR;
9769 goto unordered_bcc;
9770 case UNEQ_EXPR:
9771 rcode1 = UNEQ;
9772 tcode2 = EQ_EXPR;
9773 goto unordered_bcc;
9775 unordered_bcc:
9776 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9777 if (can_compare_p (rcode1, mode, ccp_jump))
9778 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9779 if_true_label);
9780 else
9782 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9783 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9784 tree cmp0, cmp1;
9786 /* If the target doesn't support combined unordered
9787 compares, decompose into UNORDERED + comparison. */
9788 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9789 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9790 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9791 do_jump (exp, if_false_label, if_true_label);
9794 break;
9796 /* Special case:
9797 __builtin_expect (<test>, 0) and
9798 __builtin_expect (<test>, 1)
9800 We need to do this here, so that <test> is not converted to a SCC
9801 operation on machines that use condition code registers and COMPARE
9802 like the PowerPC, and then the jump is done based on whether the SCC
9803 operation produced a 1 or 0. */
9804 case CALL_EXPR:
9805 /* Check for a built-in function. */
9806 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9808 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9809 tree arglist = TREE_OPERAND (exp, 1);
9811 if (TREE_CODE (fndecl) == FUNCTION_DECL
9812 && DECL_BUILT_IN (fndecl)
9813 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9814 && arglist != NULL_TREE
9815 && TREE_CHAIN (arglist) != NULL_TREE)
9817 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9818 if_true_label);
9820 if (seq != NULL_RTX)
9822 emit_insn (seq);
9823 return;
9827 /* fall through and generate the normal code. */
9829 default:
9830 normal:
9831 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9832 #if 0
9833 /* This is not needed any more and causes poor code since it causes
9834 comparisons and tests from non-SI objects to have different code
9835 sequences. */
9836 /* Copy to register to avoid generating bad insns by cse
9837 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9838 if (!cse_not_expected && GET_CODE (temp) == MEM)
9839 temp = copy_to_reg (temp);
9840 #endif
9841 do_pending_stack_adjust ();
9842 /* Do any postincrements in the expression that was tested. */
9843 emit_queue ();
9845 if (GET_CODE (temp) == CONST_INT
9846 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9847 || GET_CODE (temp) == LABEL_REF)
9849 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9850 if (target)
9851 emit_jump (target);
9853 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9854 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9855 /* Note swapping the labels gives us not-equal. */
9856 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9857 else if (GET_MODE (temp) != VOIDmode)
9858 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9859 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9860 GET_MODE (temp), NULL_RTX,
9861 if_false_label, if_true_label);
9862 else
9863 abort ();
9866 if (drop_through_label)
9868 /* If do_jump produces code that might be jumped around,
9869 do any stack adjusts from that code, before the place
9870 where control merges in. */
9871 do_pending_stack_adjust ();
9872 emit_label (drop_through_label);
9876 /* Given a comparison expression EXP for values too wide to be compared
9877 with one insn, test the comparison and jump to the appropriate label.
9878 The code of EXP is ignored; we always test GT if SWAP is 0,
9879 and LT if SWAP is 1. */
9881 static void
9882 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9883 tree exp;
9884 int swap;
9885 rtx if_false_label, if_true_label;
9887 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9888 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9889 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9890 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9892 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9895 /* Compare OP0 with OP1, word at a time, in mode MODE.
9896 UNSIGNEDP says to do unsigned comparison.
9897 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9899 void
9900 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9901 enum machine_mode mode;
9902 int unsignedp;
9903 rtx op0, op1;
9904 rtx if_false_label, if_true_label;
9906 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9907 rtx drop_through_label = 0;
9908 int i;
9910 if (! if_true_label || ! if_false_label)
9911 drop_through_label = gen_label_rtx ();
9912 if (! if_true_label)
9913 if_true_label = drop_through_label;
9914 if (! if_false_label)
9915 if_false_label = drop_through_label;
9917 /* Compare a word at a time, high order first. */
9918 for (i = 0; i < nwords; i++)
9920 rtx op0_word, op1_word;
9922 if (WORDS_BIG_ENDIAN)
9924 op0_word = operand_subword_force (op0, i, mode);
9925 op1_word = operand_subword_force (op1, i, mode);
9927 else
9929 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9930 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9933 /* All but high-order word must be compared as unsigned. */
9934 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9935 (unsignedp || i > 0), word_mode, NULL_RTX,
9936 NULL_RTX, if_true_label);
9938 /* Consider lower words only if these are equal. */
9939 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9940 NULL_RTX, NULL_RTX, if_false_label);
9943 if (if_false_label)
9944 emit_jump (if_false_label);
9945 if (drop_through_label)
9946 emit_label (drop_through_label);
9949 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9950 with one insn, test the comparison and jump to the appropriate label. */
9952 static void
9953 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9954 tree exp;
9955 rtx if_false_label, if_true_label;
9957 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9958 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9959 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9960 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9961 int i;
9962 rtx drop_through_label = 0;
9964 if (! if_false_label)
9965 drop_through_label = if_false_label = gen_label_rtx ();
9967 for (i = 0; i < nwords; i++)
9968 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9969 operand_subword_force (op1, i, mode),
9970 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9971 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9973 if (if_true_label)
9974 emit_jump (if_true_label);
9975 if (drop_through_label)
9976 emit_label (drop_through_label);
9979 /* Jump according to whether OP0 is 0.
9980 We assume that OP0 has an integer mode that is too wide
9981 for the available compare insns. */
9983 void
9984 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9985 rtx op0;
9986 rtx if_false_label, if_true_label;
9988 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9989 rtx part;
9990 int i;
9991 rtx drop_through_label = 0;
9993 /* The fastest way of doing this comparison on almost any machine is to
9994 "or" all the words and compare the result. If all have to be loaded
9995 from memory and this is a very wide item, it's possible this may
9996 be slower, but that's highly unlikely. */
9998 part = gen_reg_rtx (word_mode);
9999 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10000 for (i = 1; i < nwords && part != 0; i++)
10001 part = expand_binop (word_mode, ior_optab, part,
10002 operand_subword_force (op0, i, GET_MODE (op0)),
10003 part, 1, OPTAB_WIDEN);
10005 if (part != 0)
10007 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10008 NULL_RTX, if_false_label, if_true_label);
10010 return;
10013 /* If we couldn't do the "or" simply, do this with a series of compares. */
10014 if (! if_false_label)
10015 drop_through_label = if_false_label = gen_label_rtx ();
10017 for (i = 0; i < nwords; i++)
10018 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10019 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10020 if_false_label, NULL_RTX);
10022 if (if_true_label)
10023 emit_jump (if_true_label);
10025 if (drop_through_label)
10026 emit_label (drop_through_label);
10029 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10030 (including code to compute the values to be compared)
10031 and set (CC0) according to the result.
10032 The decision as to signed or unsigned comparison must be made by the caller.
10034 We force a stack adjustment unless there are currently
10035 things pushed on the stack that aren't yet used.
10037 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10038 compared. */
10041 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10042 rtx op0, op1;
10043 enum rtx_code code;
10044 int unsignedp;
10045 enum machine_mode mode;
10046 rtx size;
10048 rtx tem;
10050 /* If one operand is constant, make it the second one. Only do this
10051 if the other operand is not constant as well. */
10053 if (swap_commutative_operands_p (op0, op1))
10055 tem = op0;
10056 op0 = op1;
10057 op1 = tem;
10058 code = swap_condition (code);
10061 if (flag_force_mem)
10063 op0 = force_not_mem (op0);
10064 op1 = force_not_mem (op1);
10067 do_pending_stack_adjust ();
10069 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10070 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10071 return tem;
10073 #if 0
10074 /* There's no need to do this now that combine.c can eliminate lots of
10075 sign extensions. This can be less efficient in certain cases on other
10076 machines. */
10078 /* If this is a signed equality comparison, we can do it as an
10079 unsigned comparison since zero-extension is cheaper than sign
10080 extension and comparisons with zero are done as unsigned. This is
10081 the case even on machines that can do fast sign extension, since
10082 zero-extension is easier to combine with other operations than
10083 sign-extension is. If we are comparing against a constant, we must
10084 convert it to what it would look like unsigned. */
10085 if ((code == EQ || code == NE) && ! unsignedp
10086 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10088 if (GET_CODE (op1) == CONST_INT
10089 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10090 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10091 unsignedp = 1;
10093 #endif
10095 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10097 #if HAVE_cc0
10098 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10099 #else
10100 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10101 #endif
10104 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10105 The decision as to signed or unsigned comparison must be made by the caller.
10107 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10108 compared. */
10110 void
10111 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10112 if_false_label, if_true_label)
10113 rtx op0, op1;
10114 enum rtx_code code;
10115 int unsignedp;
10116 enum machine_mode mode;
10117 rtx size;
10118 rtx if_false_label, if_true_label;
10120 rtx tem;
10121 int dummy_true_label = 0;
10123 /* Reverse the comparison if that is safe and we want to jump if it is
10124 false. */
10125 if (! if_true_label && ! FLOAT_MODE_P (mode))
10127 if_true_label = if_false_label;
10128 if_false_label = 0;
10129 code = reverse_condition (code);
10132 /* If one operand is constant, make it the second one. Only do this
10133 if the other operand is not constant as well. */
10135 if (swap_commutative_operands_p (op0, op1))
10137 tem = op0;
10138 op0 = op1;
10139 op1 = tem;
10140 code = swap_condition (code);
10143 if (flag_force_mem)
10145 op0 = force_not_mem (op0);
10146 op1 = force_not_mem (op1);
10149 do_pending_stack_adjust ();
10151 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10152 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10154 if (tem == const_true_rtx)
10156 if (if_true_label)
10157 emit_jump (if_true_label);
10159 else
10161 if (if_false_label)
10162 emit_jump (if_false_label);
10164 return;
10167 #if 0
10168 /* There's no need to do this now that combine.c can eliminate lots of
10169 sign extensions. This can be less efficient in certain cases on other
10170 machines. */
10172 /* If this is a signed equality comparison, we can do it as an
10173 unsigned comparison since zero-extension is cheaper than sign
10174 extension and comparisons with zero are done as unsigned. This is
10175 the case even on machines that can do fast sign extension, since
10176 zero-extension is easier to combine with other operations than
10177 sign-extension is. If we are comparing against a constant, we must
10178 convert it to what it would look like unsigned. */
10179 if ((code == EQ || code == NE) && ! unsignedp
10180 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10182 if (GET_CODE (op1) == CONST_INT
10183 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10184 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10185 unsignedp = 1;
10187 #endif
10189 if (! if_true_label)
10191 dummy_true_label = 1;
10192 if_true_label = gen_label_rtx ();
10195 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10196 if_true_label);
10198 if (if_false_label)
10199 emit_jump (if_false_label);
10200 if (dummy_true_label)
10201 emit_label (if_true_label);
10204 /* Generate code for a comparison expression EXP (including code to compute
10205 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10206 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10207 generated code will drop through.
10208 SIGNED_CODE should be the rtx operation for this comparison for
10209 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10211 We force a stack adjustment unless there are currently
10212 things pushed on the stack that aren't yet used. */
10214 static void
10215 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10216 if_true_label)
10217 tree exp;
10218 enum rtx_code signed_code, unsigned_code;
10219 rtx if_false_label, if_true_label;
10221 rtx op0, op1;
10222 tree type;
10223 enum machine_mode mode;
10224 int unsignedp;
10225 enum rtx_code code;
10227 /* Don't crash if the comparison was erroneous. */
10228 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10229 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10230 return;
10232 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10233 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10234 return;
10236 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10237 mode = TYPE_MODE (type);
10238 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10239 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10240 || (GET_MODE_BITSIZE (mode)
10241 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10242 1)))))))
10244 /* op0 might have been replaced by promoted constant, in which
10245 case the type of second argument should be used. */
10246 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10247 mode = TYPE_MODE (type);
10249 unsignedp = TREE_UNSIGNED (type);
10250 code = unsignedp ? unsigned_code : signed_code;
10252 #ifdef HAVE_canonicalize_funcptr_for_compare
10253 /* If function pointers need to be "canonicalized" before they can
10254 be reliably compared, then canonicalize them. */
10255 if (HAVE_canonicalize_funcptr_for_compare
10256 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10257 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10258 == FUNCTION_TYPE))
10260 rtx new_op0 = gen_reg_rtx (mode);
10262 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10263 op0 = new_op0;
10266 if (HAVE_canonicalize_funcptr_for_compare
10267 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10268 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10269 == FUNCTION_TYPE))
10271 rtx new_op1 = gen_reg_rtx (mode);
10273 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10274 op1 = new_op1;
10276 #endif
10278 /* Do any postincrements in the expression that was tested. */
10279 emit_queue ();
10281 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10282 ((mode == BLKmode)
10283 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10284 if_false_label, if_true_label);
10287 /* Generate code to calculate EXP using a store-flag instruction
10288 and return an rtx for the result. EXP is either a comparison
10289 or a TRUTH_NOT_EXPR whose operand is a comparison.
10291 If TARGET is nonzero, store the result there if convenient.
10293 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10294 cheap.
10296 Return zero if there is no suitable set-flag instruction
10297 available on this machine.
10299 Once expand_expr has been called on the arguments of the comparison,
10300 we are committed to doing the store flag, since it is not safe to
10301 re-evaluate the expression. We emit the store-flag insn by calling
10302 emit_store_flag, but only expand the arguments if we have a reason
10303 to believe that emit_store_flag will be successful. If we think that
10304 it will, but it isn't, we have to simulate the store-flag with a
10305 set/jump/set sequence. */
10307 static rtx
10308 do_store_flag (exp, target, mode, only_cheap)
10309 tree exp;
10310 rtx target;
10311 enum machine_mode mode;
10312 int only_cheap;
10314 enum rtx_code code;
10315 tree arg0, arg1, type;
10316 tree tem;
10317 enum machine_mode operand_mode;
10318 int invert = 0;
10319 int unsignedp;
10320 rtx op0, op1;
10321 enum insn_code icode;
10322 rtx subtarget = target;
10323 rtx result, label;
10325 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10326 result at the end. We can't simply invert the test since it would
10327 have already been inverted if it were valid. This case occurs for
10328 some floating-point comparisons. */
10330 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10331 invert = 1, exp = TREE_OPERAND (exp, 0);
10333 arg0 = TREE_OPERAND (exp, 0);
10334 arg1 = TREE_OPERAND (exp, 1);
10336 /* Don't crash if the comparison was erroneous. */
10337 if (arg0 == error_mark_node || arg1 == error_mark_node)
10338 return const0_rtx;
10340 type = TREE_TYPE (arg0);
10341 operand_mode = TYPE_MODE (type);
10342 unsignedp = TREE_UNSIGNED (type);
10344 /* We won't bother with BLKmode store-flag operations because it would mean
10345 passing a lot of information to emit_store_flag. */
10346 if (operand_mode == BLKmode)
10347 return 0;
10349 /* We won't bother with store-flag operations involving function pointers
10350 when function pointers must be canonicalized before comparisons. */
10351 #ifdef HAVE_canonicalize_funcptr_for_compare
10352 if (HAVE_canonicalize_funcptr_for_compare
10353 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10354 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10355 == FUNCTION_TYPE))
10356 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10357 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10358 == FUNCTION_TYPE))))
10359 return 0;
10360 #endif
10362 STRIP_NOPS (arg0);
10363 STRIP_NOPS (arg1);
10365 /* Get the rtx comparison code to use. We know that EXP is a comparison
10366 operation of some type. Some comparisons against 1 and -1 can be
10367 converted to comparisons with zero. Do so here so that the tests
10368 below will be aware that we have a comparison with zero. These
10369 tests will not catch constants in the first operand, but constants
10370 are rarely passed as the first operand. */
10372 switch (TREE_CODE (exp))
10374 case EQ_EXPR:
10375 code = EQ;
10376 break;
10377 case NE_EXPR:
10378 code = NE;
10379 break;
10380 case LT_EXPR:
10381 if (integer_onep (arg1))
10382 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10383 else
10384 code = unsignedp ? LTU : LT;
10385 break;
10386 case LE_EXPR:
10387 if (! unsignedp && integer_all_onesp (arg1))
10388 arg1 = integer_zero_node, code = LT;
10389 else
10390 code = unsignedp ? LEU : LE;
10391 break;
10392 case GT_EXPR:
10393 if (! unsignedp && integer_all_onesp (arg1))
10394 arg1 = integer_zero_node, code = GE;
10395 else
10396 code = unsignedp ? GTU : GT;
10397 break;
10398 case GE_EXPR:
10399 if (integer_onep (arg1))
10400 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10401 else
10402 code = unsignedp ? GEU : GE;
10403 break;
10405 case UNORDERED_EXPR:
10406 code = UNORDERED;
10407 break;
10408 case ORDERED_EXPR:
10409 code = ORDERED;
10410 break;
10411 case UNLT_EXPR:
10412 code = UNLT;
10413 break;
10414 case UNLE_EXPR:
10415 code = UNLE;
10416 break;
10417 case UNGT_EXPR:
10418 code = UNGT;
10419 break;
10420 case UNGE_EXPR:
10421 code = UNGE;
10422 break;
10423 case UNEQ_EXPR:
10424 code = UNEQ;
10425 break;
10427 default:
10428 abort ();
10431 /* Put a constant second. */
10432 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10434 tem = arg0; arg0 = arg1; arg1 = tem;
10435 code = swap_condition (code);
10438 /* If this is an equality or inequality test of a single bit, we can
10439 do this by shifting the bit being tested to the low-order bit and
10440 masking the result with the constant 1. If the condition was EQ,
10441 we xor it with 1. This does not require an scc insn and is faster
10442 than an scc insn even if we have it. */
10444 if ((code == NE || code == EQ)
10445 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10446 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10448 tree inner = TREE_OPERAND (arg0, 0);
10449 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10450 int ops_unsignedp;
10452 /* If INNER is a right shift of a constant and it plus BITNUM does
10453 not overflow, adjust BITNUM and INNER. */
10455 if (TREE_CODE (inner) == RSHIFT_EXPR
10456 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10457 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10458 && bitnum < TYPE_PRECISION (type)
10459 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10460 bitnum - TYPE_PRECISION (type)))
10462 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10463 inner = TREE_OPERAND (inner, 0);
10466 /* If we are going to be able to omit the AND below, we must do our
10467 operations as unsigned. If we must use the AND, we have a choice.
10468 Normally unsigned is faster, but for some machines signed is. */
10469 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10470 #ifdef LOAD_EXTEND_OP
10471 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10472 #else
10474 #endif
10477 if (! get_subtarget (subtarget)
10478 || GET_MODE (subtarget) != operand_mode
10479 || ! safe_from_p (subtarget, inner, 1))
10480 subtarget = 0;
10482 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10484 if (bitnum != 0)
10485 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10486 size_int (bitnum), subtarget, ops_unsignedp);
10488 if (GET_MODE (op0) != mode)
10489 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10491 if ((code == EQ && ! invert) || (code == NE && invert))
10492 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10493 ops_unsignedp, OPTAB_LIB_WIDEN);
10495 /* Put the AND last so it can combine with more things. */
10496 if (bitnum != TYPE_PRECISION (type) - 1)
10497 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10499 return op0;
10502 /* Now see if we are likely to be able to do this. Return if not. */
10503 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10504 return 0;
10506 icode = setcc_gen_code[(int) code];
10507 if (icode == CODE_FOR_nothing
10508 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10510 /* We can only do this if it is one of the special cases that
10511 can be handled without an scc insn. */
10512 if ((code == LT && integer_zerop (arg1))
10513 || (! only_cheap && code == GE && integer_zerop (arg1)))
10515 else if (BRANCH_COST >= 0
10516 && ! only_cheap && (code == NE || code == EQ)
10517 && TREE_CODE (type) != REAL_TYPE
10518 && ((abs_optab->handlers[(int) operand_mode].insn_code
10519 != CODE_FOR_nothing)
10520 || (ffs_optab->handlers[(int) operand_mode].insn_code
10521 != CODE_FOR_nothing)))
10523 else
10524 return 0;
10527 if (! get_subtarget (target)
10528 || GET_MODE (subtarget) != operand_mode
10529 || ! safe_from_p (subtarget, arg1, 1))
10530 subtarget = 0;
10532 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10533 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10535 if (target == 0)
10536 target = gen_reg_rtx (mode);
10538 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10539 because, if the emit_store_flag does anything it will succeed and
10540 OP0 and OP1 will not be used subsequently. */
10542 result = emit_store_flag (target, code,
10543 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10544 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10545 operand_mode, unsignedp, 1);
10547 if (result)
10549 if (invert)
10550 result = expand_binop (mode, xor_optab, result, const1_rtx,
10551 result, 0, OPTAB_LIB_WIDEN);
10552 return result;
10555 /* If this failed, we have to do this with set/compare/jump/set code. */
10556 if (GET_CODE (target) != REG
10557 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10558 target = gen_reg_rtx (GET_MODE (target));
10560 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10561 result = compare_from_rtx (op0, op1, code, unsignedp,
10562 operand_mode, NULL_RTX);
10563 if (GET_CODE (result) == CONST_INT)
10564 return (((result == const0_rtx && ! invert)
10565 || (result != const0_rtx && invert))
10566 ? const0_rtx : const1_rtx);
10568 /* The code of RESULT may not match CODE if compare_from_rtx
10569 decided to swap its operands and reverse the original code.
10571 We know that compare_from_rtx returns either a CONST_INT or
10572 a new comparison code, so it is safe to just extract the
10573 code from RESULT. */
10574 code = GET_CODE (result);
10576 label = gen_label_rtx ();
10577 if (bcc_gen_fctn[(int) code] == 0)
10578 abort ();
10580 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10581 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10582 emit_label (label);
10584 return target;
10588 /* Stubs in case we haven't got a casesi insn. */
10589 #ifndef HAVE_casesi
10590 # define HAVE_casesi 0
10591 # define gen_casesi(a, b, c, d, e) (0)
10592 # define CODE_FOR_casesi CODE_FOR_nothing
10593 #endif
10595 /* If the machine does not have a case insn that compares the bounds,
10596 this means extra overhead for dispatch tables, which raises the
10597 threshold for using them. */
10598 #ifndef CASE_VALUES_THRESHOLD
10599 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10600 #endif /* CASE_VALUES_THRESHOLD */
10602 unsigned int
10603 case_values_threshold ()
10605 return CASE_VALUES_THRESHOLD;
10608 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10609 0 otherwise (i.e. if there is no casesi instruction). */
10611 try_casesi (index_type, index_expr, minval, range,
10612 table_label, default_label)
10613 tree index_type, index_expr, minval, range;
10614 rtx table_label ATTRIBUTE_UNUSED;
10615 rtx default_label;
10617 enum machine_mode index_mode = SImode;
10618 int index_bits = GET_MODE_BITSIZE (index_mode);
10619 rtx op1, op2, index;
10620 enum machine_mode op_mode;
10622 if (! HAVE_casesi)
10623 return 0;
10625 /* Convert the index to SImode. */
10626 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10628 enum machine_mode omode = TYPE_MODE (index_type);
10629 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10631 /* We must handle the endpoints in the original mode. */
10632 index_expr = build (MINUS_EXPR, index_type,
10633 index_expr, minval);
10634 minval = integer_zero_node;
10635 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10636 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10637 omode, 1, default_label);
10638 /* Now we can safely truncate. */
10639 index = convert_to_mode (index_mode, index, 0);
10641 else
10643 if (TYPE_MODE (index_type) != index_mode)
10645 index_expr = convert ((*lang_hooks.types.type_for_size)
10646 (index_bits, 0), index_expr);
10647 index_type = TREE_TYPE (index_expr);
10650 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10652 emit_queue ();
10653 index = protect_from_queue (index, 0);
10654 do_pending_stack_adjust ();
10656 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10657 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10658 (index, op_mode))
10659 index = copy_to_mode_reg (op_mode, index);
10661 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10663 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10664 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10665 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10666 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10667 (op1, op_mode))
10668 op1 = copy_to_mode_reg (op_mode, op1);
10670 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10672 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10673 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10674 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10675 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10676 (op2, op_mode))
10677 op2 = copy_to_mode_reg (op_mode, op2);
10679 emit_jump_insn (gen_casesi (index, op1, op2,
10680 table_label, default_label));
10681 return 1;
10684 /* Attempt to generate a tablejump instruction; same concept. */
10685 #ifndef HAVE_tablejump
10686 #define HAVE_tablejump 0
10687 #define gen_tablejump(x, y) (0)
10688 #endif
10690 /* Subroutine of the next function.
10692 INDEX is the value being switched on, with the lowest value
10693 in the table already subtracted.
10694 MODE is its expected mode (needed if INDEX is constant).
10695 RANGE is the length of the jump table.
10696 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10698 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10699 index value is out of range. */
10701 static void
10702 do_tablejump (index, mode, range, table_label, default_label)
10703 rtx index, range, table_label, default_label;
10704 enum machine_mode mode;
10706 rtx temp, vector;
10708 /* Do an unsigned comparison (in the proper mode) between the index
10709 expression and the value which represents the length of the range.
10710 Since we just finished subtracting the lower bound of the range
10711 from the index expression, this comparison allows us to simultaneously
10712 check that the original index expression value is both greater than
10713 or equal to the minimum value of the range and less than or equal to
10714 the maximum value of the range. */
10716 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10717 default_label);
10719 /* If index is in range, it must fit in Pmode.
10720 Convert to Pmode so we can index with it. */
10721 if (mode != Pmode)
10722 index = convert_to_mode (Pmode, index, 1);
10724 /* Don't let a MEM slip thru, because then INDEX that comes
10725 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10726 and break_out_memory_refs will go to work on it and mess it up. */
10727 #ifdef PIC_CASE_VECTOR_ADDRESS
10728 if (flag_pic && GET_CODE (index) != REG)
10729 index = copy_to_mode_reg (Pmode, index);
10730 #endif
10732 /* If flag_force_addr were to affect this address
10733 it could interfere with the tricky assumptions made
10734 about addresses that contain label-refs,
10735 which may be valid only very near the tablejump itself. */
10736 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10737 GET_MODE_SIZE, because this indicates how large insns are. The other
10738 uses should all be Pmode, because they are addresses. This code
10739 could fail if addresses and insns are not the same size. */
10740 index = gen_rtx_PLUS (Pmode,
10741 gen_rtx_MULT (Pmode, index,
10742 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10743 gen_rtx_LABEL_REF (Pmode, table_label));
10744 #ifdef PIC_CASE_VECTOR_ADDRESS
10745 if (flag_pic)
10746 index = PIC_CASE_VECTOR_ADDRESS (index);
10747 else
10748 #endif
10749 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10750 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10751 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10752 RTX_UNCHANGING_P (vector) = 1;
10753 convert_move (temp, vector, 0);
10755 emit_jump_insn (gen_tablejump (temp, table_label));
10757 /* If we are generating PIC code or if the table is PC-relative, the
10758 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10759 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10760 emit_barrier ();
10764 try_tablejump (index_type, index_expr, minval, range,
10765 table_label, default_label)
10766 tree index_type, index_expr, minval, range;
10767 rtx table_label, default_label;
10769 rtx index;
10771 if (! HAVE_tablejump)
10772 return 0;
10774 index_expr = fold (build (MINUS_EXPR, index_type,
10775 convert (index_type, index_expr),
10776 convert (index_type, minval)));
10777 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10778 emit_queue ();
10779 index = protect_from_queue (index, 0);
10780 do_pending_stack_adjust ();
10782 do_tablejump (index, TYPE_MODE (index_type),
10783 convert_modes (TYPE_MODE (index_type),
10784 TYPE_MODE (TREE_TYPE (range)),
10785 expand_expr (range, NULL_RTX,
10786 VOIDmode, 0),
10787 TREE_UNSIGNED (TREE_TYPE (range))),
10788 table_label, default_label);
10789 return 1;
10792 #include "gt-expr.h"