* arm.md (stack_tie): New insn. Use an idiom that the alias code
[official-gcc.git] / gcc / expr.c
blob7b5810c617cd139fd0241dbe09b0e993132e02c3
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
56 #ifdef PUSH_ROUNDING
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
60 #endif
62 #endif
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
67 #else
68 #define STACK_PUSH_CODE PRE_INC
69 #endif
70 #endif
72 /* Assume that case vectors are not pc-relative. */
73 #ifndef CASE_VECTOR_PC_RELATIVE
74 #define CASE_VECTOR_PC_RELATIVE 0
75 #endif
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
83 int cse_not_expected;
85 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
86 static tree placeholder_list = 0;
88 /* This structure is used by move_by_pieces to describe the move to
89 be performed. */
90 struct move_by_pieces
92 rtx to;
93 rtx to_addr;
94 int autinc_to;
95 int explicit_inc_to;
96 rtx from;
97 rtx from_addr;
98 int autinc_from;
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
102 int reverse;
105 /* This structure is used by store_by_pieces to describe the clear to
106 be performed. */
108 struct store_by_pieces
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
117 PTR constfundata;
118 int reverse;
121 extern struct obstack permanent_obstack;
123 static rtx enqueue_insn PARAMS ((rtx, rtx));
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
125 PARAMS ((unsigned HOST_WIDE_INT,
126 unsigned int));
127 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *));
129 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 enum machine_mode));
131 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 unsigned int));
133 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 unsigned int));
135 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 enum machine_mode,
137 struct store_by_pieces *));
138 static rtx compress_float_constant PARAMS ((rtx, rtx));
139 static rtx get_subtarget PARAMS ((rtx));
140 static int is_zeros_p PARAMS ((tree));
141 static int mostly_zeros_p PARAMS ((tree));
142 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, enum machine_mode, int, tree,
149 int));
150 static rtx var_rtx PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153 static int is_aligning_offset PARAMS ((tree, tree));
154 static rtx expand_increment PARAMS ((tree, int, int));
155 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
158 rtx, rtx));
159 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
160 #ifdef PUSH_ROUNDING
161 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
162 #endif
163 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* If a memory-to-memory move would take MOVE_RATIO or more simple
177 move-instruction sequences, we will do a movstr or libcall instead. */
179 #ifndef MOVE_RATIO
180 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
181 #define MOVE_RATIO 2
182 #else
183 /* If we are optimizing for space (-Os), cut down the default move ratio. */
184 #define MOVE_RATIO (optimize_size ? 3 : 15)
185 #endif
186 #endif
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
193 #endif
195 /* If a clear memory operation would take CLEAR_RATIO or more simple
196 move-instruction sequences, we will do a clrstr or libcall instead. */
198 #ifndef CLEAR_RATIO
199 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
200 #define CLEAR_RATIO 2
201 #else
202 /* If we are optimizing for space, cut down the default clear ratio. */
203 #define CLEAR_RATIO (optimize_size ? 3 : 15)
204 #endif
205 #endif
207 /* This macro is used to determine whether clear_by_pieces should be
208 called to clear storage. */
209 #ifndef CLEAR_BY_PIECES_P
210 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
211 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
212 #endif
214 /* This array records the insn_code of insns to perform block moves. */
215 enum insn_code movstr_optab[NUM_MACHINE_MODES];
217 /* This array records the insn_code of insns to perform block clears. */
218 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
220 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
222 #ifndef SLOW_UNALIGNED_ACCESS
223 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
224 #endif
226 /* This is run once per compilation to set up which modes can be used
227 directly in memory and to initialize the block move optab. */
229 void
230 init_expr_once ()
232 rtx insn, pat;
233 enum machine_mode mode;
234 int num_clobbers;
235 rtx mem, mem1;
236 rtx reg;
238 /* Try indexing by frame ptr and try by stack ptr.
239 It is known that on the Convex the stack ptr isn't a valid index.
240 With luck, one or the other is valid on any machine. */
241 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
242 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
244 /* A scratch register we can modify in-place below to avoid
245 useless RTL allocations. */
246 reg = gen_rtx_REG (VOIDmode, -1);
248 insn = rtx_alloc (INSN);
249 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
250 PATTERN (insn) = pat;
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
255 int regno;
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
259 PUT_MODE (mem1, mode);
260 PUT_MODE (reg, mode);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
268 regno++)
270 if (! HARD_REGNO_MODE_OK (regno, mode))
271 continue;
273 REGNO (reg) = regno;
275 SET_SRC (pat) = mem;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
290 SET_SRC (pat) = reg;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
297 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
299 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
300 mode = GET_MODE_WIDER_MODE (mode))
302 enum machine_mode srcmode;
303 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
304 srcmode = GET_MODE_WIDER_MODE (srcmode))
306 enum insn_code ic;
308 ic = can_extend_p (mode, srcmode, 0);
309 if (ic == CODE_FOR_nothing)
310 continue;
312 PUT_MODE (mem, srcmode);
314 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
315 float_extend_from_mem[mode][srcmode] = true;
320 /* This is run at the start of compiling a function. */
322 void
323 init_expr ()
325 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
327 pending_chain = 0;
328 pending_stack_adjust = 0;
329 stack_pointer_delta = 0;
330 inhibit_defer_pop = 0;
331 saveregs_value = 0;
332 apply_args_value = 0;
333 forced_labels = 0;
336 /* Small sanity check that the queue is empty at the end of a function. */
338 void
339 finish_expr_for_function ()
341 if (pending_chain)
342 abort ();
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
355 static rtx
356 enqueue_insn (var, body)
357 rtx var, body;
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x, modify)
381 rtx x;
382 int modify;
384 RTX_CODE code = GET_CODE (x);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
389 return x;
390 #endif
392 if (code != QUEUED)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
398 shared. */
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
402 rtx y = XEXP (x, 0);
403 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
405 if (QUEUED_INSN (y))
407 rtx temp = gen_reg_rtx (GET_MODE (x));
409 emit_insn_before (gen_move_insn (temp, new),
410 QUEUED_INSN (y));
411 return temp;
414 /* Copy the address into a pseudo, so that the returned value
415 remains correct across calls to emit_queue. */
416 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
419 /* Otherwise, recursively protect the subexpressions of all
420 the kinds of rtx's that can contain a QUEUED. */
421 if (code == MEM)
423 rtx tem = protect_from_queue (XEXP (x, 0), 0);
424 if (tem != XEXP (x, 0))
426 x = copy_rtx (x);
427 XEXP (x, 0) = tem;
430 else if (code == PLUS || code == MULT)
432 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
433 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
434 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
436 x = copy_rtx (x);
437 XEXP (x, 0) = new0;
438 XEXP (x, 1) = new1;
441 return x;
443 /* If the increment has not happened, use the variable itself. Copy it
444 into a new pseudo so that the value remains correct across calls to
445 emit_queue. */
446 if (QUEUED_INSN (x) == 0)
447 return copy_to_reg (QUEUED_VAR (x));
448 /* If the increment has happened and a pre-increment copy exists,
449 use that copy. */
450 if (QUEUED_COPY (x) != 0)
451 return QUEUED_COPY (x);
452 /* The increment has happened but we haven't set up a pre-increment copy.
453 Set one up now, and use it. */
454 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
455 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
456 QUEUED_INSN (x));
457 return QUEUED_COPY (x);
460 /* Return nonzero if X contains a QUEUED expression:
461 if it contains anything that will be altered by a queued increment.
462 We handle only combinations of MEM, PLUS, MINUS and MULT operators
463 since memory addresses generally contain only those. */
466 queued_subexp_p (x)
467 rtx x;
469 enum rtx_code code = GET_CODE (x);
470 switch (code)
472 case QUEUED:
473 return 1;
474 case MEM:
475 return queued_subexp_p (XEXP (x, 0));
476 case MULT:
477 case PLUS:
478 case MINUS:
479 return (queued_subexp_p (XEXP (x, 0))
480 || queued_subexp_p (XEXP (x, 1)));
481 default:
482 return 0;
486 /* Perform all the pending incrementations. */
488 void
489 emit_queue ()
491 rtx p;
492 while ((p = pending_chain))
494 rtx body = QUEUED_BODY (p);
496 switch (GET_CODE (body))
498 case INSN:
499 case JUMP_INSN:
500 case CALL_INSN:
501 case CODE_LABEL:
502 case BARRIER:
503 case NOTE:
504 QUEUED_INSN (p) = body;
505 emit_insn (body);
506 break;
508 #ifdef ENABLE_CHECKING
509 case SEQUENCE:
510 abort ();
511 break;
512 #endif
514 default:
515 QUEUED_INSN (p) = emit_insn (body);
516 break;
519 pending_chain = QUEUED_NEXT (p);
523 /* Copy data from FROM to TO, where the machine modes are not the same.
524 Both modes may be integer, or both may be floating.
525 UNSIGNEDP should be nonzero if FROM is an unsigned type.
526 This causes zero-extension instead of sign-extension. */
528 void
529 convert_move (to, from, unsignedp)
530 rtx to, from;
531 int unsignedp;
533 enum machine_mode to_mode = GET_MODE (to);
534 enum machine_mode from_mode = GET_MODE (from);
535 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
536 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
537 enum insn_code code;
538 rtx libcall;
540 /* rtx code for making an equivalent value. */
541 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
543 to = protect_from_queue (to, 1);
544 from = protect_from_queue (from, 0);
546 if (to_real != from_real)
547 abort ();
549 /* If FROM is a SUBREG that indicates that we have already done at least
550 the required extension, strip it. We don't handle such SUBREGs as
551 TO here. */
553 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
554 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
555 >= GET_MODE_SIZE (to_mode))
556 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
557 from = gen_lowpart (to_mode, from), from_mode = to_mode;
559 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
560 abort ();
562 if (to_mode == from_mode
563 || (from_mode == VOIDmode && CONSTANT_P (from)))
565 emit_move_insn (to, from);
566 return;
569 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
571 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
572 abort ();
574 if (VECTOR_MODE_P (to_mode))
575 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
576 else
577 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
579 emit_move_insn (to, from);
580 return;
583 if (to_real != from_real)
584 abort ();
586 if (to_real)
588 rtx value, insns;
590 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
592 /* Try converting directly if the insn is supported. */
593 if ((code = can_extend_p (to_mode, from_mode, 0))
594 != CODE_FOR_nothing)
596 emit_unop_insn (code, to, from, UNKNOWN);
597 return;
601 #ifdef HAVE_trunchfqf2
602 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
605 return;
607 #endif
608 #ifdef HAVE_trunctqfqf2
609 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
612 return;
614 #endif
615 #ifdef HAVE_truncsfqf2
616 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
619 return;
621 #endif
622 #ifdef HAVE_truncdfqf2
623 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
625 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
626 return;
628 #endif
629 #ifdef HAVE_truncxfqf2
630 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
632 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
633 return;
635 #endif
636 #ifdef HAVE_trunctfqf2
637 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
639 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
640 return;
642 #endif
644 #ifdef HAVE_trunctqfhf2
645 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
648 return;
650 #endif
651 #ifdef HAVE_truncsfhf2
652 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
655 return;
657 #endif
658 #ifdef HAVE_truncdfhf2
659 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
661 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
662 return;
664 #endif
665 #ifdef HAVE_truncxfhf2
666 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
668 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
669 return;
671 #endif
672 #ifdef HAVE_trunctfhf2
673 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
675 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
676 return;
678 #endif
680 #ifdef HAVE_truncsftqf2
681 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
684 return;
686 #endif
687 #ifdef HAVE_truncdftqf2
688 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
690 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
691 return;
693 #endif
694 #ifdef HAVE_truncxftqf2
695 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
697 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
698 return;
700 #endif
701 #ifdef HAVE_trunctftqf2
702 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
704 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
705 return;
707 #endif
709 #ifdef HAVE_truncdfsf2
710 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
712 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
713 return;
715 #endif
716 #ifdef HAVE_truncxfsf2
717 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
719 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
720 return;
722 #endif
723 #ifdef HAVE_trunctfsf2
724 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
726 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
727 return;
729 #endif
730 #ifdef HAVE_truncxfdf2
731 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
733 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
734 return;
736 #endif
737 #ifdef HAVE_trunctfdf2
738 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
740 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
741 return;
743 #endif
745 libcall = (rtx) 0;
746 switch (from_mode)
748 case SFmode:
749 switch (to_mode)
751 case DFmode:
752 libcall = extendsfdf2_libfunc;
753 break;
755 case XFmode:
756 libcall = extendsfxf2_libfunc;
757 break;
759 case TFmode:
760 libcall = extendsftf2_libfunc;
761 break;
763 default:
764 break;
766 break;
768 case DFmode:
769 switch (to_mode)
771 case SFmode:
772 libcall = truncdfsf2_libfunc;
773 break;
775 case XFmode:
776 libcall = extenddfxf2_libfunc;
777 break;
779 case TFmode:
780 libcall = extenddftf2_libfunc;
781 break;
783 default:
784 break;
786 break;
788 case XFmode:
789 switch (to_mode)
791 case SFmode:
792 libcall = truncxfsf2_libfunc;
793 break;
795 case DFmode:
796 libcall = truncxfdf2_libfunc;
797 break;
799 default:
800 break;
802 break;
804 case TFmode:
805 switch (to_mode)
807 case SFmode:
808 libcall = trunctfsf2_libfunc;
809 break;
811 case DFmode:
812 libcall = trunctfdf2_libfunc;
813 break;
815 default:
816 break;
818 break;
820 default:
821 break;
824 if (libcall == (rtx) 0)
825 /* This conversion is not implemented yet. */
826 abort ();
828 start_sequence ();
829 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
830 1, from, from_mode);
831 insns = get_insns ();
832 end_sequence ();
833 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
834 from));
835 return;
838 /* Now both modes are integers. */
840 /* Handle expanding beyond a word. */
841 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
842 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
844 rtx insns;
845 rtx lowpart;
846 rtx fill_value;
847 rtx lowfrom;
848 int i;
849 enum machine_mode lowpart_mode;
850 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
852 /* Try converting directly if the insn is supported. */
853 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
854 != CODE_FOR_nothing)
856 /* If FROM is a SUBREG, put it into a register. Do this
857 so that we always generate the same set of insns for
858 better cse'ing; if an intermediate assignment occurred,
859 we won't be doing the operation directly on the SUBREG. */
860 if (optimize > 0 && GET_CODE (from) == SUBREG)
861 from = force_reg (from_mode, from);
862 emit_unop_insn (code, to, from, equiv_code);
863 return;
865 /* Next, try converting via full word. */
866 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
867 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
868 != CODE_FOR_nothing))
870 if (GET_CODE (to) == REG)
871 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
872 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
873 emit_unop_insn (code, to,
874 gen_lowpart (word_mode, to), equiv_code);
875 return;
878 /* No special multiword conversion insn; do it by hand. */
879 start_sequence ();
881 /* Since we will turn this into a no conflict block, we must ensure
882 that the source does not overlap the target. */
884 if (reg_overlap_mentioned_p (to, from))
885 from = force_reg (from_mode, from);
887 /* Get a copy of FROM widened to a word, if necessary. */
888 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
889 lowpart_mode = word_mode;
890 else
891 lowpart_mode = from_mode;
893 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895 lowpart = gen_lowpart (lowpart_mode, to);
896 emit_move_insn (lowpart, lowfrom);
898 /* Compute the value to put in each remaining word. */
899 if (unsignedp)
900 fill_value = const0_rtx;
901 else
903 #ifdef HAVE_slt
904 if (HAVE_slt
905 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
906 && STORE_FLAG_VALUE == -1)
908 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 lowpart_mode, 0);
910 fill_value = gen_reg_rtx (word_mode);
911 emit_insn (gen_slt (fill_value));
913 else
914 #endif
916 fill_value
917 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
918 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 NULL_RTX, 0);
920 fill_value = convert_to_mode (word_mode, fill_value, 1);
924 /* Fill the remaining words. */
925 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
928 rtx subword = operand_subword (to, index, 1, to_mode);
930 if (subword == 0)
931 abort ();
933 if (fill_value != subword)
934 emit_move_insn (subword, fill_value);
937 insns = get_insns ();
938 end_sequence ();
940 emit_no_conflict_block (insns, to, from, NULL_RTX,
941 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
942 return;
945 /* Truncating multi-word to a word or less. */
946 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
947 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
949 if (!((GET_CODE (from) == MEM
950 && ! MEM_VOLATILE_P (from)
951 && direct_load[(int) to_mode]
952 && ! mode_dependent_address_p (XEXP (from, 0)))
953 || GET_CODE (from) == REG
954 || GET_CODE (from) == SUBREG))
955 from = force_reg (from_mode, from);
956 convert_move (to, gen_lowpart (word_mode, from), 0);
957 return;
960 /* Handle pointer conversion. */ /* SPEE 900220. */
961 if (to_mode == PQImode)
963 if (from_mode != QImode)
964 from = convert_to_mode (QImode, from, unsignedp);
966 #ifdef HAVE_truncqipqi2
967 if (HAVE_truncqipqi2)
969 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
970 return;
972 #endif /* HAVE_truncqipqi2 */
973 abort ();
976 if (from_mode == PQImode)
978 if (to_mode != QImode)
980 from = convert_to_mode (QImode, from, unsignedp);
981 from_mode = QImode;
983 else
985 #ifdef HAVE_extendpqiqi2
986 if (HAVE_extendpqiqi2)
988 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
989 return;
991 #endif /* HAVE_extendpqiqi2 */
992 abort ();
996 if (to_mode == PSImode)
998 if (from_mode != SImode)
999 from = convert_to_mode (SImode, from, unsignedp);
1001 #ifdef HAVE_truncsipsi2
1002 if (HAVE_truncsipsi2)
1004 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1005 return;
1007 #endif /* HAVE_truncsipsi2 */
1008 abort ();
1011 if (from_mode == PSImode)
1013 if (to_mode != SImode)
1015 from = convert_to_mode (SImode, from, unsignedp);
1016 from_mode = SImode;
1018 else
1020 #ifdef HAVE_extendpsisi2
1021 if (! unsignedp && HAVE_extendpsisi2)
1023 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1024 return;
1026 #endif /* HAVE_extendpsisi2 */
1027 #ifdef HAVE_zero_extendpsisi2
1028 if (unsignedp && HAVE_zero_extendpsisi2)
1030 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1031 return;
1033 #endif /* HAVE_zero_extendpsisi2 */
1034 abort ();
1038 if (to_mode == PDImode)
1040 if (from_mode != DImode)
1041 from = convert_to_mode (DImode, from, unsignedp);
1043 #ifdef HAVE_truncdipdi2
1044 if (HAVE_truncdipdi2)
1046 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1047 return;
1049 #endif /* HAVE_truncdipdi2 */
1050 abort ();
1053 if (from_mode == PDImode)
1055 if (to_mode != DImode)
1057 from = convert_to_mode (DImode, from, unsignedp);
1058 from_mode = DImode;
1060 else
1062 #ifdef HAVE_extendpdidi2
1063 if (HAVE_extendpdidi2)
1065 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1066 return;
1068 #endif /* HAVE_extendpdidi2 */
1069 abort ();
1073 /* Now follow all the conversions between integers
1074 no more than a word long. */
1076 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1077 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1078 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1079 GET_MODE_BITSIZE (from_mode)))
1081 if (!((GET_CODE (from) == MEM
1082 && ! MEM_VOLATILE_P (from)
1083 && direct_load[(int) to_mode]
1084 && ! mode_dependent_address_p (XEXP (from, 0)))
1085 || GET_CODE (from) == REG
1086 || GET_CODE (from) == SUBREG))
1087 from = force_reg (from_mode, from);
1088 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1089 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1090 from = copy_to_reg (from);
1091 emit_move_insn (to, gen_lowpart (to_mode, from));
1092 return;
1095 /* Handle extension. */
1096 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1098 /* Convert directly if that works. */
1099 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1100 != CODE_FOR_nothing)
1102 if (flag_force_mem)
1103 from = force_not_mem (from);
1105 emit_unop_insn (code, to, from, equiv_code);
1106 return;
1108 else
1110 enum machine_mode intermediate;
1111 rtx tmp;
1112 tree shift_amount;
1114 /* Search for a mode to convert via. */
1115 for (intermediate = from_mode; intermediate != VOIDmode;
1116 intermediate = GET_MODE_WIDER_MODE (intermediate))
1117 if (((can_extend_p (to_mode, intermediate, unsignedp)
1118 != CODE_FOR_nothing)
1119 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1120 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1121 GET_MODE_BITSIZE (intermediate))))
1122 && (can_extend_p (intermediate, from_mode, unsignedp)
1123 != CODE_FOR_nothing))
1125 convert_move (to, convert_to_mode (intermediate, from,
1126 unsignedp), unsignedp);
1127 return;
1130 /* No suitable intermediate mode.
1131 Generate what we need with shifts. */
1132 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1133 - GET_MODE_BITSIZE (from_mode), 0);
1134 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1135 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1136 to, unsignedp);
1137 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1138 to, unsignedp);
1139 if (tmp != to)
1140 emit_move_insn (to, tmp);
1141 return;
1145 /* Support special truncate insns for certain modes. */
1147 if (from_mode == DImode && to_mode == SImode)
1149 #ifdef HAVE_truncdisi2
1150 if (HAVE_truncdisi2)
1152 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1153 return;
1155 #endif
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 return;
1160 if (from_mode == DImode && to_mode == HImode)
1162 #ifdef HAVE_truncdihi2
1163 if (HAVE_truncdihi2)
1165 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1166 return;
1168 #endif
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 return;
1173 if (from_mode == DImode && to_mode == QImode)
1175 #ifdef HAVE_truncdiqi2
1176 if (HAVE_truncdiqi2)
1178 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == SImode && to_mode == HImode)
1188 #ifdef HAVE_truncsihi2
1189 if (HAVE_truncsihi2)
1191 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == SImode && to_mode == QImode)
1201 #ifdef HAVE_truncsiqi2
1202 if (HAVE_truncsiqi2)
1204 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == HImode && to_mode == QImode)
1214 #ifdef HAVE_trunchiqi2
1215 if (HAVE_trunchiqi2)
1217 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == TImode && to_mode == DImode)
1227 #ifdef HAVE_trunctidi2
1228 if (HAVE_trunctidi2)
1230 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 if (from_mode == TImode && to_mode == SImode)
1240 #ifdef HAVE_trunctisi2
1241 if (HAVE_trunctisi2)
1243 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1244 return;
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1251 if (from_mode == TImode && to_mode == HImode)
1253 #ifdef HAVE_trunctihi2
1254 if (HAVE_trunctihi2)
1256 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1257 return;
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1264 if (from_mode == TImode && to_mode == QImode)
1266 #ifdef HAVE_trunctiqi2
1267 if (HAVE_trunctiqi2)
1269 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1270 return;
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1277 /* Handle truncation of volatile memrefs, and so on;
1278 the things that couldn't be truncated directly,
1279 and for which there was no special instruction. */
1280 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1282 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1283 emit_move_insn (to, temp);
1284 return;
1287 /* Mode combination is not recognized. */
1288 abort ();
1291 /* Return an rtx for a value that would result
1292 from converting X to mode MODE.
1293 Both X and MODE may be floating, or both integer.
1294 UNSIGNEDP is nonzero if X is an unsigned value.
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1302 convert_to_mode (mode, x, unsignedp)
1303 enum machine_mode mode;
1304 rtx x;
1305 int unsignedp;
1307 return convert_modes (mode, VOIDmode, x, unsignedp);
1310 /* Return an rtx for a value that would result
1311 from converting X from mode OLDMODE to mode MODE.
1312 Both modes may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1315 This can be done by referring to a part of X in place
1316 or by copying to a new temporary with conversion.
1318 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1320 This function *must not* call protect_from_queue
1321 except when putting X into an insn (in which case convert_move does it). */
1324 convert_modes (mode, oldmode, x, unsignedp)
1325 enum machine_mode mode, oldmode;
1326 rtx x;
1327 int unsignedp;
1329 rtx temp;
1331 /* If FROM is a SUBREG that indicates that we have already done at least
1332 the required extension, strip it. */
1334 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1335 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1336 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1337 x = gen_lowpart (mode, x);
1339 if (GET_MODE (x) != VOIDmode)
1340 oldmode = GET_MODE (x);
1342 if (mode == oldmode)
1343 return x;
1345 /* There is one case that we must handle specially: If we are converting
1346 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1347 we are to interpret the constant as unsigned, gen_lowpart will do
1348 the wrong if the constant appears negative. What we want to do is
1349 make the high-order word of the constant zero, not all ones. */
1351 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1352 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1353 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1355 HOST_WIDE_INT val = INTVAL (x);
1357 if (oldmode != VOIDmode
1358 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1360 int width = GET_MODE_BITSIZE (oldmode);
1362 /* We need to zero extend VAL. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1366 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1369 /* We can do this with a gen_lowpart if both desired and current modes
1370 are integer, and this is either a constant integer, a register, or a
1371 non-volatile MEM. Except for the constant case where MODE is no
1372 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1374 if ((GET_CODE (x) == CONST_INT
1375 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1376 || (GET_MODE_CLASS (mode) == MODE_INT
1377 && GET_MODE_CLASS (oldmode) == MODE_INT
1378 && (GET_CODE (x) == CONST_DOUBLE
1379 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1380 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1381 && direct_load[(int) mode])
1382 || (GET_CODE (x) == REG
1383 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1384 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1386 /* ?? If we don't know OLDMODE, we have to assume here that
1387 X does not need sign- or zero-extension. This may not be
1388 the case, but it's the best we can do. */
1389 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1390 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1392 HOST_WIDE_INT val = INTVAL (x);
1393 int width = GET_MODE_BITSIZE (oldmode);
1395 /* We must sign or zero-extend in this case. Start by
1396 zero-extending, then sign extend if we need to. */
1397 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1398 if (! unsignedp
1399 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1400 val |= (HOST_WIDE_INT) (-1) << width;
1402 return gen_int_mode (val, mode);
1405 return gen_lowpart (mode, x);
1408 temp = gen_reg_rtx (mode);
1409 convert_move (temp, x, unsignedp);
1410 return temp;
1413 /* This macro is used to determine what the largest unit size that
1414 move_by_pieces can use is. */
1416 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1417 move efficiently, as opposed to MOVE_MAX which is the maximum
1418 number of bytes we can move with a single instruction. */
1420 #ifndef MOVE_MAX_PIECES
1421 #define MOVE_MAX_PIECES MOVE_MAX
1422 #endif
1424 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1425 store efficiently. Due to internal GCC limitations, this is
1426 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1427 for an immediate constant. */
1429 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1431 /* Generate several move instructions to copy LEN bytes from block FROM to
1432 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1433 and TO through protect_from_queue before calling.
1435 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1436 used to push FROM to the stack.
1438 ALIGN is maximum alignment we can assume. */
1440 void
1441 move_by_pieces (to, from, len, align)
1442 rtx to, from;
1443 unsigned HOST_WIDE_INT len;
1444 unsigned int align;
1446 struct move_by_pieces data;
1447 rtx to_addr, from_addr = XEXP (from, 0);
1448 unsigned int max_size = MOVE_MAX_PIECES + 1;
1449 enum machine_mode mode = VOIDmode, tmode;
1450 enum insn_code icode;
1452 data.offset = 0;
1453 data.from_addr = from_addr;
1454 if (to)
1456 to_addr = XEXP (to, 0);
1457 data.to = to;
1458 data.autinc_to
1459 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1460 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1461 data.reverse
1462 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1464 else
1466 to_addr = NULL_RTX;
1467 data.to = NULL_RTX;
1468 data.autinc_to = 1;
1469 #ifdef STACK_GROWS_DOWNWARD
1470 data.reverse = 1;
1471 #else
1472 data.reverse = 0;
1473 #endif
1475 data.to_addr = to_addr;
1476 data.from = from;
1477 data.autinc_from
1478 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1479 || GET_CODE (from_addr) == POST_INC
1480 || GET_CODE (from_addr) == POST_DEC);
1482 data.explicit_inc_from = 0;
1483 data.explicit_inc_to = 0;
1484 if (data.reverse) data.offset = len;
1485 data.len = len;
1487 /* If copying requires more than two move insns,
1488 copy addresses to registers (to make displacements shorter)
1489 and use post-increment if available. */
1490 if (!(data.autinc_from && data.autinc_to)
1491 && move_by_pieces_ninsns (len, align) > 2)
1493 /* Find the mode of the largest move... */
1494 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1495 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1496 if (GET_MODE_SIZE (tmode) < max_size)
1497 mode = tmode;
1499 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1501 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1502 data.autinc_from = 1;
1503 data.explicit_inc_from = -1;
1505 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1507 data.from_addr = copy_addr_to_reg (from_addr);
1508 data.autinc_from = 1;
1509 data.explicit_inc_from = 1;
1511 if (!data.autinc_from && CONSTANT_P (from_addr))
1512 data.from_addr = copy_addr_to_reg (from_addr);
1513 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1515 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1516 data.autinc_to = 1;
1517 data.explicit_inc_to = -1;
1519 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1521 data.to_addr = copy_addr_to_reg (to_addr);
1522 data.autinc_to = 1;
1523 data.explicit_inc_to = 1;
1525 if (!data.autinc_to && CONSTANT_P (to_addr))
1526 data.to_addr = copy_addr_to_reg (to_addr);
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 /* First move what we can in the largest integer mode, then go to
1534 successively smaller modes. */
1536 while (max_size > 1)
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1541 mode = tmode;
1543 if (mode == VOIDmode)
1544 break;
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1550 max_size = GET_MODE_SIZE (mode);
1553 /* The code above should have handled everything. */
1554 if (data.len > 0)
1555 abort ();
1558 /* Return number of insns required to move L bytes by pieces.
1559 ALIGN (in bits) is maximum alignment we can assume. */
1561 static unsigned HOST_WIDE_INT
1562 move_by_pieces_ninsns (l, align)
1563 unsigned HOST_WIDE_INT l;
1564 unsigned int align;
1566 unsigned HOST_WIDE_INT n_insns = 0;
1567 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1569 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1570 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1571 align = MOVE_MAX * BITS_PER_UNIT;
1573 while (max_size > 1)
1575 enum machine_mode mode = VOIDmode, tmode;
1576 enum insn_code icode;
1578 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1579 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1580 if (GET_MODE_SIZE (tmode) < max_size)
1581 mode = tmode;
1583 if (mode == VOIDmode)
1584 break;
1586 icode = mov_optab->handlers[(int) mode].insn_code;
1587 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1588 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1590 max_size = GET_MODE_SIZE (mode);
1593 if (l)
1594 abort ();
1595 return n_insns;
1598 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1599 with move instructions for mode MODE. GENFUN is the gen_... function
1600 to make a move insn for that mode. DATA has all the other info. */
1602 static void
1603 move_by_pieces_1 (genfun, mode, data)
1604 rtx (*genfun) PARAMS ((rtx, ...));
1605 enum machine_mode mode;
1606 struct move_by_pieces *data;
1608 unsigned int size = GET_MODE_SIZE (mode);
1609 rtx to1 = NULL_RTX, from1;
1611 while (data->len >= size)
1613 if (data->reverse)
1614 data->offset -= size;
1616 if (data->to)
1618 if (data->autinc_to)
1619 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1620 data->offset);
1621 else
1622 to1 = adjust_address (data->to, mode, data->offset);
1625 if (data->autinc_from)
1626 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1627 data->offset);
1628 else
1629 from1 = adjust_address (data->from, mode, data->offset);
1631 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1632 emit_insn (gen_add2_insn (data->to_addr,
1633 GEN_INT (-(HOST_WIDE_INT)size)));
1634 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1635 emit_insn (gen_add2_insn (data->from_addr,
1636 GEN_INT (-(HOST_WIDE_INT)size)));
1638 if (data->to)
1639 emit_insn ((*genfun) (to1, from1));
1640 else
1642 #ifdef PUSH_ROUNDING
1643 emit_single_push_insn (mode, from1, NULL);
1644 #else
1645 abort ();
1646 #endif
1649 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1650 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1651 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1652 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1654 if (! data->reverse)
1655 data->offset += size;
1657 data->len -= size;
1661 /* Emit code to move a block Y to a block X.
1662 This may be done with string-move instructions,
1663 with multiple scalar move instructions, or with a library call.
1665 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1666 with mode BLKmode.
1667 SIZE is an rtx that says how long they are.
1668 ALIGN is the maximum alignment we can assume they have.
1670 Return the address of the new block, if memcpy is called and returns it,
1671 0 otherwise. */
1673 static GTY(()) tree block_move_fn;
1675 emit_block_move (x, y, size)
1676 rtx x, y;
1677 rtx size;
1679 rtx retval = 0;
1680 #ifdef TARGET_MEM_FUNCTIONS
1681 tree call_expr, arg_list;
1682 #endif
1683 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1685 if (GET_MODE (x) != BLKmode)
1686 abort ();
1688 if (GET_MODE (y) != BLKmode)
1689 abort ();
1691 x = protect_from_queue (x, 1);
1692 y = protect_from_queue (y, 0);
1693 size = protect_from_queue (size, 0);
1695 if (GET_CODE (x) != MEM)
1696 abort ();
1697 if (GET_CODE (y) != MEM)
1698 abort ();
1699 if (size == 0)
1700 abort ();
1702 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1703 move_by_pieces (x, y, INTVAL (size), align);
1704 else
1706 /* Try the most limited insn first, because there's no point
1707 including more than one in the machine description unless
1708 the more limited one has some advantage. */
1710 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1711 enum machine_mode mode;
1713 /* Since this is a move insn, we don't care about volatility. */
1714 volatile_ok = 1;
1716 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1717 mode = GET_MODE_WIDER_MODE (mode))
1719 enum insn_code code = movstr_optab[(int) mode];
1720 insn_operand_predicate_fn pred;
1722 if (code != CODE_FOR_nothing
1723 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1724 here because if SIZE is less than the mode mask, as it is
1725 returned by the macro, it will definitely be less than the
1726 actual mode mask. */
1727 && ((GET_CODE (size) == CONST_INT
1728 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1729 <= (GET_MODE_MASK (mode) >> 1)))
1730 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1731 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1732 || (*pred) (x, BLKmode))
1733 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1734 || (*pred) (y, BLKmode))
1735 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1736 || (*pred) (opalign, VOIDmode)))
1738 rtx op2;
1739 rtx last = get_last_insn ();
1740 rtx pat;
1742 op2 = convert_to_mode (mode, size, 1);
1743 pred = insn_data[(int) code].operand[2].predicate;
1744 if (pred != 0 && ! (*pred) (op2, mode))
1745 op2 = copy_to_mode_reg (mode, op2);
1747 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1748 if (pat)
1750 emit_insn (pat);
1751 volatile_ok = 0;
1752 return 0;
1754 else
1755 delete_insns_since (last);
1759 volatile_ok = 0;
1761 /* X, Y, or SIZE may have been passed through protect_from_queue.
1763 It is unsafe to save the value generated by protect_from_queue
1764 and reuse it later. Consider what happens if emit_queue is
1765 called before the return value from protect_from_queue is used.
1767 Expansion of the CALL_EXPR below will call emit_queue before
1768 we are finished emitting RTL for argument setup. So if we are
1769 not careful we could get the wrong value for an argument.
1771 To avoid this problem we go ahead and emit code to copy X, Y &
1772 SIZE into new pseudos. We can then place those new pseudos
1773 into an RTL_EXPR and use them later, even after a call to
1774 emit_queue.
1776 Note this is not strictly needed for library calls since they
1777 do not call emit_queue before loading their arguments. However,
1778 we may need to have library calls call emit_queue in the future
1779 since failing to do so could cause problems for targets which
1780 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1781 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1782 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1784 #ifdef TARGET_MEM_FUNCTIONS
1785 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1786 #else
1787 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1788 TREE_UNSIGNED (integer_type_node));
1789 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1790 #endif
1792 #ifdef TARGET_MEM_FUNCTIONS
1793 /* It is incorrect to use the libcall calling conventions to call
1794 memcpy in this context.
1796 This could be a user call to memcpy and the user may wish to
1797 examine the return value from memcpy.
1799 For targets where libcalls and normal calls have different conventions
1800 for returning pointers, we could end up generating incorrect code.
1802 So instead of using a libcall sequence we build up a suitable
1803 CALL_EXPR and expand the call in the normal fashion. */
1804 if (block_move_fn == NULL_TREE)
1806 tree fntype;
1808 /* This was copied from except.c, I don't know if all this is
1809 necessary in this context or not. */
1810 block_move_fn = get_identifier ("memcpy");
1811 fntype = build_pointer_type (void_type_node);
1812 fntype = build_function_type (fntype, NULL_TREE);
1813 block_move_fn = build_decl (FUNCTION_DECL, block_move_fn, fntype);
1814 DECL_EXTERNAL (block_move_fn) = 1;
1815 TREE_PUBLIC (block_move_fn) = 1;
1816 DECL_ARTIFICIAL (block_move_fn) = 1;
1817 TREE_NOTHROW (block_move_fn) = 1;
1818 make_decl_rtl (block_move_fn, NULL);
1819 assemble_external (block_move_fn);
1822 /* We need to make an argument list for the function call.
1824 memcpy has three arguments, the first two are void * addresses and
1825 the last is a size_t byte count for the copy. */
1826 arg_list
1827 = build_tree_list (NULL_TREE,
1828 make_tree (build_pointer_type (void_type_node), x));
1829 TREE_CHAIN (arg_list)
1830 = build_tree_list (NULL_TREE,
1831 make_tree (build_pointer_type (void_type_node), y));
1832 TREE_CHAIN (TREE_CHAIN (arg_list))
1833 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1834 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1836 /* Now we have to build up the CALL_EXPR itself. */
1837 call_expr = build1 (ADDR_EXPR,
1838 build_pointer_type (TREE_TYPE (block_move_fn)),
1839 block_move_fn);
1840 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_move_fn)),
1841 call_expr, arg_list, NULL_TREE);
1842 TREE_SIDE_EFFECTS (call_expr) = 1;
1844 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1845 #else
1846 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1847 VOIDmode, 3, y, Pmode, x, Pmode,
1848 convert_to_mode (TYPE_MODE (integer_type_node), size,
1849 TREE_UNSIGNED (integer_type_node)),
1850 TYPE_MODE (integer_type_node));
1851 #endif
1853 /* If we are initializing a readonly value, show the above call
1854 clobbered it. Otherwise, a load from it may erroneously be hoisted
1855 from a loop. */
1856 if (RTX_UNCHANGING_P (x))
1857 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1860 return retval;
1863 /* Copy all or part of a value X into registers starting at REGNO.
1864 The number of registers to be filled is NREGS. */
1866 void
1867 move_block_to_reg (regno, x, nregs, mode)
1868 int regno;
1869 rtx x;
1870 int nregs;
1871 enum machine_mode mode;
1873 int i;
1874 #ifdef HAVE_load_multiple
1875 rtx pat;
1876 rtx last;
1877 #endif
1879 if (nregs == 0)
1880 return;
1882 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1883 x = validize_mem (force_const_mem (mode, x));
1885 /* See if the machine can do this with a load multiple insn. */
1886 #ifdef HAVE_load_multiple
1887 if (HAVE_load_multiple)
1889 last = get_last_insn ();
1890 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1891 GEN_INT (nregs));
1892 if (pat)
1894 emit_insn (pat);
1895 return;
1897 else
1898 delete_insns_since (last);
1900 #endif
1902 for (i = 0; i < nregs; i++)
1903 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1904 operand_subword_force (x, i, mode));
1907 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1908 The number of registers to be filled is NREGS. SIZE indicates the number
1909 of bytes in the object X. */
1911 void
1912 move_block_from_reg (regno, x, nregs, size)
1913 int regno;
1914 rtx x;
1915 int nregs;
1916 int size;
1918 int i;
1919 #ifdef HAVE_store_multiple
1920 rtx pat;
1921 rtx last;
1922 #endif
1923 enum machine_mode mode;
1925 if (nregs == 0)
1926 return;
1928 /* If SIZE is that of a mode no bigger than a word, just use that
1929 mode's store operation. */
1930 if (size <= UNITS_PER_WORD
1931 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1932 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1934 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1935 return;
1938 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1939 to the left before storing to memory. Note that the previous test
1940 doesn't handle all cases (e.g. SIZE == 3). */
1941 if (size < UNITS_PER_WORD
1942 && BYTES_BIG_ENDIAN
1943 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1945 rtx tem = operand_subword (x, 0, 1, BLKmode);
1946 rtx shift;
1948 if (tem == 0)
1949 abort ();
1951 shift = expand_shift (LSHIFT_EXPR, word_mode,
1952 gen_rtx_REG (word_mode, regno),
1953 build_int_2 ((UNITS_PER_WORD - size)
1954 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1955 emit_move_insn (tem, shift);
1956 return;
1959 /* See if the machine can do this with a store multiple insn. */
1960 #ifdef HAVE_store_multiple
1961 if (HAVE_store_multiple)
1963 last = get_last_insn ();
1964 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1965 GEN_INT (nregs));
1966 if (pat)
1968 emit_insn (pat);
1969 return;
1971 else
1972 delete_insns_since (last);
1974 #endif
1976 for (i = 0; i < nregs; i++)
1978 rtx tem = operand_subword (x, i, 1, BLKmode);
1980 if (tem == 0)
1981 abort ();
1983 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1987 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1988 registers represented by a PARALLEL. SSIZE represents the total size of
1989 block SRC in bytes, or -1 if not known. */
1990 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1991 the balance will be in what would be the low-order memory addresses, i.e.
1992 left justified for big endian, right justified for little endian. This
1993 happens to be true for the targets currently using this support. If this
1994 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1995 would be needed. */
1997 void
1998 emit_group_load (dst, orig_src, ssize)
1999 rtx dst, orig_src;
2000 int ssize;
2002 rtx *tmps, src;
2003 int start, i;
2005 if (GET_CODE (dst) != PARALLEL)
2006 abort ();
2008 /* Check for a NULL entry, used to indicate that the parameter goes
2009 both on the stack and in registers. */
2010 if (XEXP (XVECEXP (dst, 0, 0), 0))
2011 start = 0;
2012 else
2013 start = 1;
2015 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2017 /* Process the pieces. */
2018 for (i = start; i < XVECLEN (dst, 0); i++)
2020 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2021 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2022 unsigned int bytelen = GET_MODE_SIZE (mode);
2023 int shift = 0;
2025 /* Handle trailing fragments that run over the size of the struct. */
2026 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2028 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2029 bytelen = ssize - bytepos;
2030 if (bytelen <= 0)
2031 abort ();
2034 /* If we won't be loading directly from memory, protect the real source
2035 from strange tricks we might play; but make sure that the source can
2036 be loaded directly into the destination. */
2037 src = orig_src;
2038 if (GET_CODE (orig_src) != MEM
2039 && (!CONSTANT_P (orig_src)
2040 || (GET_MODE (orig_src) != mode
2041 && GET_MODE (orig_src) != VOIDmode)))
2043 if (GET_MODE (orig_src) == VOIDmode)
2044 src = gen_reg_rtx (mode);
2045 else
2046 src = gen_reg_rtx (GET_MODE (orig_src));
2048 emit_move_insn (src, orig_src);
2051 /* Optimize the access just a bit. */
2052 if (GET_CODE (src) == MEM
2053 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2054 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2055 && bytelen == GET_MODE_SIZE (mode))
2057 tmps[i] = gen_reg_rtx (mode);
2058 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2060 else if (GET_CODE (src) == CONCAT)
2062 if ((bytepos == 0
2063 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2064 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2065 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2067 tmps[i] = XEXP (src, bytepos != 0);
2068 if (! CONSTANT_P (tmps[i])
2069 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2070 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2071 0, 1, NULL_RTX, mode, mode, ssize);
2073 else if (bytepos == 0)
2075 rtx mem = assign_stack_temp (GET_MODE (src),
2076 GET_MODE_SIZE (GET_MODE (src)), 0);
2077 emit_move_insn (mem, src);
2078 tmps[i] = adjust_address (mem, mode, 0);
2080 else
2081 abort ();
2083 else if (CONSTANT_P (src)
2084 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2085 tmps[i] = src;
2086 else
2087 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2088 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2089 mode, mode, ssize);
2091 if (BYTES_BIG_ENDIAN && shift)
2092 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2093 tmps[i], 0, OPTAB_WIDEN);
2096 emit_queue ();
2098 /* Copy the extracted pieces into the proper (probable) hard regs. */
2099 for (i = start; i < XVECLEN (dst, 0); i++)
2100 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2103 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2104 registers represented by a PARALLEL. SSIZE represents the total size of
2105 block DST, or -1 if not known. */
2107 void
2108 emit_group_store (orig_dst, src, ssize)
2109 rtx orig_dst, src;
2110 int ssize;
2112 rtx *tmps, dst;
2113 int start, i;
2115 if (GET_CODE (src) != PARALLEL)
2116 abort ();
2118 /* Check for a NULL entry, used to indicate that the parameter goes
2119 both on the stack and in registers. */
2120 if (XEXP (XVECEXP (src, 0, 0), 0))
2121 start = 0;
2122 else
2123 start = 1;
2125 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2127 /* Copy the (probable) hard regs into pseudos. */
2128 for (i = start; i < XVECLEN (src, 0); i++)
2130 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2131 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2132 emit_move_insn (tmps[i], reg);
2134 emit_queue ();
2136 /* If we won't be storing directly into memory, protect the real destination
2137 from strange tricks we might play. */
2138 dst = orig_dst;
2139 if (GET_CODE (dst) == PARALLEL)
2141 rtx temp;
2143 /* We can get a PARALLEL dst if there is a conditional expression in
2144 a return statement. In that case, the dst and src are the same,
2145 so no action is necessary. */
2146 if (rtx_equal_p (dst, src))
2147 return;
2149 /* It is unclear if we can ever reach here, but we may as well handle
2150 it. Allocate a temporary, and split this into a store/load to/from
2151 the temporary. */
2153 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2154 emit_group_store (temp, src, ssize);
2155 emit_group_load (dst, temp, ssize);
2156 return;
2158 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2160 dst = gen_reg_rtx (GET_MODE (orig_dst));
2161 /* Make life a bit easier for combine. */
2162 emit_move_insn (dst, const0_rtx);
2165 /* Process the pieces. */
2166 for (i = start; i < XVECLEN (src, 0); i++)
2168 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2169 enum machine_mode mode = GET_MODE (tmps[i]);
2170 unsigned int bytelen = GET_MODE_SIZE (mode);
2171 rtx dest = dst;
2173 /* Handle trailing fragments that run over the size of the struct. */
2174 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2176 if (BYTES_BIG_ENDIAN)
2178 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2179 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2180 tmps[i], 0, OPTAB_WIDEN);
2182 bytelen = ssize - bytepos;
2185 if (GET_CODE (dst) == CONCAT)
2187 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2188 dest = XEXP (dst, 0);
2189 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2191 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2192 dest = XEXP (dst, 1);
2194 else
2195 abort ();
2198 /* Optimize the access just a bit. */
2199 if (GET_CODE (dest) == MEM
2200 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2201 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2202 && bytelen == GET_MODE_SIZE (mode))
2203 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2204 else
2205 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2206 mode, tmps[i], ssize);
2209 emit_queue ();
2211 /* Copy from the pseudo into the (probable) hard reg. */
2212 if (GET_CODE (dst) == REG)
2213 emit_move_insn (orig_dst, dst);
2216 /* Generate code to copy a BLKmode object of TYPE out of a
2217 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2218 is null, a stack temporary is created. TGTBLK is returned.
2220 The primary purpose of this routine is to handle functions
2221 that return BLKmode structures in registers. Some machines
2222 (the PA for example) want to return all small structures
2223 in registers regardless of the structure's alignment. */
2226 copy_blkmode_from_reg (tgtblk, srcreg, type)
2227 rtx tgtblk;
2228 rtx srcreg;
2229 tree type;
2231 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2232 rtx src = NULL, dst = NULL;
2233 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2234 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2236 if (tgtblk == 0)
2238 tgtblk = assign_temp (build_qualified_type (type,
2239 (TYPE_QUALS (type)
2240 | TYPE_QUAL_CONST)),
2241 0, 1, 1);
2242 preserve_temp_slots (tgtblk);
2245 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2246 into a new pseudo which is a full word.
2248 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2249 the wrong part of the register gets copied so we fake a type conversion
2250 in place. */
2251 if (GET_MODE (srcreg) != BLKmode
2252 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2254 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2255 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2256 else
2257 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2260 /* Structures whose size is not a multiple of a word are aligned
2261 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2262 machine, this means we must skip the empty high order bytes when
2263 calculating the bit offset. */
2264 if (BYTES_BIG_ENDIAN
2265 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2266 && bytes % UNITS_PER_WORD)
2267 big_endian_correction
2268 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2270 /* Copy the structure BITSIZE bites at a time.
2272 We could probably emit more efficient code for machines which do not use
2273 strict alignment, but it doesn't seem worth the effort at the current
2274 time. */
2275 for (bitpos = 0, xbitpos = big_endian_correction;
2276 bitpos < bytes * BITS_PER_UNIT;
2277 bitpos += bitsize, xbitpos += bitsize)
2279 /* We need a new source operand each time xbitpos is on a
2280 word boundary and when xbitpos == big_endian_correction
2281 (the first time through). */
2282 if (xbitpos % BITS_PER_WORD == 0
2283 || xbitpos == big_endian_correction)
2284 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2285 GET_MODE (srcreg));
2287 /* We need a new destination operand each time bitpos is on
2288 a word boundary. */
2289 if (bitpos % BITS_PER_WORD == 0)
2290 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2292 /* Use xbitpos for the source extraction (right justified) and
2293 xbitpos for the destination store (left justified). */
2294 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2295 extract_bit_field (src, bitsize,
2296 xbitpos % BITS_PER_WORD, 1,
2297 NULL_RTX, word_mode, word_mode,
2298 BITS_PER_WORD),
2299 BITS_PER_WORD);
2302 return tgtblk;
2305 /* Add a USE expression for REG to the (possibly empty) list pointed
2306 to by CALL_FUSAGE. REG must denote a hard register. */
2308 void
2309 use_reg (call_fusage, reg)
2310 rtx *call_fusage, reg;
2312 if (GET_CODE (reg) != REG
2313 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2314 abort ();
2316 *call_fusage
2317 = gen_rtx_EXPR_LIST (VOIDmode,
2318 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2321 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2322 starting at REGNO. All of these registers must be hard registers. */
2324 void
2325 use_regs (call_fusage, regno, nregs)
2326 rtx *call_fusage;
2327 int regno;
2328 int nregs;
2330 int i;
2332 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2333 abort ();
2335 for (i = 0; i < nregs; i++)
2336 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2339 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2340 PARALLEL REGS. This is for calls that pass values in multiple
2341 non-contiguous locations. The Irix 6 ABI has examples of this. */
2343 void
2344 use_group_regs (call_fusage, regs)
2345 rtx *call_fusage;
2346 rtx regs;
2348 int i;
2350 for (i = 0; i < XVECLEN (regs, 0); i++)
2352 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2354 /* A NULL entry means the parameter goes both on the stack and in
2355 registers. This can also be a MEM for targets that pass values
2356 partially on the stack and partially in registers. */
2357 if (reg != 0 && GET_CODE (reg) == REG)
2358 use_reg (call_fusage, reg);
2363 /* Determine whether the LEN bytes generated by CONSTFUN can be
2364 stored to memory using several move instructions. CONSTFUNDATA is
2365 a pointer which will be passed as argument in every CONSTFUN call.
2366 ALIGN is maximum alignment we can assume. Return nonzero if a
2367 call to store_by_pieces should succeed. */
2370 can_store_by_pieces (len, constfun, constfundata, align)
2371 unsigned HOST_WIDE_INT len;
2372 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2373 PTR constfundata;
2374 unsigned int align;
2376 unsigned HOST_WIDE_INT max_size, l;
2377 HOST_WIDE_INT offset = 0;
2378 enum machine_mode mode, tmode;
2379 enum insn_code icode;
2380 int reverse;
2381 rtx cst;
2383 if (! MOVE_BY_PIECES_P (len, align))
2384 return 0;
2386 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2387 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2388 align = MOVE_MAX * BITS_PER_UNIT;
2390 /* We would first store what we can in the largest integer mode, then go to
2391 successively smaller modes. */
2393 for (reverse = 0;
2394 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2395 reverse++)
2397 l = len;
2398 mode = VOIDmode;
2399 max_size = STORE_MAX_PIECES + 1;
2400 while (max_size > 1)
2402 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2403 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2404 if (GET_MODE_SIZE (tmode) < max_size)
2405 mode = tmode;
2407 if (mode == VOIDmode)
2408 break;
2410 icode = mov_optab->handlers[(int) mode].insn_code;
2411 if (icode != CODE_FOR_nothing
2412 && align >= GET_MODE_ALIGNMENT (mode))
2414 unsigned int size = GET_MODE_SIZE (mode);
2416 while (l >= size)
2418 if (reverse)
2419 offset -= size;
2421 cst = (*constfun) (constfundata, offset, mode);
2422 if (!LEGITIMATE_CONSTANT_P (cst))
2423 return 0;
2425 if (!reverse)
2426 offset += size;
2428 l -= size;
2432 max_size = GET_MODE_SIZE (mode);
2435 /* The code above should have handled everything. */
2436 if (l != 0)
2437 abort ();
2440 return 1;
2443 /* Generate several move instructions to store LEN bytes generated by
2444 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2445 pointer which will be passed as argument in every CONSTFUN call.
2446 ALIGN is maximum alignment we can assume. */
2448 void
2449 store_by_pieces (to, len, constfun, constfundata, align)
2450 rtx to;
2451 unsigned HOST_WIDE_INT len;
2452 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2453 PTR constfundata;
2454 unsigned int align;
2456 struct store_by_pieces data;
2458 if (! MOVE_BY_PIECES_P (len, align))
2459 abort ();
2460 to = protect_from_queue (to, 1);
2461 data.constfun = constfun;
2462 data.constfundata = constfundata;
2463 data.len = len;
2464 data.to = to;
2465 store_by_pieces_1 (&data, align);
2468 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2469 rtx with BLKmode). The caller must pass TO through protect_from_queue
2470 before calling. ALIGN is maximum alignment we can assume. */
2472 static void
2473 clear_by_pieces (to, len, align)
2474 rtx to;
2475 unsigned HOST_WIDE_INT len;
2476 unsigned int align;
2478 struct store_by_pieces data;
2480 data.constfun = clear_by_pieces_1;
2481 data.constfundata = NULL;
2482 data.len = len;
2483 data.to = to;
2484 store_by_pieces_1 (&data, align);
2487 /* Callback routine for clear_by_pieces.
2488 Return const0_rtx unconditionally. */
2490 static rtx
2491 clear_by_pieces_1 (data, offset, mode)
2492 PTR data ATTRIBUTE_UNUSED;
2493 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2494 enum machine_mode mode ATTRIBUTE_UNUSED;
2496 return const0_rtx;
2499 /* Subroutine of clear_by_pieces and store_by_pieces.
2500 Generate several move instructions to store LEN bytes of block TO. (A MEM
2501 rtx with BLKmode). The caller must pass TO through protect_from_queue
2502 before calling. ALIGN is maximum alignment we can assume. */
2504 static void
2505 store_by_pieces_1 (data, align)
2506 struct store_by_pieces *data;
2507 unsigned int align;
2509 rtx to_addr = XEXP (data->to, 0);
2510 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2511 enum machine_mode mode = VOIDmode, tmode;
2512 enum insn_code icode;
2514 data->offset = 0;
2515 data->to_addr = to_addr;
2516 data->autinc_to
2517 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2518 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2520 data->explicit_inc_to = 0;
2521 data->reverse
2522 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2523 if (data->reverse)
2524 data->offset = data->len;
2526 /* If storing requires more than two move insns,
2527 copy addresses to registers (to make displacements shorter)
2528 and use post-increment if available. */
2529 if (!data->autinc_to
2530 && move_by_pieces_ninsns (data->len, align) > 2)
2532 /* Determine the main mode we'll be using. */
2533 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2534 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2535 if (GET_MODE_SIZE (tmode) < max_size)
2536 mode = tmode;
2538 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2540 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2541 data->autinc_to = 1;
2542 data->explicit_inc_to = -1;
2545 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2546 && ! data->autinc_to)
2548 data->to_addr = copy_addr_to_reg (to_addr);
2549 data->autinc_to = 1;
2550 data->explicit_inc_to = 1;
2553 if ( !data->autinc_to && CONSTANT_P (to_addr))
2554 data->to_addr = copy_addr_to_reg (to_addr);
2557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2559 align = MOVE_MAX * BITS_PER_UNIT;
2561 /* First store what we can in the largest integer mode, then go to
2562 successively smaller modes. */
2564 while (max_size > 1)
2566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2568 if (GET_MODE_SIZE (tmode) < max_size)
2569 mode = tmode;
2571 if (mode == VOIDmode)
2572 break;
2574 icode = mov_optab->handlers[(int) mode].insn_code;
2575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2576 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2578 max_size = GET_MODE_SIZE (mode);
2581 /* The code above should have handled everything. */
2582 if (data->len != 0)
2583 abort ();
2586 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2587 with move instructions for mode MODE. GENFUN is the gen_... function
2588 to make a move insn for that mode. DATA has all the other info. */
2590 static void
2591 store_by_pieces_2 (genfun, mode, data)
2592 rtx (*genfun) PARAMS ((rtx, ...));
2593 enum machine_mode mode;
2594 struct store_by_pieces *data;
2596 unsigned int size = GET_MODE_SIZE (mode);
2597 rtx to1, cst;
2599 while (data->len >= size)
2601 if (data->reverse)
2602 data->offset -= size;
2604 if (data->autinc_to)
2605 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2606 data->offset);
2607 else
2608 to1 = adjust_address (data->to, mode, data->offset);
2610 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2611 emit_insn (gen_add2_insn (data->to_addr,
2612 GEN_INT (-(HOST_WIDE_INT) size)));
2614 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2615 emit_insn ((*genfun) (to1, cst));
2617 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2618 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2620 if (! data->reverse)
2621 data->offset += size;
2623 data->len -= size;
2627 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2628 its length in bytes. */
2630 static GTY(()) tree block_clear_fn;
2632 clear_storage (object, size)
2633 rtx object;
2634 rtx size;
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 tree call_expr, arg_list;
2638 #endif
2639 rtx retval = 0;
2640 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2641 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2643 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2644 just move a zero. Otherwise, do this a piece at a time. */
2645 if (GET_MODE (object) != BLKmode
2646 && GET_CODE (size) == CONST_INT
2647 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2648 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2649 else
2651 object = protect_from_queue (object, 1);
2652 size = protect_from_queue (size, 0);
2654 if (GET_CODE (size) == CONST_INT
2655 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2656 clear_by_pieces (object, INTVAL (size), align);
2657 else
2659 /* Try the most limited insn first, because there's no point
2660 including more than one in the machine description unless
2661 the more limited one has some advantage. */
2663 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2664 enum machine_mode mode;
2666 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2667 mode = GET_MODE_WIDER_MODE (mode))
2669 enum insn_code code = clrstr_optab[(int) mode];
2670 insn_operand_predicate_fn pred;
2672 if (code != CODE_FOR_nothing
2673 /* We don't need MODE to be narrower than
2674 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2675 the mode mask, as it is returned by the macro, it will
2676 definitely be less than the actual mode mask. */
2677 && ((GET_CODE (size) == CONST_INT
2678 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2679 <= (GET_MODE_MASK (mode) >> 1)))
2680 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2681 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2682 || (*pred) (object, BLKmode))
2683 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2684 || (*pred) (opalign, VOIDmode)))
2686 rtx op1;
2687 rtx last = get_last_insn ();
2688 rtx pat;
2690 op1 = convert_to_mode (mode, size, 1);
2691 pred = insn_data[(int) code].operand[1].predicate;
2692 if (pred != 0 && ! (*pred) (op1, mode))
2693 op1 = copy_to_mode_reg (mode, op1);
2695 pat = GEN_FCN ((int) code) (object, op1, opalign);
2696 if (pat)
2698 emit_insn (pat);
2699 return 0;
2701 else
2702 delete_insns_since (last);
2706 /* OBJECT or SIZE may have been passed through protect_from_queue.
2708 It is unsafe to save the value generated by protect_from_queue
2709 and reuse it later. Consider what happens if emit_queue is
2710 called before the return value from protect_from_queue is used.
2712 Expansion of the CALL_EXPR below will call emit_queue before
2713 we are finished emitting RTL for argument setup. So if we are
2714 not careful we could get the wrong value for an argument.
2716 To avoid this problem we go ahead and emit code to copy OBJECT
2717 and SIZE into new pseudos. We can then place those new pseudos
2718 into an RTL_EXPR and use them later, even after a call to
2719 emit_queue.
2721 Note this is not strictly needed for library calls since they
2722 do not call emit_queue before loading their arguments. However,
2723 we may need to have library calls call emit_queue in the future
2724 since failing to do so could cause problems for targets which
2725 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2726 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2728 #ifdef TARGET_MEM_FUNCTIONS
2729 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2730 #else
2731 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2732 TREE_UNSIGNED (integer_type_node));
2733 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2734 #endif
2736 #ifdef TARGET_MEM_FUNCTIONS
2737 /* It is incorrect to use the libcall calling conventions to call
2738 memset in this context.
2740 This could be a user call to memset and the user may wish to
2741 examine the return value from memset.
2743 For targets where libcalls and normal calls have different
2744 conventions for returning pointers, we could end up generating
2745 incorrect code.
2747 So instead of using a libcall sequence we build up a suitable
2748 CALL_EXPR and expand the call in the normal fashion. */
2749 if (block_clear_fn == NULL_TREE)
2751 tree fntype;
2753 /* This was copied from except.c, I don't know if all this is
2754 necessary in this context or not. */
2755 block_clear_fn = get_identifier ("memset");
2756 fntype = build_pointer_type (void_type_node);
2757 fntype = build_function_type (fntype, NULL_TREE);
2758 block_clear_fn = build_decl (FUNCTION_DECL, block_clear_fn,
2759 fntype);
2760 DECL_EXTERNAL (block_clear_fn) = 1;
2761 TREE_PUBLIC (block_clear_fn) = 1;
2762 DECL_ARTIFICIAL (block_clear_fn) = 1;
2763 TREE_NOTHROW (block_clear_fn) = 1;
2764 make_decl_rtl (block_clear_fn, NULL);
2765 assemble_external (block_clear_fn);
2768 /* We need to make an argument list for the function call.
2770 memset has three arguments, the first is a void * addresses, the
2771 second an integer with the initialization value, the last is a
2772 size_t byte count for the copy. */
2773 arg_list
2774 = build_tree_list (NULL_TREE,
2775 make_tree (build_pointer_type (void_type_node),
2776 object));
2777 TREE_CHAIN (arg_list)
2778 = build_tree_list (NULL_TREE,
2779 make_tree (integer_type_node, const0_rtx));
2780 TREE_CHAIN (TREE_CHAIN (arg_list))
2781 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2782 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2784 /* Now we have to build up the CALL_EXPR itself. */
2785 call_expr = build1 (ADDR_EXPR,
2786 build_pointer_type (TREE_TYPE (block_clear_fn)),
2787 block_clear_fn);
2788 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_clear_fn)),
2789 call_expr, arg_list, NULL_TREE);
2790 TREE_SIDE_EFFECTS (call_expr) = 1;
2792 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2793 #else
2794 emit_library_call (bzero_libfunc, LCT_NORMAL,
2795 VOIDmode, 2, object, Pmode, size,
2796 TYPE_MODE (integer_type_node));
2797 #endif
2799 /* If we are initializing a readonly value, show the above call
2800 clobbered it. Otherwise, a load from it may erroneously be
2801 hoisted from a loop. */
2802 if (RTX_UNCHANGING_P (object))
2803 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2807 return retval;
2810 /* Generate code to copy Y into X.
2811 Both Y and X must have the same mode, except that
2812 Y can be a constant with VOIDmode.
2813 This mode cannot be BLKmode; use emit_block_move for that.
2815 Return the last instruction emitted. */
2818 emit_move_insn (x, y)
2819 rtx x, y;
2821 enum machine_mode mode = GET_MODE (x);
2822 rtx y_cst = NULL_RTX;
2823 rtx last_insn;
2825 x = protect_from_queue (x, 1);
2826 y = protect_from_queue (y, 0);
2828 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2829 abort ();
2831 /* Never force constant_p_rtx to memory. */
2832 if (GET_CODE (y) == CONSTANT_P_RTX)
2834 else if (CONSTANT_P (y))
2836 if (optimize
2837 && FLOAT_MODE_P (GET_MODE (x))
2838 && (last_insn = compress_float_constant (x, y)))
2839 return last_insn;
2841 if (!LEGITIMATE_CONSTANT_P (y))
2843 y_cst = y;
2844 y = force_const_mem (mode, y);
2848 /* If X or Y are memory references, verify that their addresses are valid
2849 for the machine. */
2850 if (GET_CODE (x) == MEM
2851 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2852 && ! push_operand (x, GET_MODE (x)))
2853 || (flag_force_addr
2854 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2855 x = validize_mem (x);
2857 if (GET_CODE (y) == MEM
2858 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2859 || (flag_force_addr
2860 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2861 y = validize_mem (y);
2863 if (mode == BLKmode)
2864 abort ();
2866 last_insn = emit_move_insn_1 (x, y);
2868 if (y_cst && GET_CODE (x) == REG)
2869 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2871 return last_insn;
2874 /* Low level part of emit_move_insn.
2875 Called just like emit_move_insn, but assumes X and Y
2876 are basically valid. */
2879 emit_move_insn_1 (x, y)
2880 rtx x, y;
2882 enum machine_mode mode = GET_MODE (x);
2883 enum machine_mode submode;
2884 enum mode_class class = GET_MODE_CLASS (mode);
2886 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2887 abort ();
2889 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2890 return
2891 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2893 /* Expand complex moves by moving real part and imag part, if possible. */
2894 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2895 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2896 * BITS_PER_UNIT),
2897 (class == MODE_COMPLEX_INT
2898 ? MODE_INT : MODE_FLOAT),
2900 && (mov_optab->handlers[(int) submode].insn_code
2901 != CODE_FOR_nothing))
2903 /* Don't split destination if it is a stack push. */
2904 int stack = push_operand (x, GET_MODE (x));
2906 #ifdef PUSH_ROUNDING
2907 /* In case we output to the stack, but the size is smaller machine can
2908 push exactly, we need to use move instructions. */
2909 if (stack
2910 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2911 != GET_MODE_SIZE (submode)))
2913 rtx temp;
2914 HOST_WIDE_INT offset1, offset2;
2916 /* Do not use anti_adjust_stack, since we don't want to update
2917 stack_pointer_delta. */
2918 temp = expand_binop (Pmode,
2919 #ifdef STACK_GROWS_DOWNWARD
2920 sub_optab,
2921 #else
2922 add_optab,
2923 #endif
2924 stack_pointer_rtx,
2925 GEN_INT
2926 (PUSH_ROUNDING
2927 (GET_MODE_SIZE (GET_MODE (x)))),
2928 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2930 if (temp != stack_pointer_rtx)
2931 emit_move_insn (stack_pointer_rtx, temp);
2933 #ifdef STACK_GROWS_DOWNWARD
2934 offset1 = 0;
2935 offset2 = GET_MODE_SIZE (submode);
2936 #else
2937 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2938 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2939 + GET_MODE_SIZE (submode));
2940 #endif
2942 emit_move_insn (change_address (x, submode,
2943 gen_rtx_PLUS (Pmode,
2944 stack_pointer_rtx,
2945 GEN_INT (offset1))),
2946 gen_realpart (submode, y));
2947 emit_move_insn (change_address (x, submode,
2948 gen_rtx_PLUS (Pmode,
2949 stack_pointer_rtx,
2950 GEN_INT (offset2))),
2951 gen_imagpart (submode, y));
2953 else
2954 #endif
2955 /* If this is a stack, push the highpart first, so it
2956 will be in the argument order.
2958 In that case, change_address is used only to convert
2959 the mode, not to change the address. */
2960 if (stack)
2962 /* Note that the real part always precedes the imag part in memory
2963 regardless of machine's endianness. */
2964 #ifdef STACK_GROWS_DOWNWARD
2965 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966 (gen_rtx_MEM (submode, XEXP (x, 0)),
2967 gen_imagpart (submode, y)));
2968 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2969 (gen_rtx_MEM (submode, XEXP (x, 0)),
2970 gen_realpart (submode, y)));
2971 #else
2972 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2973 (gen_rtx_MEM (submode, XEXP (x, 0)),
2974 gen_realpart (submode, y)));
2975 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2976 (gen_rtx_MEM (submode, XEXP (x, 0)),
2977 gen_imagpart (submode, y)));
2978 #endif
2980 else
2982 rtx realpart_x, realpart_y;
2983 rtx imagpart_x, imagpart_y;
2985 /* If this is a complex value with each part being smaller than a
2986 word, the usual calling sequence will likely pack the pieces into
2987 a single register. Unfortunately, SUBREG of hard registers only
2988 deals in terms of words, so we have a problem converting input
2989 arguments to the CONCAT of two registers that is used elsewhere
2990 for complex values. If this is before reload, we can copy it into
2991 memory and reload. FIXME, we should see about using extract and
2992 insert on integer registers, but complex short and complex char
2993 variables should be rarely used. */
2994 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2995 && (reload_in_progress | reload_completed) == 0)
2997 int packed_dest_p
2998 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2999 int packed_src_p
3000 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3002 if (packed_dest_p || packed_src_p)
3004 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3005 ? MODE_FLOAT : MODE_INT);
3007 enum machine_mode reg_mode
3008 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3010 if (reg_mode != BLKmode)
3012 rtx mem = assign_stack_temp (reg_mode,
3013 GET_MODE_SIZE (mode), 0);
3014 rtx cmem = adjust_address (mem, mode, 0);
3016 cfun->cannot_inline
3017 = N_("function using short complex types cannot be inline");
3019 if (packed_dest_p)
3021 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3023 emit_move_insn_1 (cmem, y);
3024 return emit_move_insn_1 (sreg, mem);
3026 else
3028 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3030 emit_move_insn_1 (mem, sreg);
3031 return emit_move_insn_1 (x, cmem);
3037 realpart_x = gen_realpart (submode, x);
3038 realpart_y = gen_realpart (submode, y);
3039 imagpart_x = gen_imagpart (submode, x);
3040 imagpart_y = gen_imagpart (submode, y);
3042 /* Show the output dies here. This is necessary for SUBREGs
3043 of pseudos since we cannot track their lifetimes correctly;
3044 hard regs shouldn't appear here except as return values.
3045 We never want to emit such a clobber after reload. */
3046 if (x != y
3047 && ! (reload_in_progress || reload_completed)
3048 && (GET_CODE (realpart_x) == SUBREG
3049 || GET_CODE (imagpart_x) == SUBREG))
3050 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3052 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3053 (realpart_x, realpart_y));
3054 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3055 (imagpart_x, imagpart_y));
3058 return get_last_insn ();
3061 /* This will handle any multi-word or full-word mode that lacks a move_insn
3062 pattern. However, you will get better code if you define such patterns,
3063 even if they must turn into multiple assembler instructions. */
3064 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3066 rtx last_insn = 0;
3067 rtx seq, inner;
3068 int need_clobber;
3069 int i;
3071 #ifdef PUSH_ROUNDING
3073 /* If X is a push on the stack, do the push now and replace
3074 X with a reference to the stack pointer. */
3075 if (push_operand (x, GET_MODE (x)))
3077 rtx temp;
3078 enum rtx_code code;
3080 /* Do not use anti_adjust_stack, since we don't want to update
3081 stack_pointer_delta. */
3082 temp = expand_binop (Pmode,
3083 #ifdef STACK_GROWS_DOWNWARD
3084 sub_optab,
3085 #else
3086 add_optab,
3087 #endif
3088 stack_pointer_rtx,
3089 GEN_INT
3090 (PUSH_ROUNDING
3091 (GET_MODE_SIZE (GET_MODE (x)))),
3092 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3094 if (temp != stack_pointer_rtx)
3095 emit_move_insn (stack_pointer_rtx, temp);
3097 code = GET_CODE (XEXP (x, 0));
3099 /* Just hope that small offsets off SP are OK. */
3100 if (code == POST_INC)
3101 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3102 GEN_INT (-((HOST_WIDE_INT)
3103 GET_MODE_SIZE (GET_MODE (x)))));
3104 else if (code == POST_DEC)
3105 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3106 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3107 else
3108 temp = stack_pointer_rtx;
3110 x = change_address (x, VOIDmode, temp);
3112 #endif
3114 /* If we are in reload, see if either operand is a MEM whose address
3115 is scheduled for replacement. */
3116 if (reload_in_progress && GET_CODE (x) == MEM
3117 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3118 x = replace_equiv_address_nv (x, inner);
3119 if (reload_in_progress && GET_CODE (y) == MEM
3120 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3121 y = replace_equiv_address_nv (y, inner);
3123 start_sequence ();
3125 need_clobber = 0;
3126 for (i = 0;
3127 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3128 i++)
3130 rtx xpart = operand_subword (x, i, 1, mode);
3131 rtx ypart = operand_subword (y, i, 1, mode);
3133 /* If we can't get a part of Y, put Y into memory if it is a
3134 constant. Otherwise, force it into a register. If we still
3135 can't get a part of Y, abort. */
3136 if (ypart == 0 && CONSTANT_P (y))
3138 y = force_const_mem (mode, y);
3139 ypart = operand_subword (y, i, 1, mode);
3141 else if (ypart == 0)
3142 ypart = operand_subword_force (y, i, mode);
3144 if (xpart == 0 || ypart == 0)
3145 abort ();
3147 need_clobber |= (GET_CODE (xpart) == SUBREG);
3149 last_insn = emit_move_insn (xpart, ypart);
3152 seq = get_insns ();
3153 end_sequence ();
3155 /* Show the output dies here. This is necessary for SUBREGs
3156 of pseudos since we cannot track their lifetimes correctly;
3157 hard regs shouldn't appear here except as return values.
3158 We never want to emit such a clobber after reload. */
3159 if (x != y
3160 && ! (reload_in_progress || reload_completed)
3161 && need_clobber != 0)
3162 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3164 emit_insn (seq);
3166 return last_insn;
3168 else
3169 abort ();
3172 /* If Y is representable exactly in a narrower mode, and the target can
3173 perform the extension directly from constant or memory, then emit the
3174 move as an extension. */
3176 static rtx
3177 compress_float_constant (x, y)
3178 rtx x, y;
3180 enum machine_mode dstmode = GET_MODE (x);
3181 enum machine_mode orig_srcmode = GET_MODE (y);
3182 enum machine_mode srcmode;
3183 REAL_VALUE_TYPE r;
3185 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3187 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3188 srcmode != orig_srcmode;
3189 srcmode = GET_MODE_WIDER_MODE (srcmode))
3191 enum insn_code ic;
3192 rtx trunc_y, last_insn;
3194 /* Skip if the target can't extend this way. */
3195 ic = can_extend_p (dstmode, srcmode, 0);
3196 if (ic == CODE_FOR_nothing)
3197 continue;
3199 /* Skip if the narrowed value isn't exact. */
3200 if (! exact_real_truncate (srcmode, &r))
3201 continue;
3203 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3205 if (LEGITIMATE_CONSTANT_P (trunc_y))
3207 /* Skip if the target needs extra instructions to perform
3208 the extension. */
3209 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3210 continue;
3212 else if (float_extend_from_mem[dstmode][srcmode])
3213 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3214 else
3215 continue;
3217 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3218 last_insn = get_last_insn ();
3220 if (GET_CODE (x) == REG)
3221 REG_NOTES (last_insn)
3222 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3224 return last_insn;
3227 return NULL_RTX;
3230 /* Pushing data onto the stack. */
3232 /* Push a block of length SIZE (perhaps variable)
3233 and return an rtx to address the beginning of the block.
3234 Note that it is not possible for the value returned to be a QUEUED.
3235 The value may be virtual_outgoing_args_rtx.
3237 EXTRA is the number of bytes of padding to push in addition to SIZE.
3238 BELOW nonzero means this padding comes at low addresses;
3239 otherwise, the padding comes at high addresses. */
3242 push_block (size, extra, below)
3243 rtx size;
3244 int extra, below;
3246 rtx temp;
3248 size = convert_modes (Pmode, ptr_mode, size, 1);
3249 if (CONSTANT_P (size))
3250 anti_adjust_stack (plus_constant (size, extra));
3251 else if (GET_CODE (size) == REG && extra == 0)
3252 anti_adjust_stack (size);
3253 else
3255 temp = copy_to_mode_reg (Pmode, size);
3256 if (extra != 0)
3257 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3258 temp, 0, OPTAB_LIB_WIDEN);
3259 anti_adjust_stack (temp);
3262 #ifndef STACK_GROWS_DOWNWARD
3263 if (0)
3264 #else
3265 if (1)
3266 #endif
3268 temp = virtual_outgoing_args_rtx;
3269 if (extra != 0 && below)
3270 temp = plus_constant (temp, extra);
3272 else
3274 if (GET_CODE (size) == CONST_INT)
3275 temp = plus_constant (virtual_outgoing_args_rtx,
3276 -INTVAL (size) - (below ? 0 : extra));
3277 else if (extra != 0 && !below)
3278 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3279 negate_rtx (Pmode, plus_constant (size, extra)));
3280 else
3281 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3282 negate_rtx (Pmode, size));
3285 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3288 #ifdef PUSH_ROUNDING
3290 /* Emit single push insn. */
3292 static void
3293 emit_single_push_insn (mode, x, type)
3294 rtx x;
3295 enum machine_mode mode;
3296 tree type;
3298 rtx dest_addr;
3299 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3300 rtx dest;
3301 enum insn_code icode;
3302 insn_operand_predicate_fn pred;
3304 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3305 /* If there is push pattern, use it. Otherwise try old way of throwing
3306 MEM representing push operation to move expander. */
3307 icode = push_optab->handlers[(int) mode].insn_code;
3308 if (icode != CODE_FOR_nothing)
3310 if (((pred = insn_data[(int) icode].operand[0].predicate)
3311 && !((*pred) (x, mode))))
3312 x = force_reg (mode, x);
3313 emit_insn (GEN_FCN (icode) (x));
3314 return;
3316 if (GET_MODE_SIZE (mode) == rounded_size)
3317 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3318 else
3320 #ifdef STACK_GROWS_DOWNWARD
3321 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3322 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3323 #else
3324 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3325 GEN_INT (rounded_size));
3326 #endif
3327 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3330 dest = gen_rtx_MEM (mode, dest_addr);
3332 if (type != 0)
3334 set_mem_attributes (dest, type, 1);
3336 if (flag_optimize_sibling_calls)
3337 /* Function incoming arguments may overlap with sibling call
3338 outgoing arguments and we cannot allow reordering of reads
3339 from function arguments with stores to outgoing arguments
3340 of sibling calls. */
3341 set_mem_alias_set (dest, 0);
3343 emit_move_insn (dest, x);
3345 #endif
3347 /* Generate code to push X onto the stack, assuming it has mode MODE and
3348 type TYPE.
3349 MODE is redundant except when X is a CONST_INT (since they don't
3350 carry mode info).
3351 SIZE is an rtx for the size of data to be copied (in bytes),
3352 needed only if X is BLKmode.
3354 ALIGN (in bits) is maximum alignment we can assume.
3356 If PARTIAL and REG are both nonzero, then copy that many of the first
3357 words of X into registers starting with REG, and push the rest of X.
3358 The amount of space pushed is decreased by PARTIAL words,
3359 rounded *down* to a multiple of PARM_BOUNDARY.
3360 REG must be a hard register in this case.
3361 If REG is zero but PARTIAL is not, take any all others actions for an
3362 argument partially in registers, but do not actually load any
3363 registers.
3365 EXTRA is the amount in bytes of extra space to leave next to this arg.
3366 This is ignored if an argument block has already been allocated.
3368 On a machine that lacks real push insns, ARGS_ADDR is the address of
3369 the bottom of the argument block for this call. We use indexing off there
3370 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3371 argument block has not been preallocated.
3373 ARGS_SO_FAR is the size of args previously pushed for this call.
3375 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3376 for arguments passed in registers. If nonzero, it will be the number
3377 of bytes required. */
3379 void
3380 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3381 args_addr, args_so_far, reg_parm_stack_space,
3382 alignment_pad)
3383 rtx x;
3384 enum machine_mode mode;
3385 tree type;
3386 rtx size;
3387 unsigned int align;
3388 int partial;
3389 rtx reg;
3390 int extra;
3391 rtx args_addr;
3392 rtx args_so_far;
3393 int reg_parm_stack_space;
3394 rtx alignment_pad;
3396 rtx xinner;
3397 enum direction stack_direction
3398 #ifdef STACK_GROWS_DOWNWARD
3399 = downward;
3400 #else
3401 = upward;
3402 #endif
3404 /* Decide where to pad the argument: `downward' for below,
3405 `upward' for above, or `none' for don't pad it.
3406 Default is below for small data on big-endian machines; else above. */
3407 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3409 /* Invert direction if stack is post-decrement.
3410 FIXME: why? */
3411 if (STACK_PUSH_CODE == POST_DEC)
3412 if (where_pad != none)
3413 where_pad = (where_pad == downward ? upward : downward);
3415 xinner = x = protect_from_queue (x, 0);
3417 if (mode == BLKmode)
3419 /* Copy a block into the stack, entirely or partially. */
3421 rtx temp;
3422 int used = partial * UNITS_PER_WORD;
3423 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3424 int skip;
3426 if (size == 0)
3427 abort ();
3429 used -= offset;
3431 /* USED is now the # of bytes we need not copy to the stack
3432 because registers will take care of them. */
3434 if (partial != 0)
3435 xinner = adjust_address (xinner, BLKmode, used);
3437 /* If the partial register-part of the arg counts in its stack size,
3438 skip the part of stack space corresponding to the registers.
3439 Otherwise, start copying to the beginning of the stack space,
3440 by setting SKIP to 0. */
3441 skip = (reg_parm_stack_space == 0) ? 0 : used;
3443 #ifdef PUSH_ROUNDING
3444 /* Do it with several push insns if that doesn't take lots of insns
3445 and if there is no difficulty with push insns that skip bytes
3446 on the stack for alignment purposes. */
3447 if (args_addr == 0
3448 && PUSH_ARGS
3449 && GET_CODE (size) == CONST_INT
3450 && skip == 0
3451 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3452 /* Here we avoid the case of a structure whose weak alignment
3453 forces many pushes of a small amount of data,
3454 and such small pushes do rounding that causes trouble. */
3455 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3456 || align >= BIGGEST_ALIGNMENT
3457 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3458 == (align / BITS_PER_UNIT)))
3459 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3461 /* Push padding now if padding above and stack grows down,
3462 or if padding below and stack grows up.
3463 But if space already allocated, this has already been done. */
3464 if (extra && args_addr == 0
3465 && where_pad != none && where_pad != stack_direction)
3466 anti_adjust_stack (GEN_INT (extra));
3468 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3470 else
3471 #endif /* PUSH_ROUNDING */
3473 rtx target;
3475 /* Otherwise make space on the stack and copy the data
3476 to the address of that space. */
3478 /* Deduct words put into registers from the size we must copy. */
3479 if (partial != 0)
3481 if (GET_CODE (size) == CONST_INT)
3482 size = GEN_INT (INTVAL (size) - used);
3483 else
3484 size = expand_binop (GET_MODE (size), sub_optab, size,
3485 GEN_INT (used), NULL_RTX, 0,
3486 OPTAB_LIB_WIDEN);
3489 /* Get the address of the stack space.
3490 In this case, we do not deal with EXTRA separately.
3491 A single stack adjust will do. */
3492 if (! args_addr)
3494 temp = push_block (size, extra, where_pad == downward);
3495 extra = 0;
3497 else if (GET_CODE (args_so_far) == CONST_INT)
3498 temp = memory_address (BLKmode,
3499 plus_constant (args_addr,
3500 skip + INTVAL (args_so_far)));
3501 else
3502 temp = memory_address (BLKmode,
3503 plus_constant (gen_rtx_PLUS (Pmode,
3504 args_addr,
3505 args_so_far),
3506 skip));
3507 target = gen_rtx_MEM (BLKmode, temp);
3509 if (type != 0)
3511 set_mem_attributes (target, type, 1);
3512 /* Function incoming arguments may overlap with sibling call
3513 outgoing arguments and we cannot allow reordering of reads
3514 from function arguments with stores to outgoing arguments
3515 of sibling calls. */
3516 set_mem_alias_set (target, 0);
3518 else
3519 set_mem_align (target, align);
3521 /* TEMP is the address of the block. Copy the data there. */
3522 if (GET_CODE (size) == CONST_INT
3523 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3525 move_by_pieces (target, xinner, INTVAL (size), align);
3526 goto ret;
3528 else
3530 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3531 enum machine_mode mode;
3533 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3534 mode != VOIDmode;
3535 mode = GET_MODE_WIDER_MODE (mode))
3537 enum insn_code code = movstr_optab[(int) mode];
3538 insn_operand_predicate_fn pred;
3540 if (code != CODE_FOR_nothing
3541 && ((GET_CODE (size) == CONST_INT
3542 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3543 <= (GET_MODE_MASK (mode) >> 1)))
3544 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3545 && (!(pred = insn_data[(int) code].operand[0].predicate)
3546 || ((*pred) (target, BLKmode)))
3547 && (!(pred = insn_data[(int) code].operand[1].predicate)
3548 || ((*pred) (xinner, BLKmode)))
3549 && (!(pred = insn_data[(int) code].operand[3].predicate)
3550 || ((*pred) (opalign, VOIDmode))))
3552 rtx op2 = convert_to_mode (mode, size, 1);
3553 rtx last = get_last_insn ();
3554 rtx pat;
3556 pred = insn_data[(int) code].operand[2].predicate;
3557 if (pred != 0 && ! (*pred) (op2, mode))
3558 op2 = copy_to_mode_reg (mode, op2);
3560 pat = GEN_FCN ((int) code) (target, xinner,
3561 op2, opalign);
3562 if (pat)
3564 emit_insn (pat);
3565 goto ret;
3567 else
3568 delete_insns_since (last);
3573 if (!ACCUMULATE_OUTGOING_ARGS)
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3581 temp = copy_to_reg (temp);
3584 /* Make inhibit_defer_pop nonzero around the library call
3585 to force it to pop the bcopy-arguments right away. */
3586 NO_DEFER_POP;
3587 #ifdef TARGET_MEM_FUNCTIONS
3588 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3589 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3590 convert_to_mode (TYPE_MODE (sizetype),
3591 size, TREE_UNSIGNED (sizetype)),
3592 TYPE_MODE (sizetype));
3593 #else
3594 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3595 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3596 convert_to_mode (TYPE_MODE (integer_type_node),
3597 size,
3598 TREE_UNSIGNED (integer_type_node)),
3599 TYPE_MODE (integer_type_node));
3600 #endif
3601 OK_DEFER_POP;
3604 else if (partial > 0)
3606 /* Scalar partly in registers. */
3608 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3609 int i;
3610 int not_stack;
3611 /* # words of start of argument
3612 that we must make space for but need not store. */
3613 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3614 int args_offset = INTVAL (args_so_far);
3615 int skip;
3617 /* Push padding now if padding above and stack grows down,
3618 or if padding below and stack grows up.
3619 But if space already allocated, this has already been done. */
3620 if (extra && args_addr == 0
3621 && where_pad != none && where_pad != stack_direction)
3622 anti_adjust_stack (GEN_INT (extra));
3624 /* If we make space by pushing it, we might as well push
3625 the real data. Otherwise, we can leave OFFSET nonzero
3626 and leave the space uninitialized. */
3627 if (args_addr == 0)
3628 offset = 0;
3630 /* Now NOT_STACK gets the number of words that we don't need to
3631 allocate on the stack. */
3632 not_stack = partial - offset;
3634 /* If the partial register-part of the arg counts in its stack size,
3635 skip the part of stack space corresponding to the registers.
3636 Otherwise, start copying to the beginning of the stack space,
3637 by setting SKIP to 0. */
3638 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3640 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3641 x = validize_mem (force_const_mem (mode, x));
3643 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3644 SUBREGs of such registers are not allowed. */
3645 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3646 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3647 x = copy_to_reg (x);
3649 /* Loop over all the words allocated on the stack for this arg. */
3650 /* We can do it by words, because any scalar bigger than a word
3651 has a size a multiple of a word. */
3652 #ifndef PUSH_ARGS_REVERSED
3653 for (i = not_stack; i < size; i++)
3654 #else
3655 for (i = size - 1; i >= not_stack; i--)
3656 #endif
3657 if (i >= not_stack + offset)
3658 emit_push_insn (operand_subword_force (x, i, mode),
3659 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3660 0, args_addr,
3661 GEN_INT (args_offset + ((i - not_stack + skip)
3662 * UNITS_PER_WORD)),
3663 reg_parm_stack_space, alignment_pad);
3665 else
3667 rtx addr;
3668 rtx target = NULL_RTX;
3669 rtx dest;
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
3676 anti_adjust_stack (GEN_INT (extra));
3678 #ifdef PUSH_ROUNDING
3679 if (args_addr == 0 && PUSH_ARGS)
3680 emit_single_push_insn (mode, x, type);
3681 else
3682 #endif
3684 if (GET_CODE (args_so_far) == CONST_INT)
3685 addr
3686 = memory_address (mode,
3687 plus_constant (args_addr,
3688 INTVAL (args_so_far)));
3689 else
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3691 args_so_far));
3692 target = addr;
3693 dest = gen_rtx_MEM (mode, addr);
3694 if (type != 0)
3696 set_mem_attributes (dest, type, 1);
3697 /* Function incoming arguments may overlap with sibling call
3698 outgoing arguments and we cannot allow reordering of reads
3699 from function arguments with stores to outgoing arguments
3700 of sibling calls. */
3701 set_mem_alias_set (dest, 0);
3704 emit_move_insn (dest, x);
3709 ret:
3710 /* If part should go in registers, copy that part
3711 into the appropriate registers. Do this now, at the end,
3712 since mem-to-mem copies above may do function calls. */
3713 if (partial > 0 && reg != 0)
3715 /* Handle calls that pass values in multiple non-contiguous locations.
3716 The Irix 6 ABI has examples of this. */
3717 if (GET_CODE (reg) == PARALLEL)
3718 emit_group_load (reg, x, -1); /* ??? size? */
3719 else
3720 move_block_to_reg (REGNO (reg), x, partial, mode);
3723 if (extra && args_addr == 0 && where_pad == stack_direction)
3724 anti_adjust_stack (GEN_INT (extra));
3726 if (alignment_pad && args_addr == 0)
3727 anti_adjust_stack (alignment_pad);
3730 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3731 operations. */
3733 static rtx
3734 get_subtarget (x)
3735 rtx x;
3737 return ((x == 0
3738 /* Only registers can be subtargets. */
3739 || GET_CODE (x) != REG
3740 /* If the register is readonly, it can't be set more than once. */
3741 || RTX_UNCHANGING_P (x)
3742 /* Don't use hard regs to avoid extending their life. */
3743 || REGNO (x) < FIRST_PSEUDO_REGISTER
3744 /* Avoid subtargets inside loops,
3745 since they hide some invariant expressions. */
3746 || preserve_subexpressions_p ())
3747 ? 0 : x);
3750 /* Expand an assignment that stores the value of FROM into TO.
3751 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3752 (This may contain a QUEUED rtx;
3753 if the value is constant, this rtx is a constant.)
3754 Otherwise, the returned value is NULL_RTX.
3756 SUGGEST_REG is no longer actually used.
3757 It used to mean, copy the value through a register
3758 and return that register, if that is possible.
3759 We now use WANT_VALUE to decide whether to do this. */
3762 expand_assignment (to, from, want_value, suggest_reg)
3763 tree to, from;
3764 int want_value;
3765 int suggest_reg ATTRIBUTE_UNUSED;
3767 rtx to_rtx = 0;
3768 rtx result;
3770 /* Don't crash if the lhs of the assignment was erroneous. */
3772 if (TREE_CODE (to) == ERROR_MARK)
3774 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3775 return want_value ? result : NULL_RTX;
3778 /* Assignment of a structure component needs special treatment
3779 if the structure component's rtx is not simply a MEM.
3780 Assignment of an array element at a constant index, and assignment of
3781 an array element in an unaligned packed structure field, has the same
3782 problem. */
3784 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3785 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3787 enum machine_mode mode1;
3788 HOST_WIDE_INT bitsize, bitpos;
3789 rtx orig_to_rtx;
3790 tree offset;
3791 int unsignedp;
3792 int volatilep = 0;
3793 tree tem;
3795 push_temp_slots ();
3796 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3797 &unsignedp, &volatilep);
3799 /* If we are going to use store_bit_field and extract_bit_field,
3800 make sure to_rtx will be safe for multiple use. */
3802 if (mode1 == VOIDmode && want_value)
3803 tem = stabilize_reference (tem);
3805 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3807 if (offset != 0)
3809 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3811 if (GET_CODE (to_rtx) != MEM)
3812 abort ();
3814 #ifdef POINTERS_EXTEND_UNSIGNED
3815 if (GET_MODE (offset_rtx) != Pmode)
3816 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3817 #else
3818 if (GET_MODE (offset_rtx) != ptr_mode)
3819 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3820 #endif
3822 /* A constant address in TO_RTX can have VOIDmode, we must not try
3823 to call force_reg for that case. Avoid that case. */
3824 if (GET_CODE (to_rtx) == MEM
3825 && GET_MODE (to_rtx) == BLKmode
3826 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3827 && bitsize > 0
3828 && (bitpos % bitsize) == 0
3829 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3830 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3832 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3833 bitpos = 0;
3836 to_rtx = offset_address (to_rtx, offset_rtx,
3837 highest_pow2_factor_for_type (TREE_TYPE (to),
3838 offset));
3841 if (GET_CODE (to_rtx) == MEM)
3843 tree old_expr = MEM_EXPR (to_rtx);
3845 /* If the field is at offset zero, we could have been given the
3846 DECL_RTX of the parent struct. Don't munge it. */
3847 to_rtx = shallow_copy_rtx (to_rtx);
3849 set_mem_attributes (to_rtx, to, 0);
3851 /* If we changed MEM_EXPR, that means we're now referencing
3852 the COMPONENT_REF, which means that MEM_OFFSET must be
3853 relative to that field. But we've not yet reflected BITPOS
3854 in TO_RTX. This will be done in store_field. Adjust for
3855 that by biasing MEM_OFFSET by -bitpos. */
3856 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3857 && (bitpos / BITS_PER_UNIT) != 0)
3858 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3859 - (bitpos / BITS_PER_UNIT)));
3862 /* Deal with volatile and readonly fields. The former is only done
3863 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3864 if (volatilep && GET_CODE (to_rtx) == MEM)
3866 if (to_rtx == orig_to_rtx)
3867 to_rtx = copy_rtx (to_rtx);
3868 MEM_VOLATILE_P (to_rtx) = 1;
3871 if (TREE_CODE (to) == COMPONENT_REF
3872 && TREE_READONLY (TREE_OPERAND (to, 1)))
3874 if (to_rtx == orig_to_rtx)
3875 to_rtx = copy_rtx (to_rtx);
3876 RTX_UNCHANGING_P (to_rtx) = 1;
3879 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3881 if (to_rtx == orig_to_rtx)
3882 to_rtx = copy_rtx (to_rtx);
3883 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3886 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3887 (want_value
3888 /* Spurious cast for HPUX compiler. */
3889 ? ((enum machine_mode)
3890 TYPE_MODE (TREE_TYPE (to)))
3891 : VOIDmode),
3892 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3894 preserve_temp_slots (result);
3895 free_temp_slots ();
3896 pop_temp_slots ();
3898 /* If the value is meaningful, convert RESULT to the proper mode.
3899 Otherwise, return nothing. */
3900 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3901 TYPE_MODE (TREE_TYPE (from)),
3902 result,
3903 TREE_UNSIGNED (TREE_TYPE (to)))
3904 : NULL_RTX);
3907 /* If the rhs is a function call and its value is not an aggregate,
3908 call the function before we start to compute the lhs.
3909 This is needed for correct code for cases such as
3910 val = setjmp (buf) on machines where reference to val
3911 requires loading up part of an address in a separate insn.
3913 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3914 since it might be a promoted variable where the zero- or sign- extension
3915 needs to be done. Handling this in the normal way is safe because no
3916 computation is done before the call. */
3917 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3918 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3919 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3920 && GET_CODE (DECL_RTL (to)) == REG))
3922 rtx value;
3924 push_temp_slots ();
3925 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3926 if (to_rtx == 0)
3927 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3929 /* Handle calls that return values in multiple non-contiguous locations.
3930 The Irix 6 ABI has examples of this. */
3931 if (GET_CODE (to_rtx) == PARALLEL)
3932 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3933 else if (GET_MODE (to_rtx) == BLKmode)
3934 emit_block_move (to_rtx, value, expr_size (from));
3935 else
3937 #ifdef POINTERS_EXTEND_UNSIGNED
3938 if (POINTER_TYPE_P (TREE_TYPE (to))
3939 && GET_MODE (to_rtx) != GET_MODE (value))
3940 value = convert_memory_address (GET_MODE (to_rtx), value);
3941 #endif
3942 emit_move_insn (to_rtx, value);
3944 preserve_temp_slots (to_rtx);
3945 free_temp_slots ();
3946 pop_temp_slots ();
3947 return want_value ? to_rtx : NULL_RTX;
3950 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3951 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3953 if (to_rtx == 0)
3954 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3956 /* Don't move directly into a return register. */
3957 if (TREE_CODE (to) == RESULT_DECL
3958 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3960 rtx temp;
3962 push_temp_slots ();
3963 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3965 if (GET_CODE (to_rtx) == PARALLEL)
3966 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3967 else
3968 emit_move_insn (to_rtx, temp);
3970 preserve_temp_slots (to_rtx);
3971 free_temp_slots ();
3972 pop_temp_slots ();
3973 return want_value ? to_rtx : NULL_RTX;
3976 /* In case we are returning the contents of an object which overlaps
3977 the place the value is being stored, use a safe function when copying
3978 a value through a pointer into a structure value return block. */
3979 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3980 && current_function_returns_struct
3981 && !current_function_returns_pcc_struct)
3983 rtx from_rtx, size;
3985 push_temp_slots ();
3986 size = expr_size (from);
3987 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3989 #ifdef TARGET_MEM_FUNCTIONS
3990 emit_library_call (memmove_libfunc, LCT_NORMAL,
3991 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3992 XEXP (from_rtx, 0), Pmode,
3993 convert_to_mode (TYPE_MODE (sizetype),
3994 size, TREE_UNSIGNED (sizetype)),
3995 TYPE_MODE (sizetype));
3996 #else
3997 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3998 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3999 XEXP (to_rtx, 0), Pmode,
4000 convert_to_mode (TYPE_MODE (integer_type_node),
4001 size, TREE_UNSIGNED (integer_type_node)),
4002 TYPE_MODE (integer_type_node));
4003 #endif
4005 preserve_temp_slots (to_rtx);
4006 free_temp_slots ();
4007 pop_temp_slots ();
4008 return want_value ? to_rtx : NULL_RTX;
4011 /* Compute FROM and store the value in the rtx we got. */
4013 push_temp_slots ();
4014 result = store_expr (from, to_rtx, want_value);
4015 preserve_temp_slots (result);
4016 free_temp_slots ();
4017 pop_temp_slots ();
4018 return want_value ? result : NULL_RTX;
4021 /* Generate code for computing expression EXP,
4022 and storing the value into TARGET.
4023 TARGET may contain a QUEUED rtx.
4025 If WANT_VALUE is nonzero, return a copy of the value
4026 not in TARGET, so that we can be sure to use the proper
4027 value in a containing expression even if TARGET has something
4028 else stored in it. If possible, we copy the value through a pseudo
4029 and return that pseudo. Or, if the value is constant, we try to
4030 return the constant. In some cases, we return a pseudo
4031 copied *from* TARGET.
4033 If the mode is BLKmode then we may return TARGET itself.
4034 It turns out that in BLKmode it doesn't cause a problem.
4035 because C has no operators that could combine two different
4036 assignments into the same BLKmode object with different values
4037 with no sequence point. Will other languages need this to
4038 be more thorough?
4040 If WANT_VALUE is 0, we return NULL, to make sure
4041 to catch quickly any cases where the caller uses the value
4042 and fails to set WANT_VALUE. */
4045 store_expr (exp, target, want_value)
4046 tree exp;
4047 rtx target;
4048 int want_value;
4050 rtx temp;
4051 int dont_return_target = 0;
4052 int dont_store_target = 0;
4054 if (TREE_CODE (exp) == COMPOUND_EXPR)
4056 /* Perform first part of compound expression, then assign from second
4057 part. */
4058 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4059 emit_queue ();
4060 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4062 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4064 /* For conditional expression, get safe form of the target. Then
4065 test the condition, doing the appropriate assignment on either
4066 side. This avoids the creation of unnecessary temporaries.
4067 For non-BLKmode, it is more efficient not to do this. */
4069 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4071 emit_queue ();
4072 target = protect_from_queue (target, 1);
4074 do_pending_stack_adjust ();
4075 NO_DEFER_POP;
4076 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4077 start_cleanup_deferral ();
4078 store_expr (TREE_OPERAND (exp, 1), target, 0);
4079 end_cleanup_deferral ();
4080 emit_queue ();
4081 emit_jump_insn (gen_jump (lab2));
4082 emit_barrier ();
4083 emit_label (lab1);
4084 start_cleanup_deferral ();
4085 store_expr (TREE_OPERAND (exp, 2), target, 0);
4086 end_cleanup_deferral ();
4087 emit_queue ();
4088 emit_label (lab2);
4089 OK_DEFER_POP;
4091 return want_value ? target : NULL_RTX;
4093 else if (queued_subexp_p (target))
4094 /* If target contains a postincrement, let's not risk
4095 using it as the place to generate the rhs. */
4097 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4099 /* Expand EXP into a new pseudo. */
4100 temp = gen_reg_rtx (GET_MODE (target));
4101 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4103 else
4104 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4106 /* If target is volatile, ANSI requires accessing the value
4107 *from* the target, if it is accessed. So make that happen.
4108 In no case return the target itself. */
4109 if (! MEM_VOLATILE_P (target) && want_value)
4110 dont_return_target = 1;
4112 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4113 && GET_MODE (target) != BLKmode)
4114 /* If target is in memory and caller wants value in a register instead,
4115 arrange that. Pass TARGET as target for expand_expr so that,
4116 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4117 We know expand_expr will not use the target in that case.
4118 Don't do this if TARGET is volatile because we are supposed
4119 to write it and then read it. */
4121 temp = expand_expr (exp, target, GET_MODE (target), 0);
4122 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4124 /* If TEMP is already in the desired TARGET, only copy it from
4125 memory and don't store it there again. */
4126 if (temp == target
4127 || (rtx_equal_p (temp, target)
4128 && ! side_effects_p (temp) && ! side_effects_p (target)))
4129 dont_store_target = 1;
4130 temp = copy_to_reg (temp);
4132 dont_return_target = 1;
4134 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4135 /* If this is an scalar in a register that is stored in a wider mode
4136 than the declared mode, compute the result into its declared mode
4137 and then convert to the wider mode. Our value is the computed
4138 expression. */
4140 rtx inner_target = 0;
4142 /* If we don't want a value, we can do the conversion inside EXP,
4143 which will often result in some optimizations. Do the conversion
4144 in two steps: first change the signedness, if needed, then
4145 the extend. But don't do this if the type of EXP is a subtype
4146 of something else since then the conversion might involve
4147 more than just converting modes. */
4148 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4149 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4151 if (TREE_UNSIGNED (TREE_TYPE (exp))
4152 != SUBREG_PROMOTED_UNSIGNED_P (target))
4153 exp = convert
4154 ((*lang_hooks.types.signed_or_unsigned_type)
4155 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4157 exp = convert ((*lang_hooks.types.type_for_mode)
4158 (GET_MODE (SUBREG_REG (target)),
4159 SUBREG_PROMOTED_UNSIGNED_P (target)),
4160 exp);
4162 inner_target = SUBREG_REG (target);
4165 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4167 /* If TEMP is a volatile MEM and we want a result value, make
4168 the access now so it gets done only once. Likewise if
4169 it contains TARGET. */
4170 if (GET_CODE (temp) == MEM && want_value
4171 && (MEM_VOLATILE_P (temp)
4172 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4173 temp = copy_to_reg (temp);
4175 /* If TEMP is a VOIDmode constant, use convert_modes to make
4176 sure that we properly convert it. */
4177 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4179 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4180 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4181 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4182 GET_MODE (target), temp,
4183 SUBREG_PROMOTED_UNSIGNED_P (target));
4186 convert_move (SUBREG_REG (target), temp,
4187 SUBREG_PROMOTED_UNSIGNED_P (target));
4189 /* If we promoted a constant, change the mode back down to match
4190 target. Otherwise, the caller might get confused by a result whose
4191 mode is larger than expected. */
4193 if (want_value && GET_MODE (temp) != GET_MODE (target))
4195 if (GET_MODE (temp) != VOIDmode)
4197 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4198 SUBREG_PROMOTED_VAR_P (temp) = 1;
4199 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4200 SUBREG_PROMOTED_UNSIGNED_P (target));
4202 else
4203 temp = convert_modes (GET_MODE (target),
4204 GET_MODE (SUBREG_REG (target)),
4205 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4208 return want_value ? temp : NULL_RTX;
4210 else
4212 temp = expand_expr (exp, target, GET_MODE (target), 0);
4213 /* Return TARGET if it's a specified hardware register.
4214 If TARGET is a volatile mem ref, either return TARGET
4215 or return a reg copied *from* TARGET; ANSI requires this.
4217 Otherwise, if TEMP is not TARGET, return TEMP
4218 if it is constant (for efficiency),
4219 or if we really want the correct value. */
4220 if (!(target && GET_CODE (target) == REG
4221 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4222 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4223 && ! rtx_equal_p (temp, target)
4224 && (CONSTANT_P (temp) || want_value))
4225 dont_return_target = 1;
4228 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4229 the same as that of TARGET, adjust the constant. This is needed, for
4230 example, in case it is a CONST_DOUBLE and we want only a word-sized
4231 value. */
4232 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4233 && TREE_CODE (exp) != ERROR_MARK
4234 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4235 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4236 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4238 /* If value was not generated in the target, store it there.
4239 Convert the value to TARGET's type first if necessary.
4240 If TEMP and TARGET compare equal according to rtx_equal_p, but
4241 one or both of them are volatile memory refs, we have to distinguish
4242 two cases:
4243 - expand_expr has used TARGET. In this case, we must not generate
4244 another copy. This can be detected by TARGET being equal according
4245 to == .
4246 - expand_expr has not used TARGET - that means that the source just
4247 happens to have the same RTX form. Since temp will have been created
4248 by expand_expr, it will compare unequal according to == .
4249 We must generate a copy in this case, to reach the correct number
4250 of volatile memory references. */
4252 if ((! rtx_equal_p (temp, target)
4253 || (temp != target && (side_effects_p (temp)
4254 || side_effects_p (target))))
4255 && TREE_CODE (exp) != ERROR_MARK
4256 && ! dont_store_target
4257 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4258 but TARGET is not valid memory reference, TEMP will differ
4259 from TARGET although it is really the same location. */
4260 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4261 || target != DECL_RTL_IF_SET (exp)))
4263 target = protect_from_queue (target, 1);
4264 if (GET_MODE (temp) != GET_MODE (target)
4265 && GET_MODE (temp) != VOIDmode)
4267 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4268 if (dont_return_target)
4270 /* In this case, we will return TEMP,
4271 so make sure it has the proper mode.
4272 But don't forget to store the value into TARGET. */
4273 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4274 emit_move_insn (target, temp);
4276 else
4277 convert_move (target, temp, unsignedp);
4280 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4282 /* Handle copying a string constant into an array. The string
4283 constant may be shorter than the array. So copy just the string's
4284 actual length, and clear the rest. First get the size of the data
4285 type of the string, which is actually the size of the target. */
4286 rtx size = expr_size (exp);
4288 if (GET_CODE (size) == CONST_INT
4289 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4290 emit_block_move (target, temp, size);
4291 else
4293 /* Compute the size of the data to copy from the string. */
4294 tree copy_size
4295 = size_binop (MIN_EXPR,
4296 make_tree (sizetype, size),
4297 size_int (TREE_STRING_LENGTH (exp)));
4298 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4299 VOIDmode, 0);
4300 rtx label = 0;
4302 /* Copy that much. */
4303 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4304 emit_block_move (target, temp, copy_size_rtx);
4306 /* Figure out how much is left in TARGET that we have to clear.
4307 Do all calculations in ptr_mode. */
4308 if (GET_CODE (copy_size_rtx) == CONST_INT)
4310 size = plus_constant (size, -INTVAL (copy_size_rtx));
4311 target = adjust_address (target, BLKmode,
4312 INTVAL (copy_size_rtx));
4314 else
4316 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4317 copy_size_rtx, NULL_RTX, 0,
4318 OPTAB_LIB_WIDEN);
4320 #ifdef POINTERS_EXTEND_UNSIGNED
4321 if (GET_MODE (copy_size_rtx) != Pmode)
4322 copy_size_rtx = convert_memory_address (Pmode,
4323 copy_size_rtx);
4324 #endif
4326 target = offset_address (target, copy_size_rtx,
4327 highest_pow2_factor (copy_size));
4328 label = gen_label_rtx ();
4329 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4330 GET_MODE (size), 0, label);
4333 if (size != const0_rtx)
4334 clear_storage (target, size);
4336 if (label)
4337 emit_label (label);
4340 /* Handle calls that return values in multiple non-contiguous locations.
4341 The Irix 6 ABI has examples of this. */
4342 else if (GET_CODE (target) == PARALLEL)
4343 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4344 else if (GET_MODE (temp) == BLKmode)
4345 emit_block_move (target, temp, expr_size (exp));
4346 else
4347 emit_move_insn (target, temp);
4350 /* If we don't want a value, return NULL_RTX. */
4351 if (! want_value)
4352 return NULL_RTX;
4354 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4355 ??? The latter test doesn't seem to make sense. */
4356 else if (dont_return_target && GET_CODE (temp) != MEM)
4357 return temp;
4359 /* Return TARGET itself if it is a hard register. */
4360 else if (want_value && GET_MODE (target) != BLKmode
4361 && ! (GET_CODE (target) == REG
4362 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4363 return copy_to_reg (target);
4365 else
4366 return target;
4369 /* Return 1 if EXP just contains zeros. */
4371 static int
4372 is_zeros_p (exp)
4373 tree exp;
4375 tree elt;
4377 switch (TREE_CODE (exp))
4379 case CONVERT_EXPR:
4380 case NOP_EXPR:
4381 case NON_LVALUE_EXPR:
4382 case VIEW_CONVERT_EXPR:
4383 return is_zeros_p (TREE_OPERAND (exp, 0));
4385 case INTEGER_CST:
4386 return integer_zerop (exp);
4388 case COMPLEX_CST:
4389 return
4390 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4392 case REAL_CST:
4393 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4395 case VECTOR_CST:
4396 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4397 elt = TREE_CHAIN (elt))
4398 if (!is_zeros_p (TREE_VALUE (elt)))
4399 return 0;
4401 return 1;
4403 case CONSTRUCTOR:
4404 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4405 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4406 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4407 if (! is_zeros_p (TREE_VALUE (elt)))
4408 return 0;
4410 return 1;
4412 default:
4413 return 0;
4417 /* Return 1 if EXP contains mostly (3/4) zeros. */
4419 static int
4420 mostly_zeros_p (exp)
4421 tree exp;
4423 if (TREE_CODE (exp) == CONSTRUCTOR)
4425 int elts = 0, zeros = 0;
4426 tree elt = CONSTRUCTOR_ELTS (exp);
4427 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4429 /* If there are no ranges of true bits, it is all zero. */
4430 return elt == NULL_TREE;
4432 for (; elt; elt = TREE_CHAIN (elt))
4434 /* We do not handle the case where the index is a RANGE_EXPR,
4435 so the statistic will be somewhat inaccurate.
4436 We do make a more accurate count in store_constructor itself,
4437 so since this function is only used for nested array elements,
4438 this should be close enough. */
4439 if (mostly_zeros_p (TREE_VALUE (elt)))
4440 zeros++;
4441 elts++;
4444 return 4 * zeros >= 3 * elts;
4447 return is_zeros_p (exp);
4450 /* Helper function for store_constructor.
4451 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4452 TYPE is the type of the CONSTRUCTOR, not the element type.
4453 CLEARED is as for store_constructor.
4454 ALIAS_SET is the alias set to use for any stores.
4456 This provides a recursive shortcut back to store_constructor when it isn't
4457 necessary to go through store_field. This is so that we can pass through
4458 the cleared field to let store_constructor know that we may not have to
4459 clear a substructure if the outer structure has already been cleared. */
4461 static void
4462 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4463 alias_set)
4464 rtx target;
4465 unsigned HOST_WIDE_INT bitsize;
4466 HOST_WIDE_INT bitpos;
4467 enum machine_mode mode;
4468 tree exp, type;
4469 int cleared;
4470 int alias_set;
4472 if (TREE_CODE (exp) == CONSTRUCTOR
4473 && bitpos % BITS_PER_UNIT == 0
4474 /* If we have a non-zero bitpos for a register target, then we just
4475 let store_field do the bitfield handling. This is unlikely to
4476 generate unnecessary clear instructions anyways. */
4477 && (bitpos == 0 || GET_CODE (target) == MEM))
4479 if (GET_CODE (target) == MEM)
4480 target
4481 = adjust_address (target,
4482 GET_MODE (target) == BLKmode
4483 || 0 != (bitpos
4484 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4485 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4488 /* Update the alias set, if required. */
4489 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4490 && MEM_ALIAS_SET (target) != 0)
4492 target = copy_rtx (target);
4493 set_mem_alias_set (target, alias_set);
4496 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4498 else
4499 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4500 alias_set);
4503 /* Store the value of constructor EXP into the rtx TARGET.
4504 TARGET is either a REG or a MEM; we know it cannot conflict, since
4505 safe_from_p has been called.
4506 CLEARED is true if TARGET is known to have been zero'd.
4507 SIZE is the number of bytes of TARGET we are allowed to modify: this
4508 may not be the same as the size of EXP if we are assigning to a field
4509 which has been packed to exclude padding bits. */
4511 static void
4512 store_constructor (exp, target, cleared, size)
4513 tree exp;
4514 rtx target;
4515 int cleared;
4516 HOST_WIDE_INT size;
4518 tree type = TREE_TYPE (exp);
4519 #ifdef WORD_REGISTER_OPERATIONS
4520 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4521 #endif
4523 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4524 || TREE_CODE (type) == QUAL_UNION_TYPE)
4526 tree elt;
4528 /* We either clear the aggregate or indicate the value is dead. */
4529 if ((TREE_CODE (type) == UNION_TYPE
4530 || TREE_CODE (type) == QUAL_UNION_TYPE)
4531 && ! cleared
4532 && ! CONSTRUCTOR_ELTS (exp))
4533 /* If the constructor is empty, clear the union. */
4535 clear_storage (target, expr_size (exp));
4536 cleared = 1;
4539 /* If we are building a static constructor into a register,
4540 set the initial value as zero so we can fold the value into
4541 a constant. But if more than one register is involved,
4542 this probably loses. */
4543 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4544 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4546 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4547 cleared = 1;
4550 /* If the constructor has fewer fields than the structure
4551 or if we are initializing the structure to mostly zeros,
4552 clear the whole structure first. Don't do this if TARGET is a
4553 register whose mode size isn't equal to SIZE since clear_storage
4554 can't handle this case. */
4555 else if (! cleared && size > 0
4556 && ((list_length (CONSTRUCTOR_ELTS (exp))
4557 != fields_length (type))
4558 || mostly_zeros_p (exp))
4559 && (GET_CODE (target) != REG
4560 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4561 == size)))
4563 clear_storage (target, GEN_INT (size));
4564 cleared = 1;
4567 if (! cleared)
4568 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4570 /* Store each element of the constructor into
4571 the corresponding field of TARGET. */
4573 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4575 tree field = TREE_PURPOSE (elt);
4576 tree value = TREE_VALUE (elt);
4577 enum machine_mode mode;
4578 HOST_WIDE_INT bitsize;
4579 HOST_WIDE_INT bitpos = 0;
4580 int unsignedp;
4581 tree offset;
4582 rtx to_rtx = target;
4584 /* Just ignore missing fields.
4585 We cleared the whole structure, above,
4586 if any fields are missing. */
4587 if (field == 0)
4588 continue;
4590 if (cleared && is_zeros_p (value))
4591 continue;
4593 if (host_integerp (DECL_SIZE (field), 1))
4594 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4595 else
4596 bitsize = -1;
4598 unsignedp = TREE_UNSIGNED (field);
4599 mode = DECL_MODE (field);
4600 if (DECL_BIT_FIELD (field))
4601 mode = VOIDmode;
4603 offset = DECL_FIELD_OFFSET (field);
4604 if (host_integerp (offset, 0)
4605 && host_integerp (bit_position (field), 0))
4607 bitpos = int_bit_position (field);
4608 offset = 0;
4610 else
4611 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4613 if (offset)
4615 rtx offset_rtx;
4617 if (contains_placeholder_p (offset))
4618 offset = build (WITH_RECORD_EXPR, sizetype,
4619 offset, make_tree (TREE_TYPE (exp), target));
4621 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4622 if (GET_CODE (to_rtx) != MEM)
4623 abort ();
4625 #ifdef POINTERS_EXTEND_UNSIGNED
4626 if (GET_MODE (offset_rtx) != Pmode)
4627 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4628 #else
4629 if (GET_MODE (offset_rtx) != ptr_mode)
4630 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4631 #endif
4633 to_rtx = offset_address (to_rtx, offset_rtx,
4634 highest_pow2_factor (offset));
4637 if (TREE_READONLY (field))
4639 if (GET_CODE (to_rtx) == MEM)
4640 to_rtx = copy_rtx (to_rtx);
4642 RTX_UNCHANGING_P (to_rtx) = 1;
4645 #ifdef WORD_REGISTER_OPERATIONS
4646 /* If this initializes a field that is smaller than a word, at the
4647 start of a word, try to widen it to a full word.
4648 This special case allows us to output C++ member function
4649 initializations in a form that the optimizers can understand. */
4650 if (GET_CODE (target) == REG
4651 && bitsize < BITS_PER_WORD
4652 && bitpos % BITS_PER_WORD == 0
4653 && GET_MODE_CLASS (mode) == MODE_INT
4654 && TREE_CODE (value) == INTEGER_CST
4655 && exp_size >= 0
4656 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4658 tree type = TREE_TYPE (value);
4660 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4662 type = (*lang_hooks.types.type_for_size)
4663 (BITS_PER_WORD, TREE_UNSIGNED (type));
4664 value = convert (type, value);
4667 if (BYTES_BIG_ENDIAN)
4668 value
4669 = fold (build (LSHIFT_EXPR, type, value,
4670 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4671 bitsize = BITS_PER_WORD;
4672 mode = word_mode;
4674 #endif
4676 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4677 && DECL_NONADDRESSABLE_P (field))
4679 to_rtx = copy_rtx (to_rtx);
4680 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4683 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4684 value, type, cleared,
4685 get_alias_set (TREE_TYPE (field)));
4688 else if (TREE_CODE (type) == ARRAY_TYPE
4689 || TREE_CODE (type) == VECTOR_TYPE)
4691 tree elt;
4692 int i;
4693 int need_to_clear;
4694 tree domain = TYPE_DOMAIN (type);
4695 tree elttype = TREE_TYPE (type);
4696 int const_bounds_p;
4697 HOST_WIDE_INT minelt = 0;
4698 HOST_WIDE_INT maxelt = 0;
4700 /* Vectors are like arrays, but the domain is stored via an array
4701 type indirectly. */
4702 if (TREE_CODE (type) == VECTOR_TYPE)
4704 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4705 the same field as TYPE_DOMAIN, we are not guaranteed that
4706 it always will. */
4707 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4708 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4711 const_bounds_p = (TYPE_MIN_VALUE (domain)
4712 && TYPE_MAX_VALUE (domain)
4713 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4714 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4716 /* If we have constant bounds for the range of the type, get them. */
4717 if (const_bounds_p)
4719 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4720 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4723 /* If the constructor has fewer elements than the array,
4724 clear the whole array first. Similarly if this is
4725 static constructor of a non-BLKmode object. */
4726 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4727 need_to_clear = 1;
4728 else
4730 HOST_WIDE_INT count = 0, zero_count = 0;
4731 need_to_clear = ! const_bounds_p;
4733 /* This loop is a more accurate version of the loop in
4734 mostly_zeros_p (it handles RANGE_EXPR in an index).
4735 It is also needed to check for missing elements. */
4736 for (elt = CONSTRUCTOR_ELTS (exp);
4737 elt != NULL_TREE && ! need_to_clear;
4738 elt = TREE_CHAIN (elt))
4740 tree index = TREE_PURPOSE (elt);
4741 HOST_WIDE_INT this_node_count;
4743 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4745 tree lo_index = TREE_OPERAND (index, 0);
4746 tree hi_index = TREE_OPERAND (index, 1);
4748 if (! host_integerp (lo_index, 1)
4749 || ! host_integerp (hi_index, 1))
4751 need_to_clear = 1;
4752 break;
4755 this_node_count = (tree_low_cst (hi_index, 1)
4756 - tree_low_cst (lo_index, 1) + 1);
4758 else
4759 this_node_count = 1;
4761 count += this_node_count;
4762 if (mostly_zeros_p (TREE_VALUE (elt)))
4763 zero_count += this_node_count;
4766 /* Clear the entire array first if there are any missing elements,
4767 or if the incidence of zero elements is >= 75%. */
4768 if (! need_to_clear
4769 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4770 need_to_clear = 1;
4773 if (need_to_clear && size > 0)
4775 if (! cleared)
4777 if (REG_P (target))
4778 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4779 else
4780 clear_storage (target, GEN_INT (size));
4782 cleared = 1;
4784 else if (REG_P (target))
4785 /* Inform later passes that the old value is dead. */
4786 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4788 /* Store each element of the constructor into
4789 the corresponding element of TARGET, determined
4790 by counting the elements. */
4791 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4792 elt;
4793 elt = TREE_CHAIN (elt), i++)
4795 enum machine_mode mode;
4796 HOST_WIDE_INT bitsize;
4797 HOST_WIDE_INT bitpos;
4798 int unsignedp;
4799 tree value = TREE_VALUE (elt);
4800 tree index = TREE_PURPOSE (elt);
4801 rtx xtarget = target;
4803 if (cleared && is_zeros_p (value))
4804 continue;
4806 unsignedp = TREE_UNSIGNED (elttype);
4807 mode = TYPE_MODE (elttype);
4808 if (mode == BLKmode)
4809 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4810 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4811 : -1);
4812 else
4813 bitsize = GET_MODE_BITSIZE (mode);
4815 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4817 tree lo_index = TREE_OPERAND (index, 0);
4818 tree hi_index = TREE_OPERAND (index, 1);
4819 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4820 struct nesting *loop;
4821 HOST_WIDE_INT lo, hi, count;
4822 tree position;
4824 /* If the range is constant and "small", unroll the loop. */
4825 if (const_bounds_p
4826 && host_integerp (lo_index, 0)
4827 && host_integerp (hi_index, 0)
4828 && (lo = tree_low_cst (lo_index, 0),
4829 hi = tree_low_cst (hi_index, 0),
4830 count = hi - lo + 1,
4831 (GET_CODE (target) != MEM
4832 || count <= 2
4833 || (host_integerp (TYPE_SIZE (elttype), 1)
4834 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4835 <= 40 * 8)))))
4837 lo -= minelt; hi -= minelt;
4838 for (; lo <= hi; lo++)
4840 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4842 if (GET_CODE (target) == MEM
4843 && !MEM_KEEP_ALIAS_SET_P (target)
4844 && TREE_CODE (type) == ARRAY_TYPE
4845 && TYPE_NONALIASED_COMPONENT (type))
4847 target = copy_rtx (target);
4848 MEM_KEEP_ALIAS_SET_P (target) = 1;
4851 store_constructor_field
4852 (target, bitsize, bitpos, mode, value, type, cleared,
4853 get_alias_set (elttype));
4856 else
4858 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4859 loop_top = gen_label_rtx ();
4860 loop_end = gen_label_rtx ();
4862 unsignedp = TREE_UNSIGNED (domain);
4864 index = build_decl (VAR_DECL, NULL_TREE, domain);
4866 index_r
4867 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4868 &unsignedp, 0));
4869 SET_DECL_RTL (index, index_r);
4870 if (TREE_CODE (value) == SAVE_EXPR
4871 && SAVE_EXPR_RTL (value) == 0)
4873 /* Make sure value gets expanded once before the
4874 loop. */
4875 expand_expr (value, const0_rtx, VOIDmode, 0);
4876 emit_queue ();
4878 store_expr (lo_index, index_r, 0);
4879 loop = expand_start_loop (0);
4881 /* Assign value to element index. */
4882 position
4883 = convert (ssizetype,
4884 fold (build (MINUS_EXPR, TREE_TYPE (index),
4885 index, TYPE_MIN_VALUE (domain))));
4886 position = size_binop (MULT_EXPR, position,
4887 convert (ssizetype,
4888 TYPE_SIZE_UNIT (elttype)));
4890 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4891 xtarget = offset_address (target, pos_rtx,
4892 highest_pow2_factor (position));
4893 xtarget = adjust_address (xtarget, mode, 0);
4894 if (TREE_CODE (value) == CONSTRUCTOR)
4895 store_constructor (value, xtarget, cleared,
4896 bitsize / BITS_PER_UNIT);
4897 else
4898 store_expr (value, xtarget, 0);
4900 expand_exit_loop_if_false (loop,
4901 build (LT_EXPR, integer_type_node,
4902 index, hi_index));
4904 expand_increment (build (PREINCREMENT_EXPR,
4905 TREE_TYPE (index),
4906 index, integer_one_node), 0, 0);
4907 expand_end_loop ();
4908 emit_label (loop_end);
4911 else if ((index != 0 && ! host_integerp (index, 0))
4912 || ! host_integerp (TYPE_SIZE (elttype), 1))
4914 tree position;
4916 if (index == 0)
4917 index = ssize_int (1);
4919 if (minelt)
4920 index = convert (ssizetype,
4921 fold (build (MINUS_EXPR, index,
4922 TYPE_MIN_VALUE (domain))));
4924 position = size_binop (MULT_EXPR, index,
4925 convert (ssizetype,
4926 TYPE_SIZE_UNIT (elttype)));
4927 xtarget = offset_address (target,
4928 expand_expr (position, 0, VOIDmode, 0),
4929 highest_pow2_factor (position));
4930 xtarget = adjust_address (xtarget, mode, 0);
4931 store_expr (value, xtarget, 0);
4933 else
4935 if (index != 0)
4936 bitpos = ((tree_low_cst (index, 0) - minelt)
4937 * tree_low_cst (TYPE_SIZE (elttype), 1));
4938 else
4939 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4941 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4942 && TREE_CODE (type) == ARRAY_TYPE
4943 && TYPE_NONALIASED_COMPONENT (type))
4945 target = copy_rtx (target);
4946 MEM_KEEP_ALIAS_SET_P (target) = 1;
4949 store_constructor_field (target, bitsize, bitpos, mode, value,
4950 type, cleared, get_alias_set (elttype));
4956 /* Set constructor assignments. */
4957 else if (TREE_CODE (type) == SET_TYPE)
4959 tree elt = CONSTRUCTOR_ELTS (exp);
4960 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4961 tree domain = TYPE_DOMAIN (type);
4962 tree domain_min, domain_max, bitlength;
4964 /* The default implementation strategy is to extract the constant
4965 parts of the constructor, use that to initialize the target,
4966 and then "or" in whatever non-constant ranges we need in addition.
4968 If a large set is all zero or all ones, it is
4969 probably better to set it using memset (if available) or bzero.
4970 Also, if a large set has just a single range, it may also be
4971 better to first clear all the first clear the set (using
4972 bzero/memset), and set the bits we want. */
4974 /* Check for all zeros. */
4975 if (elt == NULL_TREE && size > 0)
4977 if (!cleared)
4978 clear_storage (target, GEN_INT (size));
4979 return;
4982 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4983 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4984 bitlength = size_binop (PLUS_EXPR,
4985 size_diffop (domain_max, domain_min),
4986 ssize_int (1));
4988 nbits = tree_low_cst (bitlength, 1);
4990 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4991 are "complicated" (more than one range), initialize (the
4992 constant parts) by copying from a constant. */
4993 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4994 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4996 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4997 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4998 char *bit_buffer = (char *) alloca (nbits);
4999 HOST_WIDE_INT word = 0;
5000 unsigned int bit_pos = 0;
5001 unsigned int ibit = 0;
5002 unsigned int offset = 0; /* In bytes from beginning of set. */
5004 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5005 for (;;)
5007 if (bit_buffer[ibit])
5009 if (BYTES_BIG_ENDIAN)
5010 word |= (1 << (set_word_size - 1 - bit_pos));
5011 else
5012 word |= 1 << bit_pos;
5015 bit_pos++; ibit++;
5016 if (bit_pos >= set_word_size || ibit == nbits)
5018 if (word != 0 || ! cleared)
5020 rtx datum = GEN_INT (word);
5021 rtx to_rtx;
5023 /* The assumption here is that it is safe to use
5024 XEXP if the set is multi-word, but not if
5025 it's single-word. */
5026 if (GET_CODE (target) == MEM)
5027 to_rtx = adjust_address (target, mode, offset);
5028 else if (offset == 0)
5029 to_rtx = target;
5030 else
5031 abort ();
5032 emit_move_insn (to_rtx, datum);
5035 if (ibit == nbits)
5036 break;
5037 word = 0;
5038 bit_pos = 0;
5039 offset += set_word_size / BITS_PER_UNIT;
5043 else if (!cleared)
5044 /* Don't bother clearing storage if the set is all ones. */
5045 if (TREE_CHAIN (elt) != NULL_TREE
5046 || (TREE_PURPOSE (elt) == NULL_TREE
5047 ? nbits != 1
5048 : ( ! host_integerp (TREE_VALUE (elt), 0)
5049 || ! host_integerp (TREE_PURPOSE (elt), 0)
5050 || (tree_low_cst (TREE_VALUE (elt), 0)
5051 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5052 != (HOST_WIDE_INT) nbits))))
5053 clear_storage (target, expr_size (exp));
5055 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5057 /* Start of range of element or NULL. */
5058 tree startbit = TREE_PURPOSE (elt);
5059 /* End of range of element, or element value. */
5060 tree endbit = TREE_VALUE (elt);
5061 #ifdef TARGET_MEM_FUNCTIONS
5062 HOST_WIDE_INT startb, endb;
5063 #endif
5064 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5066 bitlength_rtx = expand_expr (bitlength,
5067 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5069 /* Handle non-range tuple element like [ expr ]. */
5070 if (startbit == NULL_TREE)
5072 startbit = save_expr (endbit);
5073 endbit = startbit;
5076 startbit = convert (sizetype, startbit);
5077 endbit = convert (sizetype, endbit);
5078 if (! integer_zerop (domain_min))
5080 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5081 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5083 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5084 EXPAND_CONST_ADDRESS);
5085 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5086 EXPAND_CONST_ADDRESS);
5088 if (REG_P (target))
5090 targetx
5091 = assign_temp
5092 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5093 (GET_MODE (target), 0),
5094 TYPE_QUAL_CONST)),
5095 0, 1, 1);
5096 emit_move_insn (targetx, target);
5099 else if (GET_CODE (target) == MEM)
5100 targetx = target;
5101 else
5102 abort ();
5104 #ifdef TARGET_MEM_FUNCTIONS
5105 /* Optimization: If startbit and endbit are
5106 constants divisible by BITS_PER_UNIT,
5107 call memset instead. */
5108 if (TREE_CODE (startbit) == INTEGER_CST
5109 && TREE_CODE (endbit) == INTEGER_CST
5110 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5111 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5113 emit_library_call (memset_libfunc, LCT_NORMAL,
5114 VOIDmode, 3,
5115 plus_constant (XEXP (targetx, 0),
5116 startb / BITS_PER_UNIT),
5117 Pmode,
5118 constm1_rtx, TYPE_MODE (integer_type_node),
5119 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5120 TYPE_MODE (sizetype));
5122 else
5123 #endif
5124 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5125 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5126 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5127 startbit_rtx, TYPE_MODE (sizetype),
5128 endbit_rtx, TYPE_MODE (sizetype));
5130 if (REG_P (target))
5131 emit_move_insn (target, targetx);
5135 else
5136 abort ();
5139 /* Store the value of EXP (an expression tree)
5140 into a subfield of TARGET which has mode MODE and occupies
5141 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5142 If MODE is VOIDmode, it means that we are storing into a bit-field.
5144 If VALUE_MODE is VOIDmode, return nothing in particular.
5145 UNSIGNEDP is not used in this case.
5147 Otherwise, return an rtx for the value stored. This rtx
5148 has mode VALUE_MODE if that is convenient to do.
5149 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5151 TYPE is the type of the underlying object,
5153 ALIAS_SET is the alias set for the destination. This value will
5154 (in general) be different from that for TARGET, since TARGET is a
5155 reference to the containing structure. */
5157 static rtx
5158 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5159 alias_set)
5160 rtx target;
5161 HOST_WIDE_INT bitsize;
5162 HOST_WIDE_INT bitpos;
5163 enum machine_mode mode;
5164 tree exp;
5165 enum machine_mode value_mode;
5166 int unsignedp;
5167 tree type;
5168 int alias_set;
5170 HOST_WIDE_INT width_mask = 0;
5172 if (TREE_CODE (exp) == ERROR_MARK)
5173 return const0_rtx;
5175 /* If we have nothing to store, do nothing unless the expression has
5176 side-effects. */
5177 if (bitsize == 0)
5178 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5179 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5180 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5182 /* If we are storing into an unaligned field of an aligned union that is
5183 in a register, we may have the mode of TARGET being an integer mode but
5184 MODE == BLKmode. In that case, get an aligned object whose size and
5185 alignment are the same as TARGET and store TARGET into it (we can avoid
5186 the store if the field being stored is the entire width of TARGET). Then
5187 call ourselves recursively to store the field into a BLKmode version of
5188 that object. Finally, load from the object into TARGET. This is not
5189 very efficient in general, but should only be slightly more expensive
5190 than the otherwise-required unaligned accesses. Perhaps this can be
5191 cleaned up later. */
5193 if (mode == BLKmode
5194 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5196 rtx object
5197 = assign_temp
5198 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5199 0, 1, 1);
5200 rtx blk_object = adjust_address (object, BLKmode, 0);
5202 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5203 emit_move_insn (object, target);
5205 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5206 alias_set);
5208 emit_move_insn (target, object);
5210 /* We want to return the BLKmode version of the data. */
5211 return blk_object;
5214 if (GET_CODE (target) == CONCAT)
5216 /* We're storing into a struct containing a single __complex. */
5218 if (bitpos != 0)
5219 abort ();
5220 return store_expr (exp, target, 0);
5223 /* If the structure is in a register or if the component
5224 is a bit field, we cannot use addressing to access it.
5225 Use bit-field techniques or SUBREG to store in it. */
5227 if (mode == VOIDmode
5228 || (mode != BLKmode && ! direct_store[(int) mode]
5229 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5230 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5231 || GET_CODE (target) == REG
5232 || GET_CODE (target) == SUBREG
5233 /* If the field isn't aligned enough to store as an ordinary memref,
5234 store it as a bit field. */
5235 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5236 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5237 || bitpos % GET_MODE_ALIGNMENT (mode)))
5238 /* If the RHS and field are a constant size and the size of the
5239 RHS isn't the same size as the bitfield, we must use bitfield
5240 operations. */
5241 || (bitsize >= 0
5242 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5243 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5245 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5247 /* If BITSIZE is narrower than the size of the type of EXP
5248 we will be narrowing TEMP. Normally, what's wanted are the
5249 low-order bits. However, if EXP's type is a record and this is
5250 big-endian machine, we want the upper BITSIZE bits. */
5251 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5252 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5253 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5254 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5255 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5256 - bitsize),
5257 temp, 1);
5259 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5260 MODE. */
5261 if (mode != VOIDmode && mode != BLKmode
5262 && mode != TYPE_MODE (TREE_TYPE (exp)))
5263 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5265 /* If the modes of TARGET and TEMP are both BLKmode, both
5266 must be in memory and BITPOS must be aligned on a byte
5267 boundary. If so, we simply do a block copy. */
5268 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5270 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5271 || bitpos % BITS_PER_UNIT != 0)
5272 abort ();
5274 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5275 emit_block_move (target, temp,
5276 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5277 / BITS_PER_UNIT));
5279 return value_mode == VOIDmode ? const0_rtx : target;
5282 /* Store the value in the bitfield. */
5283 store_bit_field (target, bitsize, bitpos, mode, temp,
5284 int_size_in_bytes (type));
5286 if (value_mode != VOIDmode)
5288 /* The caller wants an rtx for the value.
5289 If possible, avoid refetching from the bitfield itself. */
5290 if (width_mask != 0
5291 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5293 tree count;
5294 enum machine_mode tmode;
5296 tmode = GET_MODE (temp);
5297 if (tmode == VOIDmode)
5298 tmode = value_mode;
5300 if (unsignedp)
5301 return expand_and (tmode, temp,
5302 gen_int_mode (width_mask, tmode),
5303 NULL_RTX);
5305 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5306 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5307 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5310 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5311 NULL_RTX, value_mode, VOIDmode,
5312 int_size_in_bytes (type));
5314 return const0_rtx;
5316 else
5318 rtx addr = XEXP (target, 0);
5319 rtx to_rtx = target;
5321 /* If a value is wanted, it must be the lhs;
5322 so make the address stable for multiple use. */
5324 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5325 && ! CONSTANT_ADDRESS_P (addr)
5326 /* A frame-pointer reference is already stable. */
5327 && ! (GET_CODE (addr) == PLUS
5328 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5329 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5330 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5331 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5333 /* Now build a reference to just the desired component. */
5335 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5337 if (to_rtx == target)
5338 to_rtx = copy_rtx (to_rtx);
5340 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5341 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5342 set_mem_alias_set (to_rtx, alias_set);
5344 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5348 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5349 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5350 codes and find the ultimate containing object, which we return.
5352 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5353 bit position, and *PUNSIGNEDP to the signedness of the field.
5354 If the position of the field is variable, we store a tree
5355 giving the variable offset (in units) in *POFFSET.
5356 This offset is in addition to the bit position.
5357 If the position is not variable, we store 0 in *POFFSET.
5359 If any of the extraction expressions is volatile,
5360 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5362 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5363 is a mode that can be used to access the field. In that case, *PBITSIZE
5364 is redundant.
5366 If the field describes a variable-sized object, *PMODE is set to
5367 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5368 this case, but the address of the object can be found. */
5370 tree
5371 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5372 punsignedp, pvolatilep)
5373 tree exp;
5374 HOST_WIDE_INT *pbitsize;
5375 HOST_WIDE_INT *pbitpos;
5376 tree *poffset;
5377 enum machine_mode *pmode;
5378 int *punsignedp;
5379 int *pvolatilep;
5381 tree size_tree = 0;
5382 enum machine_mode mode = VOIDmode;
5383 tree offset = size_zero_node;
5384 tree bit_offset = bitsize_zero_node;
5385 tree placeholder_ptr = 0;
5386 tree tem;
5388 /* First get the mode, signedness, and size. We do this from just the
5389 outermost expression. */
5390 if (TREE_CODE (exp) == COMPONENT_REF)
5392 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5393 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5394 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5396 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5398 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5400 size_tree = TREE_OPERAND (exp, 1);
5401 *punsignedp = TREE_UNSIGNED (exp);
5403 else
5405 mode = TYPE_MODE (TREE_TYPE (exp));
5406 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5408 if (mode == BLKmode)
5409 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5410 else
5411 *pbitsize = GET_MODE_BITSIZE (mode);
5414 if (size_tree != 0)
5416 if (! host_integerp (size_tree, 1))
5417 mode = BLKmode, *pbitsize = -1;
5418 else
5419 *pbitsize = tree_low_cst (size_tree, 1);
5422 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5423 and find the ultimate containing object. */
5424 while (1)
5426 if (TREE_CODE (exp) == BIT_FIELD_REF)
5427 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5428 else if (TREE_CODE (exp) == COMPONENT_REF)
5430 tree field = TREE_OPERAND (exp, 1);
5431 tree this_offset = DECL_FIELD_OFFSET (field);
5433 /* If this field hasn't been filled in yet, don't go
5434 past it. This should only happen when folding expressions
5435 made during type construction. */
5436 if (this_offset == 0)
5437 break;
5438 else if (! TREE_CONSTANT (this_offset)
5439 && contains_placeholder_p (this_offset))
5440 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5442 offset = size_binop (PLUS_EXPR, offset, this_offset);
5443 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5444 DECL_FIELD_BIT_OFFSET (field));
5446 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5449 else if (TREE_CODE (exp) == ARRAY_REF
5450 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5452 tree index = TREE_OPERAND (exp, 1);
5453 tree array = TREE_OPERAND (exp, 0);
5454 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5455 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5456 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5458 /* We assume all arrays have sizes that are a multiple of a byte.
5459 First subtract the lower bound, if any, in the type of the
5460 index, then convert to sizetype and multiply by the size of the
5461 array element. */
5462 if (low_bound != 0 && ! integer_zerop (low_bound))
5463 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5464 index, low_bound));
5466 /* If the index has a self-referential type, pass it to a
5467 WITH_RECORD_EXPR; if the component size is, pass our
5468 component to one. */
5469 if (! TREE_CONSTANT (index)
5470 && contains_placeholder_p (index))
5471 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5472 if (! TREE_CONSTANT (unit_size)
5473 && contains_placeholder_p (unit_size))
5474 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5476 offset = size_binop (PLUS_EXPR, offset,
5477 size_binop (MULT_EXPR,
5478 convert (sizetype, index),
5479 unit_size));
5482 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5484 tree new = find_placeholder (exp, &placeholder_ptr);
5486 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5487 We might have been called from tree optimization where we
5488 haven't set up an object yet. */
5489 if (new == 0)
5490 break;
5491 else
5492 exp = new;
5494 continue;
5496 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5497 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5498 && ! ((TREE_CODE (exp) == NOP_EXPR
5499 || TREE_CODE (exp) == CONVERT_EXPR)
5500 && (TYPE_MODE (TREE_TYPE (exp))
5501 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5502 break;
5504 /* If any reference in the chain is volatile, the effect is volatile. */
5505 if (TREE_THIS_VOLATILE (exp))
5506 *pvolatilep = 1;
5508 exp = TREE_OPERAND (exp, 0);
5511 /* If OFFSET is constant, see if we can return the whole thing as a
5512 constant bit position. Otherwise, split it up. */
5513 if (host_integerp (offset, 0)
5514 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5515 bitsize_unit_node))
5516 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5517 && host_integerp (tem, 0))
5518 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5519 else
5520 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5522 *pmode = mode;
5523 return exp;
5526 /* Return 1 if T is an expression that get_inner_reference handles. */
5529 handled_component_p (t)
5530 tree t;
5532 switch (TREE_CODE (t))
5534 case BIT_FIELD_REF:
5535 case COMPONENT_REF:
5536 case ARRAY_REF:
5537 case ARRAY_RANGE_REF:
5538 case NON_LVALUE_EXPR:
5539 case VIEW_CONVERT_EXPR:
5540 return 1;
5542 case NOP_EXPR:
5543 case CONVERT_EXPR:
5544 return (TYPE_MODE (TREE_TYPE (t))
5545 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5547 default:
5548 return 0;
5552 /* Given an rtx VALUE that may contain additions and multiplications, return
5553 an equivalent value that just refers to a register, memory, or constant.
5554 This is done by generating instructions to perform the arithmetic and
5555 returning a pseudo-register containing the value.
5557 The returned value may be a REG, SUBREG, MEM or constant. */
5560 force_operand (value, target)
5561 rtx value, target;
5563 rtx op1, op2;
5564 /* Use subtarget as the target for operand 0 of a binary operation. */
5565 rtx subtarget = get_subtarget (target);
5566 enum rtx_code code = GET_CODE (value);
5568 /* Check for a PIC address load. */
5569 if ((code == PLUS || code == MINUS)
5570 && XEXP (value, 0) == pic_offset_table_rtx
5571 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5572 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5573 || GET_CODE (XEXP (value, 1)) == CONST))
5575 if (!subtarget)
5576 subtarget = gen_reg_rtx (GET_MODE (value));
5577 emit_move_insn (subtarget, value);
5578 return subtarget;
5581 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5583 if (!target)
5584 target = gen_reg_rtx (GET_MODE (value));
5585 convert_move (target, force_operand (XEXP (value, 0), NULL),
5586 code == ZERO_EXTEND);
5587 return target;
5590 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5592 op2 = XEXP (value, 1);
5593 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5594 subtarget = 0;
5595 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5597 code = PLUS;
5598 op2 = negate_rtx (GET_MODE (value), op2);
5601 /* Check for an addition with OP2 a constant integer and our first
5602 operand a PLUS of a virtual register and something else. In that
5603 case, we want to emit the sum of the virtual register and the
5604 constant first and then add the other value. This allows virtual
5605 register instantiation to simply modify the constant rather than
5606 creating another one around this addition. */
5607 if (code == PLUS && GET_CODE (op2) == CONST_INT
5608 && GET_CODE (XEXP (value, 0)) == PLUS
5609 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5610 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5611 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5613 rtx temp = expand_simple_binop (GET_MODE (value), code,
5614 XEXP (XEXP (value, 0), 0), op2,
5615 subtarget, 0, OPTAB_LIB_WIDEN);
5616 return expand_simple_binop (GET_MODE (value), code, temp,
5617 force_operand (XEXP (XEXP (value,
5618 0), 1), 0),
5619 target, 0, OPTAB_LIB_WIDEN);
5622 op1 = force_operand (XEXP (value, 0), subtarget);
5623 op2 = force_operand (op2, NULL_RTX);
5624 switch (code)
5626 case MULT:
5627 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5628 case DIV:
5629 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5630 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5631 target, 1, OPTAB_LIB_WIDEN);
5632 else
5633 return expand_divmod (0,
5634 FLOAT_MODE_P (GET_MODE (value))
5635 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5636 GET_MODE (value), op1, op2, target, 0);
5637 break;
5638 case MOD:
5639 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5640 target, 0);
5641 break;
5642 case UDIV:
5643 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5644 target, 1);
5645 break;
5646 case UMOD:
5647 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5648 target, 1);
5649 break;
5650 case ASHIFTRT:
5651 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5652 target, 0, OPTAB_LIB_WIDEN);
5653 break;
5654 default:
5655 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5656 target, 1, OPTAB_LIB_WIDEN);
5659 if (GET_RTX_CLASS (code) == '1')
5661 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5662 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5665 #ifdef INSN_SCHEDULING
5666 /* On machines that have insn scheduling, we want all memory reference to be
5667 explicit, so we need to deal with such paradoxical SUBREGs. */
5668 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5669 && (GET_MODE_SIZE (GET_MODE (value))
5670 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5671 value
5672 = simplify_gen_subreg (GET_MODE (value),
5673 force_reg (GET_MODE (SUBREG_REG (value)),
5674 force_operand (SUBREG_REG (value),
5675 NULL_RTX)),
5676 GET_MODE (SUBREG_REG (value)),
5677 SUBREG_BYTE (value));
5678 #endif
5680 return value;
5683 /* Subroutine of expand_expr: return nonzero iff there is no way that
5684 EXP can reference X, which is being modified. TOP_P is nonzero if this
5685 call is going to be used to determine whether we need a temporary
5686 for EXP, as opposed to a recursive call to this function.
5688 It is always safe for this routine to return zero since it merely
5689 searches for optimization opportunities. */
5692 safe_from_p (x, exp, top_p)
5693 rtx x;
5694 tree exp;
5695 int top_p;
5697 rtx exp_rtl = 0;
5698 int i, nops;
5699 static tree save_expr_list;
5701 if (x == 0
5702 /* If EXP has varying size, we MUST use a target since we currently
5703 have no way of allocating temporaries of variable size
5704 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5705 So we assume here that something at a higher level has prevented a
5706 clash. This is somewhat bogus, but the best we can do. Only
5707 do this when X is BLKmode and when we are at the top level. */
5708 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5709 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5710 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5711 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5712 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5713 != INTEGER_CST)
5714 && GET_MODE (x) == BLKmode)
5715 /* If X is in the outgoing argument area, it is always safe. */
5716 || (GET_CODE (x) == MEM
5717 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5718 || (GET_CODE (XEXP (x, 0)) == PLUS
5719 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5720 return 1;
5722 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5723 find the underlying pseudo. */
5724 if (GET_CODE (x) == SUBREG)
5726 x = SUBREG_REG (x);
5727 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5728 return 0;
5731 /* A SAVE_EXPR might appear many times in the expression passed to the
5732 top-level safe_from_p call, and if it has a complex subexpression,
5733 examining it multiple times could result in a combinatorial explosion.
5734 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5735 with optimization took about 28 minutes to compile -- even though it was
5736 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5737 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5738 we have processed. Note that the only test of top_p was above. */
5740 if (top_p)
5742 int rtn;
5743 tree t;
5745 save_expr_list = 0;
5747 rtn = safe_from_p (x, exp, 0);
5749 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5750 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5752 return rtn;
5755 /* Now look at our tree code and possibly recurse. */
5756 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5758 case 'd':
5759 exp_rtl = DECL_RTL_IF_SET (exp);
5760 break;
5762 case 'c':
5763 return 1;
5765 case 'x':
5766 if (TREE_CODE (exp) == TREE_LIST)
5767 return ((TREE_VALUE (exp) == 0
5768 || safe_from_p (x, TREE_VALUE (exp), 0))
5769 && (TREE_CHAIN (exp) == 0
5770 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5771 else if (TREE_CODE (exp) == ERROR_MARK)
5772 return 1; /* An already-visited SAVE_EXPR? */
5773 else
5774 return 0;
5776 case '1':
5777 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5779 case '2':
5780 case '<':
5781 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5782 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5784 case 'e':
5785 case 'r':
5786 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5787 the expression. If it is set, we conflict iff we are that rtx or
5788 both are in memory. Otherwise, we check all operands of the
5789 expression recursively. */
5791 switch (TREE_CODE (exp))
5793 case ADDR_EXPR:
5794 /* If the operand is static or we are static, we can't conflict.
5795 Likewise if we don't conflict with the operand at all. */
5796 if (staticp (TREE_OPERAND (exp, 0))
5797 || TREE_STATIC (exp)
5798 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5799 return 1;
5801 /* Otherwise, the only way this can conflict is if we are taking
5802 the address of a DECL a that address if part of X, which is
5803 very rare. */
5804 exp = TREE_OPERAND (exp, 0);
5805 if (DECL_P (exp))
5807 if (!DECL_RTL_SET_P (exp)
5808 || GET_CODE (DECL_RTL (exp)) != MEM)
5809 return 0;
5810 else
5811 exp_rtl = XEXP (DECL_RTL (exp), 0);
5813 break;
5815 case INDIRECT_REF:
5816 if (GET_CODE (x) == MEM
5817 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5818 get_alias_set (exp)))
5819 return 0;
5820 break;
5822 case CALL_EXPR:
5823 /* Assume that the call will clobber all hard registers and
5824 all of memory. */
5825 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5826 || GET_CODE (x) == MEM)
5827 return 0;
5828 break;
5830 case RTL_EXPR:
5831 /* If a sequence exists, we would have to scan every instruction
5832 in the sequence to see if it was safe. This is probably not
5833 worthwhile. */
5834 if (RTL_EXPR_SEQUENCE (exp))
5835 return 0;
5837 exp_rtl = RTL_EXPR_RTL (exp);
5838 break;
5840 case WITH_CLEANUP_EXPR:
5841 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5842 break;
5844 case CLEANUP_POINT_EXPR:
5845 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5847 case SAVE_EXPR:
5848 exp_rtl = SAVE_EXPR_RTL (exp);
5849 if (exp_rtl)
5850 break;
5852 /* If we've already scanned this, don't do it again. Otherwise,
5853 show we've scanned it and record for clearing the flag if we're
5854 going on. */
5855 if (TREE_PRIVATE (exp))
5856 return 1;
5858 TREE_PRIVATE (exp) = 1;
5859 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5861 TREE_PRIVATE (exp) = 0;
5862 return 0;
5865 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5866 return 1;
5868 case BIND_EXPR:
5869 /* The only operand we look at is operand 1. The rest aren't
5870 part of the expression. */
5871 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5873 case METHOD_CALL_EXPR:
5874 /* This takes an rtx argument, but shouldn't appear here. */
5875 abort ();
5877 default:
5878 break;
5881 /* If we have an rtx, we do not need to scan our operands. */
5882 if (exp_rtl)
5883 break;
5885 nops = first_rtl_op (TREE_CODE (exp));
5886 for (i = 0; i < nops; i++)
5887 if (TREE_OPERAND (exp, i) != 0
5888 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5889 return 0;
5891 /* If this is a language-specific tree code, it may require
5892 special handling. */
5893 if ((unsigned int) TREE_CODE (exp)
5894 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5895 && !(*lang_hooks.safe_from_p) (x, exp))
5896 return 0;
5899 /* If we have an rtl, find any enclosed object. Then see if we conflict
5900 with it. */
5901 if (exp_rtl)
5903 if (GET_CODE (exp_rtl) == SUBREG)
5905 exp_rtl = SUBREG_REG (exp_rtl);
5906 if (GET_CODE (exp_rtl) == REG
5907 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5908 return 0;
5911 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5912 are memory and they conflict. */
5913 return ! (rtx_equal_p (x, exp_rtl)
5914 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5915 && true_dependence (exp_rtl, VOIDmode, x,
5916 rtx_addr_varies_p)));
5919 /* If we reach here, it is safe. */
5920 return 1;
5923 /* Subroutine of expand_expr: return rtx if EXP is a
5924 variable or parameter; else return 0. */
5926 static rtx
5927 var_rtx (exp)
5928 tree exp;
5930 STRIP_NOPS (exp);
5931 switch (TREE_CODE (exp))
5933 case PARM_DECL:
5934 case VAR_DECL:
5935 return DECL_RTL (exp);
5936 default:
5937 return 0;
5941 #ifdef MAX_INTEGER_COMPUTATION_MODE
5943 void
5944 check_max_integer_computation_mode (exp)
5945 tree exp;
5947 enum tree_code code;
5948 enum machine_mode mode;
5950 /* Strip any NOPs that don't change the mode. */
5951 STRIP_NOPS (exp);
5952 code = TREE_CODE (exp);
5954 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5955 if (code == NOP_EXPR
5956 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5957 return;
5959 /* First check the type of the overall operation. We need only look at
5960 unary, binary and relational operations. */
5961 if (TREE_CODE_CLASS (code) == '1'
5962 || TREE_CODE_CLASS (code) == '2'
5963 || TREE_CODE_CLASS (code) == '<')
5965 mode = TYPE_MODE (TREE_TYPE (exp));
5966 if (GET_MODE_CLASS (mode) == MODE_INT
5967 && mode > MAX_INTEGER_COMPUTATION_MODE)
5968 internal_error ("unsupported wide integer operation");
5971 /* Check operand of a unary op. */
5972 if (TREE_CODE_CLASS (code) == '1')
5974 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5975 if (GET_MODE_CLASS (mode) == MODE_INT
5976 && mode > MAX_INTEGER_COMPUTATION_MODE)
5977 internal_error ("unsupported wide integer operation");
5980 /* Check operands of a binary/comparison op. */
5981 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5983 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5984 if (GET_MODE_CLASS (mode) == MODE_INT
5985 && mode > MAX_INTEGER_COMPUTATION_MODE)
5986 internal_error ("unsupported wide integer operation");
5988 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5989 if (GET_MODE_CLASS (mode) == MODE_INT
5990 && mode > MAX_INTEGER_COMPUTATION_MODE)
5991 internal_error ("unsupported wide integer operation");
5994 #endif
5996 /* Return the highest power of two that EXP is known to be a multiple of.
5997 This is used in updating alignment of MEMs in array references. */
5999 static HOST_WIDE_INT
6000 highest_pow2_factor (exp)
6001 tree exp;
6003 HOST_WIDE_INT c0, c1;
6005 switch (TREE_CODE (exp))
6007 case INTEGER_CST:
6008 /* We can find the lowest bit that's a one. If the low
6009 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6010 We need to handle this case since we can find it in a COND_EXPR,
6011 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6012 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6013 later ICE. */
6014 if (TREE_CONSTANT_OVERFLOW (exp))
6015 return BIGGEST_ALIGNMENT;
6016 else
6018 /* Note: tree_low_cst is intentionally not used here,
6019 we don't care about the upper bits. */
6020 c0 = TREE_INT_CST_LOW (exp);
6021 c0 &= -c0;
6022 return c0 ? c0 : BIGGEST_ALIGNMENT;
6024 break;
6026 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6027 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6028 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6029 return MIN (c0, c1);
6031 case MULT_EXPR:
6032 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6033 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6034 return c0 * c1;
6036 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6037 case CEIL_DIV_EXPR:
6038 if (integer_pow2p (TREE_OPERAND (exp, 1))
6039 && host_integerp (TREE_OPERAND (exp, 1), 1))
6041 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6042 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6043 return MAX (1, c0 / c1);
6045 break;
6047 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6048 case SAVE_EXPR: case WITH_RECORD_EXPR:
6049 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6051 case COMPOUND_EXPR:
6052 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6054 case COND_EXPR:
6055 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6056 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6057 return MIN (c0, c1);
6059 default:
6060 break;
6063 return 1;
6066 /* Similar, except that it is known that the expression must be a multiple
6067 of the alignment of TYPE. */
6069 static HOST_WIDE_INT
6070 highest_pow2_factor_for_type (type, exp)
6071 tree type;
6072 tree exp;
6074 HOST_WIDE_INT type_align, factor;
6076 factor = highest_pow2_factor (exp);
6077 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6078 return MAX (factor, type_align);
6081 /* Return an object on the placeholder list that matches EXP, a
6082 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6083 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6084 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6085 is a location which initially points to a starting location in the
6086 placeholder list (zero means start of the list) and where a pointer into
6087 the placeholder list at which the object is found is placed. */
6089 tree
6090 find_placeholder (exp, plist)
6091 tree exp;
6092 tree *plist;
6094 tree type = TREE_TYPE (exp);
6095 tree placeholder_expr;
6097 for (placeholder_expr
6098 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6099 placeholder_expr != 0;
6100 placeholder_expr = TREE_CHAIN (placeholder_expr))
6102 tree need_type = TYPE_MAIN_VARIANT (type);
6103 tree elt;
6105 /* Find the outermost reference that is of the type we want. If none,
6106 see if any object has a type that is a pointer to the type we
6107 want. */
6108 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6109 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6110 || TREE_CODE (elt) == COND_EXPR)
6111 ? TREE_OPERAND (elt, 1)
6112 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6114 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6115 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6116 ? TREE_OPERAND (elt, 0) : 0))
6117 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6119 if (plist)
6120 *plist = placeholder_expr;
6121 return elt;
6124 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6126 = ((TREE_CODE (elt) == COMPOUND_EXPR
6127 || TREE_CODE (elt) == COND_EXPR)
6128 ? TREE_OPERAND (elt, 1)
6129 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6130 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6131 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6132 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6133 ? TREE_OPERAND (elt, 0) : 0))
6134 if (POINTER_TYPE_P (TREE_TYPE (elt))
6135 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6136 == need_type))
6138 if (plist)
6139 *plist = placeholder_expr;
6140 return build1 (INDIRECT_REF, need_type, elt);
6144 return 0;
6147 /* expand_expr: generate code for computing expression EXP.
6148 An rtx for the computed value is returned. The value is never null.
6149 In the case of a void EXP, const0_rtx is returned.
6151 The value may be stored in TARGET if TARGET is nonzero.
6152 TARGET is just a suggestion; callers must assume that
6153 the rtx returned may not be the same as TARGET.
6155 If TARGET is CONST0_RTX, it means that the value will be ignored.
6157 If TMODE is not VOIDmode, it suggests generating the
6158 result in mode TMODE. But this is done only when convenient.
6159 Otherwise, TMODE is ignored and the value generated in its natural mode.
6160 TMODE is just a suggestion; callers must assume that
6161 the rtx returned may not have mode TMODE.
6163 Note that TARGET may have neither TMODE nor MODE. In that case, it
6164 probably will not be used.
6166 If MODIFIER is EXPAND_SUM then when EXP is an addition
6167 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6168 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6169 products as above, or REG or MEM, or constant.
6170 Ordinarily in such cases we would output mul or add instructions
6171 and then return a pseudo reg containing the sum.
6173 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6174 it also marks a label as absolutely required (it can't be dead).
6175 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6176 This is used for outputting expressions used in initializers.
6178 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6179 with a constant address even if that address is not normally legitimate.
6180 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6183 expand_expr (exp, target, tmode, modifier)
6184 tree exp;
6185 rtx target;
6186 enum machine_mode tmode;
6187 enum expand_modifier modifier;
6189 rtx op0, op1, temp;
6190 tree type = TREE_TYPE (exp);
6191 int unsignedp = TREE_UNSIGNED (type);
6192 enum machine_mode mode;
6193 enum tree_code code = TREE_CODE (exp);
6194 optab this_optab;
6195 rtx subtarget, original_target;
6196 int ignore;
6197 tree context;
6199 /* Handle ERROR_MARK before anybody tries to access its type. */
6200 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6202 op0 = CONST0_RTX (tmode);
6203 if (op0 != 0)
6204 return op0;
6205 return const0_rtx;
6208 mode = TYPE_MODE (type);
6209 /* Use subtarget as the target for operand 0 of a binary operation. */
6210 subtarget = get_subtarget (target);
6211 original_target = target;
6212 ignore = (target == const0_rtx
6213 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6214 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6215 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6216 && TREE_CODE (type) == VOID_TYPE));
6218 /* If we are going to ignore this result, we need only do something
6219 if there is a side-effect somewhere in the expression. If there
6220 is, short-circuit the most common cases here. Note that we must
6221 not call expand_expr with anything but const0_rtx in case this
6222 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6224 if (ignore)
6226 if (! TREE_SIDE_EFFECTS (exp))
6227 return const0_rtx;
6229 /* Ensure we reference a volatile object even if value is ignored, but
6230 don't do this if all we are doing is taking its address. */
6231 if (TREE_THIS_VOLATILE (exp)
6232 && TREE_CODE (exp) != FUNCTION_DECL
6233 && mode != VOIDmode && mode != BLKmode
6234 && modifier != EXPAND_CONST_ADDRESS)
6236 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6237 if (GET_CODE (temp) == MEM)
6238 temp = copy_to_reg (temp);
6239 return const0_rtx;
6242 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6243 || code == INDIRECT_REF || code == BUFFER_REF)
6244 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6245 modifier);
6247 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6248 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6250 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6251 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6252 return const0_rtx;
6254 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6255 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6256 /* If the second operand has no side effects, just evaluate
6257 the first. */
6258 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6259 modifier);
6260 else if (code == BIT_FIELD_REF)
6262 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6263 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6264 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6265 return const0_rtx;
6268 target = 0;
6271 #ifdef MAX_INTEGER_COMPUTATION_MODE
6272 /* Only check stuff here if the mode we want is different from the mode
6273 of the expression; if it's the same, check_max_integer_computiation_mode
6274 will handle it. Do we really need to check this stuff at all? */
6276 if (target
6277 && GET_MODE (target) != mode
6278 && TREE_CODE (exp) != INTEGER_CST
6279 && TREE_CODE (exp) != PARM_DECL
6280 && TREE_CODE (exp) != ARRAY_REF
6281 && TREE_CODE (exp) != ARRAY_RANGE_REF
6282 && TREE_CODE (exp) != COMPONENT_REF
6283 && TREE_CODE (exp) != BIT_FIELD_REF
6284 && TREE_CODE (exp) != INDIRECT_REF
6285 && TREE_CODE (exp) != CALL_EXPR
6286 && TREE_CODE (exp) != VAR_DECL
6287 && TREE_CODE (exp) != RTL_EXPR)
6289 enum machine_mode mode = GET_MODE (target);
6291 if (GET_MODE_CLASS (mode) == MODE_INT
6292 && mode > MAX_INTEGER_COMPUTATION_MODE)
6293 internal_error ("unsupported wide integer operation");
6296 if (tmode != mode
6297 && TREE_CODE (exp) != INTEGER_CST
6298 && TREE_CODE (exp) != PARM_DECL
6299 && TREE_CODE (exp) != ARRAY_REF
6300 && TREE_CODE (exp) != ARRAY_RANGE_REF
6301 && TREE_CODE (exp) != COMPONENT_REF
6302 && TREE_CODE (exp) != BIT_FIELD_REF
6303 && TREE_CODE (exp) != INDIRECT_REF
6304 && TREE_CODE (exp) != VAR_DECL
6305 && TREE_CODE (exp) != CALL_EXPR
6306 && TREE_CODE (exp) != RTL_EXPR
6307 && GET_MODE_CLASS (tmode) == MODE_INT
6308 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6309 internal_error ("unsupported wide integer operation");
6311 check_max_integer_computation_mode (exp);
6312 #endif
6314 /* If will do cse, generate all results into pseudo registers
6315 since 1) that allows cse to find more things
6316 and 2) otherwise cse could produce an insn the machine
6317 cannot support. And exception is a CONSTRUCTOR into a multi-word
6318 MEM: that's much more likely to be most efficient into the MEM. */
6320 if (! cse_not_expected && mode != BLKmode && target
6321 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6322 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6323 target = subtarget;
6325 switch (code)
6327 case LABEL_DECL:
6329 tree function = decl_function_context (exp);
6330 /* Handle using a label in a containing function. */
6331 if (function != current_function_decl
6332 && function != inline_function_decl && function != 0)
6334 struct function *p = find_function_data (function);
6335 p->expr->x_forced_labels
6336 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6337 p->expr->x_forced_labels);
6339 else
6341 if (modifier == EXPAND_INITIALIZER)
6342 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6343 label_rtx (exp),
6344 forced_labels);
6347 temp = gen_rtx_MEM (FUNCTION_MODE,
6348 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6349 if (function != current_function_decl
6350 && function != inline_function_decl && function != 0)
6351 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6352 return temp;
6355 case PARM_DECL:
6356 if (DECL_RTL (exp) == 0)
6358 error_with_decl (exp, "prior parameter's size depends on `%s'");
6359 return CONST0_RTX (mode);
6362 /* ... fall through ... */
6364 case VAR_DECL:
6365 /* If a static var's type was incomplete when the decl was written,
6366 but the type is complete now, lay out the decl now. */
6367 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6368 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6370 rtx value = DECL_RTL_IF_SET (exp);
6372 layout_decl (exp, 0);
6374 /* If the RTL was already set, update its mode and memory
6375 attributes. */
6376 if (value != 0)
6378 PUT_MODE (value, DECL_MODE (exp));
6379 SET_DECL_RTL (exp, 0);
6380 set_mem_attributes (value, exp, 1);
6381 SET_DECL_RTL (exp, value);
6385 /* ... fall through ... */
6387 case FUNCTION_DECL:
6388 case RESULT_DECL:
6389 if (DECL_RTL (exp) == 0)
6390 abort ();
6392 /* Ensure variable marked as used even if it doesn't go through
6393 a parser. If it hasn't be used yet, write out an external
6394 definition. */
6395 if (! TREE_USED (exp))
6397 assemble_external (exp);
6398 TREE_USED (exp) = 1;
6401 /* Show we haven't gotten RTL for this yet. */
6402 temp = 0;
6404 /* Handle variables inherited from containing functions. */
6405 context = decl_function_context (exp);
6407 /* We treat inline_function_decl as an alias for the current function
6408 because that is the inline function whose vars, types, etc.
6409 are being merged into the current function.
6410 See expand_inline_function. */
6412 if (context != 0 && context != current_function_decl
6413 && context != inline_function_decl
6414 /* If var is static, we don't need a static chain to access it. */
6415 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6416 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6418 rtx addr;
6420 /* Mark as non-local and addressable. */
6421 DECL_NONLOCAL (exp) = 1;
6422 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6423 abort ();
6424 (*lang_hooks.mark_addressable) (exp);
6425 if (GET_CODE (DECL_RTL (exp)) != MEM)
6426 abort ();
6427 addr = XEXP (DECL_RTL (exp), 0);
6428 if (GET_CODE (addr) == MEM)
6429 addr
6430 = replace_equiv_address (addr,
6431 fix_lexical_addr (XEXP (addr, 0), exp));
6432 else
6433 addr = fix_lexical_addr (addr, exp);
6435 temp = replace_equiv_address (DECL_RTL (exp), addr);
6438 /* This is the case of an array whose size is to be determined
6439 from its initializer, while the initializer is still being parsed.
6440 See expand_decl. */
6442 else if (GET_CODE (DECL_RTL (exp)) == MEM
6443 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6444 temp = validize_mem (DECL_RTL (exp));
6446 /* If DECL_RTL is memory, we are in the normal case and either
6447 the address is not valid or it is not a register and -fforce-addr
6448 is specified, get the address into a register. */
6450 else if (GET_CODE (DECL_RTL (exp)) == MEM
6451 && modifier != EXPAND_CONST_ADDRESS
6452 && modifier != EXPAND_SUM
6453 && modifier != EXPAND_INITIALIZER
6454 && (! memory_address_p (DECL_MODE (exp),
6455 XEXP (DECL_RTL (exp), 0))
6456 || (flag_force_addr
6457 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6458 temp = replace_equiv_address (DECL_RTL (exp),
6459 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6461 /* If we got something, return it. But first, set the alignment
6462 if the address is a register. */
6463 if (temp != 0)
6465 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6466 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6468 return temp;
6471 /* If the mode of DECL_RTL does not match that of the decl, it
6472 must be a promoted value. We return a SUBREG of the wanted mode,
6473 but mark it so that we know that it was already extended. */
6475 if (GET_CODE (DECL_RTL (exp)) == REG
6476 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6478 /* Get the signedness used for this variable. Ensure we get the
6479 same mode we got when the variable was declared. */
6480 if (GET_MODE (DECL_RTL (exp))
6481 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6482 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6483 abort ();
6485 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6486 SUBREG_PROMOTED_VAR_P (temp) = 1;
6487 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6488 return temp;
6491 return DECL_RTL (exp);
6493 case INTEGER_CST:
6494 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6495 TREE_INT_CST_HIGH (exp), mode);
6497 /* ??? If overflow is set, fold will have done an incomplete job,
6498 which can result in (plus xx (const_int 0)), which can get
6499 simplified by validate_replace_rtx during virtual register
6500 instantiation, which can result in unrecognizable insns.
6501 Avoid this by forcing all overflows into registers. */
6502 if (TREE_CONSTANT_OVERFLOW (exp)
6503 && modifier != EXPAND_INITIALIZER)
6504 temp = force_reg (mode, temp);
6506 return temp;
6508 case CONST_DECL:
6509 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6511 case REAL_CST:
6512 /* If optimized, generate immediate CONST_DOUBLE
6513 which will be turned into memory by reload if necessary.
6515 We used to force a register so that loop.c could see it. But
6516 this does not allow gen_* patterns to perform optimizations with
6517 the constants. It also produces two insns in cases like "x = 1.0;".
6518 On most machines, floating-point constants are not permitted in
6519 many insns, so we'd end up copying it to a register in any case.
6521 Now, we do the copying in expand_binop, if appropriate. */
6522 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6523 TYPE_MODE (TREE_TYPE (exp)));
6525 case COMPLEX_CST:
6526 case STRING_CST:
6527 if (! TREE_CST_RTL (exp))
6528 output_constant_def (exp, 1);
6530 /* TREE_CST_RTL probably contains a constant address.
6531 On RISC machines where a constant address isn't valid,
6532 make some insns to get that address into a register. */
6533 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6534 && modifier != EXPAND_CONST_ADDRESS
6535 && modifier != EXPAND_INITIALIZER
6536 && modifier != EXPAND_SUM
6537 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6538 || (flag_force_addr
6539 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6540 return replace_equiv_address (TREE_CST_RTL (exp),
6541 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6542 return TREE_CST_RTL (exp);
6544 case EXPR_WITH_FILE_LOCATION:
6546 rtx to_return;
6547 const char *saved_input_filename = input_filename;
6548 int saved_lineno = lineno;
6549 input_filename = EXPR_WFL_FILENAME (exp);
6550 lineno = EXPR_WFL_LINENO (exp);
6551 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6552 emit_line_note (input_filename, lineno);
6553 /* Possibly avoid switching back and forth here. */
6554 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6555 input_filename = saved_input_filename;
6556 lineno = saved_lineno;
6557 return to_return;
6560 case SAVE_EXPR:
6561 context = decl_function_context (exp);
6563 /* If this SAVE_EXPR was at global context, assume we are an
6564 initialization function and move it into our context. */
6565 if (context == 0)
6566 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6568 /* We treat inline_function_decl as an alias for the current function
6569 because that is the inline function whose vars, types, etc.
6570 are being merged into the current function.
6571 See expand_inline_function. */
6572 if (context == current_function_decl || context == inline_function_decl)
6573 context = 0;
6575 /* If this is non-local, handle it. */
6576 if (context)
6578 /* The following call just exists to abort if the context is
6579 not of a containing function. */
6580 find_function_data (context);
6582 temp = SAVE_EXPR_RTL (exp);
6583 if (temp && GET_CODE (temp) == REG)
6585 put_var_into_stack (exp);
6586 temp = SAVE_EXPR_RTL (exp);
6588 if (temp == 0 || GET_CODE (temp) != MEM)
6589 abort ();
6590 return
6591 replace_equiv_address (temp,
6592 fix_lexical_addr (XEXP (temp, 0), exp));
6594 if (SAVE_EXPR_RTL (exp) == 0)
6596 if (mode == VOIDmode)
6597 temp = const0_rtx;
6598 else
6599 temp = assign_temp (build_qualified_type (type,
6600 (TYPE_QUALS (type)
6601 | TYPE_QUAL_CONST)),
6602 3, 0, 0);
6604 SAVE_EXPR_RTL (exp) = temp;
6605 if (!optimize && GET_CODE (temp) == REG)
6606 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6607 save_expr_regs);
6609 /* If the mode of TEMP does not match that of the expression, it
6610 must be a promoted value. We pass store_expr a SUBREG of the
6611 wanted mode but mark it so that we know that it was already
6612 extended. Note that `unsignedp' was modified above in
6613 this case. */
6615 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6617 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6618 SUBREG_PROMOTED_VAR_P (temp) = 1;
6619 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6622 if (temp == const0_rtx)
6623 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6624 else
6625 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6627 TREE_USED (exp) = 1;
6630 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6631 must be a promoted value. We return a SUBREG of the wanted mode,
6632 but mark it so that we know that it was already extended. */
6634 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6635 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6637 /* Compute the signedness and make the proper SUBREG. */
6638 promote_mode (type, mode, &unsignedp, 0);
6639 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6640 SUBREG_PROMOTED_VAR_P (temp) = 1;
6641 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6642 return temp;
6645 return SAVE_EXPR_RTL (exp);
6647 case UNSAVE_EXPR:
6649 rtx temp;
6650 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6651 TREE_OPERAND (exp, 0)
6652 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6653 return temp;
6656 case PLACEHOLDER_EXPR:
6658 tree old_list = placeholder_list;
6659 tree placeholder_expr = 0;
6661 exp = find_placeholder (exp, &placeholder_expr);
6662 if (exp == 0)
6663 abort ();
6665 placeholder_list = TREE_CHAIN (placeholder_expr);
6666 temp = expand_expr (exp, original_target, tmode, modifier);
6667 placeholder_list = old_list;
6668 return temp;
6671 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6672 abort ();
6674 case WITH_RECORD_EXPR:
6675 /* Put the object on the placeholder list, expand our first operand,
6676 and pop the list. */
6677 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6678 placeholder_list);
6679 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6680 modifier);
6681 placeholder_list = TREE_CHAIN (placeholder_list);
6682 return target;
6684 case GOTO_EXPR:
6685 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6686 expand_goto (TREE_OPERAND (exp, 0));
6687 else
6688 expand_computed_goto (TREE_OPERAND (exp, 0));
6689 return const0_rtx;
6691 case EXIT_EXPR:
6692 expand_exit_loop_if_false (NULL,
6693 invert_truthvalue (TREE_OPERAND (exp, 0)));
6694 return const0_rtx;
6696 case LABELED_BLOCK_EXPR:
6697 if (LABELED_BLOCK_BODY (exp))
6698 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6699 /* Should perhaps use expand_label, but this is simpler and safer. */
6700 do_pending_stack_adjust ();
6701 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6702 return const0_rtx;
6704 case EXIT_BLOCK_EXPR:
6705 if (EXIT_BLOCK_RETURN (exp))
6706 sorry ("returned value in block_exit_expr");
6707 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6708 return const0_rtx;
6710 case LOOP_EXPR:
6711 push_temp_slots ();
6712 expand_start_loop (1);
6713 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6714 expand_end_loop ();
6715 pop_temp_slots ();
6717 return const0_rtx;
6719 case BIND_EXPR:
6721 tree vars = TREE_OPERAND (exp, 0);
6722 int vars_need_expansion = 0;
6724 /* Need to open a binding contour here because
6725 if there are any cleanups they must be contained here. */
6726 expand_start_bindings (2);
6728 /* Mark the corresponding BLOCK for output in its proper place. */
6729 if (TREE_OPERAND (exp, 2) != 0
6730 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6731 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6733 /* If VARS have not yet been expanded, expand them now. */
6734 while (vars)
6736 if (!DECL_RTL_SET_P (vars))
6738 vars_need_expansion = 1;
6739 expand_decl (vars);
6741 expand_decl_init (vars);
6742 vars = TREE_CHAIN (vars);
6745 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6747 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6749 return temp;
6752 case RTL_EXPR:
6753 if (RTL_EXPR_SEQUENCE (exp))
6755 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6756 abort ();
6757 emit_insn (RTL_EXPR_SEQUENCE (exp));
6758 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6760 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6761 free_temps_for_rtl_expr (exp);
6762 return RTL_EXPR_RTL (exp);
6764 case CONSTRUCTOR:
6765 /* If we don't need the result, just ensure we evaluate any
6766 subexpressions. */
6767 if (ignore)
6769 tree elt;
6771 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6772 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6774 return const0_rtx;
6777 /* All elts simple constants => refer to a constant in memory. But
6778 if this is a non-BLKmode mode, let it store a field at a time
6779 since that should make a CONST_INT or CONST_DOUBLE when we
6780 fold. Likewise, if we have a target we can use, it is best to
6781 store directly into the target unless the type is large enough
6782 that memcpy will be used. If we are making an initializer and
6783 all operands are constant, put it in memory as well.
6785 FIXME: Avoid trying to fill vector constructors piece-meal.
6786 Output them with output_constant_def below unless we're sure
6787 they're zeros. This should go away when vector initializers
6788 are treated like VECTOR_CST instead of arrays.
6790 else if ((TREE_STATIC (exp)
6791 && ((mode == BLKmode
6792 && ! (target != 0 && safe_from_p (target, exp, 1)))
6793 || TREE_ADDRESSABLE (exp)
6794 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6795 && (! MOVE_BY_PIECES_P
6796 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6797 TYPE_ALIGN (type)))
6798 && ((TREE_CODE (type) == VECTOR_TYPE
6799 && !is_zeros_p (exp))
6800 || ! mostly_zeros_p (exp)))))
6801 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6803 rtx constructor = output_constant_def (exp, 1);
6805 if (modifier != EXPAND_CONST_ADDRESS
6806 && modifier != EXPAND_INITIALIZER
6807 && modifier != EXPAND_SUM)
6808 constructor = validize_mem (constructor);
6810 return constructor;
6812 else
6814 /* Handle calls that pass values in multiple non-contiguous
6815 locations. The Irix 6 ABI has examples of this. */
6816 if (target == 0 || ! safe_from_p (target, exp, 1)
6817 || GET_CODE (target) == PARALLEL)
6818 target
6819 = assign_temp (build_qualified_type (type,
6820 (TYPE_QUALS (type)
6821 | (TREE_READONLY (exp)
6822 * TYPE_QUAL_CONST))),
6823 0, TREE_ADDRESSABLE (exp), 1);
6825 store_constructor (exp, target, 0,
6826 int_size_in_bytes (TREE_TYPE (exp)));
6827 return target;
6830 case INDIRECT_REF:
6832 tree exp1 = TREE_OPERAND (exp, 0);
6833 tree index;
6834 tree string = string_constant (exp1, &index);
6836 /* Try to optimize reads from const strings. */
6837 if (string
6838 && TREE_CODE (string) == STRING_CST
6839 && TREE_CODE (index) == INTEGER_CST
6840 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6841 && GET_MODE_CLASS (mode) == MODE_INT
6842 && GET_MODE_SIZE (mode) == 1
6843 && modifier != EXPAND_WRITE)
6844 return gen_int_mode (TREE_STRING_POINTER (string)
6845 [TREE_INT_CST_LOW (index)], mode);
6847 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6848 op0 = memory_address (mode, op0);
6849 temp = gen_rtx_MEM (mode, op0);
6850 set_mem_attributes (temp, exp, 0);
6852 /* If we are writing to this object and its type is a record with
6853 readonly fields, we must mark it as readonly so it will
6854 conflict with readonly references to those fields. */
6855 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6856 RTX_UNCHANGING_P (temp) = 1;
6858 return temp;
6861 case ARRAY_REF:
6862 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6863 abort ();
6866 tree array = TREE_OPERAND (exp, 0);
6867 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6868 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6869 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6870 HOST_WIDE_INT i;
6872 /* Optimize the special-case of a zero lower bound.
6874 We convert the low_bound to sizetype to avoid some problems
6875 with constant folding. (E.g. suppose the lower bound is 1,
6876 and its mode is QI. Without the conversion, (ARRAY
6877 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6878 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6880 if (! integer_zerop (low_bound))
6881 index = size_diffop (index, convert (sizetype, low_bound));
6883 /* Fold an expression like: "foo"[2].
6884 This is not done in fold so it won't happen inside &.
6885 Don't fold if this is for wide characters since it's too
6886 difficult to do correctly and this is a very rare case. */
6888 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6889 && TREE_CODE (array) == STRING_CST
6890 && TREE_CODE (index) == INTEGER_CST
6891 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6892 && GET_MODE_CLASS (mode) == MODE_INT
6893 && GET_MODE_SIZE (mode) == 1)
6894 return gen_int_mode (TREE_STRING_POINTER (array)
6895 [TREE_INT_CST_LOW (index)], mode);
6897 /* If this is a constant index into a constant array,
6898 just get the value from the array. Handle both the cases when
6899 we have an explicit constructor and when our operand is a variable
6900 that was declared const. */
6902 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6903 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6904 && TREE_CODE (index) == INTEGER_CST
6905 && 0 > compare_tree_int (index,
6906 list_length (CONSTRUCTOR_ELTS
6907 (TREE_OPERAND (exp, 0)))))
6909 tree elem;
6911 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6912 i = TREE_INT_CST_LOW (index);
6913 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6916 if (elem)
6917 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6918 modifier);
6921 else if (optimize >= 1
6922 && modifier != EXPAND_CONST_ADDRESS
6923 && modifier != EXPAND_INITIALIZER
6924 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6925 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6926 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6928 if (TREE_CODE (index) == INTEGER_CST)
6930 tree init = DECL_INITIAL (array);
6932 if (TREE_CODE (init) == CONSTRUCTOR)
6934 tree elem;
6936 for (elem = CONSTRUCTOR_ELTS (init);
6937 (elem
6938 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6939 elem = TREE_CHAIN (elem))
6942 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6943 return expand_expr (fold (TREE_VALUE (elem)), target,
6944 tmode, modifier);
6946 else if (TREE_CODE (init) == STRING_CST
6947 && 0 > compare_tree_int (index,
6948 TREE_STRING_LENGTH (init)))
6950 tree type = TREE_TYPE (TREE_TYPE (init));
6951 enum machine_mode mode = TYPE_MODE (type);
6953 if (GET_MODE_CLASS (mode) == MODE_INT
6954 && GET_MODE_SIZE (mode) == 1)
6955 return gen_int_mode (TREE_STRING_POINTER (init)
6956 [TREE_INT_CST_LOW (index)], mode);
6961 /* Fall through. */
6963 case COMPONENT_REF:
6964 case BIT_FIELD_REF:
6965 case ARRAY_RANGE_REF:
6966 /* If the operand is a CONSTRUCTOR, we can just extract the
6967 appropriate field if it is present. Don't do this if we have
6968 already written the data since we want to refer to that copy
6969 and varasm.c assumes that's what we'll do. */
6970 if (code == COMPONENT_REF
6971 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6972 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6974 tree elt;
6976 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6977 elt = TREE_CHAIN (elt))
6978 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6979 /* We can normally use the value of the field in the
6980 CONSTRUCTOR. However, if this is a bitfield in
6981 an integral mode that we can fit in a HOST_WIDE_INT,
6982 we must mask only the number of bits in the bitfield,
6983 since this is done implicitly by the constructor. If
6984 the bitfield does not meet either of those conditions,
6985 we can't do this optimization. */
6986 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6987 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6988 == MODE_INT)
6989 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6990 <= HOST_BITS_PER_WIDE_INT))))
6992 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6993 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6995 HOST_WIDE_INT bitsize
6996 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6997 enum machine_mode imode
6998 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7000 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7002 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7003 op0 = expand_and (imode, op0, op1, target);
7005 else
7007 tree count
7008 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7011 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7012 target, 0);
7013 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7014 target, 0);
7018 return op0;
7023 enum machine_mode mode1;
7024 HOST_WIDE_INT bitsize, bitpos;
7025 tree offset;
7026 int volatilep = 0;
7027 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7028 &mode1, &unsignedp, &volatilep);
7029 rtx orig_op0;
7031 /* If we got back the original object, something is wrong. Perhaps
7032 we are evaluating an expression too early. In any event, don't
7033 infinitely recurse. */
7034 if (tem == exp)
7035 abort ();
7037 /* If TEM's type is a union of variable size, pass TARGET to the inner
7038 computation, since it will need a temporary and TARGET is known
7039 to have to do. This occurs in unchecked conversion in Ada. */
7041 orig_op0 = op0
7042 = expand_expr (tem,
7043 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7044 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7045 != INTEGER_CST)
7046 ? target : NULL_RTX),
7047 VOIDmode,
7048 (modifier == EXPAND_INITIALIZER
7049 || modifier == EXPAND_CONST_ADDRESS)
7050 ? modifier : EXPAND_NORMAL);
7052 /* If this is a constant, put it into a register if it is a
7053 legitimate constant and OFFSET is 0 and memory if it isn't. */
7054 if (CONSTANT_P (op0))
7056 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7057 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7058 && offset == 0)
7059 op0 = force_reg (mode, op0);
7060 else
7061 op0 = validize_mem (force_const_mem (mode, op0));
7064 if (offset != 0)
7066 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7068 /* If this object is in a register, put it into memory.
7069 This case can't occur in C, but can in Ada if we have
7070 unchecked conversion of an expression from a scalar type to
7071 an array or record type. */
7072 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7073 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7075 /* If the operand is a SAVE_EXPR, we can deal with this by
7076 forcing the SAVE_EXPR into memory. */
7077 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7079 put_var_into_stack (TREE_OPERAND (exp, 0));
7080 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7082 else
7084 tree nt
7085 = build_qualified_type (TREE_TYPE (tem),
7086 (TYPE_QUALS (TREE_TYPE (tem))
7087 | TYPE_QUAL_CONST));
7088 rtx memloc = assign_temp (nt, 1, 1, 1);
7090 emit_move_insn (memloc, op0);
7091 op0 = memloc;
7095 if (GET_CODE (op0) != MEM)
7096 abort ();
7098 #ifdef POINTERS_EXTEND_UNSIGNED
7099 if (GET_MODE (offset_rtx) != Pmode)
7100 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7101 #else
7102 if (GET_MODE (offset_rtx) != ptr_mode)
7103 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7104 #endif
7106 /* A constant address in OP0 can have VOIDmode, we must not try
7107 to call force_reg for that case. Avoid that case. */
7108 if (GET_CODE (op0) == MEM
7109 && GET_MODE (op0) == BLKmode
7110 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7111 && bitsize != 0
7112 && (bitpos % bitsize) == 0
7113 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7114 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7116 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7117 bitpos = 0;
7120 op0 = offset_address (op0, offset_rtx,
7121 highest_pow2_factor (offset));
7124 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7125 record its alignment as BIGGEST_ALIGNMENT. */
7126 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7127 && is_aligning_offset (offset, tem))
7128 set_mem_align (op0, BIGGEST_ALIGNMENT);
7130 /* Don't forget about volatility even if this is a bitfield. */
7131 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7133 if (op0 == orig_op0)
7134 op0 = copy_rtx (op0);
7136 MEM_VOLATILE_P (op0) = 1;
7139 /* The following code doesn't handle CONCAT.
7140 Assume only bitpos == 0 can be used for CONCAT, due to
7141 one element arrays having the same mode as its element. */
7142 if (GET_CODE (op0) == CONCAT)
7144 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7145 abort ();
7146 return op0;
7149 /* In cases where an aligned union has an unaligned object
7150 as a field, we might be extracting a BLKmode value from
7151 an integer-mode (e.g., SImode) object. Handle this case
7152 by doing the extract into an object as wide as the field
7153 (which we know to be the width of a basic mode), then
7154 storing into memory, and changing the mode to BLKmode. */
7155 if (mode1 == VOIDmode
7156 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7157 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7158 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7159 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7160 && modifier != EXPAND_CONST_ADDRESS
7161 && modifier != EXPAND_INITIALIZER)
7162 /* If the field isn't aligned enough to fetch as a memref,
7163 fetch it as a bit field. */
7164 || (mode1 != BLKmode
7165 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7166 && ((TYPE_ALIGN (TREE_TYPE (tem))
7167 < GET_MODE_ALIGNMENT (mode))
7168 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7169 /* If the type and the field are a constant size and the
7170 size of the type isn't the same size as the bitfield,
7171 we must use bitfield operations. */
7172 || (bitsize >= 0
7173 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7174 == INTEGER_CST)
7175 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7176 bitsize)))
7178 enum machine_mode ext_mode = mode;
7180 if (ext_mode == BLKmode
7181 && ! (target != 0 && GET_CODE (op0) == MEM
7182 && GET_CODE (target) == MEM
7183 && bitpos % BITS_PER_UNIT == 0))
7184 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7186 if (ext_mode == BLKmode)
7188 /* In this case, BITPOS must start at a byte boundary and
7189 TARGET, if specified, must be a MEM. */
7190 if (GET_CODE (op0) != MEM
7191 || (target != 0 && GET_CODE (target) != MEM)
7192 || bitpos % BITS_PER_UNIT != 0)
7193 abort ();
7195 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7196 if (target == 0)
7197 target = assign_temp (type, 0, 1, 1);
7199 emit_block_move (target, op0,
7200 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7201 / BITS_PER_UNIT));
7203 return target;
7206 op0 = validize_mem (op0);
7208 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7209 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7211 op0 = extract_bit_field (op0, bitsize, bitpos,
7212 unsignedp, target, ext_mode, ext_mode,
7213 int_size_in_bytes (TREE_TYPE (tem)));
7215 /* If the result is a record type and BITSIZE is narrower than
7216 the mode of OP0, an integral mode, and this is a big endian
7217 machine, we must put the field into the high-order bits. */
7218 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7219 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7220 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7221 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7222 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7223 - bitsize),
7224 op0, 1);
7226 if (mode == BLKmode)
7228 rtx new = assign_temp (build_qualified_type
7229 ((*lang_hooks.types.type_for_mode)
7230 (ext_mode, 0),
7231 TYPE_QUAL_CONST), 0, 1, 1);
7233 emit_move_insn (new, op0);
7234 op0 = copy_rtx (new);
7235 PUT_MODE (op0, BLKmode);
7236 set_mem_attributes (op0, exp, 1);
7239 return op0;
7242 /* If the result is BLKmode, use that to access the object
7243 now as well. */
7244 if (mode == BLKmode)
7245 mode1 = BLKmode;
7247 /* Get a reference to just this component. */
7248 if (modifier == EXPAND_CONST_ADDRESS
7249 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7250 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7251 else
7252 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7254 if (op0 == orig_op0)
7255 op0 = copy_rtx (op0);
7257 set_mem_attributes (op0, exp, 0);
7258 if (GET_CODE (XEXP (op0, 0)) == REG)
7259 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7261 MEM_VOLATILE_P (op0) |= volatilep;
7262 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7263 || modifier == EXPAND_CONST_ADDRESS
7264 || modifier == EXPAND_INITIALIZER)
7265 return op0;
7266 else if (target == 0)
7267 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7269 convert_move (target, op0, unsignedp);
7270 return target;
7273 case VTABLE_REF:
7275 rtx insn, before = get_last_insn (), vtbl_ref;
7277 /* Evaluate the interior expression. */
7278 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7279 tmode, modifier);
7281 /* Get or create an instruction off which to hang a note. */
7282 if (REG_P (subtarget))
7284 target = subtarget;
7285 insn = get_last_insn ();
7286 if (insn == before)
7287 abort ();
7288 if (! INSN_P (insn))
7289 insn = prev_nonnote_insn (insn);
7291 else
7293 target = gen_reg_rtx (GET_MODE (subtarget));
7294 insn = emit_move_insn (target, subtarget);
7297 /* Collect the data for the note. */
7298 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7299 vtbl_ref = plus_constant (vtbl_ref,
7300 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7301 /* Discard the initial CONST that was added. */
7302 vtbl_ref = XEXP (vtbl_ref, 0);
7304 REG_NOTES (insn)
7305 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7307 return target;
7310 /* Intended for a reference to a buffer of a file-object in Pascal.
7311 But it's not certain that a special tree code will really be
7312 necessary for these. INDIRECT_REF might work for them. */
7313 case BUFFER_REF:
7314 abort ();
7316 case IN_EXPR:
7318 /* Pascal set IN expression.
7320 Algorithm:
7321 rlo = set_low - (set_low%bits_per_word);
7322 the_word = set [ (index - rlo)/bits_per_word ];
7323 bit_index = index % bits_per_word;
7324 bitmask = 1 << bit_index;
7325 return !!(the_word & bitmask); */
7327 tree set = TREE_OPERAND (exp, 0);
7328 tree index = TREE_OPERAND (exp, 1);
7329 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7330 tree set_type = TREE_TYPE (set);
7331 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7332 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7333 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7334 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7335 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7336 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7337 rtx setaddr = XEXP (setval, 0);
7338 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7339 rtx rlow;
7340 rtx diff, quo, rem, addr, bit, result;
7342 /* If domain is empty, answer is no. Likewise if index is constant
7343 and out of bounds. */
7344 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7345 && TREE_CODE (set_low_bound) == INTEGER_CST
7346 && tree_int_cst_lt (set_high_bound, set_low_bound))
7347 || (TREE_CODE (index) == INTEGER_CST
7348 && TREE_CODE (set_low_bound) == INTEGER_CST
7349 && tree_int_cst_lt (index, set_low_bound))
7350 || (TREE_CODE (set_high_bound) == INTEGER_CST
7351 && TREE_CODE (index) == INTEGER_CST
7352 && tree_int_cst_lt (set_high_bound, index))))
7353 return const0_rtx;
7355 if (target == 0)
7356 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7358 /* If we get here, we have to generate the code for both cases
7359 (in range and out of range). */
7361 op0 = gen_label_rtx ();
7362 op1 = gen_label_rtx ();
7364 if (! (GET_CODE (index_val) == CONST_INT
7365 && GET_CODE (lo_r) == CONST_INT))
7366 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7367 GET_MODE (index_val), iunsignedp, op1);
7369 if (! (GET_CODE (index_val) == CONST_INT
7370 && GET_CODE (hi_r) == CONST_INT))
7371 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7372 GET_MODE (index_val), iunsignedp, op1);
7374 /* Calculate the element number of bit zero in the first word
7375 of the set. */
7376 if (GET_CODE (lo_r) == CONST_INT)
7377 rlow = GEN_INT (INTVAL (lo_r)
7378 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7379 else
7380 rlow = expand_binop (index_mode, and_optab, lo_r,
7381 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7382 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7384 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7385 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7387 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7388 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7389 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7390 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7392 addr = memory_address (byte_mode,
7393 expand_binop (index_mode, add_optab, diff,
7394 setaddr, NULL_RTX, iunsignedp,
7395 OPTAB_LIB_WIDEN));
7397 /* Extract the bit we want to examine. */
7398 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7399 gen_rtx_MEM (byte_mode, addr),
7400 make_tree (TREE_TYPE (index), rem),
7401 NULL_RTX, 1);
7402 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7403 GET_MODE (target) == byte_mode ? target : 0,
7404 1, OPTAB_LIB_WIDEN);
7406 if (result != target)
7407 convert_move (target, result, 1);
7409 /* Output the code to handle the out-of-range case. */
7410 emit_jump (op0);
7411 emit_label (op1);
7412 emit_move_insn (target, const0_rtx);
7413 emit_label (op0);
7414 return target;
7417 case WITH_CLEANUP_EXPR:
7418 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7420 WITH_CLEANUP_EXPR_RTL (exp)
7421 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7422 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7423 CLEANUP_EH_ONLY (exp));
7425 /* That's it for this cleanup. */
7426 TREE_OPERAND (exp, 1) = 0;
7428 return WITH_CLEANUP_EXPR_RTL (exp);
7430 case CLEANUP_POINT_EXPR:
7432 /* Start a new binding layer that will keep track of all cleanup
7433 actions to be performed. */
7434 expand_start_bindings (2);
7436 target_temp_slot_level = temp_slot_level;
7438 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7439 /* If we're going to use this value, load it up now. */
7440 if (! ignore)
7441 op0 = force_not_mem (op0);
7442 preserve_temp_slots (op0);
7443 expand_end_bindings (NULL_TREE, 0, 0);
7445 return op0;
7447 case CALL_EXPR:
7448 /* Check for a built-in function. */
7449 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7450 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7451 == FUNCTION_DECL)
7452 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7454 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7455 == BUILT_IN_FRONTEND)
7456 return (*lang_hooks.expand_expr)
7457 (exp, original_target, tmode, modifier);
7458 else
7459 return expand_builtin (exp, target, subtarget, tmode, ignore);
7462 return expand_call (exp, target, ignore);
7464 case NON_LVALUE_EXPR:
7465 case NOP_EXPR:
7466 case CONVERT_EXPR:
7467 case REFERENCE_EXPR:
7468 if (TREE_OPERAND (exp, 0) == error_mark_node)
7469 return const0_rtx;
7471 if (TREE_CODE (type) == UNION_TYPE)
7473 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7475 /* If both input and output are BLKmode, this conversion isn't doing
7476 anything except possibly changing memory attribute. */
7477 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7479 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7480 modifier);
7482 result = copy_rtx (result);
7483 set_mem_attributes (result, exp, 0);
7484 return result;
7487 if (target == 0)
7488 target = assign_temp (type, 0, 1, 1);
7490 if (GET_CODE (target) == MEM)
7491 /* Store data into beginning of memory target. */
7492 store_expr (TREE_OPERAND (exp, 0),
7493 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7495 else if (GET_CODE (target) == REG)
7496 /* Store this field into a union of the proper type. */
7497 store_field (target,
7498 MIN ((int_size_in_bytes (TREE_TYPE
7499 (TREE_OPERAND (exp, 0)))
7500 * BITS_PER_UNIT),
7501 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7502 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7503 VOIDmode, 0, type, 0);
7504 else
7505 abort ();
7507 /* Return the entire union. */
7508 return target;
7511 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7513 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7514 modifier);
7516 /* If the signedness of the conversion differs and OP0 is
7517 a promoted SUBREG, clear that indication since we now
7518 have to do the proper extension. */
7519 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7520 && GET_CODE (op0) == SUBREG)
7521 SUBREG_PROMOTED_VAR_P (op0) = 0;
7523 return op0;
7526 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7527 if (GET_MODE (op0) == mode)
7528 return op0;
7530 /* If OP0 is a constant, just convert it into the proper mode. */
7531 if (CONSTANT_P (op0))
7533 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7534 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7536 if (modifier == EXPAND_INITIALIZER)
7537 return simplify_gen_subreg (mode, op0, inner_mode,
7538 subreg_lowpart_offset (mode,
7539 inner_mode));
7540 else
7541 return convert_modes (mode, inner_mode, op0,
7542 TREE_UNSIGNED (inner_type));
7545 if (modifier == EXPAND_INITIALIZER)
7546 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7548 if (target == 0)
7549 return
7550 convert_to_mode (mode, op0,
7551 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7552 else
7553 convert_move (target, op0,
7554 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7555 return target;
7557 case VIEW_CONVERT_EXPR:
7558 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7560 /* If the input and output modes are both the same, we are done.
7561 Otherwise, if neither mode is BLKmode and both are within a word, we
7562 can use gen_lowpart. If neither is true, make sure the operand is
7563 in memory and convert the MEM to the new mode. */
7564 if (TYPE_MODE (type) == GET_MODE (op0))
7566 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7567 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7568 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7569 op0 = gen_lowpart (TYPE_MODE (type), op0);
7570 else if (GET_CODE (op0) != MEM)
7572 /* If the operand is not a MEM, force it into memory. Since we
7573 are going to be be changing the mode of the MEM, don't call
7574 force_const_mem for constants because we don't allow pool
7575 constants to change mode. */
7576 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7578 if (TREE_ADDRESSABLE (exp))
7579 abort ();
7581 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7582 target
7583 = assign_stack_temp_for_type
7584 (TYPE_MODE (inner_type),
7585 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7587 emit_move_insn (target, op0);
7588 op0 = target;
7591 /* At this point, OP0 is in the correct mode. If the output type is such
7592 that the operand is known to be aligned, indicate that it is.
7593 Otherwise, we need only be concerned about alignment for non-BLKmode
7594 results. */
7595 if (GET_CODE (op0) == MEM)
7597 op0 = copy_rtx (op0);
7599 if (TYPE_ALIGN_OK (type))
7600 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7601 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7602 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7604 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7605 HOST_WIDE_INT temp_size
7606 = MAX (int_size_in_bytes (inner_type),
7607 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7608 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7609 temp_size, 0, type);
7610 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7612 if (TREE_ADDRESSABLE (exp))
7613 abort ();
7615 if (GET_MODE (op0) == BLKmode)
7616 emit_block_move (new_with_op0_mode, op0,
7617 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7618 else
7619 emit_move_insn (new_with_op0_mode, op0);
7621 op0 = new;
7624 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7627 return op0;
7629 case PLUS_EXPR:
7630 /* We come here from MINUS_EXPR when the second operand is a
7631 constant. */
7632 plus_expr:
7633 this_optab = ! unsignedp && flag_trapv
7634 && (GET_MODE_CLASS (mode) == MODE_INT)
7635 ? addv_optab : add_optab;
7637 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7638 something else, make sure we add the register to the constant and
7639 then to the other thing. This case can occur during strength
7640 reduction and doing it this way will produce better code if the
7641 frame pointer or argument pointer is eliminated.
7643 fold-const.c will ensure that the constant is always in the inner
7644 PLUS_EXPR, so the only case we need to do anything about is if
7645 sp, ap, or fp is our second argument, in which case we must swap
7646 the innermost first argument and our second argument. */
7648 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7649 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7650 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7651 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7652 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7653 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7655 tree t = TREE_OPERAND (exp, 1);
7657 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7658 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7661 /* If the result is to be ptr_mode and we are adding an integer to
7662 something, we might be forming a constant. So try to use
7663 plus_constant. If it produces a sum and we can't accept it,
7664 use force_operand. This allows P = &ARR[const] to generate
7665 efficient code on machines where a SYMBOL_REF is not a valid
7666 address.
7668 If this is an EXPAND_SUM call, always return the sum. */
7669 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7670 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7672 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7673 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7674 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7676 rtx constant_part;
7678 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7679 EXPAND_SUM);
7680 /* Use immed_double_const to ensure that the constant is
7681 truncated according to the mode of OP1, then sign extended
7682 to a HOST_WIDE_INT. Using the constant directly can result
7683 in non-canonical RTL in a 64x32 cross compile. */
7684 constant_part
7685 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7686 (HOST_WIDE_INT) 0,
7687 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7688 op1 = plus_constant (op1, INTVAL (constant_part));
7689 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7690 op1 = force_operand (op1, target);
7691 return op1;
7694 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7695 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7696 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7698 rtx constant_part;
7700 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7701 (modifier == EXPAND_INITIALIZER
7702 ? EXPAND_INITIALIZER : EXPAND_SUM));
7703 if (! CONSTANT_P (op0))
7705 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7706 VOIDmode, modifier);
7707 /* Don't go to both_summands if modifier
7708 says it's not right to return a PLUS. */
7709 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7710 goto binop2;
7711 goto both_summands;
7713 /* Use immed_double_const to ensure that the constant is
7714 truncated according to the mode of OP1, then sign extended
7715 to a HOST_WIDE_INT. Using the constant directly can result
7716 in non-canonical RTL in a 64x32 cross compile. */
7717 constant_part
7718 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7719 (HOST_WIDE_INT) 0,
7720 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7721 op0 = plus_constant (op0, INTVAL (constant_part));
7722 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7723 op0 = force_operand (op0, target);
7724 return op0;
7728 /* No sense saving up arithmetic to be done
7729 if it's all in the wrong mode to form part of an address.
7730 And force_operand won't know whether to sign-extend or
7731 zero-extend. */
7732 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7733 || mode != ptr_mode)
7734 goto binop;
7736 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7737 subtarget = 0;
7739 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7742 both_summands:
7743 /* Make sure any term that's a sum with a constant comes last. */
7744 if (GET_CODE (op0) == PLUS
7745 && CONSTANT_P (XEXP (op0, 1)))
7747 temp = op0;
7748 op0 = op1;
7749 op1 = temp;
7751 /* If adding to a sum including a constant,
7752 associate it to put the constant outside. */
7753 if (GET_CODE (op1) == PLUS
7754 && CONSTANT_P (XEXP (op1, 1)))
7756 rtx constant_term = const0_rtx;
7758 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7759 if (temp != 0)
7760 op0 = temp;
7761 /* Ensure that MULT comes first if there is one. */
7762 else if (GET_CODE (op0) == MULT)
7763 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7764 else
7765 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7767 /* Let's also eliminate constants from op0 if possible. */
7768 op0 = eliminate_constant_term (op0, &constant_term);
7770 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7771 their sum should be a constant. Form it into OP1, since the
7772 result we want will then be OP0 + OP1. */
7774 temp = simplify_binary_operation (PLUS, mode, constant_term,
7775 XEXP (op1, 1));
7776 if (temp != 0)
7777 op1 = temp;
7778 else
7779 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7782 /* Put a constant term last and put a multiplication first. */
7783 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7784 temp = op1, op1 = op0, op0 = temp;
7786 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7787 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7789 case MINUS_EXPR:
7790 /* For initializers, we are allowed to return a MINUS of two
7791 symbolic constants. Here we handle all cases when both operands
7792 are constant. */
7793 /* Handle difference of two symbolic constants,
7794 for the sake of an initializer. */
7795 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7796 && really_constant_p (TREE_OPERAND (exp, 0))
7797 && really_constant_p (TREE_OPERAND (exp, 1)))
7799 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7800 modifier);
7801 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7802 modifier);
7804 /* If the last operand is a CONST_INT, use plus_constant of
7805 the negated constant. Else make the MINUS. */
7806 if (GET_CODE (op1) == CONST_INT)
7807 return plus_constant (op0, - INTVAL (op1));
7808 else
7809 return gen_rtx_MINUS (mode, op0, op1);
7811 /* Convert A - const to A + (-const). */
7812 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7814 tree negated = fold (build1 (NEGATE_EXPR, type,
7815 TREE_OPERAND (exp, 1)));
7817 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7818 /* If we can't negate the constant in TYPE, leave it alone and
7819 expand_binop will negate it for us. We used to try to do it
7820 here in the signed version of TYPE, but that doesn't work
7821 on POINTER_TYPEs. */;
7822 else
7824 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7825 goto plus_expr;
7828 this_optab = ! unsignedp && flag_trapv
7829 && (GET_MODE_CLASS(mode) == MODE_INT)
7830 ? subv_optab : sub_optab;
7831 goto binop;
7833 case MULT_EXPR:
7834 /* If first operand is constant, swap them.
7835 Thus the following special case checks need only
7836 check the second operand. */
7837 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7839 tree t1 = TREE_OPERAND (exp, 0);
7840 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7841 TREE_OPERAND (exp, 1) = t1;
7844 /* Attempt to return something suitable for generating an
7845 indexed address, for machines that support that. */
7847 if (modifier == EXPAND_SUM && mode == ptr_mode
7848 && host_integerp (TREE_OPERAND (exp, 1), 0))
7850 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7851 EXPAND_SUM);
7853 /* If we knew for certain that this is arithmetic for an array
7854 reference, and we knew the bounds of the array, then we could
7855 apply the distributive law across (PLUS X C) for constant C.
7856 Without such knowledge, we risk overflowing the computation
7857 when both X and C are large, but X+C isn't. */
7858 /* ??? Could perhaps special-case EXP being unsigned and C being
7859 positive. In that case we are certain that X+C is no smaller
7860 than X and so the transformed expression will overflow iff the
7861 original would have. */
7863 if (GET_CODE (op0) != REG)
7864 op0 = force_operand (op0, NULL_RTX);
7865 if (GET_CODE (op0) != REG)
7866 op0 = copy_to_mode_reg (mode, op0);
7868 return
7869 gen_rtx_MULT (mode, op0,
7870 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7873 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7874 subtarget = 0;
7876 /* Check for multiplying things that have been extended
7877 from a narrower type. If this machine supports multiplying
7878 in that narrower type with a result in the desired type,
7879 do it that way, and avoid the explicit type-conversion. */
7880 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7881 && TREE_CODE (type) == INTEGER_TYPE
7882 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7883 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7884 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7885 && int_fits_type_p (TREE_OPERAND (exp, 1),
7886 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7887 /* Don't use a widening multiply if a shift will do. */
7888 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7889 > HOST_BITS_PER_WIDE_INT)
7890 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7892 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7893 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7895 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7896 /* If both operands are extended, they must either both
7897 be zero-extended or both be sign-extended. */
7898 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7900 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7902 enum machine_mode innermode
7903 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7904 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7905 ? smul_widen_optab : umul_widen_optab);
7906 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7907 ? umul_widen_optab : smul_widen_optab);
7908 if (mode == GET_MODE_WIDER_MODE (innermode))
7910 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7912 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7913 NULL_RTX, VOIDmode, 0);
7914 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7915 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7916 VOIDmode, 0);
7917 else
7918 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7919 NULL_RTX, VOIDmode, 0);
7920 goto binop2;
7922 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7923 && innermode == word_mode)
7925 rtx htem;
7926 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7927 NULL_RTX, VOIDmode, 0);
7928 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7929 op1 = convert_modes (innermode, mode,
7930 expand_expr (TREE_OPERAND (exp, 1),
7931 NULL_RTX, VOIDmode, 0),
7932 unsignedp);
7933 else
7934 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7935 NULL_RTX, VOIDmode, 0);
7936 temp = expand_binop (mode, other_optab, op0, op1, target,
7937 unsignedp, OPTAB_LIB_WIDEN);
7938 htem = expand_mult_highpart_adjust (innermode,
7939 gen_highpart (innermode, temp),
7940 op0, op1,
7941 gen_highpart (innermode, temp),
7942 unsignedp);
7943 emit_move_insn (gen_highpart (innermode, temp), htem);
7944 return temp;
7948 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7949 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7950 return expand_mult (mode, op0, op1, target, unsignedp);
7952 case TRUNC_DIV_EXPR:
7953 case FLOOR_DIV_EXPR:
7954 case CEIL_DIV_EXPR:
7955 case ROUND_DIV_EXPR:
7956 case EXACT_DIV_EXPR:
7957 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7958 subtarget = 0;
7959 /* Possible optimization: compute the dividend with EXPAND_SUM
7960 then if the divisor is constant can optimize the case
7961 where some terms of the dividend have coeffs divisible by it. */
7962 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7963 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7964 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7966 case RDIV_EXPR:
7967 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7968 expensive divide. If not, combine will rebuild the original
7969 computation. */
7970 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7971 && TREE_CODE (type) == REAL_TYPE
7972 && !real_onep (TREE_OPERAND (exp, 0)))
7973 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7974 build (RDIV_EXPR, type,
7975 build_real (type, dconst1),
7976 TREE_OPERAND (exp, 1))),
7977 target, tmode, unsignedp);
7978 this_optab = sdiv_optab;
7979 goto binop;
7981 case TRUNC_MOD_EXPR:
7982 case FLOOR_MOD_EXPR:
7983 case CEIL_MOD_EXPR:
7984 case ROUND_MOD_EXPR:
7985 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7986 subtarget = 0;
7987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7988 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7989 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7991 case FIX_ROUND_EXPR:
7992 case FIX_FLOOR_EXPR:
7993 case FIX_CEIL_EXPR:
7994 abort (); /* Not used for C. */
7996 case FIX_TRUNC_EXPR:
7997 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7998 if (target == 0)
7999 target = gen_reg_rtx (mode);
8000 expand_fix (target, op0, unsignedp);
8001 return target;
8003 case FLOAT_EXPR:
8004 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8005 if (target == 0)
8006 target = gen_reg_rtx (mode);
8007 /* expand_float can't figure out what to do if FROM has VOIDmode.
8008 So give it the correct mode. With -O, cse will optimize this. */
8009 if (GET_MODE (op0) == VOIDmode)
8010 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8011 op0);
8012 expand_float (target, op0,
8013 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8014 return target;
8016 case NEGATE_EXPR:
8017 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8018 temp = expand_unop (mode,
8019 ! unsignedp && flag_trapv
8020 && (GET_MODE_CLASS(mode) == MODE_INT)
8021 ? negv_optab : neg_optab, op0, target, 0);
8022 if (temp == 0)
8023 abort ();
8024 return temp;
8026 case ABS_EXPR:
8027 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8029 /* Handle complex values specially. */
8030 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8031 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8032 return expand_complex_abs (mode, op0, target, unsignedp);
8034 /* Unsigned abs is simply the operand. Testing here means we don't
8035 risk generating incorrect code below. */
8036 if (TREE_UNSIGNED (type))
8037 return op0;
8039 return expand_abs (mode, op0, target, unsignedp,
8040 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8042 case MAX_EXPR:
8043 case MIN_EXPR:
8044 target = original_target;
8045 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8046 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8047 || GET_MODE (target) != mode
8048 || (GET_CODE (target) == REG
8049 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8050 target = gen_reg_rtx (mode);
8051 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8052 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8054 /* First try to do it with a special MIN or MAX instruction.
8055 If that does not win, use a conditional jump to select the proper
8056 value. */
8057 this_optab = (TREE_UNSIGNED (type)
8058 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8059 : (code == MIN_EXPR ? smin_optab : smax_optab));
8061 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8062 OPTAB_WIDEN);
8063 if (temp != 0)
8064 return temp;
8066 /* At this point, a MEM target is no longer useful; we will get better
8067 code without it. */
8069 if (GET_CODE (target) == MEM)
8070 target = gen_reg_rtx (mode);
8072 if (target != op0)
8073 emit_move_insn (target, op0);
8075 op0 = gen_label_rtx ();
8077 /* If this mode is an integer too wide to compare properly,
8078 compare word by word. Rely on cse to optimize constant cases. */
8079 if (GET_MODE_CLASS (mode) == MODE_INT
8080 && ! can_compare_p (GE, mode, ccp_jump))
8082 if (code == MAX_EXPR)
8083 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8084 target, op1, NULL_RTX, op0);
8085 else
8086 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8087 op1, target, NULL_RTX, op0);
8089 else
8091 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8092 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8093 unsignedp, mode, NULL_RTX, NULL_RTX,
8094 op0);
8096 emit_move_insn (target, op1);
8097 emit_label (op0);
8098 return target;
8100 case BIT_NOT_EXPR:
8101 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8102 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8103 if (temp == 0)
8104 abort ();
8105 return temp;
8107 case FFS_EXPR:
8108 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8109 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8110 if (temp == 0)
8111 abort ();
8112 return temp;
8114 /* ??? Can optimize bitwise operations with one arg constant.
8115 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8116 and (a bitwise1 b) bitwise2 b (etc)
8117 but that is probably not worth while. */
8119 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8120 boolean values when we want in all cases to compute both of them. In
8121 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8122 as actual zero-or-1 values and then bitwise anding. In cases where
8123 there cannot be any side effects, better code would be made by
8124 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8125 how to recognize those cases. */
8127 case TRUTH_AND_EXPR:
8128 case BIT_AND_EXPR:
8129 this_optab = and_optab;
8130 goto binop;
8132 case TRUTH_OR_EXPR:
8133 case BIT_IOR_EXPR:
8134 this_optab = ior_optab;
8135 goto binop;
8137 case TRUTH_XOR_EXPR:
8138 case BIT_XOR_EXPR:
8139 this_optab = xor_optab;
8140 goto binop;
8142 case LSHIFT_EXPR:
8143 case RSHIFT_EXPR:
8144 case LROTATE_EXPR:
8145 case RROTATE_EXPR:
8146 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8147 subtarget = 0;
8148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8149 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8150 unsignedp);
8152 /* Could determine the answer when only additive constants differ. Also,
8153 the addition of one can be handled by changing the condition. */
8154 case LT_EXPR:
8155 case LE_EXPR:
8156 case GT_EXPR:
8157 case GE_EXPR:
8158 case EQ_EXPR:
8159 case NE_EXPR:
8160 case UNORDERED_EXPR:
8161 case ORDERED_EXPR:
8162 case UNLT_EXPR:
8163 case UNLE_EXPR:
8164 case UNGT_EXPR:
8165 case UNGE_EXPR:
8166 case UNEQ_EXPR:
8167 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8168 if (temp != 0)
8169 return temp;
8171 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8172 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8173 && original_target
8174 && GET_CODE (original_target) == REG
8175 && (GET_MODE (original_target)
8176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8178 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8179 VOIDmode, 0);
8181 /* If temp is constant, we can just compute the result. */
8182 if (GET_CODE (temp) == CONST_INT)
8184 if (INTVAL (temp) != 0)
8185 emit_move_insn (target, const1_rtx);
8186 else
8187 emit_move_insn (target, const0_rtx);
8189 return target;
8192 if (temp != original_target)
8194 enum machine_mode mode1 = GET_MODE (temp);
8195 if (mode1 == VOIDmode)
8196 mode1 = tmode != VOIDmode ? tmode : mode;
8198 temp = copy_to_mode_reg (mode1, temp);
8201 op1 = gen_label_rtx ();
8202 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8203 GET_MODE (temp), unsignedp, op1);
8204 emit_move_insn (temp, const1_rtx);
8205 emit_label (op1);
8206 return temp;
8209 /* If no set-flag instruction, must generate a conditional
8210 store into a temporary variable. Drop through
8211 and handle this like && and ||. */
8213 case TRUTH_ANDIF_EXPR:
8214 case TRUTH_ORIF_EXPR:
8215 if (! ignore
8216 && (target == 0 || ! safe_from_p (target, exp, 1)
8217 /* Make sure we don't have a hard reg (such as function's return
8218 value) live across basic blocks, if not optimizing. */
8219 || (!optimize && GET_CODE (target) == REG
8220 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8221 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8223 if (target)
8224 emit_clr_insn (target);
8226 op1 = gen_label_rtx ();
8227 jumpifnot (exp, op1);
8229 if (target)
8230 emit_0_to_1_insn (target);
8232 emit_label (op1);
8233 return ignore ? const0_rtx : target;
8235 case TRUTH_NOT_EXPR:
8236 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8237 /* The parser is careful to generate TRUTH_NOT_EXPR
8238 only with operands that are always zero or one. */
8239 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8240 target, 1, OPTAB_LIB_WIDEN);
8241 if (temp == 0)
8242 abort ();
8243 return temp;
8245 case COMPOUND_EXPR:
8246 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8247 emit_queue ();
8248 return expand_expr (TREE_OPERAND (exp, 1),
8249 (ignore ? const0_rtx : target),
8250 VOIDmode, 0);
8252 case COND_EXPR:
8253 /* If we would have a "singleton" (see below) were it not for a
8254 conversion in each arm, bring that conversion back out. */
8255 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8256 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8257 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8258 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8260 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8261 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8263 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8264 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8265 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8266 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8267 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8268 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8269 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8270 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8271 return expand_expr (build1 (NOP_EXPR, type,
8272 build (COND_EXPR, TREE_TYPE (iftrue),
8273 TREE_OPERAND (exp, 0),
8274 iftrue, iffalse)),
8275 target, tmode, modifier);
8279 /* Note that COND_EXPRs whose type is a structure or union
8280 are required to be constructed to contain assignments of
8281 a temporary variable, so that we can evaluate them here
8282 for side effect only. If type is void, we must do likewise. */
8284 /* If an arm of the branch requires a cleanup,
8285 only that cleanup is performed. */
8287 tree singleton = 0;
8288 tree binary_op = 0, unary_op = 0;
8290 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8291 convert it to our mode, if necessary. */
8292 if (integer_onep (TREE_OPERAND (exp, 1))
8293 && integer_zerop (TREE_OPERAND (exp, 2))
8294 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8296 if (ignore)
8298 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8299 modifier);
8300 return const0_rtx;
8303 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8304 if (GET_MODE (op0) == mode)
8305 return op0;
8307 if (target == 0)
8308 target = gen_reg_rtx (mode);
8309 convert_move (target, op0, unsignedp);
8310 return target;
8313 /* Check for X ? A + B : A. If we have this, we can copy A to the
8314 output and conditionally add B. Similarly for unary operations.
8315 Don't do this if X has side-effects because those side effects
8316 might affect A or B and the "?" operation is a sequence point in
8317 ANSI. (operand_equal_p tests for side effects.) */
8319 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8320 && operand_equal_p (TREE_OPERAND (exp, 2),
8321 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8322 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8323 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8324 && operand_equal_p (TREE_OPERAND (exp, 1),
8325 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8326 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8327 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8328 && operand_equal_p (TREE_OPERAND (exp, 2),
8329 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8330 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8331 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8332 && operand_equal_p (TREE_OPERAND (exp, 1),
8333 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8334 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8336 /* If we are not to produce a result, we have no target. Otherwise,
8337 if a target was specified use it; it will not be used as an
8338 intermediate target unless it is safe. If no target, use a
8339 temporary. */
8341 if (ignore)
8342 temp = 0;
8343 else if (original_target
8344 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8345 || (singleton && GET_CODE (original_target) == REG
8346 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8347 && original_target == var_rtx (singleton)))
8348 && GET_MODE (original_target) == mode
8349 #ifdef HAVE_conditional_move
8350 && (! can_conditionally_move_p (mode)
8351 || GET_CODE (original_target) == REG
8352 || TREE_ADDRESSABLE (type))
8353 #endif
8354 && (GET_CODE (original_target) != MEM
8355 || TREE_ADDRESSABLE (type)))
8356 temp = original_target;
8357 else if (TREE_ADDRESSABLE (type))
8358 abort ();
8359 else
8360 temp = assign_temp (type, 0, 0, 1);
8362 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8363 do the test of X as a store-flag operation, do this as
8364 A + ((X != 0) << log C). Similarly for other simple binary
8365 operators. Only do for C == 1 if BRANCH_COST is low. */
8366 if (temp && singleton && binary_op
8367 && (TREE_CODE (binary_op) == PLUS_EXPR
8368 || TREE_CODE (binary_op) == MINUS_EXPR
8369 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8370 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8371 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8372 : integer_onep (TREE_OPERAND (binary_op, 1)))
8373 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8375 rtx result;
8376 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8377 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8378 ? addv_optab : add_optab)
8379 : TREE_CODE (binary_op) == MINUS_EXPR
8380 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8381 ? subv_optab : sub_optab)
8382 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8383 : xor_optab);
8385 /* If we had X ? A : A + 1, do this as A + (X == 0).
8387 We have to invert the truth value here and then put it
8388 back later if do_store_flag fails. We cannot simply copy
8389 TREE_OPERAND (exp, 0) to another variable and modify that
8390 because invert_truthvalue can modify the tree pointed to
8391 by its argument. */
8392 if (singleton == TREE_OPERAND (exp, 1))
8393 TREE_OPERAND (exp, 0)
8394 = invert_truthvalue (TREE_OPERAND (exp, 0));
8396 result = do_store_flag (TREE_OPERAND (exp, 0),
8397 (safe_from_p (temp, singleton, 1)
8398 ? temp : NULL_RTX),
8399 mode, BRANCH_COST <= 1);
8401 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8402 result = expand_shift (LSHIFT_EXPR, mode, result,
8403 build_int_2 (tree_log2
8404 (TREE_OPERAND
8405 (binary_op, 1)),
8407 (safe_from_p (temp, singleton, 1)
8408 ? temp : NULL_RTX), 0);
8410 if (result)
8412 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8413 return expand_binop (mode, boptab, op1, result, temp,
8414 unsignedp, OPTAB_LIB_WIDEN);
8416 else if (singleton == TREE_OPERAND (exp, 1))
8417 TREE_OPERAND (exp, 0)
8418 = invert_truthvalue (TREE_OPERAND (exp, 0));
8421 do_pending_stack_adjust ();
8422 NO_DEFER_POP;
8423 op0 = gen_label_rtx ();
8425 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8427 if (temp != 0)
8429 /* If the target conflicts with the other operand of the
8430 binary op, we can't use it. Also, we can't use the target
8431 if it is a hard register, because evaluating the condition
8432 might clobber it. */
8433 if ((binary_op
8434 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8435 || (GET_CODE (temp) == REG
8436 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8437 temp = gen_reg_rtx (mode);
8438 store_expr (singleton, temp, 0);
8440 else
8441 expand_expr (singleton,
8442 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8443 if (singleton == TREE_OPERAND (exp, 1))
8444 jumpif (TREE_OPERAND (exp, 0), op0);
8445 else
8446 jumpifnot (TREE_OPERAND (exp, 0), op0);
8448 start_cleanup_deferral ();
8449 if (binary_op && temp == 0)
8450 /* Just touch the other operand. */
8451 expand_expr (TREE_OPERAND (binary_op, 1),
8452 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8453 else if (binary_op)
8454 store_expr (build (TREE_CODE (binary_op), type,
8455 make_tree (type, temp),
8456 TREE_OPERAND (binary_op, 1)),
8457 temp, 0);
8458 else
8459 store_expr (build1 (TREE_CODE (unary_op), type,
8460 make_tree (type, temp)),
8461 temp, 0);
8462 op1 = op0;
8464 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8465 comparison operator. If we have one of these cases, set the
8466 output to A, branch on A (cse will merge these two references),
8467 then set the output to FOO. */
8468 else if (temp
8469 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8470 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8471 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8472 TREE_OPERAND (exp, 1), 0)
8473 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8474 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8475 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8477 if (GET_CODE (temp) == REG
8478 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8479 temp = gen_reg_rtx (mode);
8480 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8481 jumpif (TREE_OPERAND (exp, 0), op0);
8483 start_cleanup_deferral ();
8484 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8485 op1 = op0;
8487 else if (temp
8488 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8489 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8490 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8491 TREE_OPERAND (exp, 2), 0)
8492 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8493 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8494 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8496 if (GET_CODE (temp) == REG
8497 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8498 temp = gen_reg_rtx (mode);
8499 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8500 jumpifnot (TREE_OPERAND (exp, 0), op0);
8502 start_cleanup_deferral ();
8503 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8504 op1 = op0;
8506 else
8508 op1 = gen_label_rtx ();
8509 jumpifnot (TREE_OPERAND (exp, 0), op0);
8511 start_cleanup_deferral ();
8513 /* One branch of the cond can be void, if it never returns. For
8514 example A ? throw : E */
8515 if (temp != 0
8516 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8517 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8518 else
8519 expand_expr (TREE_OPERAND (exp, 1),
8520 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8521 end_cleanup_deferral ();
8522 emit_queue ();
8523 emit_jump_insn (gen_jump (op1));
8524 emit_barrier ();
8525 emit_label (op0);
8526 start_cleanup_deferral ();
8527 if (temp != 0
8528 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8529 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8530 else
8531 expand_expr (TREE_OPERAND (exp, 2),
8532 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8535 end_cleanup_deferral ();
8537 emit_queue ();
8538 emit_label (op1);
8539 OK_DEFER_POP;
8541 return temp;
8544 case TARGET_EXPR:
8546 /* Something needs to be initialized, but we didn't know
8547 where that thing was when building the tree. For example,
8548 it could be the return value of a function, or a parameter
8549 to a function which lays down in the stack, or a temporary
8550 variable which must be passed by reference.
8552 We guarantee that the expression will either be constructed
8553 or copied into our original target. */
8555 tree slot = TREE_OPERAND (exp, 0);
8556 tree cleanups = NULL_TREE;
8557 tree exp1;
8559 if (TREE_CODE (slot) != VAR_DECL)
8560 abort ();
8562 if (! ignore)
8563 target = original_target;
8565 /* Set this here so that if we get a target that refers to a
8566 register variable that's already been used, put_reg_into_stack
8567 knows that it should fix up those uses. */
8568 TREE_USED (slot) = 1;
8570 if (target == 0)
8572 if (DECL_RTL_SET_P (slot))
8574 target = DECL_RTL (slot);
8575 /* If we have already expanded the slot, so don't do
8576 it again. (mrs) */
8577 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8578 return target;
8580 else
8582 target = assign_temp (type, 2, 0, 1);
8583 /* All temp slots at this level must not conflict. */
8584 preserve_temp_slots (target);
8585 SET_DECL_RTL (slot, target);
8586 if (TREE_ADDRESSABLE (slot))
8587 put_var_into_stack (slot);
8589 /* Since SLOT is not known to the called function
8590 to belong to its stack frame, we must build an explicit
8591 cleanup. This case occurs when we must build up a reference
8592 to pass the reference as an argument. In this case,
8593 it is very likely that such a reference need not be
8594 built here. */
8596 if (TREE_OPERAND (exp, 2) == 0)
8597 TREE_OPERAND (exp, 2)
8598 = (*lang_hooks.maybe_build_cleanup) (slot);
8599 cleanups = TREE_OPERAND (exp, 2);
8602 else
8604 /* This case does occur, when expanding a parameter which
8605 needs to be constructed on the stack. The target
8606 is the actual stack address that we want to initialize.
8607 The function we call will perform the cleanup in this case. */
8609 /* If we have already assigned it space, use that space,
8610 not target that we were passed in, as our target
8611 parameter is only a hint. */
8612 if (DECL_RTL_SET_P (slot))
8614 target = DECL_RTL (slot);
8615 /* If we have already expanded the slot, so don't do
8616 it again. (mrs) */
8617 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8618 return target;
8620 else
8622 SET_DECL_RTL (slot, target);
8623 /* If we must have an addressable slot, then make sure that
8624 the RTL that we just stored in slot is OK. */
8625 if (TREE_ADDRESSABLE (slot))
8626 put_var_into_stack (slot);
8630 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8631 /* Mark it as expanded. */
8632 TREE_OPERAND (exp, 1) = NULL_TREE;
8634 store_expr (exp1, target, 0);
8636 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8638 return target;
8641 case INIT_EXPR:
8643 tree lhs = TREE_OPERAND (exp, 0);
8644 tree rhs = TREE_OPERAND (exp, 1);
8646 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8647 return temp;
8650 case MODIFY_EXPR:
8652 /* If lhs is complex, expand calls in rhs before computing it.
8653 That's so we don't compute a pointer and save it over a
8654 call. If lhs is simple, compute it first so we can give it
8655 as a target if the rhs is just a call. This avoids an
8656 extra temp and copy and that prevents a partial-subsumption
8657 which makes bad code. Actually we could treat
8658 component_ref's of vars like vars. */
8660 tree lhs = TREE_OPERAND (exp, 0);
8661 tree rhs = TREE_OPERAND (exp, 1);
8663 temp = 0;
8665 /* Check for |= or &= of a bitfield of size one into another bitfield
8666 of size 1. In this case, (unless we need the result of the
8667 assignment) we can do this more efficiently with a
8668 test followed by an assignment, if necessary.
8670 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8671 things change so we do, this code should be enhanced to
8672 support it. */
8673 if (ignore
8674 && TREE_CODE (lhs) == COMPONENT_REF
8675 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8676 || TREE_CODE (rhs) == BIT_AND_EXPR)
8677 && TREE_OPERAND (rhs, 0) == lhs
8678 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8679 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8680 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8682 rtx label = gen_label_rtx ();
8684 do_jump (TREE_OPERAND (rhs, 1),
8685 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8686 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8687 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8688 (TREE_CODE (rhs) == BIT_IOR_EXPR
8689 ? integer_one_node
8690 : integer_zero_node)),
8691 0, 0);
8692 do_pending_stack_adjust ();
8693 emit_label (label);
8694 return const0_rtx;
8697 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8699 return temp;
8702 case RETURN_EXPR:
8703 if (!TREE_OPERAND (exp, 0))
8704 expand_null_return ();
8705 else
8706 expand_return (TREE_OPERAND (exp, 0));
8707 return const0_rtx;
8709 case PREINCREMENT_EXPR:
8710 case PREDECREMENT_EXPR:
8711 return expand_increment (exp, 0, ignore);
8713 case POSTINCREMENT_EXPR:
8714 case POSTDECREMENT_EXPR:
8715 /* Faster to treat as pre-increment if result is not used. */
8716 return expand_increment (exp, ! ignore, ignore);
8718 case ADDR_EXPR:
8719 /* Are we taking the address of a nested function? */
8720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8721 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8722 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8723 && ! TREE_STATIC (exp))
8725 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8726 op0 = force_operand (op0, target);
8728 /* If we are taking the address of something erroneous, just
8729 return a zero. */
8730 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8731 return const0_rtx;
8732 /* If we are taking the address of a constant and are at the
8733 top level, we have to use output_constant_def since we can't
8734 call force_const_mem at top level. */
8735 else if (cfun == 0
8736 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8737 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8738 == 'c')))
8739 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8740 else
8742 /* We make sure to pass const0_rtx down if we came in with
8743 ignore set, to avoid doing the cleanups twice for something. */
8744 op0 = expand_expr (TREE_OPERAND (exp, 0),
8745 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8746 (modifier == EXPAND_INITIALIZER
8747 ? modifier : EXPAND_CONST_ADDRESS));
8749 /* If we are going to ignore the result, OP0 will have been set
8750 to const0_rtx, so just return it. Don't get confused and
8751 think we are taking the address of the constant. */
8752 if (ignore)
8753 return op0;
8755 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8756 clever and returns a REG when given a MEM. */
8757 op0 = protect_from_queue (op0, 1);
8759 /* We would like the object in memory. If it is a constant, we can
8760 have it be statically allocated into memory. For a non-constant,
8761 we need to allocate some memory and store the value into it. */
8763 if (CONSTANT_P (op0))
8764 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8765 op0);
8766 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8767 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8768 || GET_CODE (op0) == PARALLEL)
8770 /* If the operand is a SAVE_EXPR, we can deal with this by
8771 forcing the SAVE_EXPR into memory. */
8772 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8774 put_var_into_stack (TREE_OPERAND (exp, 0));
8775 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8777 else
8779 /* If this object is in a register, it can't be BLKmode. */
8780 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8781 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8783 if (GET_CODE (op0) == PARALLEL)
8784 /* Handle calls that pass values in multiple
8785 non-contiguous locations. The Irix 6 ABI has examples
8786 of this. */
8787 emit_group_store (memloc, op0,
8788 int_size_in_bytes (inner_type));
8789 else
8790 emit_move_insn (memloc, op0);
8792 op0 = memloc;
8796 if (GET_CODE (op0) != MEM)
8797 abort ();
8799 mark_temp_addr_taken (op0);
8800 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8802 op0 = XEXP (op0, 0);
8803 #ifdef POINTERS_EXTEND_UNSIGNED
8804 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8805 && mode == ptr_mode)
8806 op0 = convert_memory_address (ptr_mode, op0);
8807 #endif
8808 return op0;
8811 /* If OP0 is not aligned as least as much as the type requires, we
8812 need to make a temporary, copy OP0 to it, and take the address of
8813 the temporary. We want to use the alignment of the type, not of
8814 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8815 the test for BLKmode means that can't happen. The test for
8816 BLKmode is because we never make mis-aligned MEMs with
8817 non-BLKmode.
8819 We don't need to do this at all if the machine doesn't have
8820 strict alignment. */
8821 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8822 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8823 > MEM_ALIGN (op0))
8824 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8826 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8827 rtx new
8828 = assign_stack_temp_for_type
8829 (TYPE_MODE (inner_type),
8830 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8831 : int_size_in_bytes (inner_type),
8832 1, build_qualified_type (inner_type,
8833 (TYPE_QUALS (inner_type)
8834 | TYPE_QUAL_CONST)));
8836 if (TYPE_ALIGN_OK (inner_type))
8837 abort ();
8839 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8840 op0 = new;
8843 op0 = force_operand (XEXP (op0, 0), target);
8846 if (flag_force_addr
8847 && GET_CODE (op0) != REG
8848 && modifier != EXPAND_CONST_ADDRESS
8849 && modifier != EXPAND_INITIALIZER
8850 && modifier != EXPAND_SUM)
8851 op0 = force_reg (Pmode, op0);
8853 if (GET_CODE (op0) == REG
8854 && ! REG_USERVAR_P (op0))
8855 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8857 #ifdef POINTERS_EXTEND_UNSIGNED
8858 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8859 && mode == ptr_mode)
8860 op0 = convert_memory_address (ptr_mode, op0);
8861 #endif
8863 return op0;
8865 case ENTRY_VALUE_EXPR:
8866 abort ();
8868 /* COMPLEX type for Extended Pascal & Fortran */
8869 case COMPLEX_EXPR:
8871 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8872 rtx insns;
8874 /* Get the rtx code of the operands. */
8875 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8876 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8878 if (! target)
8879 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8881 start_sequence ();
8883 /* Move the real (op0) and imaginary (op1) parts to their location. */
8884 emit_move_insn (gen_realpart (mode, target), op0);
8885 emit_move_insn (gen_imagpart (mode, target), op1);
8887 insns = get_insns ();
8888 end_sequence ();
8890 /* Complex construction should appear as a single unit. */
8891 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8892 each with a separate pseudo as destination.
8893 It's not correct for flow to treat them as a unit. */
8894 if (GET_CODE (target) != CONCAT)
8895 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8896 else
8897 emit_insn (insns);
8899 return target;
8902 case REALPART_EXPR:
8903 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8904 return gen_realpart (mode, op0);
8906 case IMAGPART_EXPR:
8907 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8908 return gen_imagpart (mode, op0);
8910 case CONJ_EXPR:
8912 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8913 rtx imag_t;
8914 rtx insns;
8916 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8918 if (! target)
8919 target = gen_reg_rtx (mode);
8921 start_sequence ();
8923 /* Store the realpart and the negated imagpart to target. */
8924 emit_move_insn (gen_realpart (partmode, target),
8925 gen_realpart (partmode, op0));
8927 imag_t = gen_imagpart (partmode, target);
8928 temp = expand_unop (partmode,
8929 ! unsignedp && flag_trapv
8930 && (GET_MODE_CLASS(partmode) == MODE_INT)
8931 ? negv_optab : neg_optab,
8932 gen_imagpart (partmode, op0), imag_t, 0);
8933 if (temp != imag_t)
8934 emit_move_insn (imag_t, temp);
8936 insns = get_insns ();
8937 end_sequence ();
8939 /* Conjugate should appear as a single unit
8940 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8941 each with a separate pseudo as destination.
8942 It's not correct for flow to treat them as a unit. */
8943 if (GET_CODE (target) != CONCAT)
8944 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8945 else
8946 emit_insn (insns);
8948 return target;
8951 case TRY_CATCH_EXPR:
8953 tree handler = TREE_OPERAND (exp, 1);
8955 expand_eh_region_start ();
8957 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8959 expand_eh_region_end_cleanup (handler);
8961 return op0;
8964 case TRY_FINALLY_EXPR:
8966 tree try_block = TREE_OPERAND (exp, 0);
8967 tree finally_block = TREE_OPERAND (exp, 1);
8968 rtx finally_label = gen_label_rtx ();
8969 rtx done_label = gen_label_rtx ();
8970 rtx return_link = gen_reg_rtx (Pmode);
8971 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8972 (tree) finally_label, (tree) return_link);
8973 TREE_SIDE_EFFECTS (cleanup) = 1;
8975 /* Start a new binding layer that will keep track of all cleanup
8976 actions to be performed. */
8977 expand_start_bindings (2);
8979 target_temp_slot_level = temp_slot_level;
8981 expand_decl_cleanup (NULL_TREE, cleanup);
8982 op0 = expand_expr (try_block, target, tmode, modifier);
8984 preserve_temp_slots (op0);
8985 expand_end_bindings (NULL_TREE, 0, 0);
8986 emit_jump (done_label);
8987 emit_label (finally_label);
8988 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8989 emit_indirect_jump (return_link);
8990 emit_label (done_label);
8991 return op0;
8994 case GOTO_SUBROUTINE_EXPR:
8996 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8997 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8998 rtx return_address = gen_label_rtx ();
8999 emit_move_insn (return_link,
9000 gen_rtx_LABEL_REF (Pmode, return_address));
9001 emit_jump (subr);
9002 emit_label (return_address);
9003 return const0_rtx;
9006 case VA_ARG_EXPR:
9007 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9009 case EXC_PTR_EXPR:
9010 return get_exception_pointer (cfun);
9012 case FDESC_EXPR:
9013 /* Function descriptors are not valid except for as
9014 initialization constants, and should not be expanded. */
9015 abort ();
9017 default:
9018 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9021 /* Here to do an ordinary binary operator, generating an instruction
9022 from the optab already placed in `this_optab'. */
9023 binop:
9024 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9025 subtarget = 0;
9026 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9027 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9028 binop2:
9029 temp = expand_binop (mode, this_optab, op0, op1, target,
9030 unsignedp, OPTAB_LIB_WIDEN);
9031 if (temp == 0)
9032 abort ();
9033 return temp;
9036 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9037 when applied to the address of EXP produces an address known to be
9038 aligned more than BIGGEST_ALIGNMENT. */
9040 static int
9041 is_aligning_offset (offset, exp)
9042 tree offset;
9043 tree exp;
9045 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9046 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9047 || TREE_CODE (offset) == NOP_EXPR
9048 || TREE_CODE (offset) == CONVERT_EXPR
9049 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9050 offset = TREE_OPERAND (offset, 0);
9052 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9053 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9054 if (TREE_CODE (offset) != BIT_AND_EXPR
9055 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9056 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9057 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9058 return 0;
9060 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9061 It must be NEGATE_EXPR. Then strip any more conversions. */
9062 offset = TREE_OPERAND (offset, 0);
9063 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9064 || TREE_CODE (offset) == NOP_EXPR
9065 || TREE_CODE (offset) == CONVERT_EXPR)
9066 offset = TREE_OPERAND (offset, 0);
9068 if (TREE_CODE (offset) != NEGATE_EXPR)
9069 return 0;
9071 offset = TREE_OPERAND (offset, 0);
9072 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9073 || TREE_CODE (offset) == NOP_EXPR
9074 || TREE_CODE (offset) == CONVERT_EXPR)
9075 offset = TREE_OPERAND (offset, 0);
9077 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9078 whose type is the same as EXP. */
9079 return (TREE_CODE (offset) == ADDR_EXPR
9080 && (TREE_OPERAND (offset, 0) == exp
9081 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9082 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9083 == TREE_TYPE (exp)))));
9086 /* Return the tree node if an ARG corresponds to a string constant or zero
9087 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9088 in bytes within the string that ARG is accessing. The type of the
9089 offset will be `sizetype'. */
9091 tree
9092 string_constant (arg, ptr_offset)
9093 tree arg;
9094 tree *ptr_offset;
9096 STRIP_NOPS (arg);
9098 if (TREE_CODE (arg) == ADDR_EXPR
9099 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9101 *ptr_offset = size_zero_node;
9102 return TREE_OPERAND (arg, 0);
9104 else if (TREE_CODE (arg) == PLUS_EXPR)
9106 tree arg0 = TREE_OPERAND (arg, 0);
9107 tree arg1 = TREE_OPERAND (arg, 1);
9109 STRIP_NOPS (arg0);
9110 STRIP_NOPS (arg1);
9112 if (TREE_CODE (arg0) == ADDR_EXPR
9113 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9115 *ptr_offset = convert (sizetype, arg1);
9116 return TREE_OPERAND (arg0, 0);
9118 else if (TREE_CODE (arg1) == ADDR_EXPR
9119 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9121 *ptr_offset = convert (sizetype, arg0);
9122 return TREE_OPERAND (arg1, 0);
9126 return 0;
9129 /* Expand code for a post- or pre- increment or decrement
9130 and return the RTX for the result.
9131 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9133 static rtx
9134 expand_increment (exp, post, ignore)
9135 tree exp;
9136 int post, ignore;
9138 rtx op0, op1;
9139 rtx temp, value;
9140 tree incremented = TREE_OPERAND (exp, 0);
9141 optab this_optab = add_optab;
9142 int icode;
9143 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9144 int op0_is_copy = 0;
9145 int single_insn = 0;
9146 /* 1 means we can't store into OP0 directly,
9147 because it is a subreg narrower than a word,
9148 and we don't dare clobber the rest of the word. */
9149 int bad_subreg = 0;
9151 /* Stabilize any component ref that might need to be
9152 evaluated more than once below. */
9153 if (!post
9154 || TREE_CODE (incremented) == BIT_FIELD_REF
9155 || (TREE_CODE (incremented) == COMPONENT_REF
9156 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9157 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9158 incremented = stabilize_reference (incremented);
9159 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9160 ones into save exprs so that they don't accidentally get evaluated
9161 more than once by the code below. */
9162 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9163 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9164 incremented = save_expr (incremented);
9166 /* Compute the operands as RTX.
9167 Note whether OP0 is the actual lvalue or a copy of it:
9168 I believe it is a copy iff it is a register or subreg
9169 and insns were generated in computing it. */
9171 temp = get_last_insn ();
9172 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9174 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9175 in place but instead must do sign- or zero-extension during assignment,
9176 so we copy it into a new register and let the code below use it as
9177 a copy.
9179 Note that we can safely modify this SUBREG since it is know not to be
9180 shared (it was made by the expand_expr call above). */
9182 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9184 if (post)
9185 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9186 else
9187 bad_subreg = 1;
9189 else if (GET_CODE (op0) == SUBREG
9190 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9192 /* We cannot increment this SUBREG in place. If we are
9193 post-incrementing, get a copy of the old value. Otherwise,
9194 just mark that we cannot increment in place. */
9195 if (post)
9196 op0 = copy_to_reg (op0);
9197 else
9198 bad_subreg = 1;
9201 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9202 && temp != get_last_insn ());
9203 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9205 /* Decide whether incrementing or decrementing. */
9206 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9207 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9208 this_optab = sub_optab;
9210 /* Convert decrement by a constant into a negative increment. */
9211 if (this_optab == sub_optab
9212 && GET_CODE (op1) == CONST_INT)
9214 op1 = GEN_INT (-INTVAL (op1));
9215 this_optab = add_optab;
9218 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9219 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9221 /* For a preincrement, see if we can do this with a single instruction. */
9222 if (!post)
9224 icode = (int) this_optab->handlers[(int) mode].insn_code;
9225 if (icode != (int) CODE_FOR_nothing
9226 /* Make sure that OP0 is valid for operands 0 and 1
9227 of the insn we want to queue. */
9228 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9229 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9230 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9231 single_insn = 1;
9234 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9235 then we cannot just increment OP0. We must therefore contrive to
9236 increment the original value. Then, for postincrement, we can return
9237 OP0 since it is a copy of the old value. For preincrement, expand here
9238 unless we can do it with a single insn.
9240 Likewise if storing directly into OP0 would clobber high bits
9241 we need to preserve (bad_subreg). */
9242 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9244 /* This is the easiest way to increment the value wherever it is.
9245 Problems with multiple evaluation of INCREMENTED are prevented
9246 because either (1) it is a component_ref or preincrement,
9247 in which case it was stabilized above, or (2) it is an array_ref
9248 with constant index in an array in a register, which is
9249 safe to reevaluate. */
9250 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9251 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9252 ? MINUS_EXPR : PLUS_EXPR),
9253 TREE_TYPE (exp),
9254 incremented,
9255 TREE_OPERAND (exp, 1));
9257 while (TREE_CODE (incremented) == NOP_EXPR
9258 || TREE_CODE (incremented) == CONVERT_EXPR)
9260 newexp = convert (TREE_TYPE (incremented), newexp);
9261 incremented = TREE_OPERAND (incremented, 0);
9264 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9265 return post ? op0 : temp;
9268 if (post)
9270 /* We have a true reference to the value in OP0.
9271 If there is an insn to add or subtract in this mode, queue it.
9272 Queueing the increment insn avoids the register shuffling
9273 that often results if we must increment now and first save
9274 the old value for subsequent use. */
9276 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9277 op0 = stabilize (op0);
9278 #endif
9280 icode = (int) this_optab->handlers[(int) mode].insn_code;
9281 if (icode != (int) CODE_FOR_nothing
9282 /* Make sure that OP0 is valid for operands 0 and 1
9283 of the insn we want to queue. */
9284 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9285 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9287 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9288 op1 = force_reg (mode, op1);
9290 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9292 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9294 rtx addr = (general_operand (XEXP (op0, 0), mode)
9295 ? force_reg (Pmode, XEXP (op0, 0))
9296 : copy_to_reg (XEXP (op0, 0)));
9297 rtx temp, result;
9299 op0 = replace_equiv_address (op0, addr);
9300 temp = force_reg (GET_MODE (op0), op0);
9301 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9302 op1 = force_reg (mode, op1);
9304 /* The increment queue is LIFO, thus we have to `queue'
9305 the instructions in reverse order. */
9306 enqueue_insn (op0, gen_move_insn (op0, temp));
9307 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9308 return result;
9312 /* Preincrement, or we can't increment with one simple insn. */
9313 if (post)
9314 /* Save a copy of the value before inc or dec, to return it later. */
9315 temp = value = copy_to_reg (op0);
9316 else
9317 /* Arrange to return the incremented value. */
9318 /* Copy the rtx because expand_binop will protect from the queue,
9319 and the results of that would be invalid for us to return
9320 if our caller does emit_queue before using our result. */
9321 temp = copy_rtx (value = op0);
9323 /* Increment however we can. */
9324 op1 = expand_binop (mode, this_optab, value, op1, op0,
9325 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9327 /* Make sure the value is stored into OP0. */
9328 if (op1 != op0)
9329 emit_move_insn (op0, op1);
9331 return temp;
9334 /* At the start of a function, record that we have no previously-pushed
9335 arguments waiting to be popped. */
9337 void
9338 init_pending_stack_adjust ()
9340 pending_stack_adjust = 0;
9343 /* When exiting from function, if safe, clear out any pending stack adjust
9344 so the adjustment won't get done.
9346 Note, if the current function calls alloca, then it must have a
9347 frame pointer regardless of the value of flag_omit_frame_pointer. */
9349 void
9350 clear_pending_stack_adjust ()
9352 #ifdef EXIT_IGNORE_STACK
9353 if (optimize > 0
9354 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9355 && EXIT_IGNORE_STACK
9356 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9357 && ! flag_inline_functions)
9359 stack_pointer_delta -= pending_stack_adjust,
9360 pending_stack_adjust = 0;
9362 #endif
9365 /* Pop any previously-pushed arguments that have not been popped yet. */
9367 void
9368 do_pending_stack_adjust ()
9370 if (inhibit_defer_pop == 0)
9372 if (pending_stack_adjust != 0)
9373 adjust_stack (GEN_INT (pending_stack_adjust));
9374 pending_stack_adjust = 0;
9378 /* Expand conditional expressions. */
9380 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9381 LABEL is an rtx of code CODE_LABEL, in this function and all the
9382 functions here. */
9384 void
9385 jumpifnot (exp, label)
9386 tree exp;
9387 rtx label;
9389 do_jump (exp, label, NULL_RTX);
9392 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9394 void
9395 jumpif (exp, label)
9396 tree exp;
9397 rtx label;
9399 do_jump (exp, NULL_RTX, label);
9402 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9403 the result is zero, or IF_TRUE_LABEL if the result is one.
9404 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9405 meaning fall through in that case.
9407 do_jump always does any pending stack adjust except when it does not
9408 actually perform a jump. An example where there is no jump
9409 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9411 This function is responsible for optimizing cases such as
9412 &&, || and comparison operators in EXP. */
9414 void
9415 do_jump (exp, if_false_label, if_true_label)
9416 tree exp;
9417 rtx if_false_label, if_true_label;
9419 enum tree_code code = TREE_CODE (exp);
9420 /* Some cases need to create a label to jump to
9421 in order to properly fall through.
9422 These cases set DROP_THROUGH_LABEL nonzero. */
9423 rtx drop_through_label = 0;
9424 rtx temp;
9425 int i;
9426 tree type;
9427 enum machine_mode mode;
9429 #ifdef MAX_INTEGER_COMPUTATION_MODE
9430 check_max_integer_computation_mode (exp);
9431 #endif
9433 emit_queue ();
9435 switch (code)
9437 case ERROR_MARK:
9438 break;
9440 case INTEGER_CST:
9441 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9442 if (temp)
9443 emit_jump (temp);
9444 break;
9446 #if 0
9447 /* This is not true with #pragma weak */
9448 case ADDR_EXPR:
9449 /* The address of something can never be zero. */
9450 if (if_true_label)
9451 emit_jump (if_true_label);
9452 break;
9453 #endif
9455 case NOP_EXPR:
9456 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9457 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9458 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9459 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9460 goto normal;
9461 case CONVERT_EXPR:
9462 /* If we are narrowing the operand, we have to do the compare in the
9463 narrower mode. */
9464 if ((TYPE_PRECISION (TREE_TYPE (exp))
9465 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9466 goto normal;
9467 case NON_LVALUE_EXPR:
9468 case REFERENCE_EXPR:
9469 case ABS_EXPR:
9470 case NEGATE_EXPR:
9471 case LROTATE_EXPR:
9472 case RROTATE_EXPR:
9473 /* These cannot change zero->non-zero or vice versa. */
9474 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9475 break;
9477 case WITH_RECORD_EXPR:
9478 /* Put the object on the placeholder list, recurse through our first
9479 operand, and pop the list. */
9480 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9481 placeholder_list);
9482 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9483 placeholder_list = TREE_CHAIN (placeholder_list);
9484 break;
9486 #if 0
9487 /* This is never less insns than evaluating the PLUS_EXPR followed by
9488 a test and can be longer if the test is eliminated. */
9489 case PLUS_EXPR:
9490 /* Reduce to minus. */
9491 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9492 TREE_OPERAND (exp, 0),
9493 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9494 TREE_OPERAND (exp, 1))));
9495 /* Process as MINUS. */
9496 #endif
9498 case MINUS_EXPR:
9499 /* Non-zero iff operands of minus differ. */
9500 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9501 TREE_OPERAND (exp, 0),
9502 TREE_OPERAND (exp, 1)),
9503 NE, NE, if_false_label, if_true_label);
9504 break;
9506 case BIT_AND_EXPR:
9507 /* If we are AND'ing with a small constant, do this comparison in the
9508 smallest type that fits. If the machine doesn't have comparisons
9509 that small, it will be converted back to the wider comparison.
9510 This helps if we are testing the sign bit of a narrower object.
9511 combine can't do this for us because it can't know whether a
9512 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9514 if (! SLOW_BYTE_ACCESS
9515 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9516 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9517 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9518 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9519 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9520 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9521 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9522 != CODE_FOR_nothing))
9524 do_jump (convert (type, exp), if_false_label, if_true_label);
9525 break;
9527 goto normal;
9529 case TRUTH_NOT_EXPR:
9530 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9531 break;
9533 case TRUTH_ANDIF_EXPR:
9534 if (if_false_label == 0)
9535 if_false_label = drop_through_label = gen_label_rtx ();
9536 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9537 start_cleanup_deferral ();
9538 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9539 end_cleanup_deferral ();
9540 break;
9542 case TRUTH_ORIF_EXPR:
9543 if (if_true_label == 0)
9544 if_true_label = drop_through_label = gen_label_rtx ();
9545 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9546 start_cleanup_deferral ();
9547 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9548 end_cleanup_deferral ();
9549 break;
9551 case COMPOUND_EXPR:
9552 push_temp_slots ();
9553 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9554 preserve_temp_slots (NULL_RTX);
9555 free_temp_slots ();
9556 pop_temp_slots ();
9557 emit_queue ();
9558 do_pending_stack_adjust ();
9559 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9560 break;
9562 case COMPONENT_REF:
9563 case BIT_FIELD_REF:
9564 case ARRAY_REF:
9565 case ARRAY_RANGE_REF:
9567 HOST_WIDE_INT bitsize, bitpos;
9568 int unsignedp;
9569 enum machine_mode mode;
9570 tree type;
9571 tree offset;
9572 int volatilep = 0;
9574 /* Get description of this reference. We don't actually care
9575 about the underlying object here. */
9576 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9577 &unsignedp, &volatilep);
9579 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9580 if (! SLOW_BYTE_ACCESS
9581 && type != 0 && bitsize >= 0
9582 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9583 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9584 != CODE_FOR_nothing))
9586 do_jump (convert (type, exp), if_false_label, if_true_label);
9587 break;
9589 goto normal;
9592 case COND_EXPR:
9593 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9594 if (integer_onep (TREE_OPERAND (exp, 1))
9595 && integer_zerop (TREE_OPERAND (exp, 2)))
9596 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9598 else if (integer_zerop (TREE_OPERAND (exp, 1))
9599 && integer_onep (TREE_OPERAND (exp, 2)))
9600 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9602 else
9604 rtx label1 = gen_label_rtx ();
9605 drop_through_label = gen_label_rtx ();
9607 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9609 start_cleanup_deferral ();
9610 /* Now the THEN-expression. */
9611 do_jump (TREE_OPERAND (exp, 1),
9612 if_false_label ? if_false_label : drop_through_label,
9613 if_true_label ? if_true_label : drop_through_label);
9614 /* In case the do_jump just above never jumps. */
9615 do_pending_stack_adjust ();
9616 emit_label (label1);
9618 /* Now the ELSE-expression. */
9619 do_jump (TREE_OPERAND (exp, 2),
9620 if_false_label ? if_false_label : drop_through_label,
9621 if_true_label ? if_true_label : drop_through_label);
9622 end_cleanup_deferral ();
9624 break;
9626 case EQ_EXPR:
9628 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9630 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9631 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9633 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9634 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9635 do_jump
9636 (fold
9637 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9638 fold (build (EQ_EXPR, TREE_TYPE (exp),
9639 fold (build1 (REALPART_EXPR,
9640 TREE_TYPE (inner_type),
9641 exp0)),
9642 fold (build1 (REALPART_EXPR,
9643 TREE_TYPE (inner_type),
9644 exp1)))),
9645 fold (build (EQ_EXPR, TREE_TYPE (exp),
9646 fold (build1 (IMAGPART_EXPR,
9647 TREE_TYPE (inner_type),
9648 exp0)),
9649 fold (build1 (IMAGPART_EXPR,
9650 TREE_TYPE (inner_type),
9651 exp1)))))),
9652 if_false_label, if_true_label);
9655 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9656 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9658 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9659 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9660 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9661 else
9662 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9663 break;
9666 case NE_EXPR:
9668 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9670 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9671 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9673 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9674 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9675 do_jump
9676 (fold
9677 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9678 fold (build (NE_EXPR, TREE_TYPE (exp),
9679 fold (build1 (REALPART_EXPR,
9680 TREE_TYPE (inner_type),
9681 exp0)),
9682 fold (build1 (REALPART_EXPR,
9683 TREE_TYPE (inner_type),
9684 exp1)))),
9685 fold (build (NE_EXPR, TREE_TYPE (exp),
9686 fold (build1 (IMAGPART_EXPR,
9687 TREE_TYPE (inner_type),
9688 exp0)),
9689 fold (build1 (IMAGPART_EXPR,
9690 TREE_TYPE (inner_type),
9691 exp1)))))),
9692 if_false_label, if_true_label);
9695 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9696 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9698 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9699 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9700 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9701 else
9702 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9703 break;
9706 case LT_EXPR:
9707 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9708 if (GET_MODE_CLASS (mode) == MODE_INT
9709 && ! can_compare_p (LT, mode, ccp_jump))
9710 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9711 else
9712 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9713 break;
9715 case LE_EXPR:
9716 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9717 if (GET_MODE_CLASS (mode) == MODE_INT
9718 && ! can_compare_p (LE, mode, ccp_jump))
9719 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9720 else
9721 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9722 break;
9724 case GT_EXPR:
9725 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9726 if (GET_MODE_CLASS (mode) == MODE_INT
9727 && ! can_compare_p (GT, mode, ccp_jump))
9728 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9729 else
9730 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9731 break;
9733 case GE_EXPR:
9734 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9735 if (GET_MODE_CLASS (mode) == MODE_INT
9736 && ! can_compare_p (GE, mode, ccp_jump))
9737 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9738 else
9739 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9740 break;
9742 case UNORDERED_EXPR:
9743 case ORDERED_EXPR:
9745 enum rtx_code cmp, rcmp;
9746 int do_rev;
9748 if (code == UNORDERED_EXPR)
9749 cmp = UNORDERED, rcmp = ORDERED;
9750 else
9751 cmp = ORDERED, rcmp = UNORDERED;
9752 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9754 do_rev = 0;
9755 if (! can_compare_p (cmp, mode, ccp_jump)
9756 && (can_compare_p (rcmp, mode, ccp_jump)
9757 /* If the target doesn't provide either UNORDERED or ORDERED
9758 comparisons, canonicalize on UNORDERED for the library. */
9759 || rcmp == UNORDERED))
9760 do_rev = 1;
9762 if (! do_rev)
9763 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9764 else
9765 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9767 break;
9770 enum rtx_code rcode1;
9771 enum tree_code tcode2;
9773 case UNLT_EXPR:
9774 rcode1 = UNLT;
9775 tcode2 = LT_EXPR;
9776 goto unordered_bcc;
9777 case UNLE_EXPR:
9778 rcode1 = UNLE;
9779 tcode2 = LE_EXPR;
9780 goto unordered_bcc;
9781 case UNGT_EXPR:
9782 rcode1 = UNGT;
9783 tcode2 = GT_EXPR;
9784 goto unordered_bcc;
9785 case UNGE_EXPR:
9786 rcode1 = UNGE;
9787 tcode2 = GE_EXPR;
9788 goto unordered_bcc;
9789 case UNEQ_EXPR:
9790 rcode1 = UNEQ;
9791 tcode2 = EQ_EXPR;
9792 goto unordered_bcc;
9794 unordered_bcc:
9795 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9796 if (can_compare_p (rcode1, mode, ccp_jump))
9797 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9798 if_true_label);
9799 else
9801 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9802 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9803 tree cmp0, cmp1;
9805 /* If the target doesn't support combined unordered
9806 compares, decompose into UNORDERED + comparison. */
9807 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9808 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9809 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9810 do_jump (exp, if_false_label, if_true_label);
9813 break;
9815 /* Special case:
9816 __builtin_expect (<test>, 0) and
9817 __builtin_expect (<test>, 1)
9819 We need to do this here, so that <test> is not converted to a SCC
9820 operation on machines that use condition code registers and COMPARE
9821 like the PowerPC, and then the jump is done based on whether the SCC
9822 operation produced a 1 or 0. */
9823 case CALL_EXPR:
9824 /* Check for a built-in function. */
9825 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9827 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9828 tree arglist = TREE_OPERAND (exp, 1);
9830 if (TREE_CODE (fndecl) == FUNCTION_DECL
9831 && DECL_BUILT_IN (fndecl)
9832 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9833 && arglist != NULL_TREE
9834 && TREE_CHAIN (arglist) != NULL_TREE)
9836 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9837 if_true_label);
9839 if (seq != NULL_RTX)
9841 emit_insn (seq);
9842 return;
9846 /* fall through and generate the normal code. */
9848 default:
9849 normal:
9850 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9851 #if 0
9852 /* This is not needed any more and causes poor code since it causes
9853 comparisons and tests from non-SI objects to have different code
9854 sequences. */
9855 /* Copy to register to avoid generating bad insns by cse
9856 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9857 if (!cse_not_expected && GET_CODE (temp) == MEM)
9858 temp = copy_to_reg (temp);
9859 #endif
9860 do_pending_stack_adjust ();
9861 /* Do any postincrements in the expression that was tested. */
9862 emit_queue ();
9864 if (GET_CODE (temp) == CONST_INT
9865 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9866 || GET_CODE (temp) == LABEL_REF)
9868 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9869 if (target)
9870 emit_jump (target);
9872 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9873 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9874 /* Note swapping the labels gives us not-equal. */
9875 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9876 else if (GET_MODE (temp) != VOIDmode)
9877 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9878 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9879 GET_MODE (temp), NULL_RTX,
9880 if_false_label, if_true_label);
9881 else
9882 abort ();
9885 if (drop_through_label)
9887 /* If do_jump produces code that might be jumped around,
9888 do any stack adjusts from that code, before the place
9889 where control merges in. */
9890 do_pending_stack_adjust ();
9891 emit_label (drop_through_label);
9895 /* Given a comparison expression EXP for values too wide to be compared
9896 with one insn, test the comparison and jump to the appropriate label.
9897 The code of EXP is ignored; we always test GT if SWAP is 0,
9898 and LT if SWAP is 1. */
9900 static void
9901 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9902 tree exp;
9903 int swap;
9904 rtx if_false_label, if_true_label;
9906 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9907 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9908 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9909 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9911 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9914 /* Compare OP0 with OP1, word at a time, in mode MODE.
9915 UNSIGNEDP says to do unsigned comparison.
9916 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9918 void
9919 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9920 enum machine_mode mode;
9921 int unsignedp;
9922 rtx op0, op1;
9923 rtx if_false_label, if_true_label;
9925 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9926 rtx drop_through_label = 0;
9927 int i;
9929 if (! if_true_label || ! if_false_label)
9930 drop_through_label = gen_label_rtx ();
9931 if (! if_true_label)
9932 if_true_label = drop_through_label;
9933 if (! if_false_label)
9934 if_false_label = drop_through_label;
9936 /* Compare a word at a time, high order first. */
9937 for (i = 0; i < nwords; i++)
9939 rtx op0_word, op1_word;
9941 if (WORDS_BIG_ENDIAN)
9943 op0_word = operand_subword_force (op0, i, mode);
9944 op1_word = operand_subword_force (op1, i, mode);
9946 else
9948 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9949 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9952 /* All but high-order word must be compared as unsigned. */
9953 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9954 (unsignedp || i > 0), word_mode, NULL_RTX,
9955 NULL_RTX, if_true_label);
9957 /* Consider lower words only if these are equal. */
9958 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9959 NULL_RTX, NULL_RTX, if_false_label);
9962 if (if_false_label)
9963 emit_jump (if_false_label);
9964 if (drop_through_label)
9965 emit_label (drop_through_label);
9968 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9969 with one insn, test the comparison and jump to the appropriate label. */
9971 static void
9972 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9973 tree exp;
9974 rtx if_false_label, if_true_label;
9976 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9977 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9978 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9979 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9980 int i;
9981 rtx drop_through_label = 0;
9983 if (! if_false_label)
9984 drop_through_label = if_false_label = gen_label_rtx ();
9986 for (i = 0; i < nwords; i++)
9987 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9988 operand_subword_force (op1, i, mode),
9989 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9990 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9992 if (if_true_label)
9993 emit_jump (if_true_label);
9994 if (drop_through_label)
9995 emit_label (drop_through_label);
9998 /* Jump according to whether OP0 is 0.
9999 We assume that OP0 has an integer mode that is too wide
10000 for the available compare insns. */
10002 void
10003 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10004 rtx op0;
10005 rtx if_false_label, if_true_label;
10007 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10008 rtx part;
10009 int i;
10010 rtx drop_through_label = 0;
10012 /* The fastest way of doing this comparison on almost any machine is to
10013 "or" all the words and compare the result. If all have to be loaded
10014 from memory and this is a very wide item, it's possible this may
10015 be slower, but that's highly unlikely. */
10017 part = gen_reg_rtx (word_mode);
10018 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10019 for (i = 1; i < nwords && part != 0; i++)
10020 part = expand_binop (word_mode, ior_optab, part,
10021 operand_subword_force (op0, i, GET_MODE (op0)),
10022 part, 1, OPTAB_WIDEN);
10024 if (part != 0)
10026 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10027 NULL_RTX, if_false_label, if_true_label);
10029 return;
10032 /* If we couldn't do the "or" simply, do this with a series of compares. */
10033 if (! if_false_label)
10034 drop_through_label = if_false_label = gen_label_rtx ();
10036 for (i = 0; i < nwords; i++)
10037 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10038 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10039 if_false_label, NULL_RTX);
10041 if (if_true_label)
10042 emit_jump (if_true_label);
10044 if (drop_through_label)
10045 emit_label (drop_through_label);
10048 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10049 (including code to compute the values to be compared)
10050 and set (CC0) according to the result.
10051 The decision as to signed or unsigned comparison must be made by the caller.
10053 We force a stack adjustment unless there are currently
10054 things pushed on the stack that aren't yet used.
10056 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10057 compared. */
10060 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10061 rtx op0, op1;
10062 enum rtx_code code;
10063 int unsignedp;
10064 enum machine_mode mode;
10065 rtx size;
10067 enum rtx_code ucode;
10068 rtx tem;
10070 /* If one operand is constant, make it the second one. Only do this
10071 if the other operand is not constant as well. */
10073 if (swap_commutative_operands_p (op0, op1))
10075 tem = op0;
10076 op0 = op1;
10077 op1 = tem;
10078 code = swap_condition (code);
10081 if (flag_force_mem)
10083 op0 = force_not_mem (op0);
10084 op1 = force_not_mem (op1);
10087 do_pending_stack_adjust ();
10089 ucode = unsignedp ? unsigned_condition (code) : code;
10090 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10091 return tem;
10093 #if 0
10094 /* There's no need to do this now that combine.c can eliminate lots of
10095 sign extensions. This can be less efficient in certain cases on other
10096 machines. */
10098 /* If this is a signed equality comparison, we can do it as an
10099 unsigned comparison since zero-extension is cheaper than sign
10100 extension and comparisons with zero are done as unsigned. This is
10101 the case even on machines that can do fast sign extension, since
10102 zero-extension is easier to combine with other operations than
10103 sign-extension is. If we are comparing against a constant, we must
10104 convert it to what it would look like unsigned. */
10105 if ((code == EQ || code == NE) && ! unsignedp
10106 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10108 if (GET_CODE (op1) == CONST_INT
10109 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10110 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10111 unsignedp = 1;
10113 #endif
10115 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10117 #if HAVE_cc0
10118 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10119 #else
10120 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10121 #endif
10124 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10125 The decision as to signed or unsigned comparison must be made by the caller.
10127 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10128 compared. */
10130 void
10131 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10132 if_false_label, if_true_label)
10133 rtx op0, op1;
10134 enum rtx_code code;
10135 int unsignedp;
10136 enum machine_mode mode;
10137 rtx size;
10138 rtx if_false_label, if_true_label;
10140 enum rtx_code ucode;
10141 rtx tem;
10142 int dummy_true_label = 0;
10144 /* Reverse the comparison if that is safe and we want to jump if it is
10145 false. */
10146 if (! if_true_label && ! FLOAT_MODE_P (mode))
10148 if_true_label = if_false_label;
10149 if_false_label = 0;
10150 code = reverse_condition (code);
10153 /* If one operand is constant, make it the second one. Only do this
10154 if the other operand is not constant as well. */
10156 if (swap_commutative_operands_p (op0, op1))
10158 tem = op0;
10159 op0 = op1;
10160 op1 = tem;
10161 code = swap_condition (code);
10164 if (flag_force_mem)
10166 op0 = force_not_mem (op0);
10167 op1 = force_not_mem (op1);
10170 do_pending_stack_adjust ();
10172 ucode = unsignedp ? unsigned_condition (code) : code;
10173 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10175 if (tem == const_true_rtx)
10177 if (if_true_label)
10178 emit_jump (if_true_label);
10180 else
10182 if (if_false_label)
10183 emit_jump (if_false_label);
10185 return;
10188 #if 0
10189 /* There's no need to do this now that combine.c can eliminate lots of
10190 sign extensions. This can be less efficient in certain cases on other
10191 machines. */
10193 /* If this is a signed equality comparison, we can do it as an
10194 unsigned comparison since zero-extension is cheaper than sign
10195 extension and comparisons with zero are done as unsigned. This is
10196 the case even on machines that can do fast sign extension, since
10197 zero-extension is easier to combine with other operations than
10198 sign-extension is. If we are comparing against a constant, we must
10199 convert it to what it would look like unsigned. */
10200 if ((code == EQ || code == NE) && ! unsignedp
10201 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10203 if (GET_CODE (op1) == CONST_INT
10204 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10205 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10206 unsignedp = 1;
10208 #endif
10210 if (! if_true_label)
10212 dummy_true_label = 1;
10213 if_true_label = gen_label_rtx ();
10216 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10217 if_true_label);
10219 if (if_false_label)
10220 emit_jump (if_false_label);
10221 if (dummy_true_label)
10222 emit_label (if_true_label);
10225 /* Generate code for a comparison expression EXP (including code to compute
10226 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10227 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10228 generated code will drop through.
10229 SIGNED_CODE should be the rtx operation for this comparison for
10230 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10232 We force a stack adjustment unless there are currently
10233 things pushed on the stack that aren't yet used. */
10235 static void
10236 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10237 if_true_label)
10238 tree exp;
10239 enum rtx_code signed_code, unsigned_code;
10240 rtx if_false_label, if_true_label;
10242 rtx op0, op1;
10243 tree type;
10244 enum machine_mode mode;
10245 int unsignedp;
10246 enum rtx_code code;
10248 /* Don't crash if the comparison was erroneous. */
10249 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10250 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10251 return;
10253 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10254 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10255 return;
10257 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10258 mode = TYPE_MODE (type);
10259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10260 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10261 || (GET_MODE_BITSIZE (mode)
10262 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10263 1)))))))
10265 /* op0 might have been replaced by promoted constant, in which
10266 case the type of second argument should be used. */
10267 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10268 mode = TYPE_MODE (type);
10270 unsignedp = TREE_UNSIGNED (type);
10271 code = unsignedp ? unsigned_code : signed_code;
10273 #ifdef HAVE_canonicalize_funcptr_for_compare
10274 /* If function pointers need to be "canonicalized" before they can
10275 be reliably compared, then canonicalize them. */
10276 if (HAVE_canonicalize_funcptr_for_compare
10277 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10278 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10279 == FUNCTION_TYPE))
10281 rtx new_op0 = gen_reg_rtx (mode);
10283 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10284 op0 = new_op0;
10287 if (HAVE_canonicalize_funcptr_for_compare
10288 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10289 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10290 == FUNCTION_TYPE))
10292 rtx new_op1 = gen_reg_rtx (mode);
10294 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10295 op1 = new_op1;
10297 #endif
10299 /* Do any postincrements in the expression that was tested. */
10300 emit_queue ();
10302 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10303 ((mode == BLKmode)
10304 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10305 if_false_label, if_true_label);
10308 /* Generate code to calculate EXP using a store-flag instruction
10309 and return an rtx for the result. EXP is either a comparison
10310 or a TRUTH_NOT_EXPR whose operand is a comparison.
10312 If TARGET is nonzero, store the result there if convenient.
10314 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10315 cheap.
10317 Return zero if there is no suitable set-flag instruction
10318 available on this machine.
10320 Once expand_expr has been called on the arguments of the comparison,
10321 we are committed to doing the store flag, since it is not safe to
10322 re-evaluate the expression. We emit the store-flag insn by calling
10323 emit_store_flag, but only expand the arguments if we have a reason
10324 to believe that emit_store_flag will be successful. If we think that
10325 it will, but it isn't, we have to simulate the store-flag with a
10326 set/jump/set sequence. */
10328 static rtx
10329 do_store_flag (exp, target, mode, only_cheap)
10330 tree exp;
10331 rtx target;
10332 enum machine_mode mode;
10333 int only_cheap;
10335 enum rtx_code code;
10336 tree arg0, arg1, type;
10337 tree tem;
10338 enum machine_mode operand_mode;
10339 int invert = 0;
10340 int unsignedp;
10341 rtx op0, op1;
10342 enum insn_code icode;
10343 rtx subtarget = target;
10344 rtx result, label;
10346 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10347 result at the end. We can't simply invert the test since it would
10348 have already been inverted if it were valid. This case occurs for
10349 some floating-point comparisons. */
10351 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10352 invert = 1, exp = TREE_OPERAND (exp, 0);
10354 arg0 = TREE_OPERAND (exp, 0);
10355 arg1 = TREE_OPERAND (exp, 1);
10357 /* Don't crash if the comparison was erroneous. */
10358 if (arg0 == error_mark_node || arg1 == error_mark_node)
10359 return const0_rtx;
10361 type = TREE_TYPE (arg0);
10362 operand_mode = TYPE_MODE (type);
10363 unsignedp = TREE_UNSIGNED (type);
10365 /* We won't bother with BLKmode store-flag operations because it would mean
10366 passing a lot of information to emit_store_flag. */
10367 if (operand_mode == BLKmode)
10368 return 0;
10370 /* We won't bother with store-flag operations involving function pointers
10371 when function pointers must be canonicalized before comparisons. */
10372 #ifdef HAVE_canonicalize_funcptr_for_compare
10373 if (HAVE_canonicalize_funcptr_for_compare
10374 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10375 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10376 == FUNCTION_TYPE))
10377 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10378 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10379 == FUNCTION_TYPE))))
10380 return 0;
10381 #endif
10383 STRIP_NOPS (arg0);
10384 STRIP_NOPS (arg1);
10386 /* Get the rtx comparison code to use. We know that EXP is a comparison
10387 operation of some type. Some comparisons against 1 and -1 can be
10388 converted to comparisons with zero. Do so here so that the tests
10389 below will be aware that we have a comparison with zero. These
10390 tests will not catch constants in the first operand, but constants
10391 are rarely passed as the first operand. */
10393 switch (TREE_CODE (exp))
10395 case EQ_EXPR:
10396 code = EQ;
10397 break;
10398 case NE_EXPR:
10399 code = NE;
10400 break;
10401 case LT_EXPR:
10402 if (integer_onep (arg1))
10403 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10404 else
10405 code = unsignedp ? LTU : LT;
10406 break;
10407 case LE_EXPR:
10408 if (! unsignedp && integer_all_onesp (arg1))
10409 arg1 = integer_zero_node, code = LT;
10410 else
10411 code = unsignedp ? LEU : LE;
10412 break;
10413 case GT_EXPR:
10414 if (! unsignedp && integer_all_onesp (arg1))
10415 arg1 = integer_zero_node, code = GE;
10416 else
10417 code = unsignedp ? GTU : GT;
10418 break;
10419 case GE_EXPR:
10420 if (integer_onep (arg1))
10421 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10422 else
10423 code = unsignedp ? GEU : GE;
10424 break;
10426 case UNORDERED_EXPR:
10427 code = UNORDERED;
10428 break;
10429 case ORDERED_EXPR:
10430 code = ORDERED;
10431 break;
10432 case UNLT_EXPR:
10433 code = UNLT;
10434 break;
10435 case UNLE_EXPR:
10436 code = UNLE;
10437 break;
10438 case UNGT_EXPR:
10439 code = UNGT;
10440 break;
10441 case UNGE_EXPR:
10442 code = UNGE;
10443 break;
10444 case UNEQ_EXPR:
10445 code = UNEQ;
10446 break;
10448 default:
10449 abort ();
10452 /* Put a constant second. */
10453 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10455 tem = arg0; arg0 = arg1; arg1 = tem;
10456 code = swap_condition (code);
10459 /* If this is an equality or inequality test of a single bit, we can
10460 do this by shifting the bit being tested to the low-order bit and
10461 masking the result with the constant 1. If the condition was EQ,
10462 we xor it with 1. This does not require an scc insn and is faster
10463 than an scc insn even if we have it. */
10465 if ((code == NE || code == EQ)
10466 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10467 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10469 tree inner = TREE_OPERAND (arg0, 0);
10470 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10471 int ops_unsignedp;
10473 /* If INNER is a right shift of a constant and it plus BITNUM does
10474 not overflow, adjust BITNUM and INNER. */
10476 if (TREE_CODE (inner) == RSHIFT_EXPR
10477 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10478 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10479 && bitnum < TYPE_PRECISION (type)
10480 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10481 bitnum - TYPE_PRECISION (type)))
10483 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10484 inner = TREE_OPERAND (inner, 0);
10487 /* If we are going to be able to omit the AND below, we must do our
10488 operations as unsigned. If we must use the AND, we have a choice.
10489 Normally unsigned is faster, but for some machines signed is. */
10490 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10491 #ifdef LOAD_EXTEND_OP
10492 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10493 #else
10495 #endif
10498 if (! get_subtarget (subtarget)
10499 || GET_MODE (subtarget) != operand_mode
10500 || ! safe_from_p (subtarget, inner, 1))
10501 subtarget = 0;
10503 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10505 if (bitnum != 0)
10506 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10507 size_int (bitnum), subtarget, ops_unsignedp);
10509 if (GET_MODE (op0) != mode)
10510 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10512 if ((code == EQ && ! invert) || (code == NE && invert))
10513 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10514 ops_unsignedp, OPTAB_LIB_WIDEN);
10516 /* Put the AND last so it can combine with more things. */
10517 if (bitnum != TYPE_PRECISION (type) - 1)
10518 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10520 return op0;
10523 /* Now see if we are likely to be able to do this. Return if not. */
10524 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10525 return 0;
10527 icode = setcc_gen_code[(int) code];
10528 if (icode == CODE_FOR_nothing
10529 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10531 /* We can only do this if it is one of the special cases that
10532 can be handled without an scc insn. */
10533 if ((code == LT && integer_zerop (arg1))
10534 || (! only_cheap && code == GE && integer_zerop (arg1)))
10536 else if (BRANCH_COST >= 0
10537 && ! only_cheap && (code == NE || code == EQ)
10538 && TREE_CODE (type) != REAL_TYPE
10539 && ((abs_optab->handlers[(int) operand_mode].insn_code
10540 != CODE_FOR_nothing)
10541 || (ffs_optab->handlers[(int) operand_mode].insn_code
10542 != CODE_FOR_nothing)))
10544 else
10545 return 0;
10548 if (! get_subtarget (target)
10549 || GET_MODE (subtarget) != operand_mode
10550 || ! safe_from_p (subtarget, arg1, 1))
10551 subtarget = 0;
10553 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10554 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10556 if (target == 0)
10557 target = gen_reg_rtx (mode);
10559 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10560 because, if the emit_store_flag does anything it will succeed and
10561 OP0 and OP1 will not be used subsequently. */
10563 result = emit_store_flag (target, code,
10564 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10565 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10566 operand_mode, unsignedp, 1);
10568 if (result)
10570 if (invert)
10571 result = expand_binop (mode, xor_optab, result, const1_rtx,
10572 result, 0, OPTAB_LIB_WIDEN);
10573 return result;
10576 /* If this failed, we have to do this with set/compare/jump/set code. */
10577 if (GET_CODE (target) != REG
10578 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10579 target = gen_reg_rtx (GET_MODE (target));
10581 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10582 result = compare_from_rtx (op0, op1, code, unsignedp,
10583 operand_mode, NULL_RTX);
10584 if (GET_CODE (result) == CONST_INT)
10585 return (((result == const0_rtx && ! invert)
10586 || (result != const0_rtx && invert))
10587 ? const0_rtx : const1_rtx);
10589 /* The code of RESULT may not match CODE if compare_from_rtx
10590 decided to swap its operands and reverse the original code.
10592 We know that compare_from_rtx returns either a CONST_INT or
10593 a new comparison code, so it is safe to just extract the
10594 code from RESULT. */
10595 code = GET_CODE (result);
10597 label = gen_label_rtx ();
10598 if (bcc_gen_fctn[(int) code] == 0)
10599 abort ();
10601 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10602 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10603 emit_label (label);
10605 return target;
10609 /* Stubs in case we haven't got a casesi insn. */
10610 #ifndef HAVE_casesi
10611 # define HAVE_casesi 0
10612 # define gen_casesi(a, b, c, d, e) (0)
10613 # define CODE_FOR_casesi CODE_FOR_nothing
10614 #endif
10616 /* If the machine does not have a case insn that compares the bounds,
10617 this means extra overhead for dispatch tables, which raises the
10618 threshold for using them. */
10619 #ifndef CASE_VALUES_THRESHOLD
10620 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10621 #endif /* CASE_VALUES_THRESHOLD */
10623 unsigned int
10624 case_values_threshold ()
10626 return CASE_VALUES_THRESHOLD;
10629 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10630 0 otherwise (i.e. if there is no casesi instruction). */
10632 try_casesi (index_type, index_expr, minval, range,
10633 table_label, default_label)
10634 tree index_type, index_expr, minval, range;
10635 rtx table_label ATTRIBUTE_UNUSED;
10636 rtx default_label;
10638 enum machine_mode index_mode = SImode;
10639 int index_bits = GET_MODE_BITSIZE (index_mode);
10640 rtx op1, op2, index;
10641 enum machine_mode op_mode;
10643 if (! HAVE_casesi)
10644 return 0;
10646 /* Convert the index to SImode. */
10647 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10649 enum machine_mode omode = TYPE_MODE (index_type);
10650 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10652 /* We must handle the endpoints in the original mode. */
10653 index_expr = build (MINUS_EXPR, index_type,
10654 index_expr, minval);
10655 minval = integer_zero_node;
10656 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10657 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10658 omode, 1, default_label);
10659 /* Now we can safely truncate. */
10660 index = convert_to_mode (index_mode, index, 0);
10662 else
10664 if (TYPE_MODE (index_type) != index_mode)
10666 index_expr = convert ((*lang_hooks.types.type_for_size)
10667 (index_bits, 0), index_expr);
10668 index_type = TREE_TYPE (index_expr);
10671 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10673 emit_queue ();
10674 index = protect_from_queue (index, 0);
10675 do_pending_stack_adjust ();
10677 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10678 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10679 (index, op_mode))
10680 index = copy_to_mode_reg (op_mode, index);
10682 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10684 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10685 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10686 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10687 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10688 (op1, op_mode))
10689 op1 = copy_to_mode_reg (op_mode, op1);
10691 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10693 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10694 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10695 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10696 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10697 (op2, op_mode))
10698 op2 = copy_to_mode_reg (op_mode, op2);
10700 emit_jump_insn (gen_casesi (index, op1, op2,
10701 table_label, default_label));
10702 return 1;
10705 /* Attempt to generate a tablejump instruction; same concept. */
10706 #ifndef HAVE_tablejump
10707 #define HAVE_tablejump 0
10708 #define gen_tablejump(x, y) (0)
10709 #endif
10711 /* Subroutine of the next function.
10713 INDEX is the value being switched on, with the lowest value
10714 in the table already subtracted.
10715 MODE is its expected mode (needed if INDEX is constant).
10716 RANGE is the length of the jump table.
10717 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10719 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10720 index value is out of range. */
10722 static void
10723 do_tablejump (index, mode, range, table_label, default_label)
10724 rtx index, range, table_label, default_label;
10725 enum machine_mode mode;
10727 rtx temp, vector;
10729 /* Do an unsigned comparison (in the proper mode) between the index
10730 expression and the value which represents the length of the range.
10731 Since we just finished subtracting the lower bound of the range
10732 from the index expression, this comparison allows us to simultaneously
10733 check that the original index expression value is both greater than
10734 or equal to the minimum value of the range and less than or equal to
10735 the maximum value of the range. */
10737 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10738 default_label);
10740 /* If index is in range, it must fit in Pmode.
10741 Convert to Pmode so we can index with it. */
10742 if (mode != Pmode)
10743 index = convert_to_mode (Pmode, index, 1);
10745 /* Don't let a MEM slip thru, because then INDEX that comes
10746 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10747 and break_out_memory_refs will go to work on it and mess it up. */
10748 #ifdef PIC_CASE_VECTOR_ADDRESS
10749 if (flag_pic && GET_CODE (index) != REG)
10750 index = copy_to_mode_reg (Pmode, index);
10751 #endif
10753 /* If flag_force_addr were to affect this address
10754 it could interfere with the tricky assumptions made
10755 about addresses that contain label-refs,
10756 which may be valid only very near the tablejump itself. */
10757 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10758 GET_MODE_SIZE, because this indicates how large insns are. The other
10759 uses should all be Pmode, because they are addresses. This code
10760 could fail if addresses and insns are not the same size. */
10761 index = gen_rtx_PLUS (Pmode,
10762 gen_rtx_MULT (Pmode, index,
10763 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10764 gen_rtx_LABEL_REF (Pmode, table_label));
10765 #ifdef PIC_CASE_VECTOR_ADDRESS
10766 if (flag_pic)
10767 index = PIC_CASE_VECTOR_ADDRESS (index);
10768 else
10769 #endif
10770 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10771 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10772 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10773 RTX_UNCHANGING_P (vector) = 1;
10774 convert_move (temp, vector, 0);
10776 emit_jump_insn (gen_tablejump (temp, table_label));
10778 /* If we are generating PIC code or if the table is PC-relative, the
10779 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10780 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10781 emit_barrier ();
10785 try_tablejump (index_type, index_expr, minval, range,
10786 table_label, default_label)
10787 tree index_type, index_expr, minval, range;
10788 rtx table_label, default_label;
10790 rtx index;
10792 if (! HAVE_tablejump)
10793 return 0;
10795 index_expr = fold (build (MINUS_EXPR, index_type,
10796 convert (index_type, index_expr),
10797 convert (index_type, minval)));
10798 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10799 emit_queue ();
10800 index = protect_from_queue (index, 0);
10801 do_pending_stack_adjust ();
10803 do_tablejump (index, TYPE_MODE (index_type),
10804 convert_modes (TYPE_MODE (index_type),
10805 TYPE_MODE (TREE_TYPE (range)),
10806 expand_expr (range, NULL_RTX,
10807 VOIDmode, 0),
10808 TREE_UNSIGNED (TREE_TYPE (range))),
10809 table_label, default_label);
10810 return 1;
10813 /* Nonzero if the mode is a valid vector mode for this architecture.
10814 This returns nonzero even if there is no hardware support for the
10815 vector mode, but we can emulate with narrower modes. */
10818 vector_mode_valid_p (mode)
10819 enum machine_mode mode;
10821 enum mode_class class = GET_MODE_CLASS (mode);
10822 enum machine_mode innermode;
10824 /* Doh! What's going on? */
10825 if (class != MODE_VECTOR_INT
10826 && class != MODE_VECTOR_FLOAT)
10827 return 0;
10829 /* Hardware support. Woo hoo! */
10830 if (VECTOR_MODE_SUPPORTED_P (mode))
10831 return 1;
10833 innermode = GET_MODE_INNER (mode);
10835 /* We should probably return 1 if requesting V4DI and we have no DI,
10836 but we have V2DI, but this is probably very unlikely. */
10838 /* If we have support for the inner mode, we can safely emulate it.
10839 We may not have V2DI, but me can emulate with a pair of DIs. */
10840 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10843 #include "gt-expr.h"