2001-08-28 Alexandre Petit-Bianco <apbianco@redhat.com>
[official-gcc.git] / gcc / expr.c
blob222d7ec0f08104db21b2c89c701e980479b45474
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "intl.h"
45 #include "tm_p.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
53 #ifdef PUSH_ROUNDING
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
57 #endif
59 #endif
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
80 parameter. */
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
100 be performed. */
101 struct move_by_pieces
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 int reverse;
116 /* This structure is used by store_by_pieces to describe the clear to
117 be performed. */
119 struct store_by_pieces
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
128 PTR constfundata;
129 int reverse;
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
139 unsigned int));
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
145 unsigned int));
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
147 unsigned int));
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
149 enum machine_mode,
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
157 int));
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
159 HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static tree save_noncopied_parts PARAMS ((tree, tree));
167 static tree init_noncopied_parts PARAMS ((tree, tree));
168 static int fixed_type_p PARAMS ((tree));
169 static rtx var_rtx PARAMS ((tree));
170 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
171 static rtx expand_increment PARAMS ((tree, int, int));
172 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
173 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
174 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
175 rtx, rtx));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
182 /* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
186 static char direct_load[NUM_MACHINE_MODES];
187 static char direct_store[NUM_MACHINE_MODES];
189 /* If a memory-to-memory move would take MOVE_RATIO or more simple
190 move-instruction sequences, we will do a movstr or libcall instead. */
192 #ifndef MOVE_RATIO
193 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 #define MOVE_RATIO 2
195 #else
196 /* If we are optimizing for space (-Os), cut down the default move ratio. */
197 #define MOVE_RATIO (optimize_size ? 3 : 15)
198 #endif
199 #endif
201 /* This macro is used to determine whether move_by_pieces should be called
202 to perform a structure copy. */
203 #ifndef MOVE_BY_PIECES_P
204 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
205 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 #endif
208 /* This array records the insn_code of insns to perform block moves. */
209 enum insn_code movstr_optab[NUM_MACHINE_MODES];
211 /* This array records the insn_code of insns to perform block clears. */
212 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
214 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
216 #ifndef SLOW_UNALIGNED_ACCESS
217 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
218 #endif
220 /* This is run once per compilation to set up which modes can be used
221 directly in memory and to initialize the block move optab. */
223 void
224 init_expr_once ()
226 rtx insn, pat;
227 enum machine_mode mode;
228 int num_clobbers;
229 rtx mem, mem1;
231 start_sequence ();
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
239 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
240 pat = PATTERN (insn);
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
245 int regno;
246 rtx reg;
248 direct_load[(int) mode] = direct_store[(int) mode] = 0;
249 PUT_MODE (mem, mode);
250 PUT_MODE (mem1, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
263 reg = gen_rtx_REG (mode, regno);
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 end_sequence ();
290 /* This is run at the start of compiling a function. */
292 void
293 init_expr ()
295 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
297 pending_chain = 0;
298 pending_stack_adjust = 0;
299 stack_pointer_delta = 0;
300 inhibit_defer_pop = 0;
301 saveregs_value = 0;
302 apply_args_value = 0;
303 forced_labels = 0;
306 void
307 mark_expr_status (p)
308 struct expr_status *p;
310 if (p == NULL)
311 return;
313 ggc_mark_rtx (p->x_saveregs_value);
314 ggc_mark_rtx (p->x_apply_args_value);
315 ggc_mark_rtx (p->x_forced_labels);
318 void
319 free_expr_status (f)
320 struct function *f;
322 free (f->expr);
323 f->expr = NULL;
326 /* Small sanity check that the queue is empty at the end of a function. */
328 void
329 finish_expr_for_function ()
331 if (pending_chain)
332 abort ();
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
345 static rtx
346 enqueue_insn (var, body)
347 rtx var, body;
349 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
350 body, pending_chain);
351 return pending_chain;
354 /* Use protect_from_queue to convert a QUEUED expression
355 into something that you can put immediately into an instruction.
356 If the queued incrementation has not happened yet,
357 protect_from_queue returns the variable itself.
358 If the incrementation has happened, protect_from_queue returns a temp
359 that contains a copy of the old value of the variable.
361 Any time an rtx which might possibly be a QUEUED is to be put
362 into an instruction, it must be passed through protect_from_queue first.
363 QUEUED expressions are not meaningful in instructions.
365 Do not pass a value through protect_from_queue and then hold
366 on to it for a while before putting it in an instruction!
367 If the queue is flushed in between, incorrect code will result. */
370 protect_from_queue (x, modify)
371 register rtx x;
372 int modify;
374 register RTX_CODE code = GET_CODE (x);
376 #if 0 /* A QUEUED can hang around after the queue is forced out. */
377 /* Shortcut for most common case. */
378 if (pending_chain == 0)
379 return x;
380 #endif
382 if (code != QUEUED)
384 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
385 use of autoincrement. Make a copy of the contents of the memory
386 location rather than a copy of the address, but not if the value is
387 of mode BLKmode. Don't modify X in place since it might be
388 shared. */
389 if (code == MEM && GET_MODE (x) != BLKmode
390 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
392 rtx y = XEXP (x, 0);
393 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
395 if (QUEUED_INSN (y))
397 rtx temp = gen_reg_rtx (GET_MODE (x));
399 emit_insn_before (gen_move_insn (temp, new),
400 QUEUED_INSN (y));
401 return temp;
404 /* Copy the address into a pseudo, so that the returned value
405 remains correct across calls to emit_queue. */
406 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
409 /* Otherwise, recursively protect the subexpressions of all
410 the kinds of rtx's that can contain a QUEUED. */
411 if (code == MEM)
413 rtx tem = protect_from_queue (XEXP (x, 0), 0);
414 if (tem != XEXP (x, 0))
416 x = copy_rtx (x);
417 XEXP (x, 0) = tem;
420 else if (code == PLUS || code == MULT)
422 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
423 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
424 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
426 x = copy_rtx (x);
427 XEXP (x, 0) = new0;
428 XEXP (x, 1) = new1;
431 return x;
433 /* If the increment has not happened, use the variable itself. Copy it
434 into a new pseudo so that the value remains correct across calls to
435 emit_queue. */
436 if (QUEUED_INSN (x) == 0)
437 return copy_to_reg (QUEUED_VAR (x));
438 /* If the increment has happened and a pre-increment copy exists,
439 use that copy. */
440 if (QUEUED_COPY (x) != 0)
441 return QUEUED_COPY (x);
442 /* The increment has happened but we haven't set up a pre-increment copy.
443 Set one up now, and use it. */
444 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
445 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
446 QUEUED_INSN (x));
447 return QUEUED_COPY (x);
450 /* Return nonzero if X contains a QUEUED expression:
451 if it contains anything that will be altered by a queued increment.
452 We handle only combinations of MEM, PLUS, MINUS and MULT operators
453 since memory addresses generally contain only those. */
456 queued_subexp_p (x)
457 rtx x;
459 register enum rtx_code code = GET_CODE (x);
460 switch (code)
462 case QUEUED:
463 return 1;
464 case MEM:
465 return queued_subexp_p (XEXP (x, 0));
466 case MULT:
467 case PLUS:
468 case MINUS:
469 return (queued_subexp_p (XEXP (x, 0))
470 || queued_subexp_p (XEXP (x, 1)));
471 default:
472 return 0;
476 /* Perform all the pending incrementations. */
478 void
479 emit_queue ()
481 register rtx p;
482 while ((p = pending_chain))
484 rtx body = QUEUED_BODY (p);
486 if (GET_CODE (body) == SEQUENCE)
488 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
489 emit_insn (QUEUED_BODY (p));
491 else
492 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
493 pending_chain = QUEUED_NEXT (p);
497 /* Copy data from FROM to TO, where the machine modes are not the same.
498 Both modes may be integer, or both may be floating.
499 UNSIGNEDP should be nonzero if FROM is an unsigned type.
500 This causes zero-extension instead of sign-extension. */
502 void
503 convert_move (to, from, unsignedp)
504 register rtx to, from;
505 int unsignedp;
507 enum machine_mode to_mode = GET_MODE (to);
508 enum machine_mode from_mode = GET_MODE (from);
509 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
510 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
511 enum insn_code code;
512 rtx libcall;
514 /* rtx code for making an equivalent value. */
515 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
517 to = protect_from_queue (to, 1);
518 from = protect_from_queue (from, 0);
520 if (to_real != from_real)
521 abort ();
523 /* If FROM is a SUBREG that indicates that we have already done at least
524 the required extension, strip it. We don't handle such SUBREGs as
525 TO here. */
527 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
528 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
529 >= GET_MODE_SIZE (to_mode))
530 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
531 from = gen_lowpart (to_mode, from), from_mode = to_mode;
533 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 abort ();
536 if (to_mode == from_mode
537 || (from_mode == VOIDmode && CONSTANT_P (from)))
539 emit_move_insn (to, from);
540 return;
543 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
545 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 abort ();
548 if (VECTOR_MODE_P (to_mode))
549 from = gen_rtx_SUBREG (to_mode, from, 0);
550 else
551 to = gen_rtx_SUBREG (from_mode, to, 0);
553 emit_move_insn (to, from);
554 return;
557 if (to_real != from_real)
558 abort ();
560 if (to_real)
562 rtx value, insns;
564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
568 != CODE_FOR_nothing)
570 emit_unop_insn (code, to, from, UNKNOWN);
571 return;
575 #ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 return;
581 #endif
582 #ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 return;
588 #endif
589 #ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 return;
595 #endif
596 #ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 return;
602 #endif
603 #ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 return;
609 #endif
610 #ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 return;
616 #endif
618 #ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 return;
624 #endif
625 #ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 return;
631 #endif
632 #ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 return;
638 #endif
639 #ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 return;
645 #endif
646 #ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 return;
652 #endif
654 #ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 return;
660 #endif
661 #ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 return;
667 #endif
668 #ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 return;
674 #endif
675 #ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 return;
681 #endif
683 #ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 return;
696 #endif
697 #ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 return;
703 #endif
704 #ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 return;
710 #endif
711 #ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
715 return;
717 #endif
719 libcall = (rtx) 0;
720 switch (from_mode)
722 case SFmode:
723 switch (to_mode)
725 case DFmode:
726 libcall = extendsfdf2_libfunc;
727 break;
729 case XFmode:
730 libcall = extendsfxf2_libfunc;
731 break;
733 case TFmode:
734 libcall = extendsftf2_libfunc;
735 break;
737 default:
738 break;
740 break;
742 case DFmode:
743 switch (to_mode)
745 case SFmode:
746 libcall = truncdfsf2_libfunc;
747 break;
749 case XFmode:
750 libcall = extenddfxf2_libfunc;
751 break;
753 case TFmode:
754 libcall = extenddftf2_libfunc;
755 break;
757 default:
758 break;
760 break;
762 case XFmode:
763 switch (to_mode)
765 case SFmode:
766 libcall = truncxfsf2_libfunc;
767 break;
769 case DFmode:
770 libcall = truncxfdf2_libfunc;
771 break;
773 default:
774 break;
776 break;
778 case TFmode:
779 switch (to_mode)
781 case SFmode:
782 libcall = trunctfsf2_libfunc;
783 break;
785 case DFmode:
786 libcall = trunctfdf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 default:
795 break;
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
800 abort ();
802 start_sequence ();
803 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
804 1, from, from_mode);
805 insns = get_insns ();
806 end_sequence ();
807 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
808 from));
809 return;
812 /* Now both modes are integers. */
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 rtx insns;
819 rtx lowpart;
820 rtx fill_value;
821 rtx lowfrom;
822 int i;
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
836 emit_unop_insn (code, to, from, equiv_code);
837 return;
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
844 if (GET_CODE (to) == REG)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
849 return;
852 /* No special multiword conversion insn; do it by hand. */
853 start_sequence ();
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
864 else
865 lowpart_mode = from_mode;
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
872 /* Compute the value to put in each remaining word. */
873 if (unsignedp)
874 fill_value = const0_rtx;
875 else
877 #ifdef HAVE_slt
878 if (HAVE_slt
879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
880 && STORE_FLAG_VALUE == -1)
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
883 lowpart_mode, 0, 0);
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
887 else
888 #endif
890 fill_value
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
893 NULL_RTX, 0);
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
904 if (subword == 0)
905 abort ();
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
911 insns = get_insns ();
912 end_sequence ();
914 emit_no_conflict_block (insns, to, from, NULL_RTX,
915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 return;
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 convert_move (to, gen_lowpart (word_mode, from), 0);
931 return;
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode == PQImode)
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 return;
946 #endif /* HAVE_truncqipqi2 */
947 abort ();
950 if (from_mode == PQImode)
952 if (to_mode != QImode)
954 from = convert_to_mode (QImode, from, unsignedp);
955 from_mode = QImode;
957 else
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 return;
965 #endif /* HAVE_extendpqiqi2 */
966 abort ();
970 if (to_mode == PSImode)
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 return;
981 #endif /* HAVE_truncsipsi2 */
982 abort ();
985 if (from_mode == PSImode)
987 if (to_mode != SImode)
989 from = convert_to_mode (SImode, from, unsignedp);
990 from_mode = SImode;
992 else
994 #ifdef HAVE_extendpsisi2
995 if (! unsignedp && HAVE_extendpsisi2)
997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 return;
1000 #endif /* HAVE_extendpsisi2 */
1001 #ifdef HAVE_zero_extendpsisi2
1002 if (unsignedp && HAVE_zero_extendpsisi2)
1004 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 return;
1007 #endif /* HAVE_zero_extendpsisi2 */
1008 abort ();
1012 if (to_mode == PDImode)
1014 if (from_mode != DImode)
1015 from = convert_to_mode (DImode, from, unsignedp);
1017 #ifdef HAVE_truncdipdi2
1018 if (HAVE_truncdipdi2)
1020 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 return;
1023 #endif /* HAVE_truncdipdi2 */
1024 abort ();
1027 if (from_mode == PDImode)
1029 if (to_mode != DImode)
1031 from = convert_to_mode (DImode, from, unsignedp);
1032 from_mode = DImode;
1034 else
1036 #ifdef HAVE_extendpdidi2
1037 if (HAVE_extendpdidi2)
1039 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 return;
1042 #endif /* HAVE_extendpdidi2 */
1043 abort ();
1047 /* Now follow all the conversions between integers
1048 no more than a word long. */
1050 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1051 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1053 GET_MODE_BITSIZE (from_mode)))
1055 if (!((GET_CODE (from) == MEM
1056 && ! MEM_VOLATILE_P (from)
1057 && direct_load[(int) to_mode]
1058 && ! mode_dependent_address_p (XEXP (from, 0)))
1059 || GET_CODE (from) == REG
1060 || GET_CODE (from) == SUBREG))
1061 from = force_reg (from_mode, from);
1062 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1063 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1064 from = copy_to_reg (from);
1065 emit_move_insn (to, gen_lowpart (to_mode, from));
1066 return;
1069 /* Handle extension. */
1070 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1072 /* Convert directly if that works. */
1073 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1074 != CODE_FOR_nothing)
1076 emit_unop_insn (code, to, from, equiv_code);
1077 return;
1079 else
1081 enum machine_mode intermediate;
1082 rtx tmp;
1083 tree shift_amount;
1085 /* Search for a mode to convert via. */
1086 for (intermediate = from_mode; intermediate != VOIDmode;
1087 intermediate = GET_MODE_WIDER_MODE (intermediate))
1088 if (((can_extend_p (to_mode, intermediate, unsignedp)
1089 != CODE_FOR_nothing)
1090 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1091 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1092 GET_MODE_BITSIZE (intermediate))))
1093 && (can_extend_p (intermediate, from_mode, unsignedp)
1094 != CODE_FOR_nothing))
1096 convert_move (to, convert_to_mode (intermediate, from,
1097 unsignedp), unsignedp);
1098 return;
1101 /* No suitable intermediate mode.
1102 Generate what we need with shifts. */
1103 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1104 - GET_MODE_BITSIZE (from_mode), 0);
1105 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1106 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1107 to, unsignedp);
1108 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1109 to, unsignedp);
1110 if (tmp != to)
1111 emit_move_insn (to, tmp);
1112 return;
1116 /* Support special truncate insns for certain modes. */
1118 if (from_mode == DImode && to_mode == SImode)
1120 #ifdef HAVE_truncdisi2
1121 if (HAVE_truncdisi2)
1123 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1124 return;
1126 #endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1131 if (from_mode == DImode && to_mode == HImode)
1133 #ifdef HAVE_truncdihi2
1134 if (HAVE_truncdihi2)
1136 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1137 return;
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1144 if (from_mode == DImode && to_mode == QImode)
1146 #ifdef HAVE_truncdiqi2
1147 if (HAVE_truncdiqi2)
1149 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1150 return;
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1157 if (from_mode == SImode && to_mode == HImode)
1159 #ifdef HAVE_truncsihi2
1160 if (HAVE_truncsihi2)
1162 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1163 return;
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1170 if (from_mode == SImode && to_mode == QImode)
1172 #ifdef HAVE_truncsiqi2
1173 if (HAVE_truncsiqi2)
1175 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1176 return;
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1183 if (from_mode == HImode && to_mode == QImode)
1185 #ifdef HAVE_trunchiqi2
1186 if (HAVE_trunchiqi2)
1188 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1189 return;
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1196 if (from_mode == TImode && to_mode == DImode)
1198 #ifdef HAVE_trunctidi2
1199 if (HAVE_trunctidi2)
1201 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1202 return;
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1209 if (from_mode == TImode && to_mode == SImode)
1211 #ifdef HAVE_trunctisi2
1212 if (HAVE_trunctisi2)
1214 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1215 return;
1217 #endif
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 return;
1222 if (from_mode == TImode && to_mode == HImode)
1224 #ifdef HAVE_trunctihi2
1225 if (HAVE_trunctihi2)
1227 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1228 return;
1230 #endif
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 return;
1235 if (from_mode == TImode && to_mode == QImode)
1237 #ifdef HAVE_trunctiqi2
1238 if (HAVE_trunctiqi2)
1240 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1241 return;
1243 #endif
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 return;
1248 /* Handle truncation of volatile memrefs, and so on;
1249 the things that couldn't be truncated directly,
1250 and for which there was no special instruction. */
1251 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1253 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1254 emit_move_insn (to, temp);
1255 return;
1258 /* Mode combination is not recognized. */
1259 abort ();
1262 /* Return an rtx for a value that would result
1263 from converting X to mode MODE.
1264 Both X and MODE may be floating, or both integer.
1265 UNSIGNEDP is nonzero if X is an unsigned value.
1266 This can be done by referring to a part of X in place
1267 or by copying to a new temporary with conversion.
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1273 convert_to_mode (mode, x, unsignedp)
1274 enum machine_mode mode;
1275 rtx x;
1276 int unsignedp;
1278 return convert_modes (mode, VOIDmode, x, unsignedp);
1281 /* Return an rtx for a value that would result
1282 from converting X from mode OLDMODE to mode MODE.
1283 Both modes may be floating, or both integer.
1284 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1289 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1291 This function *must not* call protect_from_queue
1292 except when putting X into an insn (in which case convert_move does it). */
1295 convert_modes (mode, oldmode, x, unsignedp)
1296 enum machine_mode mode, oldmode;
1297 rtx x;
1298 int unsignedp;
1300 register rtx temp;
1302 /* If FROM is a SUBREG that indicates that we have already done at least
1303 the required extension, strip it. */
1305 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1306 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1307 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1308 x = gen_lowpart (mode, x);
1310 if (GET_MODE (x) != VOIDmode)
1311 oldmode = GET_MODE (x);
1313 if (mode == oldmode)
1314 return x;
1316 /* There is one case that we must handle specially: If we are converting
1317 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1318 we are to interpret the constant as unsigned, gen_lowpart will do
1319 the wrong if the constant appears negative. What we want to do is
1320 make the high-order word of the constant zero, not all ones. */
1322 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1323 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1324 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1326 HOST_WIDE_INT val = INTVAL (x);
1328 if (oldmode != VOIDmode
1329 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1331 int width = GET_MODE_BITSIZE (oldmode);
1333 /* We need to zero extend VAL. */
1334 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1337 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1340 /* We can do this with a gen_lowpart if both desired and current modes
1341 are integer, and this is either a constant integer, a register, or a
1342 non-volatile MEM. Except for the constant case where MODE is no
1343 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1345 if ((GET_CODE (x) == CONST_INT
1346 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1347 || (GET_MODE_CLASS (mode) == MODE_INT
1348 && GET_MODE_CLASS (oldmode) == MODE_INT
1349 && (GET_CODE (x) == CONST_DOUBLE
1350 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1351 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1352 && direct_load[(int) mode])
1353 || (GET_CODE (x) == REG
1354 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1355 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1357 /* ?? If we don't know OLDMODE, we have to assume here that
1358 X does not need sign- or zero-extension. This may not be
1359 the case, but it's the best we can do. */
1360 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1361 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1363 HOST_WIDE_INT val = INTVAL (x);
1364 int width = GET_MODE_BITSIZE (oldmode);
1366 /* We must sign or zero-extend in this case. Start by
1367 zero-extending, then sign extend if we need to. */
1368 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1369 if (! unsignedp
1370 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1371 val |= (HOST_WIDE_INT) (-1) << width;
1373 return GEN_INT (trunc_int_for_mode (val, mode));
1376 return gen_lowpart (mode, x);
1379 temp = gen_reg_rtx (mode);
1380 convert_move (temp, x, unsignedp);
1381 return temp;
1384 /* This macro is used to determine what the largest unit size that
1385 move_by_pieces can use is. */
1387 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1388 move efficiently, as opposed to MOVE_MAX which is the maximum
1389 number of bytes we can move with a single instruction. */
1391 #ifndef MOVE_MAX_PIECES
1392 #define MOVE_MAX_PIECES MOVE_MAX
1393 #endif
1395 /* Generate several move instructions to copy LEN bytes from block FROM to
1396 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1397 and TO through protect_from_queue before calling.
1399 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1400 used to push FROM to the stack.
1402 ALIGN is maximum alignment we can assume. */
1404 void
1405 move_by_pieces (to, from, len, align)
1406 rtx to, from;
1407 unsigned HOST_WIDE_INT len;
1408 unsigned int align;
1410 struct move_by_pieces data;
1411 rtx to_addr, from_addr = XEXP (from, 0);
1412 unsigned int max_size = MOVE_MAX_PIECES + 1;
1413 enum machine_mode mode = VOIDmode, tmode;
1414 enum insn_code icode;
1416 data.offset = 0;
1417 data.from_addr = from_addr;
1418 if (to)
1420 to_addr = XEXP (to, 0);
1421 data.to = to;
1422 data.autinc_to
1423 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1424 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1425 data.reverse
1426 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1428 else
1430 to_addr = NULL_RTX;
1431 data.to = NULL_RTX;
1432 data.autinc_to = 1;
1433 #ifdef STACK_GROWS_DOWNWARD
1434 data.reverse = 1;
1435 #else
1436 data.reverse = 0;
1437 #endif
1439 data.to_addr = to_addr;
1440 data.from = from;
1441 data.autinc_from
1442 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1443 || GET_CODE (from_addr) == POST_INC
1444 || GET_CODE (from_addr) == POST_DEC);
1446 data.explicit_inc_from = 0;
1447 data.explicit_inc_to = 0;
1448 if (data.reverse) data.offset = len;
1449 data.len = len;
1451 /* If copying requires more than two move insns,
1452 copy addresses to registers (to make displacements shorter)
1453 and use post-increment if available. */
1454 if (!(data.autinc_from && data.autinc_to)
1455 && move_by_pieces_ninsns (len, align) > 2)
1457 /* Find the mode of the largest move... */
1458 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1459 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1460 if (GET_MODE_SIZE (tmode) < max_size)
1461 mode = tmode;
1463 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1465 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1466 data.autinc_from = 1;
1467 data.explicit_inc_from = -1;
1469 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1471 data.from_addr = copy_addr_to_reg (from_addr);
1472 data.autinc_from = 1;
1473 data.explicit_inc_from = 1;
1475 if (!data.autinc_from && CONSTANT_P (from_addr))
1476 data.from_addr = copy_addr_to_reg (from_addr);
1477 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1479 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1480 data.autinc_to = 1;
1481 data.explicit_inc_to = -1;
1483 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1485 data.to_addr = copy_addr_to_reg (to_addr);
1486 data.autinc_to = 1;
1487 data.explicit_inc_to = 1;
1489 if (!data.autinc_to && CONSTANT_P (to_addr))
1490 data.to_addr = copy_addr_to_reg (to_addr);
1493 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1494 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1495 align = MOVE_MAX * BITS_PER_UNIT;
1497 /* First move what we can in the largest integer mode, then go to
1498 successively smaller modes. */
1500 while (max_size > 1)
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
1505 mode = tmode;
1507 if (mode == VOIDmode)
1508 break;
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1512 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1514 max_size = GET_MODE_SIZE (mode);
1517 /* The code above should have handled everything. */
1518 if (data.len > 0)
1519 abort ();
1522 /* Return number of insns required to move L bytes by pieces.
1523 ALIGN (in bits) is maximum alignment we can assume. */
1525 static unsigned HOST_WIDE_INT
1526 move_by_pieces_ninsns (l, align)
1527 unsigned HOST_WIDE_INT l;
1528 unsigned int align;
1530 unsigned HOST_WIDE_INT n_insns = 0;
1531 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1533 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1534 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1535 align = MOVE_MAX * BITS_PER_UNIT;
1537 while (max_size > 1)
1539 enum machine_mode mode = VOIDmode, tmode;
1540 enum insn_code icode;
1542 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1543 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1544 if (GET_MODE_SIZE (tmode) < max_size)
1545 mode = tmode;
1547 if (mode == VOIDmode)
1548 break;
1550 icode = mov_optab->handlers[(int) mode].insn_code;
1551 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1552 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1554 max_size = GET_MODE_SIZE (mode);
1557 if (l)
1558 abort ();
1559 return n_insns;
1562 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1563 with move instructions for mode MODE. GENFUN is the gen_... function
1564 to make a move insn for that mode. DATA has all the other info. */
1566 static void
1567 move_by_pieces_1 (genfun, mode, data)
1568 rtx (*genfun) PARAMS ((rtx, ...));
1569 enum machine_mode mode;
1570 struct move_by_pieces *data;
1572 unsigned int size = GET_MODE_SIZE (mode);
1573 rtx to1 = NULL_RTX, from1;
1575 while (data->len >= size)
1577 if (data->reverse)
1578 data->offset -= size;
1580 if (data->to)
1582 if (data->autinc_to)
1584 to1 = replace_equiv_address (data->to, data->to_addr);
1585 to1 = adjust_address (to1, mode, 0);
1587 else
1588 to1 = adjust_address (data->to, mode, data->offset);
1591 if (data->autinc_from)
1593 from1 = replace_equiv_address (data->from, data->from_addr);
1594 from1 = adjust_address (from1, mode, 0);
1596 else
1597 from1 = adjust_address (data->from, mode, data->offset);
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1600 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1601 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1602 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1604 if (data->to)
1605 emit_insn ((*genfun) (to1, from1));
1606 else
1608 #ifdef PUSH_ROUNDING
1609 emit_single_push_insn (mode, from1, NULL);
1610 #else
1611 abort ();
1612 #endif
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1616 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1617 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1618 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1620 if (! data->reverse)
1621 data->offset += size;
1623 data->len -= size;
1627 /* Emit code to move a block Y to a block X.
1628 This may be done with string-move instructions,
1629 with multiple scalar move instructions, or with a library call.
1631 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1632 with mode BLKmode.
1633 SIZE is an rtx that says how long they are.
1634 ALIGN is the maximum alignment we can assume they have.
1636 Return the address of the new block, if memcpy is called and returns it,
1637 0 otherwise. */
1640 emit_block_move (x, y, size, align)
1641 rtx x, y;
1642 rtx size;
1643 unsigned int align;
1645 rtx retval = 0;
1646 #ifdef TARGET_MEM_FUNCTIONS
1647 static tree fn;
1648 tree call_expr, arg_list;
1649 #endif
1651 if (GET_MODE (x) != BLKmode)
1652 abort ();
1654 if (GET_MODE (y) != BLKmode)
1655 abort ();
1657 x = protect_from_queue (x, 1);
1658 y = protect_from_queue (y, 0);
1659 size = protect_from_queue (size, 0);
1661 if (GET_CODE (x) != MEM)
1662 abort ();
1663 if (GET_CODE (y) != MEM)
1664 abort ();
1665 if (size == 0)
1666 abort ();
1668 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1669 move_by_pieces (x, y, INTVAL (size), align);
1670 else
1672 /* Try the most limited insn first, because there's no point
1673 including more than one in the machine description unless
1674 the more limited one has some advantage. */
1676 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1677 enum machine_mode mode;
1679 /* Since this is a move insn, we don't care about volatility. */
1680 volatile_ok = 1;
1682 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1683 mode = GET_MODE_WIDER_MODE (mode))
1685 enum insn_code code = movstr_optab[(int) mode];
1686 insn_operand_predicate_fn pred;
1688 if (code != CODE_FOR_nothing
1689 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1690 here because if SIZE is less than the mode mask, as it is
1691 returned by the macro, it will definitely be less than the
1692 actual mode mask. */
1693 && ((GET_CODE (size) == CONST_INT
1694 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1695 <= (GET_MODE_MASK (mode) >> 1)))
1696 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1697 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1698 || (*pred) (x, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1700 || (*pred) (y, BLKmode))
1701 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1702 || (*pred) (opalign, VOIDmode)))
1704 rtx op2;
1705 rtx last = get_last_insn ();
1706 rtx pat;
1708 op2 = convert_to_mode (mode, size, 1);
1709 pred = insn_data[(int) code].operand[2].predicate;
1710 if (pred != 0 && ! (*pred) (op2, mode))
1711 op2 = copy_to_mode_reg (mode, op2);
1713 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1714 if (pat)
1716 emit_insn (pat);
1717 volatile_ok = 0;
1718 return 0;
1720 else
1721 delete_insns_since (last);
1725 volatile_ok = 0;
1727 /* X, Y, or SIZE may have been passed through protect_from_queue.
1729 It is unsafe to save the value generated by protect_from_queue
1730 and reuse it later. Consider what happens if emit_queue is
1731 called before the return value from protect_from_queue is used.
1733 Expansion of the CALL_EXPR below will call emit_queue before
1734 we are finished emitting RTL for argument setup. So if we are
1735 not careful we could get the wrong value for an argument.
1737 To avoid this problem we go ahead and emit code to copy X, Y &
1738 SIZE into new pseudos. We can then place those new pseudos
1739 into an RTL_EXPR and use them later, even after a call to
1740 emit_queue.
1742 Note this is not strictly needed for library calls since they
1743 do not call emit_queue before loading their arguments. However,
1744 we may need to have library calls call emit_queue in the future
1745 since failing to do so could cause problems for targets which
1746 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1747 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1748 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1750 #ifdef TARGET_MEM_FUNCTIONS
1751 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1752 #else
1753 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1754 TREE_UNSIGNED (integer_type_node));
1755 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1756 #endif
1758 #ifdef TARGET_MEM_FUNCTIONS
1759 /* It is incorrect to use the libcall calling conventions to call
1760 memcpy in this context.
1762 This could be a user call to memcpy and the user may wish to
1763 examine the return value from memcpy.
1765 For targets where libcalls and normal calls have different conventions
1766 for returning pointers, we could end up generating incorrect code.
1768 So instead of using a libcall sequence we build up a suitable
1769 CALL_EXPR and expand the call in the normal fashion. */
1770 if (fn == NULL_TREE)
1772 tree fntype;
1774 /* This was copied from except.c, I don't know if all this is
1775 necessary in this context or not. */
1776 fn = get_identifier ("memcpy");
1777 fntype = build_pointer_type (void_type_node);
1778 fntype = build_function_type (fntype, NULL_TREE);
1779 fn = build_decl (FUNCTION_DECL, fn, fntype);
1780 ggc_add_tree_root (&fn, 1);
1781 DECL_EXTERNAL (fn) = 1;
1782 TREE_PUBLIC (fn) = 1;
1783 DECL_ARTIFICIAL (fn) = 1;
1784 TREE_NOTHROW (fn) = 1;
1785 make_decl_rtl (fn, NULL);
1786 assemble_external (fn);
1789 /* We need to make an argument list for the function call.
1791 memcpy has three arguments, the first two are void * addresses and
1792 the last is a size_t byte count for the copy. */
1793 arg_list
1794 = build_tree_list (NULL_TREE,
1795 make_tree (build_pointer_type (void_type_node), x));
1796 TREE_CHAIN (arg_list)
1797 = build_tree_list (NULL_TREE,
1798 make_tree (build_pointer_type (void_type_node), y));
1799 TREE_CHAIN (TREE_CHAIN (arg_list))
1800 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1801 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1803 /* Now we have to build up the CALL_EXPR itself. */
1804 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1805 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1806 call_expr, arg_list, NULL_TREE);
1807 TREE_SIDE_EFFECTS (call_expr) = 1;
1809 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1810 #else
1811 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1812 VOIDmode, 3, y, Pmode, x, Pmode,
1813 convert_to_mode (TYPE_MODE (integer_type_node), size,
1814 TREE_UNSIGNED (integer_type_node)),
1815 TYPE_MODE (integer_type_node));
1816 #endif
1819 return retval;
1822 /* Copy all or part of a value X into registers starting at REGNO.
1823 The number of registers to be filled is NREGS. */
1825 void
1826 move_block_to_reg (regno, x, nregs, mode)
1827 int regno;
1828 rtx x;
1829 int nregs;
1830 enum machine_mode mode;
1832 int i;
1833 #ifdef HAVE_load_multiple
1834 rtx pat;
1835 rtx last;
1836 #endif
1838 if (nregs == 0)
1839 return;
1841 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1842 x = validize_mem (force_const_mem (mode, x));
1844 /* See if the machine can do this with a load multiple insn. */
1845 #ifdef HAVE_load_multiple
1846 if (HAVE_load_multiple)
1848 last = get_last_insn ();
1849 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1850 GEN_INT (nregs));
1851 if (pat)
1853 emit_insn (pat);
1854 return;
1856 else
1857 delete_insns_since (last);
1859 #endif
1861 for (i = 0; i < nregs; i++)
1862 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1863 operand_subword_force (x, i, mode));
1866 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1867 The number of registers to be filled is NREGS. SIZE indicates the number
1868 of bytes in the object X. */
1870 void
1871 move_block_from_reg (regno, x, nregs, size)
1872 int regno;
1873 rtx x;
1874 int nregs;
1875 int size;
1877 int i;
1878 #ifdef HAVE_store_multiple
1879 rtx pat;
1880 rtx last;
1881 #endif
1882 enum machine_mode mode;
1884 if (nregs == 0)
1885 return;
1887 /* If SIZE is that of a mode no bigger than a word, just use that
1888 mode's store operation. */
1889 if (size <= UNITS_PER_WORD
1890 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1892 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1893 return;
1896 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1897 to the left before storing to memory. Note that the previous test
1898 doesn't handle all cases (e.g. SIZE == 3). */
1899 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1901 rtx tem = operand_subword (x, 0, 1, BLKmode);
1902 rtx shift;
1904 if (tem == 0)
1905 abort ();
1907 shift = expand_shift (LSHIFT_EXPR, word_mode,
1908 gen_rtx_REG (word_mode, regno),
1909 build_int_2 ((UNITS_PER_WORD - size)
1910 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1911 emit_move_insn (tem, shift);
1912 return;
1915 /* See if the machine can do this with a store multiple insn. */
1916 #ifdef HAVE_store_multiple
1917 if (HAVE_store_multiple)
1919 last = get_last_insn ();
1920 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1921 GEN_INT (nregs));
1922 if (pat)
1924 emit_insn (pat);
1925 return;
1927 else
1928 delete_insns_since (last);
1930 #endif
1932 for (i = 0; i < nregs; i++)
1934 rtx tem = operand_subword (x, i, 1, BLKmode);
1936 if (tem == 0)
1937 abort ();
1939 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1943 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1944 registers represented by a PARALLEL. SSIZE represents the total size of
1945 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1946 SRC in bits. */
1947 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1948 the balance will be in what would be the low-order memory addresses, i.e.
1949 left justified for big endian, right justified for little endian. This
1950 happens to be true for the targets currently using this support. If this
1951 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1952 would be needed. */
1954 void
1955 emit_group_load (dst, orig_src, ssize, align)
1956 rtx dst, orig_src;
1957 unsigned int align;
1958 int ssize;
1960 rtx *tmps, src;
1961 int start, i;
1963 if (GET_CODE (dst) != PARALLEL)
1964 abort ();
1966 /* Check for a NULL entry, used to indicate that the parameter goes
1967 both on the stack and in registers. */
1968 if (XEXP (XVECEXP (dst, 0, 0), 0))
1969 start = 0;
1970 else
1971 start = 1;
1973 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1975 /* Process the pieces. */
1976 for (i = start; i < XVECLEN (dst, 0); i++)
1978 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1979 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1980 unsigned int bytelen = GET_MODE_SIZE (mode);
1981 int shift = 0;
1983 /* Handle trailing fragments that run over the size of the struct. */
1984 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1986 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1987 bytelen = ssize - bytepos;
1988 if (bytelen <= 0)
1989 abort ();
1992 /* If we won't be loading directly from memory, protect the real source
1993 from strange tricks we might play; but make sure that the source can
1994 be loaded directly into the destination. */
1995 src = orig_src;
1996 if (GET_CODE (orig_src) != MEM
1997 && (!CONSTANT_P (orig_src)
1998 || (GET_MODE (orig_src) != mode
1999 && GET_MODE (orig_src) != VOIDmode)))
2001 if (GET_MODE (orig_src) == VOIDmode)
2002 src = gen_reg_rtx (mode);
2003 else
2004 src = gen_reg_rtx (GET_MODE (orig_src));
2005 emit_move_insn (src, orig_src);
2008 /* Optimize the access just a bit. */
2009 if (GET_CODE (src) == MEM
2010 && align >= GET_MODE_ALIGNMENT (mode)
2011 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2012 && bytelen == GET_MODE_SIZE (mode))
2014 tmps[i] = gen_reg_rtx (mode);
2015 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2017 else if (GET_CODE (src) == CONCAT)
2019 if (bytepos == 0
2020 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2021 tmps[i] = XEXP (src, 0);
2022 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2023 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2024 tmps[i] = XEXP (src, 1);
2025 else
2026 abort ();
2028 else if (CONSTANT_P (src)
2029 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2030 tmps[i] = src;
2031 else
2032 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2033 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2034 mode, mode, align, ssize);
2036 if (BYTES_BIG_ENDIAN && shift)
2037 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
2041 emit_queue ();
2043 /* Copy the extracted pieces into the proper (probable) hard regs. */
2044 for (i = start; i < XVECLEN (dst, 0); i++)
2045 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2048 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2049 registers represented by a PARALLEL. SSIZE represents the total size of
2050 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2052 void
2053 emit_group_store (orig_dst, src, ssize, align)
2054 rtx orig_dst, src;
2055 int ssize;
2056 unsigned int align;
2058 rtx *tmps, dst;
2059 int start, i;
2061 if (GET_CODE (src) != PARALLEL)
2062 abort ();
2064 /* Check for a NULL entry, used to indicate that the parameter goes
2065 both on the stack and in registers. */
2066 if (XEXP (XVECEXP (src, 0, 0), 0))
2067 start = 0;
2068 else
2069 start = 1;
2071 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2073 /* Copy the (probable) hard regs into pseudos. */
2074 for (i = start; i < XVECLEN (src, 0); i++)
2076 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2077 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2078 emit_move_insn (tmps[i], reg);
2080 emit_queue ();
2082 /* If we won't be storing directly into memory, protect the real destination
2083 from strange tricks we might play. */
2084 dst = orig_dst;
2085 if (GET_CODE (dst) == PARALLEL)
2087 rtx temp;
2089 /* We can get a PARALLEL dst if there is a conditional expression in
2090 a return statement. In that case, the dst and src are the same,
2091 so no action is necessary. */
2092 if (rtx_equal_p (dst, src))
2093 return;
2095 /* It is unclear if we can ever reach here, but we may as well handle
2096 it. Allocate a temporary, and split this into a store/load to/from
2097 the temporary. */
2099 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2100 emit_group_store (temp, src, ssize, align);
2101 emit_group_load (dst, temp, ssize, align);
2102 return;
2104 else if (GET_CODE (dst) != MEM)
2106 dst = gen_reg_rtx (GET_MODE (orig_dst));
2107 /* Make life a bit easier for combine. */
2108 emit_move_insn (dst, const0_rtx);
2111 /* Process the pieces. */
2112 for (i = start; i < XVECLEN (src, 0); i++)
2114 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2115 enum machine_mode mode = GET_MODE (tmps[i]);
2116 unsigned int bytelen = GET_MODE_SIZE (mode);
2118 /* Handle trailing fragments that run over the size of the struct. */
2119 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2121 if (BYTES_BIG_ENDIAN)
2123 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2124 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2125 tmps[i], 0, OPTAB_WIDEN);
2127 bytelen = ssize - bytepos;
2130 /* Optimize the access just a bit. */
2131 if (GET_CODE (dst) == MEM
2132 && align >= GET_MODE_ALIGNMENT (mode)
2133 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2134 && bytelen == GET_MODE_SIZE (mode))
2135 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2136 else
2137 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2138 mode, tmps[i], align, ssize);
2141 emit_queue ();
2143 /* Copy from the pseudo into the (probable) hard reg. */
2144 if (GET_CODE (dst) == REG)
2145 emit_move_insn (orig_dst, dst);
2148 /* Generate code to copy a BLKmode object of TYPE out of a
2149 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2150 is null, a stack temporary is created. TGTBLK is returned.
2152 The primary purpose of this routine is to handle functions
2153 that return BLKmode structures in registers. Some machines
2154 (the PA for example) want to return all small structures
2155 in registers regardless of the structure's alignment. */
2158 copy_blkmode_from_reg (tgtblk, srcreg, type)
2159 rtx tgtblk;
2160 rtx srcreg;
2161 tree type;
2163 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2164 rtx src = NULL, dst = NULL;
2165 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2166 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2168 if (tgtblk == 0)
2170 tgtblk = assign_temp (build_qualified_type (type,
2171 (TYPE_QUALS (type)
2172 | TYPE_QUAL_CONST)),
2173 0, 1, 1);
2174 preserve_temp_slots (tgtblk);
2177 /* This code assumes srcreg is at least a full word. If it isn't,
2178 copy it into a new pseudo which is a full word. */
2179 if (GET_MODE (srcreg) != BLKmode
2180 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2181 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2183 /* Structures whose size is not a multiple of a word are aligned
2184 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2185 machine, this means we must skip the empty high order bytes when
2186 calculating the bit offset. */
2187 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2188 big_endian_correction
2189 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2191 /* Copy the structure BITSIZE bites at a time.
2193 We could probably emit more efficient code for machines which do not use
2194 strict alignment, but it doesn't seem worth the effort at the current
2195 time. */
2196 for (bitpos = 0, xbitpos = big_endian_correction;
2197 bitpos < bytes * BITS_PER_UNIT;
2198 bitpos += bitsize, xbitpos += bitsize)
2200 /* We need a new source operand each time xbitpos is on a
2201 word boundary and when xbitpos == big_endian_correction
2202 (the first time through). */
2203 if (xbitpos % BITS_PER_WORD == 0
2204 || xbitpos == big_endian_correction)
2205 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2206 GET_MODE (srcreg));
2208 /* We need a new destination operand each time bitpos is on
2209 a word boundary. */
2210 if (bitpos % BITS_PER_WORD == 0)
2211 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2213 /* Use xbitpos for the source extraction (right justified) and
2214 xbitpos for the destination store (left justified). */
2215 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2216 extract_bit_field (src, bitsize,
2217 xbitpos % BITS_PER_WORD, 1,
2218 NULL_RTX, word_mode, word_mode,
2219 bitsize, BITS_PER_WORD),
2220 bitsize, BITS_PER_WORD);
2223 return tgtblk;
2226 /* Add a USE expression for REG to the (possibly empty) list pointed
2227 to by CALL_FUSAGE. REG must denote a hard register. */
2229 void
2230 use_reg (call_fusage, reg)
2231 rtx *call_fusage, reg;
2233 if (GET_CODE (reg) != REG
2234 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2235 abort ();
2237 *call_fusage
2238 = gen_rtx_EXPR_LIST (VOIDmode,
2239 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2242 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2243 starting at REGNO. All of these registers must be hard registers. */
2245 void
2246 use_regs (call_fusage, regno, nregs)
2247 rtx *call_fusage;
2248 int regno;
2249 int nregs;
2251 int i;
2253 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2254 abort ();
2256 for (i = 0; i < nregs; i++)
2257 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2260 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2261 PARALLEL REGS. This is for calls that pass values in multiple
2262 non-contiguous locations. The Irix 6 ABI has examples of this. */
2264 void
2265 use_group_regs (call_fusage, regs)
2266 rtx *call_fusage;
2267 rtx regs;
2269 int i;
2271 for (i = 0; i < XVECLEN (regs, 0); i++)
2273 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2275 /* A NULL entry means the parameter goes both on the stack and in
2276 registers. This can also be a MEM for targets that pass values
2277 partially on the stack and partially in registers. */
2278 if (reg != 0 && GET_CODE (reg) == REG)
2279 use_reg (call_fusage, reg);
2285 can_store_by_pieces (len, constfun, constfundata, align)
2286 unsigned HOST_WIDE_INT len;
2287 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2288 PTR constfundata;
2289 unsigned int align;
2291 unsigned HOST_WIDE_INT max_size, l;
2292 HOST_WIDE_INT offset = 0;
2293 enum machine_mode mode, tmode;
2294 enum insn_code icode;
2295 int reverse;
2296 rtx cst;
2298 if (! MOVE_BY_PIECES_P (len, align))
2299 return 0;
2301 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2302 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2303 align = MOVE_MAX * BITS_PER_UNIT;
2305 /* We would first store what we can in the largest integer mode, then go to
2306 successively smaller modes. */
2308 for (reverse = 0;
2309 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2310 reverse++)
2312 l = len;
2313 mode = VOIDmode;
2314 max_size = MOVE_MAX_PIECES + 1;
2315 while (max_size > 1)
2317 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2318 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2319 if (GET_MODE_SIZE (tmode) < max_size)
2320 mode = tmode;
2322 if (mode == VOIDmode)
2323 break;
2325 icode = mov_optab->handlers[(int) mode].insn_code;
2326 if (icode != CODE_FOR_nothing
2327 && align >= GET_MODE_ALIGNMENT (mode))
2329 unsigned int size = GET_MODE_SIZE (mode);
2331 while (l >= size)
2333 if (reverse)
2334 offset -= size;
2336 cst = (*constfun) (constfundata, offset, mode);
2337 if (!LEGITIMATE_CONSTANT_P (cst))
2338 return 0;
2340 if (!reverse)
2341 offset += size;
2343 l -= size;
2347 max_size = GET_MODE_SIZE (mode);
2350 /* The code above should have handled everything. */
2351 if (l != 0)
2352 abort ();
2355 return 1;
2358 /* Generate several move instructions to store LEN bytes generated by
2359 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2360 pointer which will be passed as argument in every CONSTFUN call.
2361 ALIGN is maximum alignment we can assume. */
2363 void
2364 store_by_pieces (to, len, constfun, constfundata, align)
2365 rtx to;
2366 unsigned HOST_WIDE_INT len;
2367 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2368 PTR constfundata;
2369 unsigned int align;
2371 struct store_by_pieces data;
2373 if (! MOVE_BY_PIECES_P (len, align))
2374 abort ();
2375 to = protect_from_queue (to, 1);
2376 data.constfun = constfun;
2377 data.constfundata = constfundata;
2378 data.len = len;
2379 data.to = to;
2380 store_by_pieces_1 (&data, align);
2383 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2384 rtx with BLKmode). The caller must pass TO through protect_from_queue
2385 before calling. ALIGN is maximum alignment we can assume. */
2387 static void
2388 clear_by_pieces (to, len, align)
2389 rtx to;
2390 unsigned HOST_WIDE_INT len;
2391 unsigned int align;
2393 struct store_by_pieces data;
2395 data.constfun = clear_by_pieces_1;
2396 data.constfundata = NULL;
2397 data.len = len;
2398 data.to = to;
2399 store_by_pieces_1 (&data, align);
2402 /* Callback routine for clear_by_pieces.
2403 Return const0_rtx unconditionally. */
2405 static rtx
2406 clear_by_pieces_1 (data, offset, mode)
2407 PTR data ATTRIBUTE_UNUSED;
2408 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2409 enum machine_mode mode ATTRIBUTE_UNUSED;
2411 return const0_rtx;
2414 /* Subroutine of clear_by_pieces and store_by_pieces.
2415 Generate several move instructions to store LEN bytes of block TO. (A MEM
2416 rtx with BLKmode). The caller must pass TO through protect_from_queue
2417 before calling. ALIGN is maximum alignment we can assume. */
2419 static void
2420 store_by_pieces_1 (data, align)
2421 struct store_by_pieces *data;
2422 unsigned int align;
2424 rtx to_addr = XEXP (data->to, 0);
2425 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2426 enum machine_mode mode = VOIDmode, tmode;
2427 enum insn_code icode;
2429 data->offset = 0;
2430 data->to_addr = to_addr;
2431 data->autinc_to
2432 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2433 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2435 data->explicit_inc_to = 0;
2436 data->reverse
2437 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2438 if (data->reverse)
2439 data->offset = data->len;
2441 /* If storing requires more than two move insns,
2442 copy addresses to registers (to make displacements shorter)
2443 and use post-increment if available. */
2444 if (!data->autinc_to
2445 && move_by_pieces_ninsns (data->len, align) > 2)
2447 /* Determine the main mode we'll be using. */
2448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2450 if (GET_MODE_SIZE (tmode) < max_size)
2451 mode = tmode;
2453 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2455 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2456 data->autinc_to = 1;
2457 data->explicit_inc_to = -1;
2460 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2461 && ! data->autinc_to)
2463 data->to_addr = copy_addr_to_reg (to_addr);
2464 data->autinc_to = 1;
2465 data->explicit_inc_to = 1;
2468 if ( !data->autinc_to && CONSTANT_P (to_addr))
2469 data->to_addr = copy_addr_to_reg (to_addr);
2472 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2473 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2474 align = MOVE_MAX * BITS_PER_UNIT;
2476 /* First store what we can in the largest integer mode, then go to
2477 successively smaller modes. */
2479 while (max_size > 1)
2481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2483 if (GET_MODE_SIZE (tmode) < max_size)
2484 mode = tmode;
2486 if (mode == VOIDmode)
2487 break;
2489 icode = mov_optab->handlers[(int) mode].insn_code;
2490 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2491 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2493 max_size = GET_MODE_SIZE (mode);
2496 /* The code above should have handled everything. */
2497 if (data->len != 0)
2498 abort ();
2501 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2502 with move instructions for mode MODE. GENFUN is the gen_... function
2503 to make a move insn for that mode. DATA has all the other info. */
2505 static void
2506 store_by_pieces_2 (genfun, mode, data)
2507 rtx (*genfun) PARAMS ((rtx, ...));
2508 enum machine_mode mode;
2509 struct store_by_pieces *data;
2511 unsigned int size = GET_MODE_SIZE (mode);
2512 rtx to1, cst;
2514 while (data->len >= size)
2516 if (data->reverse)
2517 data->offset -= size;
2519 if (data->autinc_to)
2521 to1 = replace_equiv_address (data->to, data->to_addr);
2522 to1 = adjust_address (to1, mode, 0);
2524 else
2525 to1 = adjust_address (data->to, mode, data->offset);
2527 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2528 emit_insn (gen_add2_insn (data->to_addr,
2529 GEN_INT (-(HOST_WIDE_INT) size)));
2531 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2532 emit_insn ((*genfun) (to1, cst));
2534 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2535 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2537 if (! data->reverse)
2538 data->offset += size;
2540 data->len -= size;
2544 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2545 its length in bytes and ALIGN is the maximum alignment we can is has.
2547 If we call a function that returns the length of the block, return it. */
2550 clear_storage (object, size, align)
2551 rtx object;
2552 rtx size;
2553 unsigned int align;
2555 #ifdef TARGET_MEM_FUNCTIONS
2556 static tree fn;
2557 tree call_expr, arg_list;
2558 #endif
2559 rtx retval = 0;
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object) != BLKmode
2564 && GET_CODE (size) == CONST_INT
2565 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2566 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2567 else
2569 object = protect_from_queue (object, 1);
2570 size = protect_from_queue (size, 0);
2572 if (GET_CODE (size) == CONST_INT
2573 && MOVE_BY_PIECES_P (INTVAL (size), align))
2574 clear_by_pieces (object, INTVAL (size), align);
2575 else
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2581 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2582 enum machine_mode mode;
2584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2585 mode = GET_MODE_WIDER_MODE (mode))
2587 enum insn_code code = clrstr_optab[(int) mode];
2588 insn_operand_predicate_fn pred;
2590 if (code != CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2597 <= (GET_MODE_MASK (mode) >> 1)))
2598 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2599 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2600 || (*pred) (object, BLKmode))
2601 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2602 || (*pred) (opalign, VOIDmode)))
2604 rtx op1;
2605 rtx last = get_last_insn ();
2606 rtx pat;
2608 op1 = convert_to_mode (mode, size, 1);
2609 pred = insn_data[(int) code].operand[1].predicate;
2610 if (pred != 0 && ! (*pred) (op1, mode))
2611 op1 = copy_to_mode_reg (mode, op1);
2613 pat = GEN_FCN ((int) code) (object, op1, opalign);
2614 if (pat)
2616 emit_insn (pat);
2617 return 0;
2619 else
2620 delete_insns_since (last);
2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2637 emit_queue.
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2648 #else
2649 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2650 TREE_UNSIGNED (integer_type_node));
2651 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2652 #endif
2654 #ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
2663 incorrect code.
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn == NULL_TREE)
2669 tree fntype;
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn = get_identifier ("memset");
2674 fntype = build_pointer_type (void_type_node);
2675 fntype = build_function_type (fntype, NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, fntype);
2677 ggc_add_tree_root (&fn, 1);
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
2681 TREE_NOTHROW (fn) = 1;
2682 make_decl_rtl (fn, NULL);
2683 assemble_external (fn);
2686 /* We need to make an argument list for the function call.
2688 memset has three arguments, the first is a void * addresses, the
2689 second a integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2691 arg_list
2692 = build_tree_list (NULL_TREE,
2693 make_tree (build_pointer_type (void_type_node),
2694 object));
2695 TREE_CHAIN (arg_list)
2696 = build_tree_list (NULL_TREE,
2697 make_tree (integer_type_node, const0_rtx));
2698 TREE_CHAIN (TREE_CHAIN (arg_list))
2699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr = build1 (ADDR_EXPR,
2704 build_pointer_type (TREE_TYPE (fn)), fn);
2705 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2706 call_expr, arg_list, NULL_TREE);
2707 TREE_SIDE_EFFECTS (call_expr) = 1;
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2710 #else
2711 emit_library_call (bzero_libfunc, LCT_NORMAL,
2712 VOIDmode, 2, object, Pmode, size,
2713 TYPE_MODE (integer_type_node));
2714 #endif
2718 return retval;
2721 /* Generate code to copy Y into X.
2722 Both Y and X must have the same mode, except that
2723 Y can be a constant with VOIDmode.
2724 This mode cannot be BLKmode; use emit_block_move for that.
2726 Return the last instruction emitted. */
2729 emit_move_insn (x, y)
2730 rtx x, y;
2732 enum machine_mode mode = GET_MODE (x);
2733 rtx y_cst = NULL_RTX;
2734 rtx last_insn;
2736 x = protect_from_queue (x, 1);
2737 y = protect_from_queue (y, 0);
2739 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2740 abort ();
2742 /* Never force constant_p_rtx to memory. */
2743 if (GET_CODE (y) == CONSTANT_P_RTX)
2745 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2747 y_cst = y;
2748 y = force_const_mem (mode, y);
2751 /* If X or Y are memory references, verify that their addresses are valid
2752 for the machine. */
2753 if (GET_CODE (x) == MEM
2754 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2755 && ! push_operand (x, GET_MODE (x)))
2756 || (flag_force_addr
2757 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2758 x = validize_mem (x);
2760 if (GET_CODE (y) == MEM
2761 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2762 || (flag_force_addr
2763 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2764 y = validize_mem (y);
2766 if (mode == BLKmode)
2767 abort ();
2769 last_insn = emit_move_insn_1 (x, y);
2771 if (y_cst && GET_CODE (x) == REG)
2772 REG_NOTES (last_insn)
2773 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2775 return last_insn;
2778 /* Low level part of emit_move_insn.
2779 Called just like emit_move_insn, but assumes X and Y
2780 are basically valid. */
2783 emit_move_insn_1 (x, y)
2784 rtx x, y;
2786 enum machine_mode mode = GET_MODE (x);
2787 enum machine_mode submode;
2788 enum mode_class class = GET_MODE_CLASS (mode);
2789 unsigned int i;
2791 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2792 abort ();
2794 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2795 return
2796 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2798 /* Expand complex moves by moving real part and imag part, if possible. */
2799 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2800 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2801 * BITS_PER_UNIT),
2802 (class == MODE_COMPLEX_INT
2803 ? MODE_INT : MODE_FLOAT),
2805 && (mov_optab->handlers[(int) submode].insn_code
2806 != CODE_FOR_nothing))
2808 /* Don't split destination if it is a stack push. */
2809 int stack = push_operand (x, GET_MODE (x));
2811 #ifdef PUSH_ROUNDING
2812 /* In case we output to the stack, but the size is smaller machine can
2813 push exactly, we need to use move instructions. */
2814 if (stack
2815 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2817 rtx temp;
2818 int offset1, offset2;
2820 /* Do not use anti_adjust_stack, since we don't want to update
2821 stack_pointer_delta. */
2822 temp = expand_binop (Pmode,
2823 #ifdef STACK_GROWS_DOWNWARD
2824 sub_optab,
2825 #else
2826 add_optab,
2827 #endif
2828 stack_pointer_rtx,
2829 GEN_INT
2830 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 stack_pointer_rtx,
2833 OPTAB_LIB_WIDEN);
2834 if (temp != stack_pointer_rtx)
2835 emit_move_insn (stack_pointer_rtx, temp);
2836 #ifdef STACK_GROWS_DOWNWARD
2837 offset1 = 0;
2838 offset2 = GET_MODE_SIZE (submode);
2839 #else
2840 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2841 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2842 + GET_MODE_SIZE (submode));
2843 #endif
2844 emit_move_insn (change_address (x, submode,
2845 gen_rtx_PLUS (Pmode,
2846 stack_pointer_rtx,
2847 GEN_INT (offset1))),
2848 gen_realpart (submode, y));
2849 emit_move_insn (change_address (x, submode,
2850 gen_rtx_PLUS (Pmode,
2851 stack_pointer_rtx,
2852 GEN_INT (offset2))),
2853 gen_imagpart (submode, y));
2855 else
2856 #endif
2857 /* If this is a stack, push the highpart first, so it
2858 will be in the argument order.
2860 In that case, change_address is used only to convert
2861 the mode, not to change the address. */
2862 if (stack)
2864 /* Note that the real part always precedes the imag part in memory
2865 regardless of machine's endianness. */
2866 #ifdef STACK_GROWS_DOWNWARD
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_imagpart (submode, y)));
2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2871 (gen_rtx_MEM (submode, XEXP (x, 0)),
2872 gen_realpart (submode, y)));
2873 #else
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_realpart (submode, y)));
2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2878 (gen_rtx_MEM (submode, XEXP (x, 0)),
2879 gen_imagpart (submode, y)));
2880 #endif
2882 else
2884 rtx realpart_x, realpart_y;
2885 rtx imagpart_x, imagpart_y;
2887 /* If this is a complex value with each part being smaller than a
2888 word, the usual calling sequence will likely pack the pieces into
2889 a single register. Unfortunately, SUBREG of hard registers only
2890 deals in terms of words, so we have a problem converting input
2891 arguments to the CONCAT of two registers that is used elsewhere
2892 for complex values. If this is before reload, we can copy it into
2893 memory and reload. FIXME, we should see about using extract and
2894 insert on integer registers, but complex short and complex char
2895 variables should be rarely used. */
2896 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2897 && (reload_in_progress | reload_completed) == 0)
2899 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2900 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2902 if (packed_dest_p || packed_src_p)
2904 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2905 ? MODE_FLOAT : MODE_INT);
2907 enum machine_mode reg_mode
2908 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2910 if (reg_mode != BLKmode)
2912 rtx mem = assign_stack_temp (reg_mode,
2913 GET_MODE_SIZE (mode), 0);
2914 rtx cmem = adjust_address (mem, mode, 0);
2916 cfun->cannot_inline
2917 = N_("function using short complex types cannot be inline");
2919 if (packed_dest_p)
2921 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2922 emit_move_insn_1 (cmem, y);
2923 return emit_move_insn_1 (sreg, mem);
2925 else
2927 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2928 emit_move_insn_1 (mem, sreg);
2929 return emit_move_insn_1 (x, cmem);
2935 realpart_x = gen_realpart (submode, x);
2936 realpart_y = gen_realpart (submode, y);
2937 imagpart_x = gen_imagpart (submode, x);
2938 imagpart_y = gen_imagpart (submode, y);
2940 /* Show the output dies here. This is necessary for SUBREGs
2941 of pseudos since we cannot track their lifetimes correctly;
2942 hard regs shouldn't appear here except as return values.
2943 We never want to emit such a clobber after reload. */
2944 if (x != y
2945 && ! (reload_in_progress || reload_completed)
2946 && (GET_CODE (realpart_x) == SUBREG
2947 || GET_CODE (imagpart_x) == SUBREG))
2949 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2953 (realpart_x, realpart_y));
2954 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2955 (imagpart_x, imagpart_y));
2958 return get_last_insn ();
2961 /* This will handle any multi-word mode that lacks a move_insn pattern.
2962 However, you will get better code if you define such patterns,
2963 even if they must turn into multiple assembler instructions. */
2964 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2966 rtx last_insn = 0;
2967 rtx seq, inner;
2968 int need_clobber;
2970 #ifdef PUSH_ROUNDING
2972 /* If X is a push on the stack, do the push now and replace
2973 X with a reference to the stack pointer. */
2974 if (push_operand (x, GET_MODE (x)))
2976 rtx temp;
2977 enum rtx_code code;
2979 /* Do not use anti_adjust_stack, since we don't want to update
2980 stack_pointer_delta. */
2981 temp = expand_binop (Pmode,
2982 #ifdef STACK_GROWS_DOWNWARD
2983 sub_optab,
2984 #else
2985 add_optab,
2986 #endif
2987 stack_pointer_rtx,
2988 GEN_INT
2989 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2990 stack_pointer_rtx,
2992 OPTAB_LIB_WIDEN);
2993 if (temp != stack_pointer_rtx)
2994 emit_move_insn (stack_pointer_rtx, temp);
2996 code = GET_CODE (XEXP (x, 0));
2997 /* Just hope that small offsets off SP are OK. */
2998 if (code == POST_INC)
2999 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3000 GEN_INT (-(HOST_WIDE_INT)
3001 GET_MODE_SIZE (GET_MODE (x))));
3002 else if (code == POST_DEC)
3003 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3004 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3005 else
3006 temp = stack_pointer_rtx;
3008 x = change_address (x, VOIDmode, temp);
3010 #endif
3012 /* If we are in reload, see if either operand is a MEM whose address
3013 is scheduled for replacement. */
3014 if (reload_in_progress && GET_CODE (x) == MEM
3015 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3016 x = replace_equiv_address_nv (x, inner);
3017 if (reload_in_progress && GET_CODE (y) == MEM
3018 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3019 y = replace_equiv_address_nv (y, inner);
3021 start_sequence ();
3023 need_clobber = 0;
3024 for (i = 0;
3025 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3026 i++)
3028 rtx xpart = operand_subword (x, i, 1, mode);
3029 rtx ypart = operand_subword (y, i, 1, mode);
3031 /* If we can't get a part of Y, put Y into memory if it is a
3032 constant. Otherwise, force it into a register. If we still
3033 can't get a part of Y, abort. */
3034 if (ypart == 0 && CONSTANT_P (y))
3036 y = force_const_mem (mode, y);
3037 ypart = operand_subword (y, i, 1, mode);
3039 else if (ypart == 0)
3040 ypart = operand_subword_force (y, i, mode);
3042 if (xpart == 0 || ypart == 0)
3043 abort ();
3045 need_clobber |= (GET_CODE (xpart) == SUBREG);
3047 last_insn = emit_move_insn (xpart, ypart);
3050 seq = gen_sequence ();
3051 end_sequence ();
3053 /* Show the output dies here. This is necessary for SUBREGs
3054 of pseudos since we cannot track their lifetimes correctly;
3055 hard regs shouldn't appear here except as return values.
3056 We never want to emit such a clobber after reload. */
3057 if (x != y
3058 && ! (reload_in_progress || reload_completed)
3059 && need_clobber != 0)
3061 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3064 emit_insn (seq);
3066 return last_insn;
3068 else
3069 abort ();
3072 /* Pushing data onto the stack. */
3074 /* Push a block of length SIZE (perhaps variable)
3075 and return an rtx to address the beginning of the block.
3076 Note that it is not possible for the value returned to be a QUEUED.
3077 The value may be virtual_outgoing_args_rtx.
3079 EXTRA is the number of bytes of padding to push in addition to SIZE.
3080 BELOW nonzero means this padding comes at low addresses;
3081 otherwise, the padding comes at high addresses. */
3084 push_block (size, extra, below)
3085 rtx size;
3086 int extra, below;
3088 register rtx temp;
3090 size = convert_modes (Pmode, ptr_mode, size, 1);
3091 if (CONSTANT_P (size))
3092 anti_adjust_stack (plus_constant (size, extra));
3093 else if (GET_CODE (size) == REG && extra == 0)
3094 anti_adjust_stack (size);
3095 else
3097 temp = copy_to_mode_reg (Pmode, size);
3098 if (extra != 0)
3099 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3100 temp, 0, OPTAB_LIB_WIDEN);
3101 anti_adjust_stack (temp);
3104 #ifndef STACK_GROWS_DOWNWARD
3105 #ifdef ARGS_GROW_DOWNWARD
3106 if (!ACCUMULATE_OUTGOING_ARGS)
3107 #else
3108 if (0)
3109 #endif
3110 #else
3111 if (1)
3112 #endif
3114 /* Return the lowest stack address when STACK or ARGS grow downward and
3115 we are not aaccumulating outgoing arguments (the c4x port uses such
3116 conventions). */
3117 temp = virtual_outgoing_args_rtx;
3118 if (extra != 0 && below)
3119 temp = plus_constant (temp, extra);
3121 else
3123 if (GET_CODE (size) == CONST_INT)
3124 temp = plus_constant (virtual_outgoing_args_rtx,
3125 -INTVAL (size) - (below ? 0 : extra));
3126 else if (extra != 0 && !below)
3127 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3128 negate_rtx (Pmode, plus_constant (size, extra)));
3129 else
3130 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3131 negate_rtx (Pmode, size));
3134 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3138 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3139 block of SIZE bytes. */
3141 static rtx
3142 get_push_address (size)
3143 int size;
3145 register rtx temp;
3147 if (STACK_PUSH_CODE == POST_DEC)
3148 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3149 else if (STACK_PUSH_CODE == POST_INC)
3150 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3151 else
3152 temp = stack_pointer_rtx;
3154 return copy_to_reg (temp);
3157 #ifdef PUSH_ROUNDING
3159 /* Emit single push insn. */
3161 static void
3162 emit_single_push_insn (mode, x, type)
3163 rtx x;
3164 enum machine_mode mode;
3165 tree type;
3167 rtx dest_addr;
3168 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3169 rtx dest;
3170 enum insn_code icode;
3171 insn_operand_predicate_fn pred;
3173 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3174 /* If there is push pattern, use it. Otherwise try old way of throwing
3175 MEM representing push operation to move expander. */
3176 icode = push_optab->handlers[(int) mode].insn_code;
3177 if (icode != CODE_FOR_nothing)
3179 if (((pred = insn_data[(int) icode].operand[0].predicate)
3180 && !((*pred) (x, mode))))
3181 x = force_reg (mode, x);
3182 emit_insn (GEN_FCN (icode) (x));
3183 return;
3185 if (GET_MODE_SIZE (mode) == rounded_size)
3186 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3187 else
3189 #ifdef STACK_GROWS_DOWNWARD
3190 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3191 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3192 #else
3193 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3194 GEN_INT (rounded_size));
3195 #endif
3196 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3199 dest = gen_rtx_MEM (mode, dest_addr);
3201 if (type != 0)
3203 set_mem_attributes (dest, type, 1);
3204 /* Function incoming arguments may overlap with sibling call
3205 outgoing arguments and we cannot allow reordering of reads
3206 from function arguments with stores to outgoing arguments
3207 of sibling calls. */
3208 set_mem_alias_set (dest, 0);
3210 emit_move_insn (dest, x);
3212 #endif
3214 /* Generate code to push X onto the stack, assuming it has mode MODE and
3215 type TYPE.
3216 MODE is redundant except when X is a CONST_INT (since they don't
3217 carry mode info).
3218 SIZE is an rtx for the size of data to be copied (in bytes),
3219 needed only if X is BLKmode.
3221 ALIGN (in bits) is maximum alignment we can assume.
3223 If PARTIAL and REG are both nonzero, then copy that many of the first
3224 words of X into registers starting with REG, and push the rest of X.
3225 The amount of space pushed is decreased by PARTIAL words,
3226 rounded *down* to a multiple of PARM_BOUNDARY.
3227 REG must be a hard register in this case.
3228 If REG is zero but PARTIAL is not, take any all others actions for an
3229 argument partially in registers, but do not actually load any
3230 registers.
3232 EXTRA is the amount in bytes of extra space to leave next to this arg.
3233 This is ignored if an argument block has already been allocated.
3235 On a machine that lacks real push insns, ARGS_ADDR is the address of
3236 the bottom of the argument block for this call. We use indexing off there
3237 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3238 argument block has not been preallocated.
3240 ARGS_SO_FAR is the size of args previously pushed for this call.
3242 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3243 for arguments passed in registers. If nonzero, it will be the number
3244 of bytes required. */
3246 void
3247 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3248 args_addr, args_so_far, reg_parm_stack_space,
3249 alignment_pad)
3250 register rtx x;
3251 enum machine_mode mode;
3252 tree type;
3253 rtx size;
3254 unsigned int align;
3255 int partial;
3256 rtx reg;
3257 int extra;
3258 rtx args_addr;
3259 rtx args_so_far;
3260 int reg_parm_stack_space;
3261 rtx alignment_pad;
3263 rtx xinner;
3264 enum direction stack_direction
3265 #ifdef STACK_GROWS_DOWNWARD
3266 = downward;
3267 #else
3268 = upward;
3269 #endif
3271 /* Decide where to pad the argument: `downward' for below,
3272 `upward' for above, or `none' for don't pad it.
3273 Default is below for small data on big-endian machines; else above. */
3274 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3276 /* Invert direction if stack is post-update. */
3277 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3278 if (where_pad != none)
3279 where_pad = (where_pad == downward ? upward : downward);
3281 xinner = x = protect_from_queue (x, 0);
3283 if (mode == BLKmode)
3285 /* Copy a block into the stack, entirely or partially. */
3287 register rtx temp;
3288 int used = partial * UNITS_PER_WORD;
3289 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3290 int skip;
3292 if (size == 0)
3293 abort ();
3295 used -= offset;
3297 /* USED is now the # of bytes we need not copy to the stack
3298 because registers will take care of them. */
3300 if (partial != 0)
3301 xinner = adjust_address (xinner, BLKmode, used);
3303 /* If the partial register-part of the arg counts in its stack size,
3304 skip the part of stack space corresponding to the registers.
3305 Otherwise, start copying to the beginning of the stack space,
3306 by setting SKIP to 0. */
3307 skip = (reg_parm_stack_space == 0) ? 0 : used;
3309 #ifdef PUSH_ROUNDING
3310 /* Do it with several push insns if that doesn't take lots of insns
3311 and if there is no difficulty with push insns that skip bytes
3312 on the stack for alignment purposes. */
3313 if (args_addr == 0
3314 && PUSH_ARGS
3315 && GET_CODE (size) == CONST_INT
3316 && skip == 0
3317 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3318 /* Here we avoid the case of a structure whose weak alignment
3319 forces many pushes of a small amount of data,
3320 and such small pushes do rounding that causes trouble. */
3321 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3322 || align >= BIGGEST_ALIGNMENT
3323 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3324 == (align / BITS_PER_UNIT)))
3325 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3327 /* Push padding now if padding above and stack grows down,
3328 or if padding below and stack grows up.
3329 But if space already allocated, this has already been done. */
3330 if (extra && args_addr == 0
3331 && where_pad != none && where_pad != stack_direction)
3332 anti_adjust_stack (GEN_INT (extra));
3334 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3336 if (current_function_check_memory_usage && ! in_check_memory_usage)
3338 rtx temp;
3340 in_check_memory_usage = 1;
3341 temp = get_push_address (INTVAL (size) - used);
3342 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3343 emit_library_call (chkr_copy_bitmap_libfunc,
3344 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3345 Pmode, XEXP (xinner, 0), Pmode,
3346 GEN_INT (INTVAL (size) - used),
3347 TYPE_MODE (sizetype));
3348 else
3349 emit_library_call (chkr_set_right_libfunc,
3350 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3351 Pmode, GEN_INT (INTVAL (size) - used),
3352 TYPE_MODE (sizetype),
3353 GEN_INT (MEMORY_USE_RW),
3354 TYPE_MODE (integer_type_node));
3355 in_check_memory_usage = 0;
3358 else
3359 #endif /* PUSH_ROUNDING */
3361 rtx target;
3363 /* Otherwise make space on the stack and copy the data
3364 to the address of that space. */
3366 /* Deduct words put into registers from the size we must copy. */
3367 if (partial != 0)
3369 if (GET_CODE (size) == CONST_INT)
3370 size = GEN_INT (INTVAL (size) - used);
3371 else
3372 size = expand_binop (GET_MODE (size), sub_optab, size,
3373 GEN_INT (used), NULL_RTX, 0,
3374 OPTAB_LIB_WIDEN);
3377 /* Get the address of the stack space.
3378 In this case, we do not deal with EXTRA separately.
3379 A single stack adjust will do. */
3380 if (! args_addr)
3382 temp = push_block (size, extra, where_pad == downward);
3383 extra = 0;
3385 else if (GET_CODE (args_so_far) == CONST_INT)
3386 temp = memory_address (BLKmode,
3387 plus_constant (args_addr,
3388 skip + INTVAL (args_so_far)));
3389 else
3390 temp = memory_address (BLKmode,
3391 plus_constant (gen_rtx_PLUS (Pmode,
3392 args_addr,
3393 args_so_far),
3394 skip));
3395 if (current_function_check_memory_usage && ! in_check_memory_usage)
3397 in_check_memory_usage = 1;
3398 target = copy_to_reg (temp);
3399 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3400 emit_library_call (chkr_copy_bitmap_libfunc,
3401 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3402 target, Pmode,
3403 XEXP (xinner, 0), Pmode,
3404 size, TYPE_MODE (sizetype));
3405 else
3406 emit_library_call (chkr_set_right_libfunc,
3407 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3408 target, Pmode,
3409 size, TYPE_MODE (sizetype),
3410 GEN_INT (MEMORY_USE_RW),
3411 TYPE_MODE (integer_type_node));
3412 in_check_memory_usage = 0;
3415 target = gen_rtx_MEM (BLKmode, temp);
3417 if (type != 0)
3419 set_mem_attributes (target, type, 1);
3420 /* Function incoming arguments may overlap with sibling call
3421 outgoing arguments and we cannot allow reordering of reads
3422 from function arguments with stores to outgoing arguments
3423 of sibling calls. */
3424 set_mem_alias_set (target, 0);
3427 /* TEMP is the address of the block. Copy the data there. */
3428 if (GET_CODE (size) == CONST_INT
3429 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3431 move_by_pieces (target, xinner, INTVAL (size), align);
3432 goto ret;
3434 else
3436 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3437 enum machine_mode mode;
3439 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3440 mode != VOIDmode;
3441 mode = GET_MODE_WIDER_MODE (mode))
3443 enum insn_code code = movstr_optab[(int) mode];
3444 insn_operand_predicate_fn pred;
3446 if (code != CODE_FOR_nothing
3447 && ((GET_CODE (size) == CONST_INT
3448 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3449 <= (GET_MODE_MASK (mode) >> 1)))
3450 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3451 && (!(pred = insn_data[(int) code].operand[0].predicate)
3452 || ((*pred) (target, BLKmode)))
3453 && (!(pred = insn_data[(int) code].operand[1].predicate)
3454 || ((*pred) (xinner, BLKmode)))
3455 && (!(pred = insn_data[(int) code].operand[3].predicate)
3456 || ((*pred) (opalign, VOIDmode))))
3458 rtx op2 = convert_to_mode (mode, size, 1);
3459 rtx last = get_last_insn ();
3460 rtx pat;
3462 pred = insn_data[(int) code].operand[2].predicate;
3463 if (pred != 0 && ! (*pred) (op2, mode))
3464 op2 = copy_to_mode_reg (mode, op2);
3466 pat = GEN_FCN ((int) code) (target, xinner,
3467 op2, opalign);
3468 if (pat)
3470 emit_insn (pat);
3471 goto ret;
3473 else
3474 delete_insns_since (last);
3479 if (!ACCUMULATE_OUTGOING_ARGS)
3481 /* If the source is referenced relative to the stack pointer,
3482 copy it to another register to stabilize it. We do not need
3483 to do this if we know that we won't be changing sp. */
3485 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3486 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3487 temp = copy_to_reg (temp);
3490 /* Make inhibit_defer_pop nonzero around the library call
3491 to force it to pop the bcopy-arguments right away. */
3492 NO_DEFER_POP;
3493 #ifdef TARGET_MEM_FUNCTIONS
3494 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3495 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3496 convert_to_mode (TYPE_MODE (sizetype),
3497 size, TREE_UNSIGNED (sizetype)),
3498 TYPE_MODE (sizetype));
3499 #else
3500 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3501 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3502 convert_to_mode (TYPE_MODE (integer_type_node),
3503 size,
3504 TREE_UNSIGNED (integer_type_node)),
3505 TYPE_MODE (integer_type_node));
3506 #endif
3507 OK_DEFER_POP;
3510 else if (partial > 0)
3512 /* Scalar partly in registers. */
3514 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3515 int i;
3516 int not_stack;
3517 /* # words of start of argument
3518 that we must make space for but need not store. */
3519 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3520 int args_offset = INTVAL (args_so_far);
3521 int skip;
3523 /* Push padding now if padding above and stack grows down,
3524 or if padding below and stack grows up.
3525 But if space already allocated, this has already been done. */
3526 if (extra && args_addr == 0
3527 && where_pad != none && where_pad != stack_direction)
3528 anti_adjust_stack (GEN_INT (extra));
3530 /* If we make space by pushing it, we might as well push
3531 the real data. Otherwise, we can leave OFFSET nonzero
3532 and leave the space uninitialized. */
3533 if (args_addr == 0)
3534 offset = 0;
3536 /* Now NOT_STACK gets the number of words that we don't need to
3537 allocate on the stack. */
3538 not_stack = partial - offset;
3540 /* If the partial register-part of the arg counts in its stack size,
3541 skip the part of stack space corresponding to the registers.
3542 Otherwise, start copying to the beginning of the stack space,
3543 by setting SKIP to 0. */
3544 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3546 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3547 x = validize_mem (force_const_mem (mode, x));
3549 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3550 SUBREGs of such registers are not allowed. */
3551 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3552 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3553 x = copy_to_reg (x);
3555 /* Loop over all the words allocated on the stack for this arg. */
3556 /* We can do it by words, because any scalar bigger than a word
3557 has a size a multiple of a word. */
3558 #ifndef PUSH_ARGS_REVERSED
3559 for (i = not_stack; i < size; i++)
3560 #else
3561 for (i = size - 1; i >= not_stack; i--)
3562 #endif
3563 if (i >= not_stack + offset)
3564 emit_push_insn (operand_subword_force (x, i, mode),
3565 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3566 0, args_addr,
3567 GEN_INT (args_offset + ((i - not_stack + skip)
3568 * UNITS_PER_WORD)),
3569 reg_parm_stack_space, alignment_pad);
3571 else
3573 rtx addr;
3574 rtx target = NULL_RTX;
3575 rtx dest;
3577 /* Push padding now if padding above and stack grows down,
3578 or if padding below and stack grows up.
3579 But if space already allocated, this has already been done. */
3580 if (extra && args_addr == 0
3581 && where_pad != none && where_pad != stack_direction)
3582 anti_adjust_stack (GEN_INT (extra));
3584 #ifdef PUSH_ROUNDING
3585 if (args_addr == 0 && PUSH_ARGS)
3586 emit_single_push_insn (mode, x, type);
3587 else
3588 #endif
3590 if (GET_CODE (args_so_far) == CONST_INT)
3591 addr
3592 = memory_address (mode,
3593 plus_constant (args_addr,
3594 INTVAL (args_so_far)));
3595 else
3596 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3597 args_so_far));
3598 target = addr;
3599 dest = gen_rtx_MEM (mode, addr);
3600 if (type != 0)
3602 set_mem_attributes (dest, type, 1);
3603 /* Function incoming arguments may overlap with sibling call
3604 outgoing arguments and we cannot allow reordering of reads
3605 from function arguments with stores to outgoing arguments
3606 of sibling calls. */
3607 set_mem_alias_set (dest, 0);
3610 emit_move_insn (dest, x);
3614 if (current_function_check_memory_usage && ! in_check_memory_usage)
3616 in_check_memory_usage = 1;
3617 if (target == 0)
3618 target = get_push_address (GET_MODE_SIZE (mode));
3620 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3621 emit_library_call (chkr_copy_bitmap_libfunc,
3622 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3623 Pmode, XEXP (x, 0), Pmode,
3624 GEN_INT (GET_MODE_SIZE (mode)),
3625 TYPE_MODE (sizetype));
3626 else
3627 emit_library_call (chkr_set_right_libfunc,
3628 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3629 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3630 TYPE_MODE (sizetype),
3631 GEN_INT (MEMORY_USE_RW),
3632 TYPE_MODE (integer_type_node));
3633 in_check_memory_usage = 0;
3637 ret:
3638 /* If part should go in registers, copy that part
3639 into the appropriate registers. Do this now, at the end,
3640 since mem-to-mem copies above may do function calls. */
3641 if (partial > 0 && reg != 0)
3643 /* Handle calls that pass values in multiple non-contiguous locations.
3644 The Irix 6 ABI has examples of this. */
3645 if (GET_CODE (reg) == PARALLEL)
3646 emit_group_load (reg, x, -1, align); /* ??? size? */
3647 else
3648 move_block_to_reg (REGNO (reg), x, partial, mode);
3651 if (extra && args_addr == 0 && where_pad == stack_direction)
3652 anti_adjust_stack (GEN_INT (extra));
3654 if (alignment_pad && args_addr == 0)
3655 anti_adjust_stack (alignment_pad);
3658 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3659 operations. */
3661 static rtx
3662 get_subtarget (x)
3663 rtx x;
3665 return ((x == 0
3666 /* Only registers can be subtargets. */
3667 || GET_CODE (x) != REG
3668 /* If the register is readonly, it can't be set more than once. */
3669 || RTX_UNCHANGING_P (x)
3670 /* Don't use hard regs to avoid extending their life. */
3671 || REGNO (x) < FIRST_PSEUDO_REGISTER
3672 /* Avoid subtargets inside loops,
3673 since they hide some invariant expressions. */
3674 || preserve_subexpressions_p ())
3675 ? 0 : x);
3678 /* Expand an assignment that stores the value of FROM into TO.
3679 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3680 (This may contain a QUEUED rtx;
3681 if the value is constant, this rtx is a constant.)
3682 Otherwise, the returned value is NULL_RTX.
3684 SUGGEST_REG is no longer actually used.
3685 It used to mean, copy the value through a register
3686 and return that register, if that is possible.
3687 We now use WANT_VALUE to decide whether to do this. */
3690 expand_assignment (to, from, want_value, suggest_reg)
3691 tree to, from;
3692 int want_value;
3693 int suggest_reg ATTRIBUTE_UNUSED;
3695 register rtx to_rtx = 0;
3696 rtx result;
3698 /* Don't crash if the lhs of the assignment was erroneous. */
3700 if (TREE_CODE (to) == ERROR_MARK)
3702 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3703 return want_value ? result : NULL_RTX;
3706 /* Assignment of a structure component needs special treatment
3707 if the structure component's rtx is not simply a MEM.
3708 Assignment of an array element at a constant index, and assignment of
3709 an array element in an unaligned packed structure field, has the same
3710 problem. */
3712 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3713 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3715 enum machine_mode mode1;
3716 HOST_WIDE_INT bitsize, bitpos;
3717 tree offset;
3718 int unsignedp;
3719 int volatilep = 0;
3720 tree tem;
3721 unsigned int alignment;
3723 push_temp_slots ();
3724 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3725 &unsignedp, &volatilep, &alignment);
3727 /* If we are going to use store_bit_field and extract_bit_field,
3728 make sure to_rtx will be safe for multiple use. */
3730 if (mode1 == VOIDmode && want_value)
3731 tem = stabilize_reference (tem);
3733 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3734 if (offset != 0)
3736 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3738 if (GET_CODE (to_rtx) != MEM)
3739 abort ();
3741 if (GET_MODE (offset_rtx) != ptr_mode)
3743 #ifdef POINTERS_EXTEND_UNSIGNED
3744 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3745 #else
3746 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3747 #endif
3750 /* A constant address in TO_RTX can have VOIDmode, we must not try
3751 to call force_reg for that case. Avoid that case. */
3752 if (GET_CODE (to_rtx) == MEM
3753 && GET_MODE (to_rtx) == BLKmode
3754 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3755 && bitsize
3756 && (bitpos % bitsize) == 0
3757 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3758 && alignment == GET_MODE_ALIGNMENT (mode1))
3760 rtx temp
3761 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3763 if (GET_CODE (XEXP (temp, 0)) == REG)
3764 to_rtx = temp;
3765 else
3766 to_rtx = (replace_equiv_address
3767 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3768 XEXP (temp, 0))));
3769 bitpos = 0;
3772 to_rtx = change_address (to_rtx, VOIDmode,
3773 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3774 force_reg (ptr_mode,
3775 offset_rtx)));
3778 if (volatilep)
3780 if (GET_CODE (to_rtx) == MEM)
3782 /* When the offset is zero, to_rtx is the address of the
3783 structure we are storing into, and hence may be shared.
3784 We must make a new MEM before setting the volatile bit. */
3785 if (offset == 0)
3786 to_rtx = copy_rtx (to_rtx);
3788 MEM_VOLATILE_P (to_rtx) = 1;
3790 #if 0 /* This was turned off because, when a field is volatile
3791 in an object which is not volatile, the object may be in a register,
3792 and then we would abort over here. */
3793 else
3794 abort ();
3795 #endif
3798 if (TREE_CODE (to) == COMPONENT_REF
3799 && TREE_READONLY (TREE_OPERAND (to, 1)))
3801 if (offset == 0)
3802 to_rtx = copy_rtx (to_rtx);
3804 RTX_UNCHANGING_P (to_rtx) = 1;
3807 /* Check the access. */
3808 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3810 rtx to_addr;
3811 int size;
3812 int best_mode_size;
3813 enum machine_mode best_mode;
3815 best_mode = get_best_mode (bitsize, bitpos,
3816 TYPE_ALIGN (TREE_TYPE (tem)),
3817 mode1, volatilep);
3818 if (best_mode == VOIDmode)
3819 best_mode = QImode;
3821 best_mode_size = GET_MODE_BITSIZE (best_mode);
3822 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3823 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3824 size *= GET_MODE_SIZE (best_mode);
3826 /* Check the access right of the pointer. */
3827 in_check_memory_usage = 1;
3828 if (size)
3829 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3830 VOIDmode, 3, to_addr, Pmode,
3831 GEN_INT (size), TYPE_MODE (sizetype),
3832 GEN_INT (MEMORY_USE_WO),
3833 TYPE_MODE (integer_type_node));
3834 in_check_memory_usage = 0;
3837 /* If this is a varying-length object, we must get the address of
3838 the source and do an explicit block move. */
3839 if (bitsize < 0)
3841 unsigned int from_align;
3842 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3843 rtx inner_to_rtx
3844 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3846 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3847 MIN (alignment, from_align));
3848 free_temp_slots ();
3849 pop_temp_slots ();
3850 return to_rtx;
3852 else
3854 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3855 (want_value
3856 /* Spurious cast for HPUX compiler. */
3857 ? ((enum machine_mode)
3858 TYPE_MODE (TREE_TYPE (to)))
3859 : VOIDmode),
3860 unsignedp,
3861 alignment,
3862 int_size_in_bytes (TREE_TYPE (tem)),
3863 get_alias_set (to));
3865 preserve_temp_slots (result);
3866 free_temp_slots ();
3867 pop_temp_slots ();
3869 /* If the value is meaningful, convert RESULT to the proper mode.
3870 Otherwise, return nothing. */
3871 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3872 TYPE_MODE (TREE_TYPE (from)),
3873 result,
3874 TREE_UNSIGNED (TREE_TYPE (to)))
3875 : NULL_RTX);
3879 /* If the rhs is a function call and its value is not an aggregate,
3880 call the function before we start to compute the lhs.
3881 This is needed for correct code for cases such as
3882 val = setjmp (buf) on machines where reference to val
3883 requires loading up part of an address in a separate insn.
3885 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3886 since it might be a promoted variable where the zero- or sign- extension
3887 needs to be done. Handling this in the normal way is safe because no
3888 computation is done before the call. */
3889 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3891 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3892 && GET_CODE (DECL_RTL (to)) == REG))
3894 rtx value;
3896 push_temp_slots ();
3897 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3898 if (to_rtx == 0)
3899 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3901 /* Handle calls that return values in multiple non-contiguous locations.
3902 The Irix 6 ABI has examples of this. */
3903 if (GET_CODE (to_rtx) == PARALLEL)
3904 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3905 TYPE_ALIGN (TREE_TYPE (from)));
3906 else if (GET_MODE (to_rtx) == BLKmode)
3907 emit_block_move (to_rtx, value, expr_size (from),
3908 TYPE_ALIGN (TREE_TYPE (from)));
3909 else
3911 #ifdef POINTERS_EXTEND_UNSIGNED
3912 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3913 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3914 value = convert_memory_address (GET_MODE (to_rtx), value);
3915 #endif
3916 emit_move_insn (to_rtx, value);
3918 preserve_temp_slots (to_rtx);
3919 free_temp_slots ();
3920 pop_temp_slots ();
3921 return want_value ? to_rtx : NULL_RTX;
3924 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3925 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3927 if (to_rtx == 0)
3929 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3930 if (GET_CODE (to_rtx) == MEM)
3931 set_mem_alias_set (to_rtx, get_alias_set (to));
3934 /* Don't move directly into a return register. */
3935 if (TREE_CODE (to) == RESULT_DECL
3936 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3938 rtx temp;
3940 push_temp_slots ();
3941 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3943 if (GET_CODE (to_rtx) == PARALLEL)
3944 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3945 TYPE_ALIGN (TREE_TYPE (from)));
3946 else
3947 emit_move_insn (to_rtx, temp);
3949 preserve_temp_slots (to_rtx);
3950 free_temp_slots ();
3951 pop_temp_slots ();
3952 return want_value ? to_rtx : NULL_RTX;
3955 /* In case we are returning the contents of an object which overlaps
3956 the place the value is being stored, use a safe function when copying
3957 a value through a pointer into a structure value return block. */
3958 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3959 && current_function_returns_struct
3960 && !current_function_returns_pcc_struct)
3962 rtx from_rtx, size;
3964 push_temp_slots ();
3965 size = expr_size (from);
3966 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3967 EXPAND_MEMORY_USE_DONT);
3969 /* Copy the rights of the bitmap. */
3970 if (current_function_check_memory_usage)
3971 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3972 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3973 XEXP (from_rtx, 0), Pmode,
3974 convert_to_mode (TYPE_MODE (sizetype),
3975 size, TREE_UNSIGNED (sizetype)),
3976 TYPE_MODE (sizetype));
3978 #ifdef TARGET_MEM_FUNCTIONS
3979 emit_library_call (memmove_libfunc, LCT_NORMAL,
3980 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3981 XEXP (from_rtx, 0), Pmode,
3982 convert_to_mode (TYPE_MODE (sizetype),
3983 size, TREE_UNSIGNED (sizetype)),
3984 TYPE_MODE (sizetype));
3985 #else
3986 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3987 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3988 XEXP (to_rtx, 0), Pmode,
3989 convert_to_mode (TYPE_MODE (integer_type_node),
3990 size, TREE_UNSIGNED (integer_type_node)),
3991 TYPE_MODE (integer_type_node));
3992 #endif
3994 preserve_temp_slots (to_rtx);
3995 free_temp_slots ();
3996 pop_temp_slots ();
3997 return want_value ? to_rtx : NULL_RTX;
4000 /* Compute FROM and store the value in the rtx we got. */
4002 push_temp_slots ();
4003 result = store_expr (from, to_rtx, want_value);
4004 preserve_temp_slots (result);
4005 free_temp_slots ();
4006 pop_temp_slots ();
4007 return want_value ? result : NULL_RTX;
4010 /* Generate code for computing expression EXP,
4011 and storing the value into TARGET.
4012 TARGET may contain a QUEUED rtx.
4014 If WANT_VALUE is nonzero, return a copy of the value
4015 not in TARGET, so that we can be sure to use the proper
4016 value in a containing expression even if TARGET has something
4017 else stored in it. If possible, we copy the value through a pseudo
4018 and return that pseudo. Or, if the value is constant, we try to
4019 return the constant. In some cases, we return a pseudo
4020 copied *from* TARGET.
4022 If the mode is BLKmode then we may return TARGET itself.
4023 It turns out that in BLKmode it doesn't cause a problem.
4024 because C has no operators that could combine two different
4025 assignments into the same BLKmode object with different values
4026 with no sequence point. Will other languages need this to
4027 be more thorough?
4029 If WANT_VALUE is 0, we return NULL, to make sure
4030 to catch quickly any cases where the caller uses the value
4031 and fails to set WANT_VALUE. */
4034 store_expr (exp, target, want_value)
4035 register tree exp;
4036 register rtx target;
4037 int want_value;
4039 register rtx temp;
4040 int dont_return_target = 0;
4041 int dont_store_target = 0;
4043 if (TREE_CODE (exp) == COMPOUND_EXPR)
4045 /* Perform first part of compound expression, then assign from second
4046 part. */
4047 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4048 emit_queue ();
4049 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4051 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4053 /* For conditional expression, get safe form of the target. Then
4054 test the condition, doing the appropriate assignment on either
4055 side. This avoids the creation of unnecessary temporaries.
4056 For non-BLKmode, it is more efficient not to do this. */
4058 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4060 emit_queue ();
4061 target = protect_from_queue (target, 1);
4063 do_pending_stack_adjust ();
4064 NO_DEFER_POP;
4065 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4066 start_cleanup_deferral ();
4067 store_expr (TREE_OPERAND (exp, 1), target, 0);
4068 end_cleanup_deferral ();
4069 emit_queue ();
4070 emit_jump_insn (gen_jump (lab2));
4071 emit_barrier ();
4072 emit_label (lab1);
4073 start_cleanup_deferral ();
4074 store_expr (TREE_OPERAND (exp, 2), target, 0);
4075 end_cleanup_deferral ();
4076 emit_queue ();
4077 emit_label (lab2);
4078 OK_DEFER_POP;
4080 return want_value ? target : NULL_RTX;
4082 else if (queued_subexp_p (target))
4083 /* If target contains a postincrement, let's not risk
4084 using it as the place to generate the rhs. */
4086 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4088 /* Expand EXP into a new pseudo. */
4089 temp = gen_reg_rtx (GET_MODE (target));
4090 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4092 else
4093 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4095 /* If target is volatile, ANSI requires accessing the value
4096 *from* the target, if it is accessed. So make that happen.
4097 In no case return the target itself. */
4098 if (! MEM_VOLATILE_P (target) && want_value)
4099 dont_return_target = 1;
4101 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4102 && GET_MODE (target) != BLKmode)
4103 /* If target is in memory and caller wants value in a register instead,
4104 arrange that. Pass TARGET as target for expand_expr so that,
4105 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4106 We know expand_expr will not use the target in that case.
4107 Don't do this if TARGET is volatile because we are supposed
4108 to write it and then read it. */
4110 temp = expand_expr (exp, target, GET_MODE (target), 0);
4111 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4113 /* If TEMP is already in the desired TARGET, only copy it from
4114 memory and don't store it there again. */
4115 if (temp == target
4116 || (rtx_equal_p (temp, target)
4117 && ! side_effects_p (temp) && ! side_effects_p (target)))
4118 dont_store_target = 1;
4119 temp = copy_to_reg (temp);
4121 dont_return_target = 1;
4123 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4124 /* If this is an scalar in a register that is stored in a wider mode
4125 than the declared mode, compute the result into its declared mode
4126 and then convert to the wider mode. Our value is the computed
4127 expression. */
4129 /* If we don't want a value, we can do the conversion inside EXP,
4130 which will often result in some optimizations. Do the conversion
4131 in two steps: first change the signedness, if needed, then
4132 the extend. But don't do this if the type of EXP is a subtype
4133 of something else since then the conversion might involve
4134 more than just converting modes. */
4135 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4136 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4138 if (TREE_UNSIGNED (TREE_TYPE (exp))
4139 != SUBREG_PROMOTED_UNSIGNED_P (target))
4141 = convert
4142 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4143 TREE_TYPE (exp)),
4144 exp);
4146 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4147 SUBREG_PROMOTED_UNSIGNED_P (target)),
4148 exp);
4151 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4153 /* If TEMP is a volatile MEM and we want a result value, make
4154 the access now so it gets done only once. Likewise if
4155 it contains TARGET. */
4156 if (GET_CODE (temp) == MEM && want_value
4157 && (MEM_VOLATILE_P (temp)
4158 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4159 temp = copy_to_reg (temp);
4161 /* If TEMP is a VOIDmode constant, use convert_modes to make
4162 sure that we properly convert it. */
4163 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4164 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4165 TYPE_MODE (TREE_TYPE (exp)), temp,
4166 SUBREG_PROMOTED_UNSIGNED_P (target));
4168 convert_move (SUBREG_REG (target), temp,
4169 SUBREG_PROMOTED_UNSIGNED_P (target));
4171 /* If we promoted a constant, change the mode back down to match
4172 target. Otherwise, the caller might get confused by a result whose
4173 mode is larger than expected. */
4175 if (want_value && GET_MODE (temp) != GET_MODE (target)
4176 && GET_MODE (temp) != VOIDmode)
4178 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4179 SUBREG_PROMOTED_VAR_P (temp) = 1;
4180 SUBREG_PROMOTED_UNSIGNED_P (temp)
4181 = SUBREG_PROMOTED_UNSIGNED_P (target);
4184 return want_value ? temp : NULL_RTX;
4186 else
4188 temp = expand_expr (exp, target, GET_MODE (target), 0);
4189 /* Return TARGET if it's a specified hardware register.
4190 If TARGET is a volatile mem ref, either return TARGET
4191 or return a reg copied *from* TARGET; ANSI requires this.
4193 Otherwise, if TEMP is not TARGET, return TEMP
4194 if it is constant (for efficiency),
4195 or if we really want the correct value. */
4196 if (!(target && GET_CODE (target) == REG
4197 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4198 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4199 && ! rtx_equal_p (temp, target)
4200 && (CONSTANT_P (temp) || want_value))
4201 dont_return_target = 1;
4204 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4205 the same as that of TARGET, adjust the constant. This is needed, for
4206 example, in case it is a CONST_DOUBLE and we want only a word-sized
4207 value. */
4208 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4209 && TREE_CODE (exp) != ERROR_MARK
4210 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4211 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4212 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4214 if (current_function_check_memory_usage
4215 && GET_CODE (target) == MEM
4216 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4218 in_check_memory_usage = 1;
4219 if (GET_CODE (temp) == MEM)
4220 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4221 VOIDmode, 3, XEXP (target, 0), Pmode,
4222 XEXP (temp, 0), Pmode,
4223 expr_size (exp), TYPE_MODE (sizetype));
4224 else
4225 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4226 VOIDmode, 3, XEXP (target, 0), Pmode,
4227 expr_size (exp), TYPE_MODE (sizetype),
4228 GEN_INT (MEMORY_USE_WO),
4229 TYPE_MODE (integer_type_node));
4230 in_check_memory_usage = 0;
4233 /* If value was not generated in the target, store it there.
4234 Convert the value to TARGET's type first if nec. */
4235 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4236 one or both of them are volatile memory refs, we have to distinguish
4237 two cases:
4238 - expand_expr has used TARGET. In this case, we must not generate
4239 another copy. This can be detected by TARGET being equal according
4240 to == .
4241 - expand_expr has not used TARGET - that means that the source just
4242 happens to have the same RTX form. Since temp will have been created
4243 by expand_expr, it will compare unequal according to == .
4244 We must generate a copy in this case, to reach the correct number
4245 of volatile memory references. */
4247 if ((! rtx_equal_p (temp, target)
4248 || (temp != target && (side_effects_p (temp)
4249 || side_effects_p (target))))
4250 && TREE_CODE (exp) != ERROR_MARK
4251 && ! dont_store_target)
4253 target = protect_from_queue (target, 1);
4254 if (GET_MODE (temp) != GET_MODE (target)
4255 && GET_MODE (temp) != VOIDmode)
4257 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4258 if (dont_return_target)
4260 /* In this case, we will return TEMP,
4261 so make sure it has the proper mode.
4262 But don't forget to store the value into TARGET. */
4263 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4264 emit_move_insn (target, temp);
4266 else
4267 convert_move (target, temp, unsignedp);
4270 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4272 /* Handle copying a string constant into an array.
4273 The string constant may be shorter than the array.
4274 So copy just the string's actual length, and clear the rest. */
4275 rtx size;
4276 rtx addr;
4278 /* Get the size of the data type of the string,
4279 which is actually the size of the target. */
4280 size = expr_size (exp);
4281 if (GET_CODE (size) == CONST_INT
4282 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4283 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4284 else
4286 /* Compute the size of the data to copy from the string. */
4287 tree copy_size
4288 = size_binop (MIN_EXPR,
4289 make_tree (sizetype, size),
4290 size_int (TREE_STRING_LENGTH (exp)));
4291 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4292 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4293 VOIDmode, 0);
4294 rtx label = 0;
4296 /* Copy that much. */
4297 emit_block_move (target, temp, copy_size_rtx,
4298 TYPE_ALIGN (TREE_TYPE (exp)));
4300 /* Figure out how much is left in TARGET that we have to clear.
4301 Do all calculations in ptr_mode. */
4303 addr = XEXP (target, 0);
4304 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4306 if (GET_CODE (copy_size_rtx) == CONST_INT)
4308 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4309 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4310 align = MIN (align,
4311 (unsigned int) (BITS_PER_UNIT
4312 * (INTVAL (copy_size_rtx)
4313 & - INTVAL (copy_size_rtx))));
4315 else
4317 addr = force_reg (ptr_mode, addr);
4318 addr = expand_binop (ptr_mode, add_optab, addr,
4319 copy_size_rtx, NULL_RTX, 0,
4320 OPTAB_LIB_WIDEN);
4322 size = expand_binop (ptr_mode, sub_optab, size,
4323 copy_size_rtx, NULL_RTX, 0,
4324 OPTAB_LIB_WIDEN);
4326 align = BITS_PER_UNIT;
4327 label = gen_label_rtx ();
4328 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4329 GET_MODE (size), 0, 0, label);
4331 align = MIN (align, expr_align (copy_size));
4333 if (size != const0_rtx)
4335 rtx dest = gen_rtx_MEM (BLKmode, addr);
4337 MEM_COPY_ATTRIBUTES (dest, target);
4339 /* Be sure we can write on ADDR. */
4340 in_check_memory_usage = 1;
4341 if (current_function_check_memory_usage)
4342 emit_library_call (chkr_check_addr_libfunc,
4343 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4344 addr, Pmode,
4345 size, TYPE_MODE (sizetype),
4346 GEN_INT (MEMORY_USE_WO),
4347 TYPE_MODE (integer_type_node));
4348 in_check_memory_usage = 0;
4349 clear_storage (dest, size, align);
4352 if (label)
4353 emit_label (label);
4356 /* Handle calls that return values in multiple non-contiguous locations.
4357 The Irix 6 ABI has examples of this. */
4358 else if (GET_CODE (target) == PARALLEL)
4359 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4360 TYPE_ALIGN (TREE_TYPE (exp)));
4361 else if (GET_MODE (temp) == BLKmode)
4362 emit_block_move (target, temp, expr_size (exp),
4363 TYPE_ALIGN (TREE_TYPE (exp)));
4364 else
4365 emit_move_insn (target, temp);
4368 /* If we don't want a value, return NULL_RTX. */
4369 if (! want_value)
4370 return NULL_RTX;
4372 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4373 ??? The latter test doesn't seem to make sense. */
4374 else if (dont_return_target && GET_CODE (temp) != MEM)
4375 return temp;
4377 /* Return TARGET itself if it is a hard register. */
4378 else if (want_value && GET_MODE (target) != BLKmode
4379 && ! (GET_CODE (target) == REG
4380 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4381 return copy_to_reg (target);
4383 else
4384 return target;
4387 /* Return 1 if EXP just contains zeros. */
4389 static int
4390 is_zeros_p (exp)
4391 tree exp;
4393 tree elt;
4395 switch (TREE_CODE (exp))
4397 case CONVERT_EXPR:
4398 case NOP_EXPR:
4399 case NON_LVALUE_EXPR:
4400 return is_zeros_p (TREE_OPERAND (exp, 0));
4402 case INTEGER_CST:
4403 return integer_zerop (exp);
4405 case COMPLEX_CST:
4406 return
4407 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4409 case REAL_CST:
4410 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4412 case CONSTRUCTOR:
4413 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4414 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4415 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4416 if (! is_zeros_p (TREE_VALUE (elt)))
4417 return 0;
4419 return 1;
4421 default:
4422 return 0;
4426 /* Return 1 if EXP contains mostly (3/4) zeros. */
4428 static int
4429 mostly_zeros_p (exp)
4430 tree exp;
4432 if (TREE_CODE (exp) == CONSTRUCTOR)
4434 int elts = 0, zeros = 0;
4435 tree elt = CONSTRUCTOR_ELTS (exp);
4436 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4438 /* If there are no ranges of true bits, it is all zero. */
4439 return elt == NULL_TREE;
4441 for (; elt; elt = TREE_CHAIN (elt))
4443 /* We do not handle the case where the index is a RANGE_EXPR,
4444 so the statistic will be somewhat inaccurate.
4445 We do make a more accurate count in store_constructor itself,
4446 so since this function is only used for nested array elements,
4447 this should be close enough. */
4448 if (mostly_zeros_p (TREE_VALUE (elt)))
4449 zeros++;
4450 elts++;
4453 return 4 * zeros >= 3 * elts;
4456 return is_zeros_p (exp);
4459 /* Helper function for store_constructor.
4460 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4461 TYPE is the type of the CONSTRUCTOR, not the element type.
4462 ALIGN and CLEARED are as for store_constructor.
4463 ALIAS_SET is the alias set to use for any stores.
4465 This provides a recursive shortcut back to store_constructor when it isn't
4466 necessary to go through store_field. This is so that we can pass through
4467 the cleared field to let store_constructor know that we may not have to
4468 clear a substructure if the outer structure has already been cleared. */
4470 static void
4471 store_constructor_field (target, bitsize, bitpos,
4472 mode, exp, type, align, cleared, alias_set)
4473 rtx target;
4474 unsigned HOST_WIDE_INT bitsize;
4475 HOST_WIDE_INT bitpos;
4476 enum machine_mode mode;
4477 tree exp, type;
4478 unsigned int align;
4479 int cleared;
4480 int alias_set;
4482 if (TREE_CODE (exp) == CONSTRUCTOR
4483 && bitpos % BITS_PER_UNIT == 0
4484 /* If we have a non-zero bitpos for a register target, then we just
4485 let store_field do the bitfield handling. This is unlikely to
4486 generate unnecessary clear instructions anyways. */
4487 && (bitpos == 0 || GET_CODE (target) == MEM))
4489 if (bitpos != 0)
4490 target
4491 = adjust_address (target,
4492 GET_MODE (target) == BLKmode
4493 || 0 != (bitpos
4494 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4495 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4498 /* Show the alignment may no longer be what it was and update the alias
4499 set, if required. */
4500 if (bitpos != 0)
4501 align = MIN (align, (unsigned int) bitpos & - bitpos);
4502 if (GET_CODE (target) == MEM)
4503 set_mem_alias_set (target, alias_set);
4505 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4507 else
4508 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4509 int_size_in_bytes (type), alias_set);
4512 /* Store the value of constructor EXP into the rtx TARGET.
4513 TARGET is either a REG or a MEM.
4514 ALIGN is the maximum known alignment for TARGET.
4515 CLEARED is true if TARGET is known to have been zero'd.
4516 SIZE is the number of bytes of TARGET we are allowed to modify: this
4517 may not be the same as the size of EXP if we are assigning to a field
4518 which has been packed to exclude padding bits. */
4520 static void
4521 store_constructor (exp, target, align, cleared, size)
4522 tree exp;
4523 rtx target;
4524 unsigned int align;
4525 int cleared;
4526 HOST_WIDE_INT size;
4528 tree type = TREE_TYPE (exp);
4529 #ifdef WORD_REGISTER_OPERATIONS
4530 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4531 #endif
4533 /* We know our target cannot conflict, since safe_from_p has been called. */
4534 #if 0
4535 /* Don't try copying piece by piece into a hard register
4536 since that is vulnerable to being clobbered by EXP.
4537 Instead, construct in a pseudo register and then copy it all. */
4538 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4540 rtx temp = gen_reg_rtx (GET_MODE (target));
4541 store_constructor (exp, temp, align, cleared, size);
4542 emit_move_insn (target, temp);
4543 return;
4545 #endif
4547 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4548 || TREE_CODE (type) == QUAL_UNION_TYPE)
4550 register tree elt;
4552 /* Inform later passes that the whole union value is dead. */
4553 if ((TREE_CODE (type) == UNION_TYPE
4554 || TREE_CODE (type) == QUAL_UNION_TYPE)
4555 && ! cleared)
4557 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4559 /* If the constructor is empty, clear the union. */
4560 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4561 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4564 /* If we are building a static constructor into a register,
4565 set the initial value as zero so we can fold the value into
4566 a constant. But if more than one register is involved,
4567 this probably loses. */
4568 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4569 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4571 if (! cleared)
4572 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4574 cleared = 1;
4577 /* If the constructor has fewer fields than the structure
4578 or if we are initializing the structure to mostly zeros,
4579 clear the whole structure first. Don't do this if TARGET is a
4580 register whose mode size isn't equal to SIZE since clear_storage
4581 can't handle this case. */
4582 else if (size > 0
4583 && ((list_length (CONSTRUCTOR_ELTS (exp))
4584 != fields_length (type))
4585 || mostly_zeros_p (exp))
4586 && (GET_CODE (target) != REG
4587 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4589 if (! cleared)
4590 clear_storage (target, GEN_INT (size), align);
4592 cleared = 1;
4594 else if (! cleared)
4595 /* Inform later passes that the old value is dead. */
4596 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4598 /* Store each element of the constructor into
4599 the corresponding field of TARGET. */
4601 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4603 register tree field = TREE_PURPOSE (elt);
4604 #ifdef WORD_REGISTER_OPERATIONS
4605 tree value = TREE_VALUE (elt);
4606 #endif
4607 register enum machine_mode mode;
4608 HOST_WIDE_INT bitsize;
4609 HOST_WIDE_INT bitpos = 0;
4610 int unsignedp;
4611 tree offset;
4612 rtx to_rtx = target;
4614 /* Just ignore missing fields.
4615 We cleared the whole structure, above,
4616 if any fields are missing. */
4617 if (field == 0)
4618 continue;
4620 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4621 continue;
4623 if (host_integerp (DECL_SIZE (field), 1))
4624 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4625 else
4626 bitsize = -1;
4628 unsignedp = TREE_UNSIGNED (field);
4629 mode = DECL_MODE (field);
4630 if (DECL_BIT_FIELD (field))
4631 mode = VOIDmode;
4633 offset = DECL_FIELD_OFFSET (field);
4634 if (host_integerp (offset, 0)
4635 && host_integerp (bit_position (field), 0))
4637 bitpos = int_bit_position (field);
4638 offset = 0;
4640 else
4641 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4643 if (offset)
4645 rtx offset_rtx;
4647 if (contains_placeholder_p (offset))
4648 offset = build (WITH_RECORD_EXPR, sizetype,
4649 offset, make_tree (TREE_TYPE (exp), target));
4651 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4652 if (GET_CODE (to_rtx) != MEM)
4653 abort ();
4655 if (GET_MODE (offset_rtx) != ptr_mode)
4657 #ifdef POINTERS_EXTEND_UNSIGNED
4658 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4659 #else
4660 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4661 #endif
4664 to_rtx
4665 = change_address (to_rtx, VOIDmode,
4666 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4667 force_reg (ptr_mode,
4668 offset_rtx)));
4669 align = DECL_OFFSET_ALIGN (field);
4672 if (TREE_READONLY (field))
4674 if (GET_CODE (to_rtx) == MEM)
4675 to_rtx = copy_rtx (to_rtx);
4677 RTX_UNCHANGING_P (to_rtx) = 1;
4680 #ifdef WORD_REGISTER_OPERATIONS
4681 /* If this initializes a field that is smaller than a word, at the
4682 start of a word, try to widen it to a full word.
4683 This special case allows us to output C++ member function
4684 initializations in a form that the optimizers can understand. */
4685 if (GET_CODE (target) == REG
4686 && bitsize < BITS_PER_WORD
4687 && bitpos % BITS_PER_WORD == 0
4688 && GET_MODE_CLASS (mode) == MODE_INT
4689 && TREE_CODE (value) == INTEGER_CST
4690 && exp_size >= 0
4691 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4693 tree type = TREE_TYPE (value);
4694 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4696 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4697 value = convert (type, value);
4699 if (BYTES_BIG_ENDIAN)
4700 value
4701 = fold (build (LSHIFT_EXPR, type, value,
4702 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4703 bitsize = BITS_PER_WORD;
4704 mode = word_mode;
4706 #endif
4707 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4708 TREE_VALUE (elt), type, align, cleared,
4709 (DECL_NONADDRESSABLE_P (field)
4710 && GET_CODE (to_rtx) == MEM)
4711 ? MEM_ALIAS_SET (to_rtx)
4712 : get_alias_set (TREE_TYPE (field)));
4715 else if (TREE_CODE (type) == ARRAY_TYPE)
4717 register tree elt;
4718 register int i;
4719 int need_to_clear;
4720 tree domain = TYPE_DOMAIN (type);
4721 tree elttype = TREE_TYPE (type);
4722 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4723 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4724 HOST_WIDE_INT minelt = 0;
4725 HOST_WIDE_INT maxelt = 0;
4727 /* If we have constant bounds for the range of the type, get them. */
4728 if (const_bounds_p)
4730 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4731 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4734 /* If the constructor has fewer elements than the array,
4735 clear the whole array first. Similarly if this is
4736 static constructor of a non-BLKmode object. */
4737 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4738 need_to_clear = 1;
4739 else
4741 HOST_WIDE_INT count = 0, zero_count = 0;
4742 need_to_clear = ! const_bounds_p;
4744 /* This loop is a more accurate version of the loop in
4745 mostly_zeros_p (it handles RANGE_EXPR in an index).
4746 It is also needed to check for missing elements. */
4747 for (elt = CONSTRUCTOR_ELTS (exp);
4748 elt != NULL_TREE && ! need_to_clear;
4749 elt = TREE_CHAIN (elt))
4751 tree index = TREE_PURPOSE (elt);
4752 HOST_WIDE_INT this_node_count;
4754 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4756 tree lo_index = TREE_OPERAND (index, 0);
4757 tree hi_index = TREE_OPERAND (index, 1);
4759 if (! host_integerp (lo_index, 1)
4760 || ! host_integerp (hi_index, 1))
4762 need_to_clear = 1;
4763 break;
4766 this_node_count = (tree_low_cst (hi_index, 1)
4767 - tree_low_cst (lo_index, 1) + 1);
4769 else
4770 this_node_count = 1;
4772 count += this_node_count;
4773 if (mostly_zeros_p (TREE_VALUE (elt)))
4774 zero_count += this_node_count;
4777 /* Clear the entire array first if there are any missing elements,
4778 or if the incidence of zero elements is >= 75%. */
4779 if (! need_to_clear
4780 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4781 need_to_clear = 1;
4784 if (need_to_clear && size > 0)
4786 if (! cleared)
4787 clear_storage (target, GEN_INT (size), align);
4788 cleared = 1;
4790 else if (REG_P (target))
4791 /* Inform later passes that the old value is dead. */
4792 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4794 /* Store each element of the constructor into
4795 the corresponding element of TARGET, determined
4796 by counting the elements. */
4797 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4798 elt;
4799 elt = TREE_CHAIN (elt), i++)
4801 register enum machine_mode mode;
4802 HOST_WIDE_INT bitsize;
4803 HOST_WIDE_INT bitpos;
4804 int unsignedp;
4805 tree value = TREE_VALUE (elt);
4806 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4807 tree index = TREE_PURPOSE (elt);
4808 rtx xtarget = target;
4810 if (cleared && is_zeros_p (value))
4811 continue;
4813 unsignedp = TREE_UNSIGNED (elttype);
4814 mode = TYPE_MODE (elttype);
4815 if (mode == BLKmode)
4816 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4817 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4818 : -1);
4819 else
4820 bitsize = GET_MODE_BITSIZE (mode);
4822 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4824 tree lo_index = TREE_OPERAND (index, 0);
4825 tree hi_index = TREE_OPERAND (index, 1);
4826 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4827 struct nesting *loop;
4828 HOST_WIDE_INT lo, hi, count;
4829 tree position;
4831 /* If the range is constant and "small", unroll the loop. */
4832 if (const_bounds_p
4833 && host_integerp (lo_index, 0)
4834 && host_integerp (hi_index, 0)
4835 && (lo = tree_low_cst (lo_index, 0),
4836 hi = tree_low_cst (hi_index, 0),
4837 count = hi - lo + 1,
4838 (GET_CODE (target) != MEM
4839 || count <= 2
4840 || (host_integerp (TYPE_SIZE (elttype), 1)
4841 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4842 <= 40 * 8)))))
4844 lo -= minelt; hi -= minelt;
4845 for (; lo <= hi; lo++)
4847 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4848 store_constructor_field
4849 (target, bitsize, bitpos, mode, value, type, align,
4850 cleared,
4851 TYPE_NONALIASED_COMPONENT (type)
4852 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4855 else
4857 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4858 loop_top = gen_label_rtx ();
4859 loop_end = gen_label_rtx ();
4861 unsignedp = TREE_UNSIGNED (domain);
4863 index = build_decl (VAR_DECL, NULL_TREE, domain);
4865 index_r
4866 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4867 &unsignedp, 0));
4868 SET_DECL_RTL (index, index_r);
4869 if (TREE_CODE (value) == SAVE_EXPR
4870 && SAVE_EXPR_RTL (value) == 0)
4872 /* Make sure value gets expanded once before the
4873 loop. */
4874 expand_expr (value, const0_rtx, VOIDmode, 0);
4875 emit_queue ();
4877 store_expr (lo_index, index_r, 0);
4878 loop = expand_start_loop (0);
4880 /* Assign value to element index. */
4881 position
4882 = convert (ssizetype,
4883 fold (build (MINUS_EXPR, TREE_TYPE (index),
4884 index, TYPE_MIN_VALUE (domain))));
4885 position = size_binop (MULT_EXPR, position,
4886 convert (ssizetype,
4887 TYPE_SIZE_UNIT (elttype)));
4889 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4890 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4891 xtarget = change_address (target, mode, addr);
4892 if (TREE_CODE (value) == CONSTRUCTOR)
4893 store_constructor (value, xtarget, align, cleared,
4894 bitsize / BITS_PER_UNIT);
4895 else
4896 store_expr (value, xtarget, 0);
4898 expand_exit_loop_if_false (loop,
4899 build (LT_EXPR, integer_type_node,
4900 index, hi_index));
4902 expand_increment (build (PREINCREMENT_EXPR,
4903 TREE_TYPE (index),
4904 index, integer_one_node), 0, 0);
4905 expand_end_loop ();
4906 emit_label (loop_end);
4909 else if ((index != 0 && ! host_integerp (index, 0))
4910 || ! host_integerp (TYPE_SIZE (elttype), 1))
4912 rtx pos_rtx, addr;
4913 tree position;
4915 if (index == 0)
4916 index = ssize_int (1);
4918 if (minelt)
4919 index = convert (ssizetype,
4920 fold (build (MINUS_EXPR, index,
4921 TYPE_MIN_VALUE (domain))));
4923 position = size_binop (MULT_EXPR, index,
4924 convert (ssizetype,
4925 TYPE_SIZE_UNIT (elttype)));
4926 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4927 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4928 xtarget = change_address (target, mode, addr);
4929 store_expr (value, xtarget, 0);
4931 else
4933 if (index != 0)
4934 bitpos = ((tree_low_cst (index, 0) - minelt)
4935 * tree_low_cst (TYPE_SIZE (elttype), 1));
4936 else
4937 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4939 store_constructor_field (target, bitsize, bitpos, mode, value,
4940 type, align, cleared,
4941 TYPE_NONALIASED_COMPONENT (type)
4942 && GET_CODE (target) == MEM
4943 ? MEM_ALIAS_SET (target) :
4944 get_alias_set (elttype));
4950 /* Set constructor assignments. */
4951 else if (TREE_CODE (type) == SET_TYPE)
4953 tree elt = CONSTRUCTOR_ELTS (exp);
4954 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4955 tree domain = TYPE_DOMAIN (type);
4956 tree domain_min, domain_max, bitlength;
4958 /* The default implementation strategy is to extract the constant
4959 parts of the constructor, use that to initialize the target,
4960 and then "or" in whatever non-constant ranges we need in addition.
4962 If a large set is all zero or all ones, it is
4963 probably better to set it using memset (if available) or bzero.
4964 Also, if a large set has just a single range, it may also be
4965 better to first clear all the first clear the set (using
4966 bzero/memset), and set the bits we want. */
4968 /* Check for all zeros. */
4969 if (elt == NULL_TREE && size > 0)
4971 if (!cleared)
4972 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4973 return;
4976 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4977 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4978 bitlength = size_binop (PLUS_EXPR,
4979 size_diffop (domain_max, domain_min),
4980 ssize_int (1));
4982 nbits = tree_low_cst (bitlength, 1);
4984 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4985 are "complicated" (more than one range), initialize (the
4986 constant parts) by copying from a constant. */
4987 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4988 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4990 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4991 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4992 char *bit_buffer = (char *) alloca (nbits);
4993 HOST_WIDE_INT word = 0;
4994 unsigned int bit_pos = 0;
4995 unsigned int ibit = 0;
4996 unsigned int offset = 0; /* In bytes from beginning of set. */
4998 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4999 for (;;)
5001 if (bit_buffer[ibit])
5003 if (BYTES_BIG_ENDIAN)
5004 word |= (1 << (set_word_size - 1 - bit_pos));
5005 else
5006 word |= 1 << bit_pos;
5009 bit_pos++; ibit++;
5010 if (bit_pos >= set_word_size || ibit == nbits)
5012 if (word != 0 || ! cleared)
5014 rtx datum = GEN_INT (word);
5015 rtx to_rtx;
5017 /* The assumption here is that it is safe to use
5018 XEXP if the set is multi-word, but not if
5019 it's single-word. */
5020 if (GET_CODE (target) == MEM)
5021 to_rtx = adjust_address (target, mode, offset);
5022 else if (offset == 0)
5023 to_rtx = target;
5024 else
5025 abort ();
5026 emit_move_insn (to_rtx, datum);
5029 if (ibit == nbits)
5030 break;
5031 word = 0;
5032 bit_pos = 0;
5033 offset += set_word_size / BITS_PER_UNIT;
5037 else if (!cleared)
5038 /* Don't bother clearing storage if the set is all ones. */
5039 if (TREE_CHAIN (elt) != NULL_TREE
5040 || (TREE_PURPOSE (elt) == NULL_TREE
5041 ? nbits != 1
5042 : ( ! host_integerp (TREE_VALUE (elt), 0)
5043 || ! host_integerp (TREE_PURPOSE (elt), 0)
5044 || (tree_low_cst (TREE_VALUE (elt), 0)
5045 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5046 != (HOST_WIDE_INT) nbits))))
5047 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5049 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5051 /* Start of range of element or NULL. */
5052 tree startbit = TREE_PURPOSE (elt);
5053 /* End of range of element, or element value. */
5054 tree endbit = TREE_VALUE (elt);
5055 #ifdef TARGET_MEM_FUNCTIONS
5056 HOST_WIDE_INT startb, endb;
5057 #endif
5058 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5060 bitlength_rtx = expand_expr (bitlength,
5061 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5063 /* Handle non-range tuple element like [ expr ]. */
5064 if (startbit == NULL_TREE)
5066 startbit = save_expr (endbit);
5067 endbit = startbit;
5070 startbit = convert (sizetype, startbit);
5071 endbit = convert (sizetype, endbit);
5072 if (! integer_zerop (domain_min))
5074 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5075 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5077 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5078 EXPAND_CONST_ADDRESS);
5079 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5080 EXPAND_CONST_ADDRESS);
5082 if (REG_P (target))
5084 targetx
5085 = assign_temp
5086 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5087 TYPE_QUAL_CONST)),
5088 0, 1, 1);
5089 emit_move_insn (targetx, target);
5092 else if (GET_CODE (target) == MEM)
5093 targetx = target;
5094 else
5095 abort ();
5097 #ifdef TARGET_MEM_FUNCTIONS
5098 /* Optimization: If startbit and endbit are
5099 constants divisible by BITS_PER_UNIT,
5100 call memset instead. */
5101 if (TREE_CODE (startbit) == INTEGER_CST
5102 && TREE_CODE (endbit) == INTEGER_CST
5103 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5104 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5106 emit_library_call (memset_libfunc, LCT_NORMAL,
5107 VOIDmode, 3,
5108 plus_constant (XEXP (targetx, 0),
5109 startb / BITS_PER_UNIT),
5110 Pmode,
5111 constm1_rtx, TYPE_MODE (integer_type_node),
5112 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5113 TYPE_MODE (sizetype));
5115 else
5116 #endif
5117 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5118 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5119 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5120 startbit_rtx, TYPE_MODE (sizetype),
5121 endbit_rtx, TYPE_MODE (sizetype));
5123 if (REG_P (target))
5124 emit_move_insn (target, targetx);
5128 else
5129 abort ();
5132 /* Store the value of EXP (an expression tree)
5133 into a subfield of TARGET which has mode MODE and occupies
5134 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5135 If MODE is VOIDmode, it means that we are storing into a bit-field.
5137 If VALUE_MODE is VOIDmode, return nothing in particular.
5138 UNSIGNEDP is not used in this case.
5140 Otherwise, return an rtx for the value stored. This rtx
5141 has mode VALUE_MODE if that is convenient to do.
5142 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5144 ALIGN is the alignment that TARGET is known to have.
5145 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5147 ALIAS_SET is the alias set for the destination. This value will
5148 (in general) be different from that for TARGET, since TARGET is a
5149 reference to the containing structure. */
5151 static rtx
5152 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5153 unsignedp, align, total_size, alias_set)
5154 rtx target;
5155 HOST_WIDE_INT bitsize;
5156 HOST_WIDE_INT bitpos;
5157 enum machine_mode mode;
5158 tree exp;
5159 enum machine_mode value_mode;
5160 int unsignedp;
5161 unsigned int align;
5162 HOST_WIDE_INT total_size;
5163 int alias_set;
5165 HOST_WIDE_INT width_mask = 0;
5167 if (TREE_CODE (exp) == ERROR_MARK)
5168 return const0_rtx;
5170 /* If we have nothing to store, do nothing unless the expression has
5171 side-effects. */
5172 if (bitsize == 0)
5173 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5175 if (bitsize < HOST_BITS_PER_WIDE_INT)
5176 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5178 /* If we are storing into an unaligned field of an aligned union that is
5179 in a register, we may have the mode of TARGET being an integer mode but
5180 MODE == BLKmode. In that case, get an aligned object whose size and
5181 alignment are the same as TARGET and store TARGET into it (we can avoid
5182 the store if the field being stored is the entire width of TARGET). Then
5183 call ourselves recursively to store the field into a BLKmode version of
5184 that object. Finally, load from the object into TARGET. This is not
5185 very efficient in general, but should only be slightly more expensive
5186 than the otherwise-required unaligned accesses. Perhaps this can be
5187 cleaned up later. */
5189 if (mode == BLKmode
5190 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5192 rtx object
5193 = assign_temp
5194 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5195 TYPE_QUAL_CONST),
5196 0, 1, 1);
5197 rtx blk_object = copy_rtx (object);
5199 PUT_MODE (blk_object, BLKmode);
5201 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5202 emit_move_insn (object, target);
5204 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5205 align, total_size, alias_set);
5207 /* Even though we aren't returning target, we need to
5208 give it the updated value. */
5209 emit_move_insn (target, object);
5211 return blk_object;
5214 if (GET_CODE (target) == CONCAT)
5216 /* We're storing into a struct containing a single __complex. */
5218 if (bitpos != 0)
5219 abort ();
5220 return store_expr (exp, target, 0);
5223 /* If the structure is in a register or if the component
5224 is a bit field, we cannot use addressing to access it.
5225 Use bit-field techniques or SUBREG to store in it. */
5227 if (mode == VOIDmode
5228 || (mode != BLKmode && ! direct_store[(int) mode]
5229 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5230 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5231 || GET_CODE (target) == REG
5232 || GET_CODE (target) == SUBREG
5233 /* If the field isn't aligned enough to store as an ordinary memref,
5234 store it as a bit field. */
5235 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5236 && (align < GET_MODE_ALIGNMENT (mode)
5237 || bitpos % GET_MODE_ALIGNMENT (mode)))
5238 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5239 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5240 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5241 /* If the RHS and field are a constant size and the size of the
5242 RHS isn't the same size as the bitfield, we must use bitfield
5243 operations. */
5244 || (bitsize >= 0
5245 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5246 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5248 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5250 /* If BITSIZE is narrower than the size of the type of EXP
5251 we will be narrowing TEMP. Normally, what's wanted are the
5252 low-order bits. However, if EXP's type is a record and this is
5253 big-endian machine, we want the upper BITSIZE bits. */
5254 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5255 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5256 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5257 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5258 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5259 - bitsize),
5260 temp, 1);
5262 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5263 MODE. */
5264 if (mode != VOIDmode && mode != BLKmode
5265 && mode != TYPE_MODE (TREE_TYPE (exp)))
5266 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5268 /* If the modes of TARGET and TEMP are both BLKmode, both
5269 must be in memory and BITPOS must be aligned on a byte
5270 boundary. If so, we simply do a block copy. */
5271 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5273 unsigned int exp_align = expr_align (exp);
5275 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5276 || bitpos % BITS_PER_UNIT != 0)
5277 abort ();
5279 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5281 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5282 align = MIN (exp_align, align);
5284 /* Find an alignment that is consistent with the bit position. */
5285 while ((bitpos % align) != 0)
5286 align >>= 1;
5288 emit_block_move (target, temp,
5289 bitsize == -1 ? expr_size (exp)
5290 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5291 / BITS_PER_UNIT),
5292 align);
5294 return value_mode == VOIDmode ? const0_rtx : target;
5297 /* Store the value in the bitfield. */
5298 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5299 if (value_mode != VOIDmode)
5301 /* The caller wants an rtx for the value. */
5302 /* If possible, avoid refetching from the bitfield itself. */
5303 if (width_mask != 0
5304 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5306 tree count;
5307 enum machine_mode tmode;
5309 if (unsignedp)
5310 return expand_and (temp,
5311 GEN_INT
5312 (trunc_int_for_mode
5313 (width_mask,
5314 GET_MODE (temp) == VOIDmode
5315 ? value_mode
5316 : GET_MODE (temp))), NULL_RTX);
5317 tmode = GET_MODE (temp);
5318 if (tmode == VOIDmode)
5319 tmode = value_mode;
5320 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5321 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5322 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5324 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5325 NULL_RTX, value_mode, 0, align,
5326 total_size);
5328 return const0_rtx;
5330 else
5332 rtx addr = XEXP (target, 0);
5333 rtx to_rtx;
5335 /* If a value is wanted, it must be the lhs;
5336 so make the address stable for multiple use. */
5338 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5339 && ! CONSTANT_ADDRESS_P (addr)
5340 /* A frame-pointer reference is already stable. */
5341 && ! (GET_CODE (addr) == PLUS
5342 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5343 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5344 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5345 target = replace_equiv_address (target, copy_to_reg (addr));
5347 /* Now build a reference to just the desired component. */
5349 to_rtx = copy_rtx (adjust_address (target, mode,
5350 bitpos / BITS_PER_UNIT));
5352 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5353 /* If the address of the structure varies, then it might be on
5354 the stack. And, stack slots may be shared across scopes.
5355 So, two different structures, of different types, can end up
5356 at the same location. We will give the structures alias set
5357 zero; here we must be careful not to give non-zero alias sets
5358 to their fields. */
5359 set_mem_alias_set (to_rtx,
5360 rtx_varies_p (addr, /*for_alias=*/0)
5361 ? 0 : alias_set);
5363 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5367 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5368 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5369 codes and find the ultimate containing object, which we return.
5371 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5372 bit position, and *PUNSIGNEDP to the signedness of the field.
5373 If the position of the field is variable, we store a tree
5374 giving the variable offset (in units) in *POFFSET.
5375 This offset is in addition to the bit position.
5376 If the position is not variable, we store 0 in *POFFSET.
5377 We set *PALIGNMENT to the alignment of the address that will be
5378 computed. This is the alignment of the thing we return if *POFFSET
5379 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5381 If any of the extraction expressions is volatile,
5382 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5384 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5385 is a mode that can be used to access the field. In that case, *PBITSIZE
5386 is redundant.
5388 If the field describes a variable-sized object, *PMODE is set to
5389 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5390 this case, but the address of the object can be found. */
5392 tree
5393 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5394 punsignedp, pvolatilep, palignment)
5395 tree exp;
5396 HOST_WIDE_INT *pbitsize;
5397 HOST_WIDE_INT *pbitpos;
5398 tree *poffset;
5399 enum machine_mode *pmode;
5400 int *punsignedp;
5401 int *pvolatilep;
5402 unsigned int *palignment;
5404 tree size_tree = 0;
5405 enum machine_mode mode = VOIDmode;
5406 tree offset = size_zero_node;
5407 tree bit_offset = bitsize_zero_node;
5408 unsigned int alignment = BIGGEST_ALIGNMENT;
5409 tree tem;
5411 /* First get the mode, signedness, and size. We do this from just the
5412 outermost expression. */
5413 if (TREE_CODE (exp) == COMPONENT_REF)
5415 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5416 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5417 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5419 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5421 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5423 size_tree = TREE_OPERAND (exp, 1);
5424 *punsignedp = TREE_UNSIGNED (exp);
5426 else
5428 mode = TYPE_MODE (TREE_TYPE (exp));
5429 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5431 if (mode == BLKmode)
5432 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5433 else
5434 *pbitsize = GET_MODE_BITSIZE (mode);
5437 if (size_tree != 0)
5439 if (! host_integerp (size_tree, 1))
5440 mode = BLKmode, *pbitsize = -1;
5441 else
5442 *pbitsize = tree_low_cst (size_tree, 1);
5445 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5446 and find the ultimate containing object. */
5447 while (1)
5449 if (TREE_CODE (exp) == BIT_FIELD_REF)
5450 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5451 else if (TREE_CODE (exp) == COMPONENT_REF)
5453 tree field = TREE_OPERAND (exp, 1);
5454 tree this_offset = DECL_FIELD_OFFSET (field);
5456 /* If this field hasn't been filled in yet, don't go
5457 past it. This should only happen when folding expressions
5458 made during type construction. */
5459 if (this_offset == 0)
5460 break;
5461 else if (! TREE_CONSTANT (this_offset)
5462 && contains_placeholder_p (this_offset))
5463 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5465 offset = size_binop (PLUS_EXPR, offset, this_offset);
5466 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5467 DECL_FIELD_BIT_OFFSET (field));
5469 if (! host_integerp (offset, 0))
5470 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5473 else if (TREE_CODE (exp) == ARRAY_REF
5474 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5476 tree index = TREE_OPERAND (exp, 1);
5477 tree array = TREE_OPERAND (exp, 0);
5478 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5479 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5480 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5482 /* We assume all arrays have sizes that are a multiple of a byte.
5483 First subtract the lower bound, if any, in the type of the
5484 index, then convert to sizetype and multiply by the size of the
5485 array element. */
5486 if (low_bound != 0 && ! integer_zerop (low_bound))
5487 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5488 index, low_bound));
5490 /* If the index has a self-referential type, pass it to a
5491 WITH_RECORD_EXPR; if the component size is, pass our
5492 component to one. */
5493 if (! TREE_CONSTANT (index)
5494 && contains_placeholder_p (index))
5495 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5496 if (! TREE_CONSTANT (unit_size)
5497 && contains_placeholder_p (unit_size))
5498 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5500 offset = size_binop (PLUS_EXPR, offset,
5501 size_binop (MULT_EXPR,
5502 convert (sizetype, index),
5503 unit_size));
5506 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5507 && ! ((TREE_CODE (exp) == NOP_EXPR
5508 || TREE_CODE (exp) == CONVERT_EXPR)
5509 && (TYPE_MODE (TREE_TYPE (exp))
5510 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5511 break;
5513 /* If any reference in the chain is volatile, the effect is volatile. */
5514 if (TREE_THIS_VOLATILE (exp))
5515 *pvolatilep = 1;
5517 /* If the offset is non-constant already, then we can't assume any
5518 alignment more than the alignment here. */
5519 if (! TREE_CONSTANT (offset))
5520 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5522 exp = TREE_OPERAND (exp, 0);
5525 if (DECL_P (exp))
5526 alignment = MIN (alignment, DECL_ALIGN (exp));
5527 else if (TREE_TYPE (exp) != 0)
5528 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5530 /* If OFFSET is constant, see if we can return the whole thing as a
5531 constant bit position. Otherwise, split it up. */
5532 if (host_integerp (offset, 0)
5533 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5534 bitsize_unit_node))
5535 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5536 && host_integerp (tem, 0))
5537 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5538 else
5539 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5541 *pmode = mode;
5542 *palignment = alignment;
5543 return exp;
5546 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5548 static enum memory_use_mode
5549 get_memory_usage_from_modifier (modifier)
5550 enum expand_modifier modifier;
5552 switch (modifier)
5554 case EXPAND_NORMAL:
5555 case EXPAND_SUM:
5556 return MEMORY_USE_RO;
5557 break;
5558 case EXPAND_MEMORY_USE_WO:
5559 return MEMORY_USE_WO;
5560 break;
5561 case EXPAND_MEMORY_USE_RW:
5562 return MEMORY_USE_RW;
5563 break;
5564 case EXPAND_MEMORY_USE_DONT:
5565 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5566 MEMORY_USE_DONT, because they are modifiers to a call of
5567 expand_expr in the ADDR_EXPR case of expand_expr. */
5568 case EXPAND_CONST_ADDRESS:
5569 case EXPAND_INITIALIZER:
5570 return MEMORY_USE_DONT;
5571 case EXPAND_MEMORY_USE_BAD:
5572 default:
5573 abort ();
5577 /* Given an rtx VALUE that may contain additions and multiplications, return
5578 an equivalent value that just refers to a register, memory, or constant.
5579 This is done by generating instructions to perform the arithmetic and
5580 returning a pseudo-register containing the value.
5582 The returned value may be a REG, SUBREG, MEM or constant. */
5585 force_operand (value, target)
5586 rtx value, target;
5588 register optab binoptab = 0;
5589 /* Use a temporary to force order of execution of calls to
5590 `force_operand'. */
5591 rtx tmp;
5592 register rtx op2;
5593 /* Use subtarget as the target for operand 0 of a binary operation. */
5594 register rtx subtarget = get_subtarget (target);
5596 /* Check for a PIC address load. */
5597 if (flag_pic
5598 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5599 && XEXP (value, 0) == pic_offset_table_rtx
5600 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5601 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5602 || GET_CODE (XEXP (value, 1)) == CONST))
5604 if (!subtarget)
5605 subtarget = gen_reg_rtx (GET_MODE (value));
5606 emit_move_insn (subtarget, value);
5607 return subtarget;
5610 if (GET_CODE (value) == PLUS)
5611 binoptab = add_optab;
5612 else if (GET_CODE (value) == MINUS)
5613 binoptab = sub_optab;
5614 else if (GET_CODE (value) == MULT)
5616 op2 = XEXP (value, 1);
5617 if (!CONSTANT_P (op2)
5618 && !(GET_CODE (op2) == REG && op2 != subtarget))
5619 subtarget = 0;
5620 tmp = force_operand (XEXP (value, 0), subtarget);
5621 return expand_mult (GET_MODE (value), tmp,
5622 force_operand (op2, NULL_RTX),
5623 target, 1);
5626 if (binoptab)
5628 op2 = XEXP (value, 1);
5629 if (!CONSTANT_P (op2)
5630 && !(GET_CODE (op2) == REG && op2 != subtarget))
5631 subtarget = 0;
5632 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5634 binoptab = add_optab;
5635 op2 = negate_rtx (GET_MODE (value), op2);
5638 /* Check for an addition with OP2 a constant integer and our first
5639 operand a PLUS of a virtual register and something else. In that
5640 case, we want to emit the sum of the virtual register and the
5641 constant first and then add the other value. This allows virtual
5642 register instantiation to simply modify the constant rather than
5643 creating another one around this addition. */
5644 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5645 && GET_CODE (XEXP (value, 0)) == PLUS
5646 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5647 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5648 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5650 rtx temp = expand_binop (GET_MODE (value), binoptab,
5651 XEXP (XEXP (value, 0), 0), op2,
5652 subtarget, 0, OPTAB_LIB_WIDEN);
5653 return expand_binop (GET_MODE (value), binoptab, temp,
5654 force_operand (XEXP (XEXP (value, 0), 1), 0),
5655 target, 0, OPTAB_LIB_WIDEN);
5658 tmp = force_operand (XEXP (value, 0), subtarget);
5659 return expand_binop (GET_MODE (value), binoptab, tmp,
5660 force_operand (op2, NULL_RTX),
5661 target, 0, OPTAB_LIB_WIDEN);
5662 /* We give UNSIGNEDP = 0 to expand_binop
5663 because the only operations we are expanding here are signed ones. */
5665 return value;
5668 /* Subroutine of expand_expr:
5669 save the non-copied parts (LIST) of an expr (LHS), and return a list
5670 which can restore these values to their previous values,
5671 should something modify their storage. */
5673 static tree
5674 save_noncopied_parts (lhs, list)
5675 tree lhs;
5676 tree list;
5678 tree tail;
5679 tree parts = 0;
5681 for (tail = list; tail; tail = TREE_CHAIN (tail))
5682 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5683 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5684 else
5686 tree part = TREE_VALUE (tail);
5687 tree part_type = TREE_TYPE (part);
5688 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5689 rtx target
5690 = assign_temp (build_qualified_type (part_type,
5691 (TYPE_QUALS (part_type)
5692 | TYPE_QUAL_CONST)),
5693 0, 1, 1);
5695 parts = tree_cons (to_be_saved,
5696 build (RTL_EXPR, part_type, NULL_TREE,
5697 (tree) validize_mem (target)),
5698 parts);
5699 store_expr (TREE_PURPOSE (parts),
5700 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5702 return parts;
5705 /* Subroutine of expand_expr:
5706 record the non-copied parts (LIST) of an expr (LHS), and return a list
5707 which specifies the initial values of these parts. */
5709 static tree
5710 init_noncopied_parts (lhs, list)
5711 tree lhs;
5712 tree list;
5714 tree tail;
5715 tree parts = 0;
5717 for (tail = list; tail; tail = TREE_CHAIN (tail))
5718 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5719 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5720 else if (TREE_PURPOSE (tail))
5722 tree part = TREE_VALUE (tail);
5723 tree part_type = TREE_TYPE (part);
5724 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5725 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5727 return parts;
5730 /* Subroutine of expand_expr: return nonzero iff there is no way that
5731 EXP can reference X, which is being modified. TOP_P is nonzero if this
5732 call is going to be used to determine whether we need a temporary
5733 for EXP, as opposed to a recursive call to this function.
5735 It is always safe for this routine to return zero since it merely
5736 searches for optimization opportunities. */
5739 safe_from_p (x, exp, top_p)
5740 rtx x;
5741 tree exp;
5742 int top_p;
5744 rtx exp_rtl = 0;
5745 int i, nops;
5746 static tree save_expr_list;
5748 if (x == 0
5749 /* If EXP has varying size, we MUST use a target since we currently
5750 have no way of allocating temporaries of variable size
5751 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5752 So we assume here that something at a higher level has prevented a
5753 clash. This is somewhat bogus, but the best we can do. Only
5754 do this when X is BLKmode and when we are at the top level. */
5755 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5756 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5757 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5758 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5759 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5760 != INTEGER_CST)
5761 && GET_MODE (x) == BLKmode)
5762 /* If X is in the outgoing argument area, it is always safe. */
5763 || (GET_CODE (x) == MEM
5764 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5765 || (GET_CODE (XEXP (x, 0)) == PLUS
5766 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5767 return 1;
5769 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5770 find the underlying pseudo. */
5771 if (GET_CODE (x) == SUBREG)
5773 x = SUBREG_REG (x);
5774 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5775 return 0;
5778 /* A SAVE_EXPR might appear many times in the expression passed to the
5779 top-level safe_from_p call, and if it has a complex subexpression,
5780 examining it multiple times could result in a combinatorial explosion.
5781 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5782 with optimization took about 28 minutes to compile -- even though it was
5783 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5784 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5785 we have processed. Note that the only test of top_p was above. */
5787 if (top_p)
5789 int rtn;
5790 tree t;
5792 save_expr_list = 0;
5794 rtn = safe_from_p (x, exp, 0);
5796 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5797 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5799 return rtn;
5802 /* Now look at our tree code and possibly recurse. */
5803 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5805 case 'd':
5806 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5807 break;
5809 case 'c':
5810 return 1;
5812 case 'x':
5813 if (TREE_CODE (exp) == TREE_LIST)
5814 return ((TREE_VALUE (exp) == 0
5815 || safe_from_p (x, TREE_VALUE (exp), 0))
5816 && (TREE_CHAIN (exp) == 0
5817 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5818 else if (TREE_CODE (exp) == ERROR_MARK)
5819 return 1; /* An already-visited SAVE_EXPR? */
5820 else
5821 return 0;
5823 case '1':
5824 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5826 case '2':
5827 case '<':
5828 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5829 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5831 case 'e':
5832 case 'r':
5833 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5834 the expression. If it is set, we conflict iff we are that rtx or
5835 both are in memory. Otherwise, we check all operands of the
5836 expression recursively. */
5838 switch (TREE_CODE (exp))
5840 case ADDR_EXPR:
5841 return (staticp (TREE_OPERAND (exp, 0))
5842 || TREE_STATIC (exp)
5843 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5845 case INDIRECT_REF:
5846 if (GET_CODE (x) == MEM
5847 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5848 get_alias_set (exp)))
5849 return 0;
5850 break;
5852 case CALL_EXPR:
5853 /* Assume that the call will clobber all hard registers and
5854 all of memory. */
5855 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5856 || GET_CODE (x) == MEM)
5857 return 0;
5858 break;
5860 case RTL_EXPR:
5861 /* If a sequence exists, we would have to scan every instruction
5862 in the sequence to see if it was safe. This is probably not
5863 worthwhile. */
5864 if (RTL_EXPR_SEQUENCE (exp))
5865 return 0;
5867 exp_rtl = RTL_EXPR_RTL (exp);
5868 break;
5870 case WITH_CLEANUP_EXPR:
5871 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5872 break;
5874 case CLEANUP_POINT_EXPR:
5875 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5877 case SAVE_EXPR:
5878 exp_rtl = SAVE_EXPR_RTL (exp);
5879 if (exp_rtl)
5880 break;
5882 /* If we've already scanned this, don't do it again. Otherwise,
5883 show we've scanned it and record for clearing the flag if we're
5884 going on. */
5885 if (TREE_PRIVATE (exp))
5886 return 1;
5888 TREE_PRIVATE (exp) = 1;
5889 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5891 TREE_PRIVATE (exp) = 0;
5892 return 0;
5895 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5896 return 1;
5898 case BIND_EXPR:
5899 /* The only operand we look at is operand 1. The rest aren't
5900 part of the expression. */
5901 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5903 case METHOD_CALL_EXPR:
5904 /* This takes a rtx argument, but shouldn't appear here. */
5905 abort ();
5907 default:
5908 break;
5911 /* If we have an rtx, we do not need to scan our operands. */
5912 if (exp_rtl)
5913 break;
5915 nops = first_rtl_op (TREE_CODE (exp));
5916 for (i = 0; i < nops; i++)
5917 if (TREE_OPERAND (exp, i) != 0
5918 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5919 return 0;
5921 /* If this is a language-specific tree code, it may require
5922 special handling. */
5923 if ((unsigned int) TREE_CODE (exp)
5924 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5925 && lang_safe_from_p
5926 && !(*lang_safe_from_p) (x, exp))
5927 return 0;
5930 /* If we have an rtl, find any enclosed object. Then see if we conflict
5931 with it. */
5932 if (exp_rtl)
5934 if (GET_CODE (exp_rtl) == SUBREG)
5936 exp_rtl = SUBREG_REG (exp_rtl);
5937 if (GET_CODE (exp_rtl) == REG
5938 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5939 return 0;
5942 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5943 are memory and they conflict. */
5944 return ! (rtx_equal_p (x, exp_rtl)
5945 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5946 && true_dependence (exp_rtl, GET_MODE (x), x,
5947 rtx_addr_varies_p)));
5950 /* If we reach here, it is safe. */
5951 return 1;
5954 /* Subroutine of expand_expr: return nonzero iff EXP is an
5955 expression whose type is statically determinable. */
5957 static int
5958 fixed_type_p (exp)
5959 tree exp;
5961 if (TREE_CODE (exp) == PARM_DECL
5962 || TREE_CODE (exp) == VAR_DECL
5963 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5964 || TREE_CODE (exp) == COMPONENT_REF
5965 || TREE_CODE (exp) == ARRAY_REF)
5966 return 1;
5967 return 0;
5970 /* Subroutine of expand_expr: return rtx if EXP is a
5971 variable or parameter; else return 0. */
5973 static rtx
5974 var_rtx (exp)
5975 tree exp;
5977 STRIP_NOPS (exp);
5978 switch (TREE_CODE (exp))
5980 case PARM_DECL:
5981 case VAR_DECL:
5982 return DECL_RTL (exp);
5983 default:
5984 return 0;
5988 #ifdef MAX_INTEGER_COMPUTATION_MODE
5990 void
5991 check_max_integer_computation_mode (exp)
5992 tree exp;
5994 enum tree_code code;
5995 enum machine_mode mode;
5997 /* Strip any NOPs that don't change the mode. */
5998 STRIP_NOPS (exp);
5999 code = TREE_CODE (exp);
6001 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6002 if (code == NOP_EXPR
6003 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6004 return;
6006 /* First check the type of the overall operation. We need only look at
6007 unary, binary and relational operations. */
6008 if (TREE_CODE_CLASS (code) == '1'
6009 || TREE_CODE_CLASS (code) == '2'
6010 || TREE_CODE_CLASS (code) == '<')
6012 mode = TYPE_MODE (TREE_TYPE (exp));
6013 if (GET_MODE_CLASS (mode) == MODE_INT
6014 && mode > MAX_INTEGER_COMPUTATION_MODE)
6015 internal_error ("unsupported wide integer operation");
6018 /* Check operand of a unary op. */
6019 if (TREE_CODE_CLASS (code) == '1')
6021 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6022 if (GET_MODE_CLASS (mode) == MODE_INT
6023 && mode > MAX_INTEGER_COMPUTATION_MODE)
6024 internal_error ("unsupported wide integer operation");
6027 /* Check operands of a binary/comparison op. */
6028 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6030 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6031 if (GET_MODE_CLASS (mode) == MODE_INT
6032 && mode > MAX_INTEGER_COMPUTATION_MODE)
6033 internal_error ("unsupported wide integer operation");
6035 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6036 if (GET_MODE_CLASS (mode) == MODE_INT
6037 && mode > MAX_INTEGER_COMPUTATION_MODE)
6038 internal_error ("unsupported wide integer operation");
6041 #endif
6043 /* expand_expr: generate code for computing expression EXP.
6044 An rtx for the computed value is returned. The value is never null.
6045 In the case of a void EXP, const0_rtx is returned.
6047 The value may be stored in TARGET if TARGET is nonzero.
6048 TARGET is just a suggestion; callers must assume that
6049 the rtx returned may not be the same as TARGET.
6051 If TARGET is CONST0_RTX, it means that the value will be ignored.
6053 If TMODE is not VOIDmode, it suggests generating the
6054 result in mode TMODE. But this is done only when convenient.
6055 Otherwise, TMODE is ignored and the value generated in its natural mode.
6056 TMODE is just a suggestion; callers must assume that
6057 the rtx returned may not have mode TMODE.
6059 Note that TARGET may have neither TMODE nor MODE. In that case, it
6060 probably will not be used.
6062 If MODIFIER is EXPAND_SUM then when EXP is an addition
6063 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6064 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6065 products as above, or REG or MEM, or constant.
6066 Ordinarily in such cases we would output mul or add instructions
6067 and then return a pseudo reg containing the sum.
6069 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6070 it also marks a label as absolutely required (it can't be dead).
6071 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6072 This is used for outputting expressions used in initializers.
6074 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6075 with a constant address even if that address is not normally legitimate.
6076 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6079 expand_expr (exp, target, tmode, modifier)
6080 register tree exp;
6081 rtx target;
6082 enum machine_mode tmode;
6083 enum expand_modifier modifier;
6085 register rtx op0, op1, temp;
6086 tree type = TREE_TYPE (exp);
6087 int unsignedp = TREE_UNSIGNED (type);
6088 register enum machine_mode mode;
6089 register enum tree_code code = TREE_CODE (exp);
6090 optab this_optab;
6091 rtx subtarget, original_target;
6092 int ignore;
6093 tree context;
6094 /* Used by check-memory-usage to make modifier read only. */
6095 enum expand_modifier ro_modifier;
6097 /* Handle ERROR_MARK before anybody tries to access its type. */
6098 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6100 op0 = CONST0_RTX (tmode);
6101 if (op0 != 0)
6102 return op0;
6103 return const0_rtx;
6106 mode = TYPE_MODE (type);
6107 /* Use subtarget as the target for operand 0 of a binary operation. */
6108 subtarget = get_subtarget (target);
6109 original_target = target;
6110 ignore = (target == const0_rtx
6111 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6112 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6113 || code == COND_EXPR)
6114 && TREE_CODE (type) == VOID_TYPE));
6116 /* Make a read-only version of the modifier. */
6117 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6118 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6119 ro_modifier = modifier;
6120 else
6121 ro_modifier = EXPAND_NORMAL;
6123 /* If we are going to ignore this result, we need only do something
6124 if there is a side-effect somewhere in the expression. If there
6125 is, short-circuit the most common cases here. Note that we must
6126 not call expand_expr with anything but const0_rtx in case this
6127 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6129 if (ignore)
6131 if (! TREE_SIDE_EFFECTS (exp))
6132 return const0_rtx;
6134 /* Ensure we reference a volatile object even if value is ignored, but
6135 don't do this if all we are doing is taking its address. */
6136 if (TREE_THIS_VOLATILE (exp)
6137 && TREE_CODE (exp) != FUNCTION_DECL
6138 && mode != VOIDmode && mode != BLKmode
6139 && modifier != EXPAND_CONST_ADDRESS)
6141 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6142 if (GET_CODE (temp) == MEM)
6143 temp = copy_to_reg (temp);
6144 return const0_rtx;
6147 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6148 || code == INDIRECT_REF || code == BUFFER_REF)
6149 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6150 VOIDmode, ro_modifier);
6151 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6152 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6154 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6155 ro_modifier);
6156 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6157 ro_modifier);
6158 return const0_rtx;
6160 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6161 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6162 /* If the second operand has no side effects, just evaluate
6163 the first. */
6164 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6165 VOIDmode, ro_modifier);
6166 else if (code == BIT_FIELD_REF)
6168 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6169 ro_modifier);
6170 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6171 ro_modifier);
6172 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6173 ro_modifier);
6174 return const0_rtx;
6177 target = 0;
6180 #ifdef MAX_INTEGER_COMPUTATION_MODE
6181 /* Only check stuff here if the mode we want is different from the mode
6182 of the expression; if it's the same, check_max_integer_computiation_mode
6183 will handle it. Do we really need to check this stuff at all? */
6185 if (target
6186 && GET_MODE (target) != mode
6187 && TREE_CODE (exp) != INTEGER_CST
6188 && TREE_CODE (exp) != PARM_DECL
6189 && TREE_CODE (exp) != ARRAY_REF
6190 && TREE_CODE (exp) != ARRAY_RANGE_REF
6191 && TREE_CODE (exp) != COMPONENT_REF
6192 && TREE_CODE (exp) != BIT_FIELD_REF
6193 && TREE_CODE (exp) != INDIRECT_REF
6194 && TREE_CODE (exp) != CALL_EXPR
6195 && TREE_CODE (exp) != VAR_DECL
6196 && TREE_CODE (exp) != RTL_EXPR)
6198 enum machine_mode mode = GET_MODE (target);
6200 if (GET_MODE_CLASS (mode) == MODE_INT
6201 && mode > MAX_INTEGER_COMPUTATION_MODE)
6202 internal_error ("unsupported wide integer operation");
6205 if (tmode != mode
6206 && TREE_CODE (exp) != INTEGER_CST
6207 && TREE_CODE (exp) != PARM_DECL
6208 && TREE_CODE (exp) != ARRAY_REF
6209 && TREE_CODE (exp) != ARRAY_RANGE_REF
6210 && TREE_CODE (exp) != COMPONENT_REF
6211 && TREE_CODE (exp) != BIT_FIELD_REF
6212 && TREE_CODE (exp) != INDIRECT_REF
6213 && TREE_CODE (exp) != VAR_DECL
6214 && TREE_CODE (exp) != CALL_EXPR
6215 && TREE_CODE (exp) != RTL_EXPR
6216 && GET_MODE_CLASS (tmode) == MODE_INT
6217 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6218 internal_error ("unsupported wide integer operation");
6220 check_max_integer_computation_mode (exp);
6221 #endif
6223 /* If will do cse, generate all results into pseudo registers
6224 since 1) that allows cse to find more things
6225 and 2) otherwise cse could produce an insn the machine
6226 cannot support. */
6228 if (! cse_not_expected && mode != BLKmode && target
6229 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6230 target = subtarget;
6232 switch (code)
6234 case LABEL_DECL:
6236 tree function = decl_function_context (exp);
6237 /* Handle using a label in a containing function. */
6238 if (function != current_function_decl
6239 && function != inline_function_decl && function != 0)
6241 struct function *p = find_function_data (function);
6242 p->expr->x_forced_labels
6243 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6244 p->expr->x_forced_labels);
6246 else
6248 if (modifier == EXPAND_INITIALIZER)
6249 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6250 label_rtx (exp),
6251 forced_labels);
6254 temp = gen_rtx_MEM (FUNCTION_MODE,
6255 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6256 if (function != current_function_decl
6257 && function != inline_function_decl && function != 0)
6258 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6259 return temp;
6262 case PARM_DECL:
6263 if (DECL_RTL (exp) == 0)
6265 error_with_decl (exp, "prior parameter's size depends on `%s'");
6266 return CONST0_RTX (mode);
6269 /* ... fall through ... */
6271 case VAR_DECL:
6272 /* If a static var's type was incomplete when the decl was written,
6273 but the type is complete now, lay out the decl now. */
6274 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6275 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6277 layout_decl (exp, 0);
6278 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6281 /* Although static-storage variables start off initialized, according to
6282 ANSI C, a memcpy could overwrite them with uninitialized values. So
6283 we check them too. This also lets us check for read-only variables
6284 accessed via a non-const declaration, in case it won't be detected
6285 any other way (e.g., in an embedded system or OS kernel without
6286 memory protection).
6288 Aggregates are not checked here; they're handled elsewhere. */
6289 if (cfun && current_function_check_memory_usage
6290 && code == VAR_DECL
6291 && GET_CODE (DECL_RTL (exp)) == MEM
6292 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6294 enum memory_use_mode memory_usage;
6295 memory_usage = get_memory_usage_from_modifier (modifier);
6297 in_check_memory_usage = 1;
6298 if (memory_usage != MEMORY_USE_DONT)
6299 emit_library_call (chkr_check_addr_libfunc,
6300 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6301 XEXP (DECL_RTL (exp), 0), Pmode,
6302 GEN_INT (int_size_in_bytes (type)),
6303 TYPE_MODE (sizetype),
6304 GEN_INT (memory_usage),
6305 TYPE_MODE (integer_type_node));
6306 in_check_memory_usage = 0;
6309 /* ... fall through ... */
6311 case FUNCTION_DECL:
6312 case RESULT_DECL:
6313 if (DECL_RTL (exp) == 0)
6314 abort ();
6316 /* Ensure variable marked as used even if it doesn't go through
6317 a parser. If it hasn't be used yet, write out an external
6318 definition. */
6319 if (! TREE_USED (exp))
6321 assemble_external (exp);
6322 TREE_USED (exp) = 1;
6325 /* Show we haven't gotten RTL for this yet. */
6326 temp = 0;
6328 /* Handle variables inherited from containing functions. */
6329 context = decl_function_context (exp);
6331 /* We treat inline_function_decl as an alias for the current function
6332 because that is the inline function whose vars, types, etc.
6333 are being merged into the current function.
6334 See expand_inline_function. */
6336 if (context != 0 && context != current_function_decl
6337 && context != inline_function_decl
6338 /* If var is static, we don't need a static chain to access it. */
6339 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6340 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6342 rtx addr;
6344 /* Mark as non-local and addressable. */
6345 DECL_NONLOCAL (exp) = 1;
6346 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6347 abort ();
6348 mark_addressable (exp);
6349 if (GET_CODE (DECL_RTL (exp)) != MEM)
6350 abort ();
6351 addr = XEXP (DECL_RTL (exp), 0);
6352 if (GET_CODE (addr) == MEM)
6353 addr
6354 = replace_equiv_address (addr,
6355 fix_lexical_addr (XEXP (addr, 0), exp));
6356 else
6357 addr = fix_lexical_addr (addr, exp);
6359 temp = replace_equiv_address (DECL_RTL (exp), addr);
6362 /* This is the case of an array whose size is to be determined
6363 from its initializer, while the initializer is still being parsed.
6364 See expand_decl. */
6366 else if (GET_CODE (DECL_RTL (exp)) == MEM
6367 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6368 temp = validize_mem (DECL_RTL (exp));
6370 /* If DECL_RTL is memory, we are in the normal case and either
6371 the address is not valid or it is not a register and -fforce-addr
6372 is specified, get the address into a register. */
6374 else if (GET_CODE (DECL_RTL (exp)) == MEM
6375 && modifier != EXPAND_CONST_ADDRESS
6376 && modifier != EXPAND_SUM
6377 && modifier != EXPAND_INITIALIZER
6378 && (! memory_address_p (DECL_MODE (exp),
6379 XEXP (DECL_RTL (exp), 0))
6380 || (flag_force_addr
6381 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6382 temp = replace_equiv_address (DECL_RTL (exp),
6383 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6385 /* If we got something, return it. But first, set the alignment
6386 if the address is a register. */
6387 if (temp != 0)
6389 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6390 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6392 return temp;
6395 /* If the mode of DECL_RTL does not match that of the decl, it
6396 must be a promoted value. We return a SUBREG of the wanted mode,
6397 but mark it so that we know that it was already extended. */
6399 if (GET_CODE (DECL_RTL (exp)) == REG
6400 && GET_MODE (DECL_RTL (exp)) != mode)
6402 /* Get the signedness used for this variable. Ensure we get the
6403 same mode we got when the variable was declared. */
6404 if (GET_MODE (DECL_RTL (exp))
6405 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6406 abort ();
6408 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6409 SUBREG_PROMOTED_VAR_P (temp) = 1;
6410 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6411 return temp;
6414 return DECL_RTL (exp);
6416 case INTEGER_CST:
6417 return immed_double_const (TREE_INT_CST_LOW (exp),
6418 TREE_INT_CST_HIGH (exp), mode);
6420 case CONST_DECL:
6421 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6422 EXPAND_MEMORY_USE_BAD);
6424 case REAL_CST:
6425 /* If optimized, generate immediate CONST_DOUBLE
6426 which will be turned into memory by reload if necessary.
6428 We used to force a register so that loop.c could see it. But
6429 this does not allow gen_* patterns to perform optimizations with
6430 the constants. It also produces two insns in cases like "x = 1.0;".
6431 On most machines, floating-point constants are not permitted in
6432 many insns, so we'd end up copying it to a register in any case.
6434 Now, we do the copying in expand_binop, if appropriate. */
6435 return immed_real_const (exp);
6437 case COMPLEX_CST:
6438 case STRING_CST:
6439 if (! TREE_CST_RTL (exp))
6440 output_constant_def (exp, 1);
6442 /* TREE_CST_RTL probably contains a constant address.
6443 On RISC machines where a constant address isn't valid,
6444 make some insns to get that address into a register. */
6445 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6446 && modifier != EXPAND_CONST_ADDRESS
6447 && modifier != EXPAND_INITIALIZER
6448 && modifier != EXPAND_SUM
6449 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6450 || (flag_force_addr
6451 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6452 return replace_equiv_address (TREE_CST_RTL (exp),
6453 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6454 return TREE_CST_RTL (exp);
6456 case EXPR_WITH_FILE_LOCATION:
6458 rtx to_return;
6459 const char *saved_input_filename = input_filename;
6460 int saved_lineno = lineno;
6461 input_filename = EXPR_WFL_FILENAME (exp);
6462 lineno = EXPR_WFL_LINENO (exp);
6463 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6464 emit_line_note (input_filename, lineno);
6465 /* Possibly avoid switching back and forth here. */
6466 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6467 input_filename = saved_input_filename;
6468 lineno = saved_lineno;
6469 return to_return;
6472 case SAVE_EXPR:
6473 context = decl_function_context (exp);
6475 /* If this SAVE_EXPR was at global context, assume we are an
6476 initialization function and move it into our context. */
6477 if (context == 0)
6478 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6480 /* We treat inline_function_decl as an alias for the current function
6481 because that is the inline function whose vars, types, etc.
6482 are being merged into the current function.
6483 See expand_inline_function. */
6484 if (context == current_function_decl || context == inline_function_decl)
6485 context = 0;
6487 /* If this is non-local, handle it. */
6488 if (context)
6490 /* The following call just exists to abort if the context is
6491 not of a containing function. */
6492 find_function_data (context);
6494 temp = SAVE_EXPR_RTL (exp);
6495 if (temp && GET_CODE (temp) == REG)
6497 put_var_into_stack (exp);
6498 temp = SAVE_EXPR_RTL (exp);
6500 if (temp == 0 || GET_CODE (temp) != MEM)
6501 abort ();
6502 return
6503 replace_equiv_address (temp,
6504 fix_lexical_addr (XEXP (temp, 0), exp));
6506 if (SAVE_EXPR_RTL (exp) == 0)
6508 if (mode == VOIDmode)
6509 temp = const0_rtx;
6510 else
6511 temp = assign_temp (build_qualified_type (type,
6512 (TYPE_QUALS (type)
6513 | TYPE_QUAL_CONST)),
6514 3, 0, 0);
6516 SAVE_EXPR_RTL (exp) = temp;
6517 if (!optimize && GET_CODE (temp) == REG)
6518 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6519 save_expr_regs);
6521 /* If the mode of TEMP does not match that of the expression, it
6522 must be a promoted value. We pass store_expr a SUBREG of the
6523 wanted mode but mark it so that we know that it was already
6524 extended. Note that `unsignedp' was modified above in
6525 this case. */
6527 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6529 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6530 SUBREG_PROMOTED_VAR_P (temp) = 1;
6531 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6534 if (temp == const0_rtx)
6535 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6536 EXPAND_MEMORY_USE_BAD);
6537 else
6538 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6540 TREE_USED (exp) = 1;
6543 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6544 must be a promoted value. We return a SUBREG of the wanted mode,
6545 but mark it so that we know that it was already extended. */
6547 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6548 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6550 /* Compute the signedness and make the proper SUBREG. */
6551 promote_mode (type, mode, &unsignedp, 0);
6552 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6553 SUBREG_PROMOTED_VAR_P (temp) = 1;
6554 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6555 return temp;
6558 return SAVE_EXPR_RTL (exp);
6560 case UNSAVE_EXPR:
6562 rtx temp;
6563 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6564 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6565 return temp;
6568 case PLACEHOLDER_EXPR:
6570 tree placeholder_expr;
6572 /* If there is an object on the head of the placeholder list,
6573 see if some object in it of type TYPE or a pointer to it. For
6574 further information, see tree.def. */
6575 for (placeholder_expr = placeholder_list;
6576 placeholder_expr != 0;
6577 placeholder_expr = TREE_CHAIN (placeholder_expr))
6579 tree need_type = TYPE_MAIN_VARIANT (type);
6580 tree object = 0;
6581 tree old_list = placeholder_list;
6582 tree elt;
6584 /* Find the outermost reference that is of the type we want.
6585 If none, see if any object has a type that is a pointer to
6586 the type we want. */
6587 for (elt = TREE_PURPOSE (placeholder_expr);
6588 elt != 0 && object == 0;
6590 = ((TREE_CODE (elt) == COMPOUND_EXPR
6591 || TREE_CODE (elt) == COND_EXPR)
6592 ? TREE_OPERAND (elt, 1)
6593 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6594 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6595 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6596 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6597 ? TREE_OPERAND (elt, 0) : 0))
6598 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6599 object = elt;
6601 for (elt = TREE_PURPOSE (placeholder_expr);
6602 elt != 0 && object == 0;
6604 = ((TREE_CODE (elt) == COMPOUND_EXPR
6605 || TREE_CODE (elt) == COND_EXPR)
6606 ? TREE_OPERAND (elt, 1)
6607 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6608 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6609 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6610 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6611 ? TREE_OPERAND (elt, 0) : 0))
6612 if (POINTER_TYPE_P (TREE_TYPE (elt))
6613 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6614 == need_type))
6615 object = build1 (INDIRECT_REF, need_type, elt);
6617 if (object != 0)
6619 /* Expand this object skipping the list entries before
6620 it was found in case it is also a PLACEHOLDER_EXPR.
6621 In that case, we want to translate it using subsequent
6622 entries. */
6623 placeholder_list = TREE_CHAIN (placeholder_expr);
6624 temp = expand_expr (object, original_target, tmode,
6625 ro_modifier);
6626 placeholder_list = old_list;
6627 return temp;
6632 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6633 abort ();
6635 case WITH_RECORD_EXPR:
6636 /* Put the object on the placeholder list, expand our first operand,
6637 and pop the list. */
6638 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6639 placeholder_list);
6640 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6641 tmode, ro_modifier);
6642 placeholder_list = TREE_CHAIN (placeholder_list);
6643 return target;
6645 case GOTO_EXPR:
6646 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6647 expand_goto (TREE_OPERAND (exp, 0));
6648 else
6649 expand_computed_goto (TREE_OPERAND (exp, 0));
6650 return const0_rtx;
6652 case EXIT_EXPR:
6653 expand_exit_loop_if_false (NULL,
6654 invert_truthvalue (TREE_OPERAND (exp, 0)));
6655 return const0_rtx;
6657 case LABELED_BLOCK_EXPR:
6658 if (LABELED_BLOCK_BODY (exp))
6659 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6660 /* Should perhaps use expand_label, but this is simpler and safer. */
6661 do_pending_stack_adjust ();
6662 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6663 return const0_rtx;
6665 case EXIT_BLOCK_EXPR:
6666 if (EXIT_BLOCK_RETURN (exp))
6667 sorry ("returned value in block_exit_expr");
6668 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6669 return const0_rtx;
6671 case LOOP_EXPR:
6672 push_temp_slots ();
6673 expand_start_loop (1);
6674 expand_expr_stmt (TREE_OPERAND (exp, 0));
6675 expand_end_loop ();
6676 pop_temp_slots ();
6678 return const0_rtx;
6680 case BIND_EXPR:
6682 tree vars = TREE_OPERAND (exp, 0);
6683 int vars_need_expansion = 0;
6685 /* Need to open a binding contour here because
6686 if there are any cleanups they must be contained here. */
6687 expand_start_bindings (2);
6689 /* Mark the corresponding BLOCK for output in its proper place. */
6690 if (TREE_OPERAND (exp, 2) != 0
6691 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6692 insert_block (TREE_OPERAND (exp, 2));
6694 /* If VARS have not yet been expanded, expand them now. */
6695 while (vars)
6697 if (!DECL_RTL_SET_P (vars))
6699 vars_need_expansion = 1;
6700 expand_decl (vars);
6702 expand_decl_init (vars);
6703 vars = TREE_CHAIN (vars);
6706 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6708 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6710 return temp;
6713 case RTL_EXPR:
6714 if (RTL_EXPR_SEQUENCE (exp))
6716 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6717 abort ();
6718 emit_insns (RTL_EXPR_SEQUENCE (exp));
6719 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6721 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6722 free_temps_for_rtl_expr (exp);
6723 return RTL_EXPR_RTL (exp);
6725 case CONSTRUCTOR:
6726 /* If we don't need the result, just ensure we evaluate any
6727 subexpressions. */
6728 if (ignore)
6730 tree elt;
6731 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6732 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6733 EXPAND_MEMORY_USE_BAD);
6734 return const0_rtx;
6737 /* All elts simple constants => refer to a constant in memory. But
6738 if this is a non-BLKmode mode, let it store a field at a time
6739 since that should make a CONST_INT or CONST_DOUBLE when we
6740 fold. Likewise, if we have a target we can use, it is best to
6741 store directly into the target unless the type is large enough
6742 that memcpy will be used. If we are making an initializer and
6743 all operands are constant, put it in memory as well. */
6744 else if ((TREE_STATIC (exp)
6745 && ((mode == BLKmode
6746 && ! (target != 0 && safe_from_p (target, exp, 1)))
6747 || TREE_ADDRESSABLE (exp)
6748 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6749 && (! MOVE_BY_PIECES_P
6750 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6751 TYPE_ALIGN (type)))
6752 && ! mostly_zeros_p (exp))))
6753 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6755 rtx constructor = output_constant_def (exp, 1);
6757 if (modifier != EXPAND_CONST_ADDRESS
6758 && modifier != EXPAND_INITIALIZER
6759 && modifier != EXPAND_SUM)
6760 constructor = validize_mem (constructor);
6762 return constructor;
6764 else
6766 /* Handle calls that pass values in multiple non-contiguous
6767 locations. The Irix 6 ABI has examples of this. */
6768 if (target == 0 || ! safe_from_p (target, exp, 1)
6769 || GET_CODE (target) == PARALLEL)
6770 target
6771 = assign_temp (build_qualified_type (type,
6772 (TYPE_QUALS (type)
6773 | (TREE_READONLY (exp)
6774 * TYPE_QUAL_CONST))),
6775 TREE_ADDRESSABLE (exp), 1, 1);
6777 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6778 int_size_in_bytes (TREE_TYPE (exp)));
6779 return target;
6782 case INDIRECT_REF:
6784 tree exp1 = TREE_OPERAND (exp, 0);
6785 tree index;
6786 tree string = string_constant (exp1, &index);
6788 /* Try to optimize reads from const strings. */
6789 if (string
6790 && TREE_CODE (string) == STRING_CST
6791 && TREE_CODE (index) == INTEGER_CST
6792 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6793 && GET_MODE_CLASS (mode) == MODE_INT
6794 && GET_MODE_SIZE (mode) == 1
6795 && modifier != EXPAND_MEMORY_USE_WO)
6796 return
6797 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6799 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6800 op0 = memory_address (mode, op0);
6802 if (cfun && current_function_check_memory_usage
6803 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6805 enum memory_use_mode memory_usage;
6806 memory_usage = get_memory_usage_from_modifier (modifier);
6808 if (memory_usage != MEMORY_USE_DONT)
6810 in_check_memory_usage = 1;
6811 emit_library_call (chkr_check_addr_libfunc,
6812 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6813 Pmode, GEN_INT (int_size_in_bytes (type)),
6814 TYPE_MODE (sizetype),
6815 GEN_INT (memory_usage),
6816 TYPE_MODE (integer_type_node));
6817 in_check_memory_usage = 0;
6821 temp = gen_rtx_MEM (mode, op0);
6822 set_mem_attributes (temp, exp, 0);
6824 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6825 here, because, in C and C++, the fact that a location is accessed
6826 through a pointer to const does not mean that the value there can
6827 never change. Languages where it can never change should
6828 also set TREE_STATIC. */
6829 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6831 /* If we are writing to this object and its type is a record with
6832 readonly fields, we must mark it as readonly so it will
6833 conflict with readonly references to those fields. */
6834 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6835 RTX_UNCHANGING_P (temp) = 1;
6837 return temp;
6840 case ARRAY_REF:
6841 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6842 abort ();
6845 tree array = TREE_OPERAND (exp, 0);
6846 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6847 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6848 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6849 HOST_WIDE_INT i;
6851 /* Optimize the special-case of a zero lower bound.
6853 We convert the low_bound to sizetype to avoid some problems
6854 with constant folding. (E.g. suppose the lower bound is 1,
6855 and its mode is QI. Without the conversion, (ARRAY
6856 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6857 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6859 if (! integer_zerop (low_bound))
6860 index = size_diffop (index, convert (sizetype, low_bound));
6862 /* Fold an expression like: "foo"[2].
6863 This is not done in fold so it won't happen inside &.
6864 Don't fold if this is for wide characters since it's too
6865 difficult to do correctly and this is a very rare case. */
6867 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6868 && TREE_CODE (array) == STRING_CST
6869 && TREE_CODE (index) == INTEGER_CST
6870 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6871 && GET_MODE_CLASS (mode) == MODE_INT
6872 && GET_MODE_SIZE (mode) == 1)
6873 return
6874 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6876 /* If this is a constant index into a constant array,
6877 just get the value from the array. Handle both the cases when
6878 we have an explicit constructor and when our operand is a variable
6879 that was declared const. */
6881 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6882 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6883 && TREE_CODE (index) == INTEGER_CST
6884 && 0 > compare_tree_int (index,
6885 list_length (CONSTRUCTOR_ELTS
6886 (TREE_OPERAND (exp, 0)))))
6888 tree elem;
6890 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6891 i = TREE_INT_CST_LOW (index);
6892 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6895 if (elem)
6896 return expand_expr (fold (TREE_VALUE (elem)), target,
6897 tmode, ro_modifier);
6900 else if (optimize >= 1
6901 && modifier != EXPAND_CONST_ADDRESS
6902 && modifier != EXPAND_INITIALIZER
6903 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6904 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6905 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6907 if (TREE_CODE (index) == INTEGER_CST)
6909 tree init = DECL_INITIAL (array);
6911 if (TREE_CODE (init) == CONSTRUCTOR)
6913 tree elem;
6915 for (elem = CONSTRUCTOR_ELTS (init);
6916 (elem
6917 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6918 elem = TREE_CHAIN (elem))
6921 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6922 return expand_expr (fold (TREE_VALUE (elem)), target,
6923 tmode, ro_modifier);
6925 else if (TREE_CODE (init) == STRING_CST
6926 && 0 > compare_tree_int (index,
6927 TREE_STRING_LENGTH (init)))
6929 tree type = TREE_TYPE (TREE_TYPE (init));
6930 enum machine_mode mode = TYPE_MODE (type);
6932 if (GET_MODE_CLASS (mode) == MODE_INT
6933 && GET_MODE_SIZE (mode) == 1)
6934 return (GEN_INT
6935 (TREE_STRING_POINTER
6936 (init)[TREE_INT_CST_LOW (index)]));
6941 /* Fall through. */
6943 case COMPONENT_REF:
6944 case BIT_FIELD_REF:
6945 case ARRAY_RANGE_REF:
6946 /* If the operand is a CONSTRUCTOR, we can just extract the
6947 appropriate field if it is present. Don't do this if we have
6948 already written the data since we want to refer to that copy
6949 and varasm.c assumes that's what we'll do. */
6950 if (code == COMPONENT_REF
6951 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6952 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6954 tree elt;
6956 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6957 elt = TREE_CHAIN (elt))
6958 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6959 /* We can normally use the value of the field in the
6960 CONSTRUCTOR. However, if this is a bitfield in
6961 an integral mode that we can fit in a HOST_WIDE_INT,
6962 we must mask only the number of bits in the bitfield,
6963 since this is done implicitly by the constructor. If
6964 the bitfield does not meet either of those conditions,
6965 we can't do this optimization. */
6966 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6967 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6968 == MODE_INT)
6969 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6970 <= HOST_BITS_PER_WIDE_INT))))
6972 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6973 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6975 HOST_WIDE_INT bitsize
6976 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6978 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6980 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6981 op0 = expand_and (op0, op1, target);
6983 else
6985 enum machine_mode imode
6986 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6987 tree count
6988 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6991 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6992 target, 0);
6993 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6994 target, 0);
6998 return op0;
7003 enum machine_mode mode1;
7004 HOST_WIDE_INT bitsize, bitpos;
7005 tree offset;
7006 int volatilep = 0;
7007 unsigned int alignment;
7008 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7009 &mode1, &unsignedp, &volatilep,
7010 &alignment);
7012 /* If we got back the original object, something is wrong. Perhaps
7013 we are evaluating an expression too early. In any event, don't
7014 infinitely recurse. */
7015 if (tem == exp)
7016 abort ();
7018 /* If TEM's type is a union of variable size, pass TARGET to the inner
7019 computation, since it will need a temporary and TARGET is known
7020 to have to do. This occurs in unchecked conversion in Ada. */
7022 op0 = expand_expr (tem,
7023 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7024 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7025 != INTEGER_CST)
7026 ? target : NULL_RTX),
7027 VOIDmode,
7028 (modifier == EXPAND_INITIALIZER
7029 || modifier == EXPAND_CONST_ADDRESS)
7030 ? modifier : EXPAND_NORMAL);
7032 /* If this is a constant, put it into a register if it is a
7033 legitimate constant and OFFSET is 0 and memory if it isn't. */
7034 if (CONSTANT_P (op0))
7036 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7037 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7038 && offset == 0)
7039 op0 = force_reg (mode, op0);
7040 else
7041 op0 = validize_mem (force_const_mem (mode, op0));
7044 if (offset != 0)
7046 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7048 /* If this object is in a register, put it into memory.
7049 This case can't occur in C, but can in Ada if we have
7050 unchecked conversion of an expression from a scalar type to
7051 an array or record type. */
7052 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7053 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7055 /* If the operand is a SAVE_EXPR, we can deal with this by
7056 forcing the SAVE_EXPR into memory. */
7057 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7059 put_var_into_stack (TREE_OPERAND (exp, 0));
7060 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7062 else
7064 tree nt
7065 = build_qualified_type (TREE_TYPE (tem),
7066 (TYPE_QUALS (TREE_TYPE (tem))
7067 | TYPE_QUAL_CONST));
7068 rtx memloc = assign_temp (nt, 1, 1, 1);
7070 mark_temp_addr_taken (memloc);
7071 emit_move_insn (memloc, op0);
7072 op0 = memloc;
7076 if (GET_CODE (op0) != MEM)
7077 abort ();
7079 if (GET_MODE (offset_rtx) != ptr_mode)
7081 #ifdef POINTERS_EXTEND_UNSIGNED
7082 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7083 #else
7084 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7085 #endif
7088 /* A constant address in OP0 can have VOIDmode, we must not try
7089 to call force_reg for that case. Avoid that case. */
7090 if (GET_CODE (op0) == MEM
7091 && GET_MODE (op0) == BLKmode
7092 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7093 && bitsize != 0
7094 && (bitpos % bitsize) == 0
7095 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7096 && alignment == GET_MODE_ALIGNMENT (mode1))
7098 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7100 if (GET_CODE (XEXP (temp, 0)) == REG)
7101 op0 = temp;
7102 else
7103 op0 = (replace_equiv_address
7104 (op0,
7105 force_reg (GET_MODE (XEXP (temp, 0)),
7106 XEXP (temp, 0))));
7107 bitpos = 0;
7110 op0 = change_address (op0, VOIDmode,
7111 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7112 force_reg (ptr_mode,
7113 offset_rtx)));
7116 /* Don't forget about volatility even if this is a bitfield. */
7117 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7119 op0 = copy_rtx (op0);
7120 MEM_VOLATILE_P (op0) = 1;
7123 /* Check the access. */
7124 if (cfun != 0 && current_function_check_memory_usage
7125 && GET_CODE (op0) == MEM)
7127 enum memory_use_mode memory_usage;
7128 memory_usage = get_memory_usage_from_modifier (modifier);
7130 if (memory_usage != MEMORY_USE_DONT)
7132 rtx to;
7133 int size;
7135 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7136 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7138 /* Check the access right of the pointer. */
7139 in_check_memory_usage = 1;
7140 if (size > BITS_PER_UNIT)
7141 emit_library_call (chkr_check_addr_libfunc,
7142 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7143 Pmode, GEN_INT (size / BITS_PER_UNIT),
7144 TYPE_MODE (sizetype),
7145 GEN_INT (memory_usage),
7146 TYPE_MODE (integer_type_node));
7147 in_check_memory_usage = 0;
7151 /* In cases where an aligned union has an unaligned object
7152 as a field, we might be extracting a BLKmode value from
7153 an integer-mode (e.g., SImode) object. Handle this case
7154 by doing the extract into an object as wide as the field
7155 (which we know to be the width of a basic mode), then
7156 storing into memory, and changing the mode to BLKmode. */
7157 if (mode1 == VOIDmode
7158 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7159 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7160 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7161 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7162 && modifier != EXPAND_CONST_ADDRESS
7163 && modifier != EXPAND_INITIALIZER)
7164 /* If the field isn't aligned enough to fetch as a memref,
7165 fetch it as a bit field. */
7166 || (mode1 != BLKmode
7167 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7168 && ((TYPE_ALIGN (TREE_TYPE (tem))
7169 < GET_MODE_ALIGNMENT (mode))
7170 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7171 /* If the type and the field are a constant size and the
7172 size of the type isn't the same size as the bitfield,
7173 we must use bitfield operations. */
7174 || (bitsize >= 0
7175 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7176 == INTEGER_CST)
7177 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7178 bitsize))
7179 || (mode == BLKmode
7180 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7181 && (TYPE_ALIGN (type) > alignment
7182 || bitpos % TYPE_ALIGN (type) != 0)))
7184 enum machine_mode ext_mode = mode;
7186 if (ext_mode == BLKmode
7187 && ! (target != 0 && GET_CODE (op0) == MEM
7188 && GET_CODE (target) == MEM
7189 && bitpos % BITS_PER_UNIT == 0))
7190 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7192 if (ext_mode == BLKmode)
7194 /* In this case, BITPOS must start at a byte boundary and
7195 TARGET, if specified, must be a MEM. */
7196 if (GET_CODE (op0) != MEM
7197 || (target != 0 && GET_CODE (target) != MEM)
7198 || bitpos % BITS_PER_UNIT != 0)
7199 abort ();
7201 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7202 if (target == 0)
7203 target = assign_temp (type, 0, 1, 1);
7205 emit_block_move (target, op0,
7206 bitsize == -1 ? expr_size (exp)
7207 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7208 / BITS_PER_UNIT),
7209 BITS_PER_UNIT);
7211 return target;
7214 op0 = validize_mem (op0);
7216 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7217 mark_reg_pointer (XEXP (op0, 0), alignment);
7219 op0 = extract_bit_field (op0, bitsize, bitpos,
7220 unsignedp, target, ext_mode, ext_mode,
7221 alignment,
7222 int_size_in_bytes (TREE_TYPE (tem)));
7224 /* If the result is a record type and BITSIZE is narrower than
7225 the mode of OP0, an integral mode, and this is a big endian
7226 machine, we must put the field into the high-order bits. */
7227 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7228 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7229 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7230 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7231 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7232 - bitsize),
7233 op0, 1);
7235 if (mode == BLKmode)
7237 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7238 TYPE_QUAL_CONST);
7239 rtx new = assign_temp (nt, 0, 1, 1);
7241 emit_move_insn (new, op0);
7242 op0 = copy_rtx (new);
7243 PUT_MODE (op0, BLKmode);
7246 return op0;
7249 /* If the result is BLKmode, use that to access the object
7250 now as well. */
7251 if (mode == BLKmode)
7252 mode1 = BLKmode;
7254 /* Get a reference to just this component. */
7255 if (modifier == EXPAND_CONST_ADDRESS
7256 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7257 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7258 else
7259 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7261 set_mem_attributes (op0, exp, 0);
7262 if (GET_CODE (XEXP (op0, 0)) == REG)
7263 mark_reg_pointer (XEXP (op0, 0), alignment);
7265 MEM_VOLATILE_P (op0) |= volatilep;
7266 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7267 || modifier == EXPAND_CONST_ADDRESS
7268 || modifier == EXPAND_INITIALIZER)
7269 return op0;
7270 else if (target == 0)
7271 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7273 convert_move (target, op0, unsignedp);
7274 return target;
7277 /* Intended for a reference to a buffer of a file-object in Pascal.
7278 But it's not certain that a special tree code will really be
7279 necessary for these. INDIRECT_REF might work for them. */
7280 case BUFFER_REF:
7281 abort ();
7283 case IN_EXPR:
7285 /* Pascal set IN expression.
7287 Algorithm:
7288 rlo = set_low - (set_low%bits_per_word);
7289 the_word = set [ (index - rlo)/bits_per_word ];
7290 bit_index = index % bits_per_word;
7291 bitmask = 1 << bit_index;
7292 return !!(the_word & bitmask); */
7294 tree set = TREE_OPERAND (exp, 0);
7295 tree index = TREE_OPERAND (exp, 1);
7296 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7297 tree set_type = TREE_TYPE (set);
7298 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7299 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7300 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7301 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7302 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7303 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7304 rtx setaddr = XEXP (setval, 0);
7305 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7306 rtx rlow;
7307 rtx diff, quo, rem, addr, bit, result;
7309 /* If domain is empty, answer is no. Likewise if index is constant
7310 and out of bounds. */
7311 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7312 && TREE_CODE (set_low_bound) == INTEGER_CST
7313 && tree_int_cst_lt (set_high_bound, set_low_bound))
7314 || (TREE_CODE (index) == INTEGER_CST
7315 && TREE_CODE (set_low_bound) == INTEGER_CST
7316 && tree_int_cst_lt (index, set_low_bound))
7317 || (TREE_CODE (set_high_bound) == INTEGER_CST
7318 && TREE_CODE (index) == INTEGER_CST
7319 && tree_int_cst_lt (set_high_bound, index))))
7320 return const0_rtx;
7322 if (target == 0)
7323 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7325 /* If we get here, we have to generate the code for both cases
7326 (in range and out of range). */
7328 op0 = gen_label_rtx ();
7329 op1 = gen_label_rtx ();
7331 if (! (GET_CODE (index_val) == CONST_INT
7332 && GET_CODE (lo_r) == CONST_INT))
7334 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7335 GET_MODE (index_val), iunsignedp, 0, op1);
7338 if (! (GET_CODE (index_val) == CONST_INT
7339 && GET_CODE (hi_r) == CONST_INT))
7341 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7342 GET_MODE (index_val), iunsignedp, 0, op1);
7345 /* Calculate the element number of bit zero in the first word
7346 of the set. */
7347 if (GET_CODE (lo_r) == CONST_INT)
7348 rlow = GEN_INT (INTVAL (lo_r)
7349 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7350 else
7351 rlow = expand_binop (index_mode, and_optab, lo_r,
7352 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7353 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7355 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7356 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7358 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7359 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7360 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7361 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7363 addr = memory_address (byte_mode,
7364 expand_binop (index_mode, add_optab, diff,
7365 setaddr, NULL_RTX, iunsignedp,
7366 OPTAB_LIB_WIDEN));
7368 /* Extract the bit we want to examine. */
7369 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7370 gen_rtx_MEM (byte_mode, addr),
7371 make_tree (TREE_TYPE (index), rem),
7372 NULL_RTX, 1);
7373 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7374 GET_MODE (target) == byte_mode ? target : 0,
7375 1, OPTAB_LIB_WIDEN);
7377 if (result != target)
7378 convert_move (target, result, 1);
7380 /* Output the code to handle the out-of-range case. */
7381 emit_jump (op0);
7382 emit_label (op1);
7383 emit_move_insn (target, const0_rtx);
7384 emit_label (op0);
7385 return target;
7388 case WITH_CLEANUP_EXPR:
7389 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7391 WITH_CLEANUP_EXPR_RTL (exp)
7392 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7393 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7395 /* That's it for this cleanup. */
7396 TREE_OPERAND (exp, 1) = 0;
7398 return WITH_CLEANUP_EXPR_RTL (exp);
7400 case CLEANUP_POINT_EXPR:
7402 /* Start a new binding layer that will keep track of all cleanup
7403 actions to be performed. */
7404 expand_start_bindings (2);
7406 target_temp_slot_level = temp_slot_level;
7408 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7409 /* If we're going to use this value, load it up now. */
7410 if (! ignore)
7411 op0 = force_not_mem (op0);
7412 preserve_temp_slots (op0);
7413 expand_end_bindings (NULL_TREE, 0, 0);
7415 return op0;
7417 case CALL_EXPR:
7418 /* Check for a built-in function. */
7419 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7420 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7421 == FUNCTION_DECL)
7422 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7424 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7425 == BUILT_IN_FRONTEND)
7426 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7427 else
7428 return expand_builtin (exp, target, subtarget, tmode, ignore);
7431 return expand_call (exp, target, ignore);
7433 case NON_LVALUE_EXPR:
7434 case NOP_EXPR:
7435 case CONVERT_EXPR:
7436 case REFERENCE_EXPR:
7437 if (TREE_OPERAND (exp, 0) == error_mark_node)
7438 return const0_rtx;
7440 if (TREE_CODE (type) == UNION_TYPE)
7442 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7444 /* If both input and output are BLKmode, this conversion
7445 isn't actually doing anything unless we need to make the
7446 alignment stricter. */
7447 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7448 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7449 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7450 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7451 modifier);
7453 if (target == 0)
7454 target = assign_temp (type, 0, 1, 1);
7456 if (GET_CODE (target) == MEM)
7457 /* Store data into beginning of memory target. */
7458 store_expr (TREE_OPERAND (exp, 0),
7459 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7461 else if (GET_CODE (target) == REG)
7462 /* Store this field into a union of the proper type. */
7463 store_field (target,
7464 MIN ((int_size_in_bytes (TREE_TYPE
7465 (TREE_OPERAND (exp, 0)))
7466 * BITS_PER_UNIT),
7467 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7468 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7469 VOIDmode, 0, BITS_PER_UNIT,
7470 int_size_in_bytes (type), 0);
7471 else
7472 abort ();
7474 /* Return the entire union. */
7475 return target;
7478 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7480 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7481 ro_modifier);
7483 /* If the signedness of the conversion differs and OP0 is
7484 a promoted SUBREG, clear that indication since we now
7485 have to do the proper extension. */
7486 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7487 && GET_CODE (op0) == SUBREG)
7488 SUBREG_PROMOTED_VAR_P (op0) = 0;
7490 return op0;
7493 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7494 if (GET_MODE (op0) == mode)
7495 return op0;
7497 /* If OP0 is a constant, just convert it into the proper mode. */
7498 if (CONSTANT_P (op0))
7499 return
7500 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7501 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7503 if (modifier == EXPAND_INITIALIZER)
7504 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7506 if (target == 0)
7507 return
7508 convert_to_mode (mode, op0,
7509 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7510 else
7511 convert_move (target, op0,
7512 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7513 return target;
7515 case PLUS_EXPR:
7516 /* We come here from MINUS_EXPR when the second operand is a
7517 constant. */
7518 plus_expr:
7519 this_optab = ! unsignedp && flag_trapv
7520 && (GET_MODE_CLASS(mode) == MODE_INT)
7521 ? addv_optab : add_optab;
7523 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7524 something else, make sure we add the register to the constant and
7525 then to the other thing. This case can occur during strength
7526 reduction and doing it this way will produce better code if the
7527 frame pointer or argument pointer is eliminated.
7529 fold-const.c will ensure that the constant is always in the inner
7530 PLUS_EXPR, so the only case we need to do anything about is if
7531 sp, ap, or fp is our second argument, in which case we must swap
7532 the innermost first argument and our second argument. */
7534 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7535 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7536 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7537 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7538 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7539 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7541 tree t = TREE_OPERAND (exp, 1);
7543 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7544 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7547 /* If the result is to be ptr_mode and we are adding an integer to
7548 something, we might be forming a constant. So try to use
7549 plus_constant. If it produces a sum and we can't accept it,
7550 use force_operand. This allows P = &ARR[const] to generate
7551 efficient code on machines where a SYMBOL_REF is not a valid
7552 address.
7554 If this is an EXPAND_SUM call, always return the sum. */
7555 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7556 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7558 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7559 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7560 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7562 rtx constant_part;
7564 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7565 EXPAND_SUM);
7566 /* Use immed_double_const to ensure that the constant is
7567 truncated according to the mode of OP1, then sign extended
7568 to a HOST_WIDE_INT. Using the constant directly can result
7569 in non-canonical RTL in a 64x32 cross compile. */
7570 constant_part
7571 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7572 (HOST_WIDE_INT) 0,
7573 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7574 op1 = plus_constant (op1, INTVAL (constant_part));
7575 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7576 op1 = force_operand (op1, target);
7577 return op1;
7580 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7581 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7582 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7584 rtx constant_part;
7586 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7587 EXPAND_SUM);
7588 if (! CONSTANT_P (op0))
7590 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7591 VOIDmode, modifier);
7592 /* Don't go to both_summands if modifier
7593 says it's not right to return a PLUS. */
7594 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7595 goto binop2;
7596 goto both_summands;
7598 /* Use immed_double_const to ensure that the constant is
7599 truncated according to the mode of OP1, then sign extended
7600 to a HOST_WIDE_INT. Using the constant directly can result
7601 in non-canonical RTL in a 64x32 cross compile. */
7602 constant_part
7603 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7604 (HOST_WIDE_INT) 0,
7605 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7606 op0 = plus_constant (op0, INTVAL (constant_part));
7607 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7608 op0 = force_operand (op0, target);
7609 return op0;
7613 /* No sense saving up arithmetic to be done
7614 if it's all in the wrong mode to form part of an address.
7615 And force_operand won't know whether to sign-extend or
7616 zero-extend. */
7617 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7618 || mode != ptr_mode)
7619 goto binop;
7621 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7622 subtarget = 0;
7624 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7625 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7627 both_summands:
7628 /* Make sure any term that's a sum with a constant comes last. */
7629 if (GET_CODE (op0) == PLUS
7630 && CONSTANT_P (XEXP (op0, 1)))
7632 temp = op0;
7633 op0 = op1;
7634 op1 = temp;
7636 /* If adding to a sum including a constant,
7637 associate it to put the constant outside. */
7638 if (GET_CODE (op1) == PLUS
7639 && CONSTANT_P (XEXP (op1, 1)))
7641 rtx constant_term = const0_rtx;
7643 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7644 if (temp != 0)
7645 op0 = temp;
7646 /* Ensure that MULT comes first if there is one. */
7647 else if (GET_CODE (op0) == MULT)
7648 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7649 else
7650 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7652 /* Let's also eliminate constants from op0 if possible. */
7653 op0 = eliminate_constant_term (op0, &constant_term);
7655 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7656 their sum should be a constant. Form it into OP1, since the
7657 result we want will then be OP0 + OP1. */
7659 temp = simplify_binary_operation (PLUS, mode, constant_term,
7660 XEXP (op1, 1));
7661 if (temp != 0)
7662 op1 = temp;
7663 else
7664 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7667 /* Put a constant term last and put a multiplication first. */
7668 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7669 temp = op1, op1 = op0, op0 = temp;
7671 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7672 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7674 case MINUS_EXPR:
7675 /* For initializers, we are allowed to return a MINUS of two
7676 symbolic constants. Here we handle all cases when both operands
7677 are constant. */
7678 /* Handle difference of two symbolic constants,
7679 for the sake of an initializer. */
7680 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7681 && really_constant_p (TREE_OPERAND (exp, 0))
7682 && really_constant_p (TREE_OPERAND (exp, 1)))
7684 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7685 VOIDmode, ro_modifier);
7686 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7687 VOIDmode, ro_modifier);
7689 /* If the last operand is a CONST_INT, use plus_constant of
7690 the negated constant. Else make the MINUS. */
7691 if (GET_CODE (op1) == CONST_INT)
7692 return plus_constant (op0, - INTVAL (op1));
7693 else
7694 return gen_rtx_MINUS (mode, op0, op1);
7696 /* Convert A - const to A + (-const). */
7697 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7699 tree negated = fold (build1 (NEGATE_EXPR, type,
7700 TREE_OPERAND (exp, 1)));
7702 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7703 /* If we can't negate the constant in TYPE, leave it alone and
7704 expand_binop will negate it for us. We used to try to do it
7705 here in the signed version of TYPE, but that doesn't work
7706 on POINTER_TYPEs. */;
7707 else
7709 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7710 goto plus_expr;
7713 this_optab = ! unsignedp && flag_trapv
7714 && (GET_MODE_CLASS(mode) == MODE_INT)
7715 ? subv_optab : sub_optab;
7716 goto binop;
7718 case MULT_EXPR:
7719 /* If first operand is constant, swap them.
7720 Thus the following special case checks need only
7721 check the second operand. */
7722 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7724 register tree t1 = TREE_OPERAND (exp, 0);
7725 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7726 TREE_OPERAND (exp, 1) = t1;
7729 /* Attempt to return something suitable for generating an
7730 indexed address, for machines that support that. */
7732 if (modifier == EXPAND_SUM && mode == ptr_mode
7733 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7734 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7736 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7737 EXPAND_SUM);
7739 /* Apply distributive law if OP0 is x+c. */
7740 if (GET_CODE (op0) == PLUS
7741 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7742 return
7743 gen_rtx_PLUS
7744 (mode,
7745 gen_rtx_MULT
7746 (mode, XEXP (op0, 0),
7747 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7748 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7749 * INTVAL (XEXP (op0, 1))));
7751 if (GET_CODE (op0) != REG)
7752 op0 = force_operand (op0, NULL_RTX);
7753 if (GET_CODE (op0) != REG)
7754 op0 = copy_to_mode_reg (mode, op0);
7756 return
7757 gen_rtx_MULT (mode, op0,
7758 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7761 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7762 subtarget = 0;
7764 /* Check for multiplying things that have been extended
7765 from a narrower type. If this machine supports multiplying
7766 in that narrower type with a result in the desired type,
7767 do it that way, and avoid the explicit type-conversion. */
7768 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7769 && TREE_CODE (type) == INTEGER_TYPE
7770 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7771 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7772 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7773 && int_fits_type_p (TREE_OPERAND (exp, 1),
7774 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7775 /* Don't use a widening multiply if a shift will do. */
7776 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7777 > HOST_BITS_PER_WIDE_INT)
7778 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7780 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7781 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7783 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7784 /* If both operands are extended, they must either both
7785 be zero-extended or both be sign-extended. */
7786 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7788 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7790 enum machine_mode innermode
7791 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7792 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7793 ? smul_widen_optab : umul_widen_optab);
7794 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7795 ? umul_widen_optab : smul_widen_optab);
7796 if (mode == GET_MODE_WIDER_MODE (innermode))
7798 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7800 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7801 NULL_RTX, VOIDmode, 0);
7802 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7803 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7804 VOIDmode, 0);
7805 else
7806 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7807 NULL_RTX, VOIDmode, 0);
7808 goto binop2;
7810 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7811 && innermode == word_mode)
7813 rtx htem;
7814 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7815 NULL_RTX, VOIDmode, 0);
7816 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7817 op1 = convert_modes (innermode, mode,
7818 expand_expr (TREE_OPERAND (exp, 1),
7819 NULL_RTX, VOIDmode, 0),
7820 unsignedp);
7821 else
7822 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7823 NULL_RTX, VOIDmode, 0);
7824 temp = expand_binop (mode, other_optab, op0, op1, target,
7825 unsignedp, OPTAB_LIB_WIDEN);
7826 htem = expand_mult_highpart_adjust (innermode,
7827 gen_highpart (innermode, temp),
7828 op0, op1,
7829 gen_highpart (innermode, temp),
7830 unsignedp);
7831 emit_move_insn (gen_highpart (innermode, temp), htem);
7832 return temp;
7836 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7837 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7838 return expand_mult (mode, op0, op1, target, unsignedp);
7840 case TRUNC_DIV_EXPR:
7841 case FLOOR_DIV_EXPR:
7842 case CEIL_DIV_EXPR:
7843 case ROUND_DIV_EXPR:
7844 case EXACT_DIV_EXPR:
7845 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7846 subtarget = 0;
7847 /* Possible optimization: compute the dividend with EXPAND_SUM
7848 then if the divisor is constant can optimize the case
7849 where some terms of the dividend have coeffs divisible by it. */
7850 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7851 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7852 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7854 case RDIV_EXPR:
7855 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7856 expensive divide. If not, combine will rebuild the original
7857 computation. */
7858 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7859 && !real_onep (TREE_OPERAND (exp, 0)))
7860 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7861 build (RDIV_EXPR, type,
7862 build_real (type, dconst1),
7863 TREE_OPERAND (exp, 1))),
7864 target, tmode, unsignedp);
7865 this_optab = sdiv_optab;
7866 goto binop;
7868 case TRUNC_MOD_EXPR:
7869 case FLOOR_MOD_EXPR:
7870 case CEIL_MOD_EXPR:
7871 case ROUND_MOD_EXPR:
7872 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7873 subtarget = 0;
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7875 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7876 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7878 case FIX_ROUND_EXPR:
7879 case FIX_FLOOR_EXPR:
7880 case FIX_CEIL_EXPR:
7881 abort (); /* Not used for C. */
7883 case FIX_TRUNC_EXPR:
7884 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7885 if (target == 0)
7886 target = gen_reg_rtx (mode);
7887 expand_fix (target, op0, unsignedp);
7888 return target;
7890 case FLOAT_EXPR:
7891 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7892 if (target == 0)
7893 target = gen_reg_rtx (mode);
7894 /* expand_float can't figure out what to do if FROM has VOIDmode.
7895 So give it the correct mode. With -O, cse will optimize this. */
7896 if (GET_MODE (op0) == VOIDmode)
7897 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7898 op0);
7899 expand_float (target, op0,
7900 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7901 return target;
7903 case NEGATE_EXPR:
7904 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7905 temp = expand_unop (mode,
7906 ! unsignedp && flag_trapv
7907 && (GET_MODE_CLASS(mode) == MODE_INT)
7908 ? negv_optab : neg_optab, op0, target, 0);
7909 if (temp == 0)
7910 abort ();
7911 return temp;
7913 case ABS_EXPR:
7914 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7916 /* Handle complex values specially. */
7917 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7918 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7919 return expand_complex_abs (mode, op0, target, unsignedp);
7921 /* Unsigned abs is simply the operand. Testing here means we don't
7922 risk generating incorrect code below. */
7923 if (TREE_UNSIGNED (type))
7924 return op0;
7926 return expand_abs (mode, op0, target, unsignedp,
7927 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7929 case MAX_EXPR:
7930 case MIN_EXPR:
7931 target = original_target;
7932 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7933 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7934 || GET_MODE (target) != mode
7935 || (GET_CODE (target) == REG
7936 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7937 target = gen_reg_rtx (mode);
7938 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7939 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7941 /* First try to do it with a special MIN or MAX instruction.
7942 If that does not win, use a conditional jump to select the proper
7943 value. */
7944 this_optab = (TREE_UNSIGNED (type)
7945 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7946 : (code == MIN_EXPR ? smin_optab : smax_optab));
7948 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7949 OPTAB_WIDEN);
7950 if (temp != 0)
7951 return temp;
7953 /* At this point, a MEM target is no longer useful; we will get better
7954 code without it. */
7956 if (GET_CODE (target) == MEM)
7957 target = gen_reg_rtx (mode);
7959 if (target != op0)
7960 emit_move_insn (target, op0);
7962 op0 = gen_label_rtx ();
7964 /* If this mode is an integer too wide to compare properly,
7965 compare word by word. Rely on cse to optimize constant cases. */
7966 if (GET_MODE_CLASS (mode) == MODE_INT
7967 && ! can_compare_p (GE, mode, ccp_jump))
7969 if (code == MAX_EXPR)
7970 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7971 target, op1, NULL_RTX, op0);
7972 else
7973 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7974 op1, target, NULL_RTX, op0);
7976 else
7978 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7979 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7980 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7981 op0);
7983 emit_move_insn (target, op1);
7984 emit_label (op0);
7985 return target;
7987 case BIT_NOT_EXPR:
7988 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7989 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7990 if (temp == 0)
7991 abort ();
7992 return temp;
7994 case FFS_EXPR:
7995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7996 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7997 if (temp == 0)
7998 abort ();
7999 return temp;
8001 /* ??? Can optimize bitwise operations with one arg constant.
8002 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8003 and (a bitwise1 b) bitwise2 b (etc)
8004 but that is probably not worth while. */
8006 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8007 boolean values when we want in all cases to compute both of them. In
8008 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8009 as actual zero-or-1 values and then bitwise anding. In cases where
8010 there cannot be any side effects, better code would be made by
8011 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8012 how to recognize those cases. */
8014 case TRUTH_AND_EXPR:
8015 case BIT_AND_EXPR:
8016 this_optab = and_optab;
8017 goto binop;
8019 case TRUTH_OR_EXPR:
8020 case BIT_IOR_EXPR:
8021 this_optab = ior_optab;
8022 goto binop;
8024 case TRUTH_XOR_EXPR:
8025 case BIT_XOR_EXPR:
8026 this_optab = xor_optab;
8027 goto binop;
8029 case LSHIFT_EXPR:
8030 case RSHIFT_EXPR:
8031 case LROTATE_EXPR:
8032 case RROTATE_EXPR:
8033 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8034 subtarget = 0;
8035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8036 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8037 unsignedp);
8039 /* Could determine the answer when only additive constants differ. Also,
8040 the addition of one can be handled by changing the condition. */
8041 case LT_EXPR:
8042 case LE_EXPR:
8043 case GT_EXPR:
8044 case GE_EXPR:
8045 case EQ_EXPR:
8046 case NE_EXPR:
8047 case UNORDERED_EXPR:
8048 case ORDERED_EXPR:
8049 case UNLT_EXPR:
8050 case UNLE_EXPR:
8051 case UNGT_EXPR:
8052 case UNGE_EXPR:
8053 case UNEQ_EXPR:
8054 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8055 if (temp != 0)
8056 return temp;
8058 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8059 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8060 && original_target
8061 && GET_CODE (original_target) == REG
8062 && (GET_MODE (original_target)
8063 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8065 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8066 VOIDmode, 0);
8068 if (temp != original_target)
8069 temp = copy_to_reg (temp);
8071 op1 = gen_label_rtx ();
8072 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8073 GET_MODE (temp), unsignedp, 0, op1);
8074 emit_move_insn (temp, const1_rtx);
8075 emit_label (op1);
8076 return temp;
8079 /* If no set-flag instruction, must generate a conditional
8080 store into a temporary variable. Drop through
8081 and handle this like && and ||. */
8083 case TRUTH_ANDIF_EXPR:
8084 case TRUTH_ORIF_EXPR:
8085 if (! ignore
8086 && (target == 0 || ! safe_from_p (target, exp, 1)
8087 /* Make sure we don't have a hard reg (such as function's return
8088 value) live across basic blocks, if not optimizing. */
8089 || (!optimize && GET_CODE (target) == REG
8090 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8091 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8093 if (target)
8094 emit_clr_insn (target);
8096 op1 = gen_label_rtx ();
8097 jumpifnot (exp, op1);
8099 if (target)
8100 emit_0_to_1_insn (target);
8102 emit_label (op1);
8103 return ignore ? const0_rtx : target;
8105 case TRUTH_NOT_EXPR:
8106 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8107 /* The parser is careful to generate TRUTH_NOT_EXPR
8108 only with operands that are always zero or one. */
8109 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8110 target, 1, OPTAB_LIB_WIDEN);
8111 if (temp == 0)
8112 abort ();
8113 return temp;
8115 case COMPOUND_EXPR:
8116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8117 emit_queue ();
8118 return expand_expr (TREE_OPERAND (exp, 1),
8119 (ignore ? const0_rtx : target),
8120 VOIDmode, 0);
8122 case COND_EXPR:
8123 /* If we would have a "singleton" (see below) were it not for a
8124 conversion in each arm, bring that conversion back out. */
8125 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8126 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8127 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8128 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8130 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8131 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8133 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8134 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8135 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8136 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8137 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8138 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8139 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8140 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8141 return expand_expr (build1 (NOP_EXPR, type,
8142 build (COND_EXPR, TREE_TYPE (iftrue),
8143 TREE_OPERAND (exp, 0),
8144 iftrue, iffalse)),
8145 target, tmode, modifier);
8149 /* Note that COND_EXPRs whose type is a structure or union
8150 are required to be constructed to contain assignments of
8151 a temporary variable, so that we can evaluate them here
8152 for side effect only. If type is void, we must do likewise. */
8154 /* If an arm of the branch requires a cleanup,
8155 only that cleanup is performed. */
8157 tree singleton = 0;
8158 tree binary_op = 0, unary_op = 0;
8160 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8161 convert it to our mode, if necessary. */
8162 if (integer_onep (TREE_OPERAND (exp, 1))
8163 && integer_zerop (TREE_OPERAND (exp, 2))
8164 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8166 if (ignore)
8168 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8169 ro_modifier);
8170 return const0_rtx;
8173 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8174 if (GET_MODE (op0) == mode)
8175 return op0;
8177 if (target == 0)
8178 target = gen_reg_rtx (mode);
8179 convert_move (target, op0, unsignedp);
8180 return target;
8183 /* Check for X ? A + B : A. If we have this, we can copy A to the
8184 output and conditionally add B. Similarly for unary operations.
8185 Don't do this if X has side-effects because those side effects
8186 might affect A or B and the "?" operation is a sequence point in
8187 ANSI. (operand_equal_p tests for side effects.) */
8189 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8190 && operand_equal_p (TREE_OPERAND (exp, 2),
8191 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8192 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8193 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8194 && operand_equal_p (TREE_OPERAND (exp, 1),
8195 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8196 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8197 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8198 && operand_equal_p (TREE_OPERAND (exp, 2),
8199 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8200 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8201 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8202 && operand_equal_p (TREE_OPERAND (exp, 1),
8203 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8204 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8206 /* If we are not to produce a result, we have no target. Otherwise,
8207 if a target was specified use it; it will not be used as an
8208 intermediate target unless it is safe. If no target, use a
8209 temporary. */
8211 if (ignore)
8212 temp = 0;
8213 else if (original_target
8214 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8215 || (singleton && GET_CODE (original_target) == REG
8216 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8217 && original_target == var_rtx (singleton)))
8218 && GET_MODE (original_target) == mode
8219 #ifdef HAVE_conditional_move
8220 && (! can_conditionally_move_p (mode)
8221 || GET_CODE (original_target) == REG
8222 || TREE_ADDRESSABLE (type))
8223 #endif
8224 && (GET_CODE (original_target) != MEM
8225 || TREE_ADDRESSABLE (type)))
8226 temp = original_target;
8227 else if (TREE_ADDRESSABLE (type))
8228 abort ();
8229 else
8230 temp = assign_temp (type, 0, 0, 1);
8232 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8233 do the test of X as a store-flag operation, do this as
8234 A + ((X != 0) << log C). Similarly for other simple binary
8235 operators. Only do for C == 1 if BRANCH_COST is low. */
8236 if (temp && singleton && binary_op
8237 && (TREE_CODE (binary_op) == PLUS_EXPR
8238 || TREE_CODE (binary_op) == MINUS_EXPR
8239 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8240 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8241 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8242 : integer_onep (TREE_OPERAND (binary_op, 1)))
8243 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8245 rtx result;
8246 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8247 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8248 ? addv_optab : add_optab)
8249 : TREE_CODE (binary_op) == MINUS_EXPR
8250 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8251 ? subv_optab : sub_optab)
8252 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8253 : xor_optab);
8255 /* If we had X ? A : A + 1, do this as A + (X == 0).
8257 We have to invert the truth value here and then put it
8258 back later if do_store_flag fails. We cannot simply copy
8259 TREE_OPERAND (exp, 0) to another variable and modify that
8260 because invert_truthvalue can modify the tree pointed to
8261 by its argument. */
8262 if (singleton == TREE_OPERAND (exp, 1))
8263 TREE_OPERAND (exp, 0)
8264 = invert_truthvalue (TREE_OPERAND (exp, 0));
8266 result = do_store_flag (TREE_OPERAND (exp, 0),
8267 (safe_from_p (temp, singleton, 1)
8268 ? temp : NULL_RTX),
8269 mode, BRANCH_COST <= 1);
8271 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8272 result = expand_shift (LSHIFT_EXPR, mode, result,
8273 build_int_2 (tree_log2
8274 (TREE_OPERAND
8275 (binary_op, 1)),
8277 (safe_from_p (temp, singleton, 1)
8278 ? temp : NULL_RTX), 0);
8280 if (result)
8282 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8283 return expand_binop (mode, boptab, op1, result, temp,
8284 unsignedp, OPTAB_LIB_WIDEN);
8286 else if (singleton == TREE_OPERAND (exp, 1))
8287 TREE_OPERAND (exp, 0)
8288 = invert_truthvalue (TREE_OPERAND (exp, 0));
8291 do_pending_stack_adjust ();
8292 NO_DEFER_POP;
8293 op0 = gen_label_rtx ();
8295 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8297 if (temp != 0)
8299 /* If the target conflicts with the other operand of the
8300 binary op, we can't use it. Also, we can't use the target
8301 if it is a hard register, because evaluating the condition
8302 might clobber it. */
8303 if ((binary_op
8304 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8305 || (GET_CODE (temp) == REG
8306 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8307 temp = gen_reg_rtx (mode);
8308 store_expr (singleton, temp, 0);
8310 else
8311 expand_expr (singleton,
8312 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8313 if (singleton == TREE_OPERAND (exp, 1))
8314 jumpif (TREE_OPERAND (exp, 0), op0);
8315 else
8316 jumpifnot (TREE_OPERAND (exp, 0), op0);
8318 start_cleanup_deferral ();
8319 if (binary_op && temp == 0)
8320 /* Just touch the other operand. */
8321 expand_expr (TREE_OPERAND (binary_op, 1),
8322 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8323 else if (binary_op)
8324 store_expr (build (TREE_CODE (binary_op), type,
8325 make_tree (type, temp),
8326 TREE_OPERAND (binary_op, 1)),
8327 temp, 0);
8328 else
8329 store_expr (build1 (TREE_CODE (unary_op), type,
8330 make_tree (type, temp)),
8331 temp, 0);
8332 op1 = op0;
8334 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8335 comparison operator. If we have one of these cases, set the
8336 output to A, branch on A (cse will merge these two references),
8337 then set the output to FOO. */
8338 else if (temp
8339 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8340 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8342 TREE_OPERAND (exp, 1), 0)
8343 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8344 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8345 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8347 if (GET_CODE (temp) == REG
8348 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8349 temp = gen_reg_rtx (mode);
8350 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8351 jumpif (TREE_OPERAND (exp, 0), op0);
8353 start_cleanup_deferral ();
8354 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8355 op1 = op0;
8357 else if (temp
8358 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8359 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8360 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8361 TREE_OPERAND (exp, 2), 0)
8362 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8363 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8364 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8366 if (GET_CODE (temp) == REG
8367 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8368 temp = gen_reg_rtx (mode);
8369 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8370 jumpifnot (TREE_OPERAND (exp, 0), op0);
8372 start_cleanup_deferral ();
8373 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8374 op1 = op0;
8376 else
8378 op1 = gen_label_rtx ();
8379 jumpifnot (TREE_OPERAND (exp, 0), op0);
8381 start_cleanup_deferral ();
8383 /* One branch of the cond can be void, if it never returns. For
8384 example A ? throw : E */
8385 if (temp != 0
8386 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8387 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8388 else
8389 expand_expr (TREE_OPERAND (exp, 1),
8390 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8391 end_cleanup_deferral ();
8392 emit_queue ();
8393 emit_jump_insn (gen_jump (op1));
8394 emit_barrier ();
8395 emit_label (op0);
8396 start_cleanup_deferral ();
8397 if (temp != 0
8398 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8399 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8400 else
8401 expand_expr (TREE_OPERAND (exp, 2),
8402 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8405 end_cleanup_deferral ();
8407 emit_queue ();
8408 emit_label (op1);
8409 OK_DEFER_POP;
8411 return temp;
8414 case TARGET_EXPR:
8416 /* Something needs to be initialized, but we didn't know
8417 where that thing was when building the tree. For example,
8418 it could be the return value of a function, or a parameter
8419 to a function which lays down in the stack, or a temporary
8420 variable which must be passed by reference.
8422 We guarantee that the expression will either be constructed
8423 or copied into our original target. */
8425 tree slot = TREE_OPERAND (exp, 0);
8426 tree cleanups = NULL_TREE;
8427 tree exp1;
8429 if (TREE_CODE (slot) != VAR_DECL)
8430 abort ();
8432 if (! ignore)
8433 target = original_target;
8435 /* Set this here so that if we get a target that refers to a
8436 register variable that's already been used, put_reg_into_stack
8437 knows that it should fix up those uses. */
8438 TREE_USED (slot) = 1;
8440 if (target == 0)
8442 if (DECL_RTL_SET_P (slot))
8444 target = DECL_RTL (slot);
8445 /* If we have already expanded the slot, so don't do
8446 it again. (mrs) */
8447 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8448 return target;
8450 else
8452 target = assign_temp (type, 2, 0, 1);
8453 /* All temp slots at this level must not conflict. */
8454 preserve_temp_slots (target);
8455 SET_DECL_RTL (slot, target);
8456 if (TREE_ADDRESSABLE (slot))
8457 put_var_into_stack (slot);
8459 /* Since SLOT is not known to the called function
8460 to belong to its stack frame, we must build an explicit
8461 cleanup. This case occurs when we must build up a reference
8462 to pass the reference as an argument. In this case,
8463 it is very likely that such a reference need not be
8464 built here. */
8466 if (TREE_OPERAND (exp, 2) == 0)
8467 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8468 cleanups = TREE_OPERAND (exp, 2);
8471 else
8473 /* This case does occur, when expanding a parameter which
8474 needs to be constructed on the stack. The target
8475 is the actual stack address that we want to initialize.
8476 The function we call will perform the cleanup in this case. */
8478 /* If we have already assigned it space, use that space,
8479 not target that we were passed in, as our target
8480 parameter is only a hint. */
8481 if (DECL_RTL_SET_P (slot))
8483 target = DECL_RTL (slot);
8484 /* If we have already expanded the slot, so don't do
8485 it again. (mrs) */
8486 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8487 return target;
8489 else
8491 SET_DECL_RTL (slot, target);
8492 /* If we must have an addressable slot, then make sure that
8493 the RTL that we just stored in slot is OK. */
8494 if (TREE_ADDRESSABLE (slot))
8495 put_var_into_stack (slot);
8499 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8500 /* Mark it as expanded. */
8501 TREE_OPERAND (exp, 1) = NULL_TREE;
8503 store_expr (exp1, target, 0);
8505 expand_decl_cleanup (NULL_TREE, cleanups);
8507 return target;
8510 case INIT_EXPR:
8512 tree lhs = TREE_OPERAND (exp, 0);
8513 tree rhs = TREE_OPERAND (exp, 1);
8514 tree noncopied_parts = 0;
8515 tree lhs_type = TREE_TYPE (lhs);
8517 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8518 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8519 noncopied_parts
8520 = init_noncopied_parts (stabilize_reference (lhs),
8521 TYPE_NONCOPIED_PARTS (lhs_type));
8523 while (noncopied_parts != 0)
8525 expand_assignment (TREE_VALUE (noncopied_parts),
8526 TREE_PURPOSE (noncopied_parts), 0, 0);
8527 noncopied_parts = TREE_CHAIN (noncopied_parts);
8529 return temp;
8532 case MODIFY_EXPR:
8534 /* If lhs is complex, expand calls in rhs before computing it.
8535 That's so we don't compute a pointer and save it over a call.
8536 If lhs is simple, compute it first so we can give it as a
8537 target if the rhs is just a call. This avoids an extra temp and copy
8538 and that prevents a partial-subsumption which makes bad code.
8539 Actually we could treat component_ref's of vars like vars. */
8541 tree lhs = TREE_OPERAND (exp, 0);
8542 tree rhs = TREE_OPERAND (exp, 1);
8543 tree noncopied_parts = 0;
8544 tree lhs_type = TREE_TYPE (lhs);
8546 temp = 0;
8548 /* Check for |= or &= of a bitfield of size one into another bitfield
8549 of size 1. In this case, (unless we need the result of the
8550 assignment) we can do this more efficiently with a
8551 test followed by an assignment, if necessary.
8553 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8554 things change so we do, this code should be enhanced to
8555 support it. */
8556 if (ignore
8557 && TREE_CODE (lhs) == COMPONENT_REF
8558 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8559 || TREE_CODE (rhs) == BIT_AND_EXPR)
8560 && TREE_OPERAND (rhs, 0) == lhs
8561 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8562 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8563 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8565 rtx label = gen_label_rtx ();
8567 do_jump (TREE_OPERAND (rhs, 1),
8568 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8569 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8570 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8571 (TREE_CODE (rhs) == BIT_IOR_EXPR
8572 ? integer_one_node
8573 : integer_zero_node)),
8574 0, 0);
8575 do_pending_stack_adjust ();
8576 emit_label (label);
8577 return const0_rtx;
8580 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8581 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8582 noncopied_parts
8583 = save_noncopied_parts (stabilize_reference (lhs),
8584 TYPE_NONCOPIED_PARTS (lhs_type));
8586 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8587 while (noncopied_parts != 0)
8589 expand_assignment (TREE_PURPOSE (noncopied_parts),
8590 TREE_VALUE (noncopied_parts), 0, 0);
8591 noncopied_parts = TREE_CHAIN (noncopied_parts);
8593 return temp;
8596 case RETURN_EXPR:
8597 if (!TREE_OPERAND (exp, 0))
8598 expand_null_return ();
8599 else
8600 expand_return (TREE_OPERAND (exp, 0));
8601 return const0_rtx;
8603 case PREINCREMENT_EXPR:
8604 case PREDECREMENT_EXPR:
8605 return expand_increment (exp, 0, ignore);
8607 case POSTINCREMENT_EXPR:
8608 case POSTDECREMENT_EXPR:
8609 /* Faster to treat as pre-increment if result is not used. */
8610 return expand_increment (exp, ! ignore, ignore);
8612 case ADDR_EXPR:
8613 /* If nonzero, TEMP will be set to the address of something that might
8614 be a MEM corresponding to a stack slot. */
8615 temp = 0;
8617 /* Are we taking the address of a nested function? */
8618 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8619 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8620 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8621 && ! TREE_STATIC (exp))
8623 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8624 op0 = force_operand (op0, target);
8626 /* If we are taking the address of something erroneous, just
8627 return a zero. */
8628 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8629 return const0_rtx;
8630 else
8632 /* We make sure to pass const0_rtx down if we came in with
8633 ignore set, to avoid doing the cleanups twice for something. */
8634 op0 = expand_expr (TREE_OPERAND (exp, 0),
8635 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8636 (modifier == EXPAND_INITIALIZER
8637 ? modifier : EXPAND_CONST_ADDRESS));
8639 /* If we are going to ignore the result, OP0 will have been set
8640 to const0_rtx, so just return it. Don't get confused and
8641 think we are taking the address of the constant. */
8642 if (ignore)
8643 return op0;
8645 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8646 clever and returns a REG when given a MEM. */
8647 op0 = protect_from_queue (op0, 1);
8649 /* We would like the object in memory. If it is a constant, we can
8650 have it be statically allocated into memory. For a non-constant,
8651 we need to allocate some memory and store the value into it. */
8653 if (CONSTANT_P (op0))
8654 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8655 op0);
8656 else if (GET_CODE (op0) == MEM)
8658 mark_temp_addr_taken (op0);
8659 temp = XEXP (op0, 0);
8662 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8663 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8664 || GET_CODE (op0) == PARALLEL)
8666 /* If this object is in a register, it must be not
8667 be BLKmode. */
8668 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8669 tree nt = build_qualified_type (inner_type,
8670 (TYPE_QUALS (inner_type)
8671 | TYPE_QUAL_CONST));
8672 rtx memloc = assign_temp (nt, 1, 1, 1);
8674 mark_temp_addr_taken (memloc);
8675 if (GET_CODE (op0) == PARALLEL)
8676 /* Handle calls that pass values in multiple non-contiguous
8677 locations. The Irix 6 ABI has examples of this. */
8678 emit_group_store (memloc, op0,
8679 int_size_in_bytes (inner_type),
8680 TYPE_ALIGN (inner_type));
8681 else
8682 emit_move_insn (memloc, op0);
8683 op0 = memloc;
8686 if (GET_CODE (op0) != MEM)
8687 abort ();
8689 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8691 temp = XEXP (op0, 0);
8692 #ifdef POINTERS_EXTEND_UNSIGNED
8693 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8694 && mode == ptr_mode)
8695 temp = convert_memory_address (ptr_mode, temp);
8696 #endif
8697 return temp;
8700 op0 = force_operand (XEXP (op0, 0), target);
8703 if (flag_force_addr && GET_CODE (op0) != REG)
8704 op0 = force_reg (Pmode, op0);
8706 if (GET_CODE (op0) == REG
8707 && ! REG_USERVAR_P (op0))
8708 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8710 /* If we might have had a temp slot, add an equivalent address
8711 for it. */
8712 if (temp != 0)
8713 update_temp_slot_address (temp, op0);
8715 #ifdef POINTERS_EXTEND_UNSIGNED
8716 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8717 && mode == ptr_mode)
8718 op0 = convert_memory_address (ptr_mode, op0);
8719 #endif
8721 return op0;
8723 case ENTRY_VALUE_EXPR:
8724 abort ();
8726 /* COMPLEX type for Extended Pascal & Fortran */
8727 case COMPLEX_EXPR:
8729 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8730 rtx insns;
8732 /* Get the rtx code of the operands. */
8733 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8734 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8736 if (! target)
8737 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8739 start_sequence ();
8741 /* Move the real (op0) and imaginary (op1) parts to their location. */
8742 emit_move_insn (gen_realpart (mode, target), op0);
8743 emit_move_insn (gen_imagpart (mode, target), op1);
8745 insns = get_insns ();
8746 end_sequence ();
8748 /* Complex construction should appear as a single unit. */
8749 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8750 each with a separate pseudo as destination.
8751 It's not correct for flow to treat them as a unit. */
8752 if (GET_CODE (target) != CONCAT)
8753 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8754 else
8755 emit_insns (insns);
8757 return target;
8760 case REALPART_EXPR:
8761 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8762 return gen_realpart (mode, op0);
8764 case IMAGPART_EXPR:
8765 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8766 return gen_imagpart (mode, op0);
8768 case CONJ_EXPR:
8770 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8771 rtx imag_t;
8772 rtx insns;
8774 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8776 if (! target)
8777 target = gen_reg_rtx (mode);
8779 start_sequence ();
8781 /* Store the realpart and the negated imagpart to target. */
8782 emit_move_insn (gen_realpart (partmode, target),
8783 gen_realpart (partmode, op0));
8785 imag_t = gen_imagpart (partmode, target);
8786 temp = expand_unop (partmode,
8787 ! unsignedp && flag_trapv
8788 && (GET_MODE_CLASS(partmode) == MODE_INT)
8789 ? negv_optab : neg_optab,
8790 gen_imagpart (partmode, op0), imag_t, 0);
8791 if (temp != imag_t)
8792 emit_move_insn (imag_t, temp);
8794 insns = get_insns ();
8795 end_sequence ();
8797 /* Conjugate should appear as a single unit
8798 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8799 each with a separate pseudo as destination.
8800 It's not correct for flow to treat them as a unit. */
8801 if (GET_CODE (target) != CONCAT)
8802 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8803 else
8804 emit_insns (insns);
8806 return target;
8809 case TRY_CATCH_EXPR:
8811 tree handler = TREE_OPERAND (exp, 1);
8813 expand_eh_region_start ();
8815 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8817 expand_eh_region_end_cleanup (handler);
8819 return op0;
8822 case TRY_FINALLY_EXPR:
8824 tree try_block = TREE_OPERAND (exp, 0);
8825 tree finally_block = TREE_OPERAND (exp, 1);
8826 rtx finally_label = gen_label_rtx ();
8827 rtx done_label = gen_label_rtx ();
8828 rtx return_link = gen_reg_rtx (Pmode);
8829 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8830 (tree) finally_label, (tree) return_link);
8831 TREE_SIDE_EFFECTS (cleanup) = 1;
8833 /* Start a new binding layer that will keep track of all cleanup
8834 actions to be performed. */
8835 expand_start_bindings (2);
8837 target_temp_slot_level = temp_slot_level;
8839 expand_decl_cleanup (NULL_TREE, cleanup);
8840 op0 = expand_expr (try_block, target, tmode, modifier);
8842 preserve_temp_slots (op0);
8843 expand_end_bindings (NULL_TREE, 0, 0);
8844 emit_jump (done_label);
8845 emit_label (finally_label);
8846 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8847 emit_indirect_jump (return_link);
8848 emit_label (done_label);
8849 return op0;
8852 case GOTO_SUBROUTINE_EXPR:
8854 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8855 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8856 rtx return_address = gen_label_rtx ();
8857 emit_move_insn (return_link,
8858 gen_rtx_LABEL_REF (Pmode, return_address));
8859 emit_jump (subr);
8860 emit_label (return_address);
8861 return const0_rtx;
8864 case VA_ARG_EXPR:
8865 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8867 case EXC_PTR_EXPR:
8868 return get_exception_pointer (cfun);
8870 default:
8871 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8874 /* Here to do an ordinary binary operator, generating an instruction
8875 from the optab already placed in `this_optab'. */
8876 binop:
8877 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8878 subtarget = 0;
8879 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8880 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8881 binop2:
8882 temp = expand_binop (mode, this_optab, op0, op1, target,
8883 unsignedp, OPTAB_LIB_WIDEN);
8884 if (temp == 0)
8885 abort ();
8886 return temp;
8889 /* Similar to expand_expr, except that we don't specify a target, target
8890 mode, or modifier and we return the alignment of the inner type. This is
8891 used in cases where it is not necessary to align the result to the
8892 alignment of its type as long as we know the alignment of the result, for
8893 example for comparisons of BLKmode values. */
8895 static rtx
8896 expand_expr_unaligned (exp, palign)
8897 register tree exp;
8898 unsigned int *palign;
8900 register rtx op0;
8901 tree type = TREE_TYPE (exp);
8902 register enum machine_mode mode = TYPE_MODE (type);
8904 /* Default the alignment we return to that of the type. */
8905 *palign = TYPE_ALIGN (type);
8907 /* The only cases in which we do anything special is if the resulting mode
8908 is BLKmode. */
8909 if (mode != BLKmode)
8910 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8912 switch (TREE_CODE (exp))
8914 case CONVERT_EXPR:
8915 case NOP_EXPR:
8916 case NON_LVALUE_EXPR:
8917 /* Conversions between BLKmode values don't change the underlying
8918 alignment or value. */
8919 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8920 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8921 break;
8923 case ARRAY_REF:
8924 /* Much of the code for this case is copied directly from expand_expr.
8925 We need to duplicate it here because we will do something different
8926 in the fall-through case, so we need to handle the same exceptions
8927 it does. */
8929 tree array = TREE_OPERAND (exp, 0);
8930 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8931 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8932 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8933 HOST_WIDE_INT i;
8935 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8936 abort ();
8938 /* Optimize the special-case of a zero lower bound.
8940 We convert the low_bound to sizetype to avoid some problems
8941 with constant folding. (E.g. suppose the lower bound is 1,
8942 and its mode is QI. Without the conversion, (ARRAY
8943 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8944 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8946 if (! integer_zerop (low_bound))
8947 index = size_diffop (index, convert (sizetype, low_bound));
8949 /* If this is a constant index into a constant array,
8950 just get the value from the array. Handle both the cases when
8951 we have an explicit constructor and when our operand is a variable
8952 that was declared const. */
8954 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8955 && host_integerp (index, 0)
8956 && 0 > compare_tree_int (index,
8957 list_length (CONSTRUCTOR_ELTS
8958 (TREE_OPERAND (exp, 0)))))
8960 tree elem;
8962 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8963 i = tree_low_cst (index, 0);
8964 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8967 if (elem)
8968 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8971 else if (optimize >= 1
8972 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8973 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8974 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8976 if (TREE_CODE (index) == INTEGER_CST)
8978 tree init = DECL_INITIAL (array);
8980 if (TREE_CODE (init) == CONSTRUCTOR)
8982 tree elem;
8984 for (elem = CONSTRUCTOR_ELTS (init);
8985 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8986 elem = TREE_CHAIN (elem))
8989 if (elem)
8990 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8991 palign);
8996 /* Fall through. */
8998 case COMPONENT_REF:
8999 case BIT_FIELD_REF:
9000 case ARRAY_RANGE_REF:
9001 /* If the operand is a CONSTRUCTOR, we can just extract the
9002 appropriate field if it is present. Don't do this if we have
9003 already written the data since we want to refer to that copy
9004 and varasm.c assumes that's what we'll do. */
9005 if (TREE_CODE (exp) == COMPONENT_REF
9006 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9007 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9009 tree elt;
9011 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9012 elt = TREE_CHAIN (elt))
9013 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9014 /* Note that unlike the case in expand_expr, we know this is
9015 BLKmode and hence not an integer. */
9016 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9020 enum machine_mode mode1;
9021 HOST_WIDE_INT bitsize, bitpos;
9022 tree offset;
9023 int volatilep = 0;
9024 unsigned int alignment;
9025 int unsignedp;
9026 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9027 &mode1, &unsignedp, &volatilep,
9028 &alignment);
9030 /* If we got back the original object, something is wrong. Perhaps
9031 we are evaluating an expression too early. In any event, don't
9032 infinitely recurse. */
9033 if (tem == exp)
9034 abort ();
9036 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9038 /* If this is a constant, put it into a register if it is a
9039 legitimate constant and OFFSET is 0 and memory if it isn't. */
9040 if (CONSTANT_P (op0))
9042 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9044 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9045 && offset == 0)
9046 op0 = force_reg (inner_mode, op0);
9047 else
9048 op0 = validize_mem (force_const_mem (inner_mode, op0));
9051 if (offset != 0)
9053 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9055 /* If this object is in a register, put it into memory.
9056 This case can't occur in C, but can in Ada if we have
9057 unchecked conversion of an expression from a scalar type to
9058 an array or record type. */
9059 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9060 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9062 tree nt = build_qualified_type (TREE_TYPE (tem),
9063 (TYPE_QUALS (TREE_TYPE (tem))
9064 | TYPE_QUAL_CONST));
9065 rtx memloc = assign_temp (nt, 1, 1, 1);
9067 mark_temp_addr_taken (memloc);
9068 emit_move_insn (memloc, op0);
9069 op0 = memloc;
9072 if (GET_CODE (op0) != MEM)
9073 abort ();
9075 if (GET_MODE (offset_rtx) != ptr_mode)
9077 #ifdef POINTERS_EXTEND_UNSIGNED
9078 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9079 #else
9080 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9081 #endif
9084 op0 = change_address (op0, VOIDmode,
9085 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9086 force_reg (ptr_mode,
9087 offset_rtx)));
9090 /* Don't forget about volatility even if this is a bitfield. */
9091 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9093 op0 = copy_rtx (op0);
9094 MEM_VOLATILE_P (op0) = 1;
9097 /* Check the access. */
9098 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9100 rtx to;
9101 int size;
9103 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9104 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9106 /* Check the access right of the pointer. */
9107 in_check_memory_usage = 1;
9108 if (size > BITS_PER_UNIT)
9109 emit_library_call (chkr_check_addr_libfunc,
9110 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9111 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9112 TYPE_MODE (sizetype),
9113 GEN_INT (MEMORY_USE_RO),
9114 TYPE_MODE (integer_type_node));
9115 in_check_memory_usage = 0;
9118 /* In cases where an aligned union has an unaligned object
9119 as a field, we might be extracting a BLKmode value from
9120 an integer-mode (e.g., SImode) object. Handle this case
9121 by doing the extract into an object as wide as the field
9122 (which we know to be the width of a basic mode), then
9123 storing into memory, and changing the mode to BLKmode.
9124 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9125 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9126 if (mode1 == VOIDmode
9127 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9128 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9129 && (TYPE_ALIGN (type) > alignment
9130 || bitpos % TYPE_ALIGN (type) != 0)))
9132 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9134 if (ext_mode == BLKmode)
9136 /* In this case, BITPOS must start at a byte boundary. */
9137 if (GET_CODE (op0) != MEM
9138 || bitpos % BITS_PER_UNIT != 0)
9139 abort ();
9141 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9143 else
9145 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9146 TYPE_QUAL_CONST);
9147 rtx new = assign_temp (nt, 0, 1, 1);
9149 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9150 unsignedp, NULL_RTX, ext_mode,
9151 ext_mode, alignment,
9152 int_size_in_bytes (TREE_TYPE (tem)));
9154 /* If the result is a record type and BITSIZE is narrower than
9155 the mode of OP0, an integral mode, and this is a big endian
9156 machine, we must put the field into the high-order bits. */
9157 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9158 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9159 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9160 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9161 size_int (GET_MODE_BITSIZE
9162 (GET_MODE (op0))
9163 - bitsize),
9164 op0, 1);
9166 emit_move_insn (new, op0);
9167 op0 = copy_rtx (new);
9168 PUT_MODE (op0, BLKmode);
9171 else
9172 /* Get a reference to just this component. */
9173 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9175 set_mem_alias_set (op0, get_alias_set (exp));
9177 /* Adjust the alignment in case the bit position is not
9178 a multiple of the alignment of the inner object. */
9179 while (bitpos % alignment != 0)
9180 alignment >>= 1;
9182 if (GET_CODE (XEXP (op0, 0)) == REG)
9183 mark_reg_pointer (XEXP (op0, 0), alignment);
9185 MEM_IN_STRUCT_P (op0) = 1;
9186 MEM_VOLATILE_P (op0) |= volatilep;
9188 *palign = alignment;
9189 return op0;
9192 default:
9193 break;
9197 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9200 /* Return the tree node if a ARG corresponds to a string constant or zero
9201 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9202 in bytes within the string that ARG is accessing. The type of the
9203 offset will be `sizetype'. */
9205 tree
9206 string_constant (arg, ptr_offset)
9207 tree arg;
9208 tree *ptr_offset;
9210 STRIP_NOPS (arg);
9212 if (TREE_CODE (arg) == ADDR_EXPR
9213 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9215 *ptr_offset = size_zero_node;
9216 return TREE_OPERAND (arg, 0);
9218 else if (TREE_CODE (arg) == PLUS_EXPR)
9220 tree arg0 = TREE_OPERAND (arg, 0);
9221 tree arg1 = TREE_OPERAND (arg, 1);
9223 STRIP_NOPS (arg0);
9224 STRIP_NOPS (arg1);
9226 if (TREE_CODE (arg0) == ADDR_EXPR
9227 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9229 *ptr_offset = convert (sizetype, arg1);
9230 return TREE_OPERAND (arg0, 0);
9232 else if (TREE_CODE (arg1) == ADDR_EXPR
9233 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9235 *ptr_offset = convert (sizetype, arg0);
9236 return TREE_OPERAND (arg1, 0);
9240 return 0;
9243 /* Expand code for a post- or pre- increment or decrement
9244 and return the RTX for the result.
9245 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9247 static rtx
9248 expand_increment (exp, post, ignore)
9249 register tree exp;
9250 int post, ignore;
9252 register rtx op0, op1;
9253 register rtx temp, value;
9254 register tree incremented = TREE_OPERAND (exp, 0);
9255 optab this_optab = add_optab;
9256 int icode;
9257 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9258 int op0_is_copy = 0;
9259 int single_insn = 0;
9260 /* 1 means we can't store into OP0 directly,
9261 because it is a subreg narrower than a word,
9262 and we don't dare clobber the rest of the word. */
9263 int bad_subreg = 0;
9265 /* Stabilize any component ref that might need to be
9266 evaluated more than once below. */
9267 if (!post
9268 || TREE_CODE (incremented) == BIT_FIELD_REF
9269 || (TREE_CODE (incremented) == COMPONENT_REF
9270 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9271 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9272 incremented = stabilize_reference (incremented);
9273 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9274 ones into save exprs so that they don't accidentally get evaluated
9275 more than once by the code below. */
9276 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9277 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9278 incremented = save_expr (incremented);
9280 /* Compute the operands as RTX.
9281 Note whether OP0 is the actual lvalue or a copy of it:
9282 I believe it is a copy iff it is a register or subreg
9283 and insns were generated in computing it. */
9285 temp = get_last_insn ();
9286 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9288 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9289 in place but instead must do sign- or zero-extension during assignment,
9290 so we copy it into a new register and let the code below use it as
9291 a copy.
9293 Note that we can safely modify this SUBREG since it is know not to be
9294 shared (it was made by the expand_expr call above). */
9296 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9298 if (post)
9299 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9300 else
9301 bad_subreg = 1;
9303 else if (GET_CODE (op0) == SUBREG
9304 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9306 /* We cannot increment this SUBREG in place. If we are
9307 post-incrementing, get a copy of the old value. Otherwise,
9308 just mark that we cannot increment in place. */
9309 if (post)
9310 op0 = copy_to_reg (op0);
9311 else
9312 bad_subreg = 1;
9315 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9316 && temp != get_last_insn ());
9317 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9318 EXPAND_MEMORY_USE_BAD);
9320 /* Decide whether incrementing or decrementing. */
9321 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9322 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9323 this_optab = sub_optab;
9325 /* Convert decrement by a constant into a negative increment. */
9326 if (this_optab == sub_optab
9327 && GET_CODE (op1) == CONST_INT)
9329 op1 = GEN_INT (-INTVAL (op1));
9330 this_optab = add_optab;
9333 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9334 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9336 /* For a preincrement, see if we can do this with a single instruction. */
9337 if (!post)
9339 icode = (int) this_optab->handlers[(int) mode].insn_code;
9340 if (icode != (int) CODE_FOR_nothing
9341 /* Make sure that OP0 is valid for operands 0 and 1
9342 of the insn we want to queue. */
9343 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9344 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9345 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9346 single_insn = 1;
9349 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9350 then we cannot just increment OP0. We must therefore contrive to
9351 increment the original value. Then, for postincrement, we can return
9352 OP0 since it is a copy of the old value. For preincrement, expand here
9353 unless we can do it with a single insn.
9355 Likewise if storing directly into OP0 would clobber high bits
9356 we need to preserve (bad_subreg). */
9357 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9359 /* This is the easiest way to increment the value wherever it is.
9360 Problems with multiple evaluation of INCREMENTED are prevented
9361 because either (1) it is a component_ref or preincrement,
9362 in which case it was stabilized above, or (2) it is an array_ref
9363 with constant index in an array in a register, which is
9364 safe to reevaluate. */
9365 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9366 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9367 ? MINUS_EXPR : PLUS_EXPR),
9368 TREE_TYPE (exp),
9369 incremented,
9370 TREE_OPERAND (exp, 1));
9372 while (TREE_CODE (incremented) == NOP_EXPR
9373 || TREE_CODE (incremented) == CONVERT_EXPR)
9375 newexp = convert (TREE_TYPE (incremented), newexp);
9376 incremented = TREE_OPERAND (incremented, 0);
9379 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9380 return post ? op0 : temp;
9383 if (post)
9385 /* We have a true reference to the value in OP0.
9386 If there is an insn to add or subtract in this mode, queue it.
9387 Queueing the increment insn avoids the register shuffling
9388 that often results if we must increment now and first save
9389 the old value for subsequent use. */
9391 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9392 op0 = stabilize (op0);
9393 #endif
9395 icode = (int) this_optab->handlers[(int) mode].insn_code;
9396 if (icode != (int) CODE_FOR_nothing
9397 /* Make sure that OP0 is valid for operands 0 and 1
9398 of the insn we want to queue. */
9399 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9400 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9402 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9403 op1 = force_reg (mode, op1);
9405 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9407 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9409 rtx addr = (general_operand (XEXP (op0, 0), mode)
9410 ? force_reg (Pmode, XEXP (op0, 0))
9411 : copy_to_reg (XEXP (op0, 0)));
9412 rtx temp, result;
9414 op0 = replace_equiv_address (op0, addr);
9415 temp = force_reg (GET_MODE (op0), op0);
9416 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9417 op1 = force_reg (mode, op1);
9419 /* The increment queue is LIFO, thus we have to `queue'
9420 the instructions in reverse order. */
9421 enqueue_insn (op0, gen_move_insn (op0, temp));
9422 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9423 return result;
9427 /* Preincrement, or we can't increment with one simple insn. */
9428 if (post)
9429 /* Save a copy of the value before inc or dec, to return it later. */
9430 temp = value = copy_to_reg (op0);
9431 else
9432 /* Arrange to return the incremented value. */
9433 /* Copy the rtx because expand_binop will protect from the queue,
9434 and the results of that would be invalid for us to return
9435 if our caller does emit_queue before using our result. */
9436 temp = copy_rtx (value = op0);
9438 /* Increment however we can. */
9439 op1 = expand_binop (mode, this_optab, value, op1,
9440 current_function_check_memory_usage ? NULL_RTX : op0,
9441 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9442 /* Make sure the value is stored into OP0. */
9443 if (op1 != op0)
9444 emit_move_insn (op0, op1);
9446 return temp;
9449 /* At the start of a function, record that we have no previously-pushed
9450 arguments waiting to be popped. */
9452 void
9453 init_pending_stack_adjust ()
9455 pending_stack_adjust = 0;
9458 /* When exiting from function, if safe, clear out any pending stack adjust
9459 so the adjustment won't get done.
9461 Note, if the current function calls alloca, then it must have a
9462 frame pointer regardless of the value of flag_omit_frame_pointer. */
9464 void
9465 clear_pending_stack_adjust ()
9467 #ifdef EXIT_IGNORE_STACK
9468 if (optimize > 0
9469 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9470 && EXIT_IGNORE_STACK
9471 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9472 && ! flag_inline_functions)
9474 stack_pointer_delta -= pending_stack_adjust,
9475 pending_stack_adjust = 0;
9477 #endif
9480 /* Pop any previously-pushed arguments that have not been popped yet. */
9482 void
9483 do_pending_stack_adjust ()
9485 if (inhibit_defer_pop == 0)
9487 if (pending_stack_adjust != 0)
9488 adjust_stack (GEN_INT (pending_stack_adjust));
9489 pending_stack_adjust = 0;
9493 /* Expand conditional expressions. */
9495 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9496 LABEL is an rtx of code CODE_LABEL, in this function and all the
9497 functions here. */
9499 void
9500 jumpifnot (exp, label)
9501 tree exp;
9502 rtx label;
9504 do_jump (exp, label, NULL_RTX);
9507 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9509 void
9510 jumpif (exp, label)
9511 tree exp;
9512 rtx label;
9514 do_jump (exp, NULL_RTX, label);
9517 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9518 the result is zero, or IF_TRUE_LABEL if the result is one.
9519 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9520 meaning fall through in that case.
9522 do_jump always does any pending stack adjust except when it does not
9523 actually perform a jump. An example where there is no jump
9524 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9526 This function is responsible for optimizing cases such as
9527 &&, || and comparison operators in EXP. */
9529 void
9530 do_jump (exp, if_false_label, if_true_label)
9531 tree exp;
9532 rtx if_false_label, if_true_label;
9534 register enum tree_code code = TREE_CODE (exp);
9535 /* Some cases need to create a label to jump to
9536 in order to properly fall through.
9537 These cases set DROP_THROUGH_LABEL nonzero. */
9538 rtx drop_through_label = 0;
9539 rtx temp;
9540 int i;
9541 tree type;
9542 enum machine_mode mode;
9544 #ifdef MAX_INTEGER_COMPUTATION_MODE
9545 check_max_integer_computation_mode (exp);
9546 #endif
9548 emit_queue ();
9550 switch (code)
9552 case ERROR_MARK:
9553 break;
9555 case INTEGER_CST:
9556 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9557 if (temp)
9558 emit_jump (temp);
9559 break;
9561 #if 0
9562 /* This is not true with #pragma weak */
9563 case ADDR_EXPR:
9564 /* The address of something can never be zero. */
9565 if (if_true_label)
9566 emit_jump (if_true_label);
9567 break;
9568 #endif
9570 case NOP_EXPR:
9571 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9572 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9573 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9574 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9575 goto normal;
9576 case CONVERT_EXPR:
9577 /* If we are narrowing the operand, we have to do the compare in the
9578 narrower mode. */
9579 if ((TYPE_PRECISION (TREE_TYPE (exp))
9580 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9581 goto normal;
9582 case NON_LVALUE_EXPR:
9583 case REFERENCE_EXPR:
9584 case ABS_EXPR:
9585 case NEGATE_EXPR:
9586 case LROTATE_EXPR:
9587 case RROTATE_EXPR:
9588 /* These cannot change zero->non-zero or vice versa. */
9589 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9590 break;
9592 case WITH_RECORD_EXPR:
9593 /* Put the object on the placeholder list, recurse through our first
9594 operand, and pop the list. */
9595 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9596 placeholder_list);
9597 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9598 placeholder_list = TREE_CHAIN (placeholder_list);
9599 break;
9601 #if 0
9602 /* This is never less insns than evaluating the PLUS_EXPR followed by
9603 a test and can be longer if the test is eliminated. */
9604 case PLUS_EXPR:
9605 /* Reduce to minus. */
9606 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9607 TREE_OPERAND (exp, 0),
9608 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9609 TREE_OPERAND (exp, 1))));
9610 /* Process as MINUS. */
9611 #endif
9613 case MINUS_EXPR:
9614 /* Non-zero iff operands of minus differ. */
9615 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9616 TREE_OPERAND (exp, 0),
9617 TREE_OPERAND (exp, 1)),
9618 NE, NE, if_false_label, if_true_label);
9619 break;
9621 case BIT_AND_EXPR:
9622 /* If we are AND'ing with a small constant, do this comparison in the
9623 smallest type that fits. If the machine doesn't have comparisons
9624 that small, it will be converted back to the wider comparison.
9625 This helps if we are testing the sign bit of a narrower object.
9626 combine can't do this for us because it can't know whether a
9627 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9629 if (! SLOW_BYTE_ACCESS
9630 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9631 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9632 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9633 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9634 && (type = type_for_mode (mode, 1)) != 0
9635 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9636 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9637 != CODE_FOR_nothing))
9639 do_jump (convert (type, exp), if_false_label, if_true_label);
9640 break;
9642 goto normal;
9644 case TRUTH_NOT_EXPR:
9645 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9646 break;
9648 case TRUTH_ANDIF_EXPR:
9649 if (if_false_label == 0)
9650 if_false_label = drop_through_label = gen_label_rtx ();
9651 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9652 start_cleanup_deferral ();
9653 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9654 end_cleanup_deferral ();
9655 break;
9657 case TRUTH_ORIF_EXPR:
9658 if (if_true_label == 0)
9659 if_true_label = drop_through_label = gen_label_rtx ();
9660 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9661 start_cleanup_deferral ();
9662 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9663 end_cleanup_deferral ();
9664 break;
9666 case COMPOUND_EXPR:
9667 push_temp_slots ();
9668 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9669 preserve_temp_slots (NULL_RTX);
9670 free_temp_slots ();
9671 pop_temp_slots ();
9672 emit_queue ();
9673 do_pending_stack_adjust ();
9674 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9675 break;
9677 case COMPONENT_REF:
9678 case BIT_FIELD_REF:
9679 case ARRAY_REF:
9680 case ARRAY_RANGE_REF:
9682 HOST_WIDE_INT bitsize, bitpos;
9683 int unsignedp;
9684 enum machine_mode mode;
9685 tree type;
9686 tree offset;
9687 int volatilep = 0;
9688 unsigned int alignment;
9690 /* Get description of this reference. We don't actually care
9691 about the underlying object here. */
9692 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9693 &unsignedp, &volatilep, &alignment);
9695 type = type_for_size (bitsize, unsignedp);
9696 if (! SLOW_BYTE_ACCESS
9697 && type != 0 && bitsize >= 0
9698 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9699 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9700 != CODE_FOR_nothing))
9702 do_jump (convert (type, exp), if_false_label, if_true_label);
9703 break;
9705 goto normal;
9708 case COND_EXPR:
9709 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9710 if (integer_onep (TREE_OPERAND (exp, 1))
9711 && integer_zerop (TREE_OPERAND (exp, 2)))
9712 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9714 else if (integer_zerop (TREE_OPERAND (exp, 1))
9715 && integer_onep (TREE_OPERAND (exp, 2)))
9716 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9718 else
9720 register rtx label1 = gen_label_rtx ();
9721 drop_through_label = gen_label_rtx ();
9723 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9725 start_cleanup_deferral ();
9726 /* Now the THEN-expression. */
9727 do_jump (TREE_OPERAND (exp, 1),
9728 if_false_label ? if_false_label : drop_through_label,
9729 if_true_label ? if_true_label : drop_through_label);
9730 /* In case the do_jump just above never jumps. */
9731 do_pending_stack_adjust ();
9732 emit_label (label1);
9734 /* Now the ELSE-expression. */
9735 do_jump (TREE_OPERAND (exp, 2),
9736 if_false_label ? if_false_label : drop_through_label,
9737 if_true_label ? if_true_label : drop_through_label);
9738 end_cleanup_deferral ();
9740 break;
9742 case EQ_EXPR:
9744 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9746 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9747 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9749 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9750 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9751 do_jump
9752 (fold
9753 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9754 fold (build (EQ_EXPR, TREE_TYPE (exp),
9755 fold (build1 (REALPART_EXPR,
9756 TREE_TYPE (inner_type),
9757 exp0)),
9758 fold (build1 (REALPART_EXPR,
9759 TREE_TYPE (inner_type),
9760 exp1)))),
9761 fold (build (EQ_EXPR, TREE_TYPE (exp),
9762 fold (build1 (IMAGPART_EXPR,
9763 TREE_TYPE (inner_type),
9764 exp0)),
9765 fold (build1 (IMAGPART_EXPR,
9766 TREE_TYPE (inner_type),
9767 exp1)))))),
9768 if_false_label, if_true_label);
9771 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9772 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9774 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9775 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9776 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9777 else
9778 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9779 break;
9782 case NE_EXPR:
9784 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9786 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9787 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9789 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9790 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9791 do_jump
9792 (fold
9793 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9794 fold (build (NE_EXPR, TREE_TYPE (exp),
9795 fold (build1 (REALPART_EXPR,
9796 TREE_TYPE (inner_type),
9797 exp0)),
9798 fold (build1 (REALPART_EXPR,
9799 TREE_TYPE (inner_type),
9800 exp1)))),
9801 fold (build (NE_EXPR, TREE_TYPE (exp),
9802 fold (build1 (IMAGPART_EXPR,
9803 TREE_TYPE (inner_type),
9804 exp0)),
9805 fold (build1 (IMAGPART_EXPR,
9806 TREE_TYPE (inner_type),
9807 exp1)))))),
9808 if_false_label, if_true_label);
9811 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9812 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9814 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9815 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9816 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9817 else
9818 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9819 break;
9822 case LT_EXPR:
9823 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9824 if (GET_MODE_CLASS (mode) == MODE_INT
9825 && ! can_compare_p (LT, mode, ccp_jump))
9826 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9827 else
9828 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9829 break;
9831 case LE_EXPR:
9832 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9833 if (GET_MODE_CLASS (mode) == MODE_INT
9834 && ! can_compare_p (LE, mode, ccp_jump))
9835 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9836 else
9837 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9838 break;
9840 case GT_EXPR:
9841 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9842 if (GET_MODE_CLASS (mode) == MODE_INT
9843 && ! can_compare_p (GT, mode, ccp_jump))
9844 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9845 else
9846 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9847 break;
9849 case GE_EXPR:
9850 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9851 if (GET_MODE_CLASS (mode) == MODE_INT
9852 && ! can_compare_p (GE, mode, ccp_jump))
9853 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9854 else
9855 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9856 break;
9858 case UNORDERED_EXPR:
9859 case ORDERED_EXPR:
9861 enum rtx_code cmp, rcmp;
9862 int do_rev;
9864 if (code == UNORDERED_EXPR)
9865 cmp = UNORDERED, rcmp = ORDERED;
9866 else
9867 cmp = ORDERED, rcmp = UNORDERED;
9868 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9870 do_rev = 0;
9871 if (! can_compare_p (cmp, mode, ccp_jump)
9872 && (can_compare_p (rcmp, mode, ccp_jump)
9873 /* If the target doesn't provide either UNORDERED or ORDERED
9874 comparisons, canonicalize on UNORDERED for the library. */
9875 || rcmp == UNORDERED))
9876 do_rev = 1;
9878 if (! do_rev)
9879 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9880 else
9881 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9883 break;
9886 enum rtx_code rcode1;
9887 enum tree_code tcode2;
9889 case UNLT_EXPR:
9890 rcode1 = UNLT;
9891 tcode2 = LT_EXPR;
9892 goto unordered_bcc;
9893 case UNLE_EXPR:
9894 rcode1 = UNLE;
9895 tcode2 = LE_EXPR;
9896 goto unordered_bcc;
9897 case UNGT_EXPR:
9898 rcode1 = UNGT;
9899 tcode2 = GT_EXPR;
9900 goto unordered_bcc;
9901 case UNGE_EXPR:
9902 rcode1 = UNGE;
9903 tcode2 = GE_EXPR;
9904 goto unordered_bcc;
9905 case UNEQ_EXPR:
9906 rcode1 = UNEQ;
9907 tcode2 = EQ_EXPR;
9908 goto unordered_bcc;
9910 unordered_bcc:
9911 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9912 if (can_compare_p (rcode1, mode, ccp_jump))
9913 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9914 if_true_label);
9915 else
9917 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9918 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9919 tree cmp0, cmp1;
9921 /* If the target doesn't support combined unordered
9922 compares, decompose into UNORDERED + comparison. */
9923 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9924 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9925 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9926 do_jump (exp, if_false_label, if_true_label);
9929 break;
9931 /* Special case:
9932 __builtin_expect (<test>, 0) and
9933 __builtin_expect (<test>, 1)
9935 We need to do this here, so that <test> is not converted to a SCC
9936 operation on machines that use condition code registers and COMPARE
9937 like the PowerPC, and then the jump is done based on whether the SCC
9938 operation produced a 1 or 0. */
9939 case CALL_EXPR:
9940 /* Check for a built-in function. */
9941 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9943 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9944 tree arglist = TREE_OPERAND (exp, 1);
9946 if (TREE_CODE (fndecl) == FUNCTION_DECL
9947 && DECL_BUILT_IN (fndecl)
9948 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9949 && arglist != NULL_TREE
9950 && TREE_CHAIN (arglist) != NULL_TREE)
9952 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9953 if_true_label);
9955 if (seq != NULL_RTX)
9957 emit_insn (seq);
9958 return;
9962 /* fall through and generate the normal code. */
9964 default:
9965 normal:
9966 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9967 #if 0
9968 /* This is not needed any more and causes poor code since it causes
9969 comparisons and tests from non-SI objects to have different code
9970 sequences. */
9971 /* Copy to register to avoid generating bad insns by cse
9972 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9973 if (!cse_not_expected && GET_CODE (temp) == MEM)
9974 temp = copy_to_reg (temp);
9975 #endif
9976 do_pending_stack_adjust ();
9977 /* Do any postincrements in the expression that was tested. */
9978 emit_queue ();
9980 if (GET_CODE (temp) == CONST_INT
9981 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9982 || GET_CODE (temp) == LABEL_REF)
9984 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9985 if (target)
9986 emit_jump (target);
9988 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9989 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9990 /* Note swapping the labels gives us not-equal. */
9991 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9992 else if (GET_MODE (temp) != VOIDmode)
9993 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9994 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9995 GET_MODE (temp), NULL_RTX, 0,
9996 if_false_label, if_true_label);
9997 else
9998 abort ();
10001 if (drop_through_label)
10003 /* If do_jump produces code that might be jumped around,
10004 do any stack adjusts from that code, before the place
10005 where control merges in. */
10006 do_pending_stack_adjust ();
10007 emit_label (drop_through_label);
10011 /* Given a comparison expression EXP for values too wide to be compared
10012 with one insn, test the comparison and jump to the appropriate label.
10013 The code of EXP is ignored; we always test GT if SWAP is 0,
10014 and LT if SWAP is 1. */
10016 static void
10017 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10018 tree exp;
10019 int swap;
10020 rtx if_false_label, if_true_label;
10022 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10023 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10024 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10025 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10027 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10030 /* Compare OP0 with OP1, word at a time, in mode MODE.
10031 UNSIGNEDP says to do unsigned comparison.
10032 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10034 void
10035 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10036 enum machine_mode mode;
10037 int unsignedp;
10038 rtx op0, op1;
10039 rtx if_false_label, if_true_label;
10041 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10042 rtx drop_through_label = 0;
10043 int i;
10045 if (! if_true_label || ! if_false_label)
10046 drop_through_label = gen_label_rtx ();
10047 if (! if_true_label)
10048 if_true_label = drop_through_label;
10049 if (! if_false_label)
10050 if_false_label = drop_through_label;
10052 /* Compare a word at a time, high order first. */
10053 for (i = 0; i < nwords; i++)
10055 rtx op0_word, op1_word;
10057 if (WORDS_BIG_ENDIAN)
10059 op0_word = operand_subword_force (op0, i, mode);
10060 op1_word = operand_subword_force (op1, i, mode);
10062 else
10064 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10065 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10068 /* All but high-order word must be compared as unsigned. */
10069 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10070 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10071 NULL_RTX, if_true_label);
10073 /* Consider lower words only if these are equal. */
10074 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10075 NULL_RTX, 0, NULL_RTX, if_false_label);
10078 if (if_false_label)
10079 emit_jump (if_false_label);
10080 if (drop_through_label)
10081 emit_label (drop_through_label);
10084 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10085 with one insn, test the comparison and jump to the appropriate label. */
10087 static void
10088 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10089 tree exp;
10090 rtx if_false_label, if_true_label;
10092 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10093 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10094 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10095 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10096 int i;
10097 rtx drop_through_label = 0;
10099 if (! if_false_label)
10100 drop_through_label = if_false_label = gen_label_rtx ();
10102 for (i = 0; i < nwords; i++)
10103 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10104 operand_subword_force (op1, i, mode),
10105 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10106 word_mode, NULL_RTX, 0, if_false_label,
10107 NULL_RTX);
10109 if (if_true_label)
10110 emit_jump (if_true_label);
10111 if (drop_through_label)
10112 emit_label (drop_through_label);
10115 /* Jump according to whether OP0 is 0.
10116 We assume that OP0 has an integer mode that is too wide
10117 for the available compare insns. */
10119 void
10120 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10121 rtx op0;
10122 rtx if_false_label, if_true_label;
10124 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10125 rtx part;
10126 int i;
10127 rtx drop_through_label = 0;
10129 /* The fastest way of doing this comparison on almost any machine is to
10130 "or" all the words and compare the result. If all have to be loaded
10131 from memory and this is a very wide item, it's possible this may
10132 be slower, but that's highly unlikely. */
10134 part = gen_reg_rtx (word_mode);
10135 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10136 for (i = 1; i < nwords && part != 0; i++)
10137 part = expand_binop (word_mode, ior_optab, part,
10138 operand_subword_force (op0, i, GET_MODE (op0)),
10139 part, 1, OPTAB_WIDEN);
10141 if (part != 0)
10143 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10144 NULL_RTX, 0, if_false_label, if_true_label);
10146 return;
10149 /* If we couldn't do the "or" simply, do this with a series of compares. */
10150 if (! if_false_label)
10151 drop_through_label = if_false_label = gen_label_rtx ();
10153 for (i = 0; i < nwords; i++)
10154 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10155 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10156 if_false_label, NULL_RTX);
10158 if (if_true_label)
10159 emit_jump (if_true_label);
10161 if (drop_through_label)
10162 emit_label (drop_through_label);
10165 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10166 (including code to compute the values to be compared)
10167 and set (CC0) according to the result.
10168 The decision as to signed or unsigned comparison must be made by the caller.
10170 We force a stack adjustment unless there are currently
10171 things pushed on the stack that aren't yet used.
10173 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10174 compared.
10176 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10177 size of MODE should be used. */
10180 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10181 register rtx op0, op1;
10182 enum rtx_code code;
10183 int unsignedp;
10184 enum machine_mode mode;
10185 rtx size;
10186 unsigned int align;
10188 rtx tem;
10190 /* If one operand is constant, make it the second one. Only do this
10191 if the other operand is not constant as well. */
10193 if (swap_commutative_operands_p (op0, op1))
10195 tem = op0;
10196 op0 = op1;
10197 op1 = tem;
10198 code = swap_condition (code);
10201 if (flag_force_mem)
10203 op0 = force_not_mem (op0);
10204 op1 = force_not_mem (op1);
10207 do_pending_stack_adjust ();
10209 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10210 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10211 return tem;
10213 #if 0
10214 /* There's no need to do this now that combine.c can eliminate lots of
10215 sign extensions. This can be less efficient in certain cases on other
10216 machines. */
10218 /* If this is a signed equality comparison, we can do it as an
10219 unsigned comparison since zero-extension is cheaper than sign
10220 extension and comparisons with zero are done as unsigned. This is
10221 the case even on machines that can do fast sign extension, since
10222 zero-extension is easier to combine with other operations than
10223 sign-extension is. If we are comparing against a constant, we must
10224 convert it to what it would look like unsigned. */
10225 if ((code == EQ || code == NE) && ! unsignedp
10226 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10228 if (GET_CODE (op1) == CONST_INT
10229 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10230 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10231 unsignedp = 1;
10233 #endif
10235 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10237 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10240 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10241 The decision as to signed or unsigned comparison must be made by the caller.
10243 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10244 compared.
10246 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10247 size of MODE should be used. */
10249 void
10250 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10251 if_false_label, if_true_label)
10252 register rtx op0, op1;
10253 enum rtx_code code;
10254 int unsignedp;
10255 enum machine_mode mode;
10256 rtx size;
10257 unsigned int align;
10258 rtx if_false_label, if_true_label;
10260 rtx tem;
10261 int dummy_true_label = 0;
10263 /* Reverse the comparison if that is safe and we want to jump if it is
10264 false. */
10265 if (! if_true_label && ! FLOAT_MODE_P (mode))
10267 if_true_label = if_false_label;
10268 if_false_label = 0;
10269 code = reverse_condition (code);
10272 /* If one operand is constant, make it the second one. Only do this
10273 if the other operand is not constant as well. */
10275 if (swap_commutative_operands_p (op0, op1))
10277 tem = op0;
10278 op0 = op1;
10279 op1 = tem;
10280 code = swap_condition (code);
10283 if (flag_force_mem)
10285 op0 = force_not_mem (op0);
10286 op1 = force_not_mem (op1);
10289 do_pending_stack_adjust ();
10291 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10292 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10294 if (tem == const_true_rtx)
10296 if (if_true_label)
10297 emit_jump (if_true_label);
10299 else
10301 if (if_false_label)
10302 emit_jump (if_false_label);
10304 return;
10307 #if 0
10308 /* There's no need to do this now that combine.c can eliminate lots of
10309 sign extensions. This can be less efficient in certain cases on other
10310 machines. */
10312 /* If this is a signed equality comparison, we can do it as an
10313 unsigned comparison since zero-extension is cheaper than sign
10314 extension and comparisons with zero are done as unsigned. This is
10315 the case even on machines that can do fast sign extension, since
10316 zero-extension is easier to combine with other operations than
10317 sign-extension is. If we are comparing against a constant, we must
10318 convert it to what it would look like unsigned. */
10319 if ((code == EQ || code == NE) && ! unsignedp
10320 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10322 if (GET_CODE (op1) == CONST_INT
10323 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10324 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10325 unsignedp = 1;
10327 #endif
10329 if (! if_true_label)
10331 dummy_true_label = 1;
10332 if_true_label = gen_label_rtx ();
10335 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10336 if_true_label);
10338 if (if_false_label)
10339 emit_jump (if_false_label);
10340 if (dummy_true_label)
10341 emit_label (if_true_label);
10344 /* Generate code for a comparison expression EXP (including code to compute
10345 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10346 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10347 generated code will drop through.
10348 SIGNED_CODE should be the rtx operation for this comparison for
10349 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10351 We force a stack adjustment unless there are currently
10352 things pushed on the stack that aren't yet used. */
10354 static void
10355 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10356 if_true_label)
10357 register tree exp;
10358 enum rtx_code signed_code, unsigned_code;
10359 rtx if_false_label, if_true_label;
10361 unsigned int align0, align1;
10362 register rtx op0, op1;
10363 register tree type;
10364 register enum machine_mode mode;
10365 int unsignedp;
10366 enum rtx_code code;
10368 /* Don't crash if the comparison was erroneous. */
10369 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10370 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10371 return;
10373 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10374 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10375 return;
10377 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10378 mode = TYPE_MODE (type);
10379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10380 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10381 || (GET_MODE_BITSIZE (mode)
10382 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10383 1)))))))
10385 /* op0 might have been replaced by promoted constant, in which
10386 case the type of second argument should be used. */
10387 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10388 mode = TYPE_MODE (type);
10390 unsignedp = TREE_UNSIGNED (type);
10391 code = unsignedp ? unsigned_code : signed_code;
10393 #ifdef HAVE_canonicalize_funcptr_for_compare
10394 /* If function pointers need to be "canonicalized" before they can
10395 be reliably compared, then canonicalize them. */
10396 if (HAVE_canonicalize_funcptr_for_compare
10397 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10398 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10399 == FUNCTION_TYPE))
10401 rtx new_op0 = gen_reg_rtx (mode);
10403 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10404 op0 = new_op0;
10407 if (HAVE_canonicalize_funcptr_for_compare
10408 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10409 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10410 == FUNCTION_TYPE))
10412 rtx new_op1 = gen_reg_rtx (mode);
10414 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10415 op1 = new_op1;
10417 #endif
10419 /* Do any postincrements in the expression that was tested. */
10420 emit_queue ();
10422 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10423 ((mode == BLKmode)
10424 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10425 MIN (align0, align1),
10426 if_false_label, if_true_label);
10429 /* Generate code to calculate EXP using a store-flag instruction
10430 and return an rtx for the result. EXP is either a comparison
10431 or a TRUTH_NOT_EXPR whose operand is a comparison.
10433 If TARGET is nonzero, store the result there if convenient.
10435 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10436 cheap.
10438 Return zero if there is no suitable set-flag instruction
10439 available on this machine.
10441 Once expand_expr has been called on the arguments of the comparison,
10442 we are committed to doing the store flag, since it is not safe to
10443 re-evaluate the expression. We emit the store-flag insn by calling
10444 emit_store_flag, but only expand the arguments if we have a reason
10445 to believe that emit_store_flag will be successful. If we think that
10446 it will, but it isn't, we have to simulate the store-flag with a
10447 set/jump/set sequence. */
10449 static rtx
10450 do_store_flag (exp, target, mode, only_cheap)
10451 tree exp;
10452 rtx target;
10453 enum machine_mode mode;
10454 int only_cheap;
10456 enum rtx_code code;
10457 tree arg0, arg1, type;
10458 tree tem;
10459 enum machine_mode operand_mode;
10460 int invert = 0;
10461 int unsignedp;
10462 rtx op0, op1;
10463 enum insn_code icode;
10464 rtx subtarget = target;
10465 rtx result, label;
10467 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10468 result at the end. We can't simply invert the test since it would
10469 have already been inverted if it were valid. This case occurs for
10470 some floating-point comparisons. */
10472 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10473 invert = 1, exp = TREE_OPERAND (exp, 0);
10475 arg0 = TREE_OPERAND (exp, 0);
10476 arg1 = TREE_OPERAND (exp, 1);
10478 /* Don't crash if the comparison was erroneous. */
10479 if (arg0 == error_mark_node || arg1 == error_mark_node)
10480 return const0_rtx;
10482 type = TREE_TYPE (arg0);
10483 operand_mode = TYPE_MODE (type);
10484 unsignedp = TREE_UNSIGNED (type);
10486 /* We won't bother with BLKmode store-flag operations because it would mean
10487 passing a lot of information to emit_store_flag. */
10488 if (operand_mode == BLKmode)
10489 return 0;
10491 /* We won't bother with store-flag operations involving function pointers
10492 when function pointers must be canonicalized before comparisons. */
10493 #ifdef HAVE_canonicalize_funcptr_for_compare
10494 if (HAVE_canonicalize_funcptr_for_compare
10495 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10496 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10497 == FUNCTION_TYPE))
10498 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10499 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10500 == FUNCTION_TYPE))))
10501 return 0;
10502 #endif
10504 STRIP_NOPS (arg0);
10505 STRIP_NOPS (arg1);
10507 /* Get the rtx comparison code to use. We know that EXP is a comparison
10508 operation of some type. Some comparisons against 1 and -1 can be
10509 converted to comparisons with zero. Do so here so that the tests
10510 below will be aware that we have a comparison with zero. These
10511 tests will not catch constants in the first operand, but constants
10512 are rarely passed as the first operand. */
10514 switch (TREE_CODE (exp))
10516 case EQ_EXPR:
10517 code = EQ;
10518 break;
10519 case NE_EXPR:
10520 code = NE;
10521 break;
10522 case LT_EXPR:
10523 if (integer_onep (arg1))
10524 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10525 else
10526 code = unsignedp ? LTU : LT;
10527 break;
10528 case LE_EXPR:
10529 if (! unsignedp && integer_all_onesp (arg1))
10530 arg1 = integer_zero_node, code = LT;
10531 else
10532 code = unsignedp ? LEU : LE;
10533 break;
10534 case GT_EXPR:
10535 if (! unsignedp && integer_all_onesp (arg1))
10536 arg1 = integer_zero_node, code = GE;
10537 else
10538 code = unsignedp ? GTU : GT;
10539 break;
10540 case GE_EXPR:
10541 if (integer_onep (arg1))
10542 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10543 else
10544 code = unsignedp ? GEU : GE;
10545 break;
10547 case UNORDERED_EXPR:
10548 code = UNORDERED;
10549 break;
10550 case ORDERED_EXPR:
10551 code = ORDERED;
10552 break;
10553 case UNLT_EXPR:
10554 code = UNLT;
10555 break;
10556 case UNLE_EXPR:
10557 code = UNLE;
10558 break;
10559 case UNGT_EXPR:
10560 code = UNGT;
10561 break;
10562 case UNGE_EXPR:
10563 code = UNGE;
10564 break;
10565 case UNEQ_EXPR:
10566 code = UNEQ;
10567 break;
10569 default:
10570 abort ();
10573 /* Put a constant second. */
10574 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10576 tem = arg0; arg0 = arg1; arg1 = tem;
10577 code = swap_condition (code);
10580 /* If this is an equality or inequality test of a single bit, we can
10581 do this by shifting the bit being tested to the low-order bit and
10582 masking the result with the constant 1. If the condition was EQ,
10583 we xor it with 1. This does not require an scc insn and is faster
10584 than an scc insn even if we have it. */
10586 if ((code == NE || code == EQ)
10587 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10588 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10590 tree inner = TREE_OPERAND (arg0, 0);
10591 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10592 int ops_unsignedp;
10594 /* If INNER is a right shift of a constant and it plus BITNUM does
10595 not overflow, adjust BITNUM and INNER. */
10597 if (TREE_CODE (inner) == RSHIFT_EXPR
10598 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10599 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10600 && bitnum < TYPE_PRECISION (type)
10601 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10602 bitnum - TYPE_PRECISION (type)))
10604 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10605 inner = TREE_OPERAND (inner, 0);
10608 /* If we are going to be able to omit the AND below, we must do our
10609 operations as unsigned. If we must use the AND, we have a choice.
10610 Normally unsigned is faster, but for some machines signed is. */
10611 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10612 #ifdef LOAD_EXTEND_OP
10613 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10614 #else
10616 #endif
10619 if (! get_subtarget (subtarget)
10620 || GET_MODE (subtarget) != operand_mode
10621 || ! safe_from_p (subtarget, inner, 1))
10622 subtarget = 0;
10624 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10626 if (bitnum != 0)
10627 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10628 size_int (bitnum), subtarget, ops_unsignedp);
10630 if (GET_MODE (op0) != mode)
10631 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10633 if ((code == EQ && ! invert) || (code == NE && invert))
10634 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10635 ops_unsignedp, OPTAB_LIB_WIDEN);
10637 /* Put the AND last so it can combine with more things. */
10638 if (bitnum != TYPE_PRECISION (type) - 1)
10639 op0 = expand_and (op0, const1_rtx, subtarget);
10641 return op0;
10644 /* Now see if we are likely to be able to do this. Return if not. */
10645 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10646 return 0;
10648 icode = setcc_gen_code[(int) code];
10649 if (icode == CODE_FOR_nothing
10650 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10652 /* We can only do this if it is one of the special cases that
10653 can be handled without an scc insn. */
10654 if ((code == LT && integer_zerop (arg1))
10655 || (! only_cheap && code == GE && integer_zerop (arg1)))
10657 else if (BRANCH_COST >= 0
10658 && ! only_cheap && (code == NE || code == EQ)
10659 && TREE_CODE (type) != REAL_TYPE
10660 && ((abs_optab->handlers[(int) operand_mode].insn_code
10661 != CODE_FOR_nothing)
10662 || (ffs_optab->handlers[(int) operand_mode].insn_code
10663 != CODE_FOR_nothing)))
10665 else
10666 return 0;
10669 if (! get_subtarget (target)
10670 || GET_MODE (subtarget) != operand_mode
10671 || ! safe_from_p (subtarget, arg1, 1))
10672 subtarget = 0;
10674 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10675 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10677 if (target == 0)
10678 target = gen_reg_rtx (mode);
10680 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10681 because, if the emit_store_flag does anything it will succeed and
10682 OP0 and OP1 will not be used subsequently. */
10684 result = emit_store_flag (target, code,
10685 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10686 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10687 operand_mode, unsignedp, 1);
10689 if (result)
10691 if (invert)
10692 result = expand_binop (mode, xor_optab, result, const1_rtx,
10693 result, 0, OPTAB_LIB_WIDEN);
10694 return result;
10697 /* If this failed, we have to do this with set/compare/jump/set code. */
10698 if (GET_CODE (target) != REG
10699 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10700 target = gen_reg_rtx (GET_MODE (target));
10702 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10703 result = compare_from_rtx (op0, op1, code, unsignedp,
10704 operand_mode, NULL_RTX, 0);
10705 if (GET_CODE (result) == CONST_INT)
10706 return (((result == const0_rtx && ! invert)
10707 || (result != const0_rtx && invert))
10708 ? const0_rtx : const1_rtx);
10710 label = gen_label_rtx ();
10711 if (bcc_gen_fctn[(int) code] == 0)
10712 abort ();
10714 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10715 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10716 emit_label (label);
10718 return target;
10722 /* Stubs in case we haven't got a casesi insn. */
10723 #ifndef HAVE_casesi
10724 # define HAVE_casesi 0
10725 # define gen_casesi(a, b, c, d, e) (0)
10726 # define CODE_FOR_casesi CODE_FOR_nothing
10727 #endif
10729 /* If the machine does not have a case insn that compares the bounds,
10730 this means extra overhead for dispatch tables, which raises the
10731 threshold for using them. */
10732 #ifndef CASE_VALUES_THRESHOLD
10733 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10734 #endif /* CASE_VALUES_THRESHOLD */
10736 unsigned int
10737 case_values_threshold ()
10739 return CASE_VALUES_THRESHOLD;
10742 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10743 0 otherwise (i.e. if there is no casesi instruction). */
10745 try_casesi (index_type, index_expr, minval, range,
10746 table_label, default_label)
10747 tree index_type, index_expr, minval, range;
10748 rtx table_label ATTRIBUTE_UNUSED;
10749 rtx default_label;
10751 enum machine_mode index_mode = SImode;
10752 int index_bits = GET_MODE_BITSIZE (index_mode);
10753 rtx op1, op2, index;
10754 enum machine_mode op_mode;
10756 if (! HAVE_casesi)
10757 return 0;
10759 /* Convert the index to SImode. */
10760 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10762 enum machine_mode omode = TYPE_MODE (index_type);
10763 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10765 /* We must handle the endpoints in the original mode. */
10766 index_expr = build (MINUS_EXPR, index_type,
10767 index_expr, minval);
10768 minval = integer_zero_node;
10769 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10770 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10771 omode, 1, 0, default_label);
10772 /* Now we can safely truncate. */
10773 index = convert_to_mode (index_mode, index, 0);
10775 else
10777 if (TYPE_MODE (index_type) != index_mode)
10779 index_expr = convert (type_for_size (index_bits, 0),
10780 index_expr);
10781 index_type = TREE_TYPE (index_expr);
10784 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10786 emit_queue ();
10787 index = protect_from_queue (index, 0);
10788 do_pending_stack_adjust ();
10790 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10791 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10792 (index, op_mode))
10793 index = copy_to_mode_reg (op_mode, index);
10795 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10797 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10798 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10799 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10800 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10801 (op1, op_mode))
10802 op1 = copy_to_mode_reg (op_mode, op1);
10804 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10806 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10807 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10808 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10809 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10810 (op2, op_mode))
10811 op2 = copy_to_mode_reg (op_mode, op2);
10813 emit_jump_insn (gen_casesi (index, op1, op2,
10814 table_label, default_label));
10815 return 1;
10818 /* Attempt to generate a tablejump instruction; same concept. */
10819 #ifndef HAVE_tablejump
10820 #define HAVE_tablejump 0
10821 #define gen_tablejump(x, y) (0)
10822 #endif
10824 /* Subroutine of the next function.
10826 INDEX is the value being switched on, with the lowest value
10827 in the table already subtracted.
10828 MODE is its expected mode (needed if INDEX is constant).
10829 RANGE is the length of the jump table.
10830 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10832 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10833 index value is out of range. */
10835 static void
10836 do_tablejump (index, mode, range, table_label, default_label)
10837 rtx index, range, table_label, default_label;
10838 enum machine_mode mode;
10840 register rtx temp, vector;
10842 /* Do an unsigned comparison (in the proper mode) between the index
10843 expression and the value which represents the length of the range.
10844 Since we just finished subtracting the lower bound of the range
10845 from the index expression, this comparison allows us to simultaneously
10846 check that the original index expression value is both greater than
10847 or equal to the minimum value of the range and less than or equal to
10848 the maximum value of the range. */
10850 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10851 0, default_label);
10853 /* If index is in range, it must fit in Pmode.
10854 Convert to Pmode so we can index with it. */
10855 if (mode != Pmode)
10856 index = convert_to_mode (Pmode, index, 1);
10858 /* Don't let a MEM slip thru, because then INDEX that comes
10859 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10860 and break_out_memory_refs will go to work on it and mess it up. */
10861 #ifdef PIC_CASE_VECTOR_ADDRESS
10862 if (flag_pic && GET_CODE (index) != REG)
10863 index = copy_to_mode_reg (Pmode, index);
10864 #endif
10866 /* If flag_force_addr were to affect this address
10867 it could interfere with the tricky assumptions made
10868 about addresses that contain label-refs,
10869 which may be valid only very near the tablejump itself. */
10870 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10871 GET_MODE_SIZE, because this indicates how large insns are. The other
10872 uses should all be Pmode, because they are addresses. This code
10873 could fail if addresses and insns are not the same size. */
10874 index = gen_rtx_PLUS (Pmode,
10875 gen_rtx_MULT (Pmode, index,
10876 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10877 gen_rtx_LABEL_REF (Pmode, table_label));
10878 #ifdef PIC_CASE_VECTOR_ADDRESS
10879 if (flag_pic)
10880 index = PIC_CASE_VECTOR_ADDRESS (index);
10881 else
10882 #endif
10883 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10884 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10885 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10886 RTX_UNCHANGING_P (vector) = 1;
10887 convert_move (temp, vector, 0);
10889 emit_jump_insn (gen_tablejump (temp, table_label));
10891 /* If we are generating PIC code or if the table is PC-relative, the
10892 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10893 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10894 emit_barrier ();
10898 try_tablejump (index_type, index_expr, minval, range,
10899 table_label, default_label)
10900 tree index_type, index_expr, minval, range;
10901 rtx table_label, default_label;
10903 rtx index;
10905 if (! HAVE_tablejump)
10906 return 0;
10908 index_expr = fold (build (MINUS_EXPR, index_type,
10909 convert (index_type, index_expr),
10910 convert (index_type, minval)));
10911 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10912 emit_queue ();
10913 index = protect_from_queue (index, 0);
10914 do_pending_stack_adjust ();
10916 do_tablejump (index, TYPE_MODE (index_type),
10917 convert_modes (TYPE_MODE (index_type),
10918 TYPE_MODE (TREE_TYPE (range)),
10919 expand_expr (range, NULL_RTX,
10920 VOIDmode, 0),
10921 TREE_UNSIGNED (TREE_TYPE (range))),
10922 table_label, default_label);
10923 return 1;