Remove s390x __LONG_MAX__ special case from glimits.h;
[official-gcc.git] / gcc / expr.c
blob27ad3f62953012cdce326ca026d76665e9250469
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "intl.h"
45 #include "tm_p.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
53 #ifdef PUSH_ROUNDING
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
57 #endif
59 #endif
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
80 parameter. */
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
100 be performed. */
101 struct move_by_pieces
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 int reverse;
116 /* This structure is used by store_by_pieces to describe the clear to
117 be performed. */
119 struct store_by_pieces
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
128 PTR constfundata;
129 int reverse;
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
139 unsigned int));
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
145 unsigned int));
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
147 unsigned int));
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
149 enum machine_mode,
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
157 int));
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
159 HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static tree save_noncopied_parts PARAMS ((tree, tree));
167 static tree init_noncopied_parts PARAMS ((tree, tree));
168 static int fixed_type_p PARAMS ((tree));
169 static rtx var_rtx PARAMS ((tree));
170 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
171 static rtx expand_increment PARAMS ((tree, int, int));
172 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
173 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
174 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
175 rtx, rtx));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
181 /* Record for each mode whether we can move a register directly to or
182 from an object of that mode in memory. If we can't, we won't try
183 to use that mode directly when accessing a field of that mode. */
185 static char direct_load[NUM_MACHINE_MODES];
186 static char direct_store[NUM_MACHINE_MODES];
188 /* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
191 #ifndef MOVE_RATIO
192 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 #define MOVE_RATIO 2
194 #else
195 /* If we are optimizing for space (-Os), cut down the default move ratio. */
196 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 #endif
198 #endif
200 /* This macro is used to determine whether move_by_pieces should be called
201 to perform a structure copy. */
202 #ifndef MOVE_BY_PIECES_P
203 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 #endif
207 /* This array records the insn_code of insns to perform block moves. */
208 enum insn_code movstr_optab[NUM_MACHINE_MODES];
210 /* This array records the insn_code of insns to perform block clears. */
211 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
213 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
215 #ifndef SLOW_UNALIGNED_ACCESS
216 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
217 #endif
219 /* This is run once per compilation to set up which modes can be used
220 directly in memory and to initialize the block move optab. */
222 void
223 init_expr_once ()
225 rtx insn, pat;
226 enum machine_mode mode;
227 int num_clobbers;
228 rtx mem, mem1;
230 start_sequence ();
232 /* Try indexing by frame ptr and try by stack ptr.
233 It is known that on the Convex the stack ptr isn't a valid index.
234 With luck, one or the other is valid on any machine. */
235 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
236 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
238 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
239 pat = PATTERN (insn);
241 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
242 mode = (enum machine_mode) ((int) mode + 1))
244 int regno;
245 rtx reg;
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
251 /* See if there is some register that can be used in this mode and
252 directly loaded or stored from memory. */
254 if (mode != VOIDmode && mode != BLKmode)
255 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
256 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
257 regno++)
259 if (! HARD_REGNO_MODE_OK (regno, mode))
260 continue;
262 reg = gen_rtx_REG (mode, regno);
264 SET_SRC (pat) = mem;
265 SET_DEST (pat) = reg;
266 if (recog (pat, insn, &num_clobbers) >= 0)
267 direct_load[(int) mode] = 1;
269 SET_SRC (pat) = mem1;
270 SET_DEST (pat) = reg;
271 if (recog (pat, insn, &num_clobbers) >= 0)
272 direct_load[(int) mode] = 1;
274 SET_SRC (pat) = reg;
275 SET_DEST (pat) = mem;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_store[(int) mode] = 1;
279 SET_SRC (pat) = reg;
280 SET_DEST (pat) = mem1;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_store[(int) mode] = 1;
286 end_sequence ();
289 /* This is run at the start of compiling a function. */
291 void
292 init_expr ()
294 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
296 pending_chain = 0;
297 pending_stack_adjust = 0;
298 stack_pointer_delta = 0;
299 inhibit_defer_pop = 0;
300 saveregs_value = 0;
301 apply_args_value = 0;
302 forced_labels = 0;
305 void
306 mark_expr_status (p)
307 struct expr_status *p;
309 if (p == NULL)
310 return;
312 ggc_mark_rtx (p->x_saveregs_value);
313 ggc_mark_rtx (p->x_apply_args_value);
314 ggc_mark_rtx (p->x_forced_labels);
317 void
318 free_expr_status (f)
319 struct function *f;
321 free (f->expr);
322 f->expr = NULL;
325 /* Small sanity check that the queue is empty at the end of a function. */
327 void
328 finish_expr_for_function ()
330 if (pending_chain)
331 abort ();
334 /* Manage the queue of increment instructions to be output
335 for POSTINCREMENT_EXPR expressions, etc. */
337 /* Queue up to increment (or change) VAR later. BODY says how:
338 BODY should be the same thing you would pass to emit_insn
339 to increment right away. It will go to emit_insn later on.
341 The value is a QUEUED expression to be used in place of VAR
342 where you want to guarantee the pre-incrementation value of VAR. */
344 static rtx
345 enqueue_insn (var, body)
346 rtx var, body;
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
350 return pending_chain;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (x, modify)
370 register rtx x;
371 int modify;
373 register RTX_CODE code = GET_CODE (x);
375 #if 0 /* A QUEUED can hang around after the queue is forced out. */
376 /* Shortcut for most common case. */
377 if (pending_chain == 0)
378 return x;
379 #endif
381 if (code != QUEUED)
383 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
384 use of autoincrement. Make a copy of the contents of the memory
385 location rather than a copy of the address, but not if the value is
386 of mode BLKmode. Don't modify X in place since it might be
387 shared. */
388 if (code == MEM && GET_MODE (x) != BLKmode
389 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
391 rtx y = XEXP (x, 0);
392 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
394 if (QUEUED_INSN (y))
396 rtx temp = gen_reg_rtx (GET_MODE (x));
398 emit_insn_before (gen_move_insn (temp, new),
399 QUEUED_INSN (y));
400 return temp;
403 /* Copy the address into a pseudo, so that the returned value
404 remains correct across calls to emit_queue. */
405 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
408 /* Otherwise, recursively protect the subexpressions of all
409 the kinds of rtx's that can contain a QUEUED. */
410 if (code == MEM)
412 rtx tem = protect_from_queue (XEXP (x, 0), 0);
413 if (tem != XEXP (x, 0))
415 x = copy_rtx (x);
416 XEXP (x, 0) = tem;
419 else if (code == PLUS || code == MULT)
421 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
422 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
423 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
425 x = copy_rtx (x);
426 XEXP (x, 0) = new0;
427 XEXP (x, 1) = new1;
430 return x;
432 /* If the increment has not happened, use the variable itself. Copy it
433 into a new pseudo so that the value remains correct across calls to
434 emit_queue. */
435 if (QUEUED_INSN (x) == 0)
436 return copy_to_reg (QUEUED_VAR (x));
437 /* If the increment has happened and a pre-increment copy exists,
438 use that copy. */
439 if (QUEUED_COPY (x) != 0)
440 return QUEUED_COPY (x);
441 /* The increment has happened but we haven't set up a pre-increment copy.
442 Set one up now, and use it. */
443 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
444 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
445 QUEUED_INSN (x));
446 return QUEUED_COPY (x);
449 /* Return nonzero if X contains a QUEUED expression:
450 if it contains anything that will be altered by a queued increment.
451 We handle only combinations of MEM, PLUS, MINUS and MULT operators
452 since memory addresses generally contain only those. */
455 queued_subexp_p (x)
456 rtx x;
458 register enum rtx_code code = GET_CODE (x);
459 switch (code)
461 case QUEUED:
462 return 1;
463 case MEM:
464 return queued_subexp_p (XEXP (x, 0));
465 case MULT:
466 case PLUS:
467 case MINUS:
468 return (queued_subexp_p (XEXP (x, 0))
469 || queued_subexp_p (XEXP (x, 1)));
470 default:
471 return 0;
475 /* Perform all the pending incrementations. */
477 void
478 emit_queue ()
480 register rtx p;
481 while ((p = pending_chain))
483 rtx body = QUEUED_BODY (p);
485 if (GET_CODE (body) == SEQUENCE)
487 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
488 emit_insn (QUEUED_BODY (p));
490 else
491 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
492 pending_chain = QUEUED_NEXT (p);
496 /* Copy data from FROM to TO, where the machine modes are not the same.
497 Both modes may be integer, or both may be floating.
498 UNSIGNEDP should be nonzero if FROM is an unsigned type.
499 This causes zero-extension instead of sign-extension. */
501 void
502 convert_move (to, from, unsignedp)
503 register rtx to, from;
504 int unsignedp;
506 enum machine_mode to_mode = GET_MODE (to);
507 enum machine_mode from_mode = GET_MODE (from);
508 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
509 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
510 enum insn_code code;
511 rtx libcall;
513 /* rtx code for making an equivalent value. */
514 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
516 to = protect_from_queue (to, 1);
517 from = protect_from_queue (from, 0);
519 if (to_real != from_real)
520 abort ();
522 /* If FROM is a SUBREG that indicates that we have already done at least
523 the required extension, strip it. We don't handle such SUBREGs as
524 TO here. */
526 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
527 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
528 >= GET_MODE_SIZE (to_mode))
529 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
530 from = gen_lowpart (to_mode, from), from_mode = to_mode;
532 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
533 abort ();
535 if (to_mode == from_mode
536 || (from_mode == VOIDmode && CONSTANT_P (from)))
538 emit_move_insn (to, from);
539 return;
542 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
544 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
545 abort ();
547 if (VECTOR_MODE_P (to_mode))
548 from = gen_rtx_SUBREG (to_mode, from, 0);
549 else
550 to = gen_rtx_SUBREG (from_mode, to, 0);
552 emit_move_insn (to, from);
553 return;
556 if (to_real != from_real)
557 abort ();
559 if (to_real)
561 rtx value, insns;
563 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
565 /* Try converting directly if the insn is supported. */
566 if ((code = can_extend_p (to_mode, from_mode, 0))
567 != CODE_FOR_nothing)
569 emit_unop_insn (code, to, from, UNKNOWN);
570 return;
574 #ifdef HAVE_trunchfqf2
575 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
577 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
578 return;
580 #endif
581 #ifdef HAVE_trunctqfqf2
582 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
584 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
585 return;
587 #endif
588 #ifdef HAVE_truncsfqf2
589 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
591 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
592 return;
594 #endif
595 #ifdef HAVE_truncdfqf2
596 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
598 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
599 return;
601 #endif
602 #ifdef HAVE_truncxfqf2
603 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
605 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
606 return;
608 #endif
609 #ifdef HAVE_trunctfqf2
610 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
612 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
613 return;
615 #endif
617 #ifdef HAVE_trunctqfhf2
618 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
620 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
621 return;
623 #endif
624 #ifdef HAVE_truncsfhf2
625 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
627 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
628 return;
630 #endif
631 #ifdef HAVE_truncdfhf2
632 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
634 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_truncxfhf2
639 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
641 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_trunctfhf2
646 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
648 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
649 return;
651 #endif
653 #ifdef HAVE_truncsftqf2
654 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
656 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
657 return;
659 #endif
660 #ifdef HAVE_truncdftqf2
661 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
663 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
664 return;
666 #endif
667 #ifdef HAVE_truncxftqf2
668 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
670 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_trunctftqf2
675 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
677 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
678 return;
680 #endif
682 #ifdef HAVE_truncdfsf2
683 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
685 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
686 return;
688 #endif
689 #ifdef HAVE_truncxfsf2
690 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
692 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
693 return;
695 #endif
696 #ifdef HAVE_trunctfsf2
697 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
699 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
700 return;
702 #endif
703 #ifdef HAVE_truncxfdf2
704 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
706 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
707 return;
709 #endif
710 #ifdef HAVE_trunctfdf2
711 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
713 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
714 return;
716 #endif
718 libcall = (rtx) 0;
719 switch (from_mode)
721 case SFmode:
722 switch (to_mode)
724 case DFmode:
725 libcall = extendsfdf2_libfunc;
726 break;
728 case XFmode:
729 libcall = extendsfxf2_libfunc;
730 break;
732 case TFmode:
733 libcall = extendsftf2_libfunc;
734 break;
736 default:
737 break;
739 break;
741 case DFmode:
742 switch (to_mode)
744 case SFmode:
745 libcall = truncdfsf2_libfunc;
746 break;
748 case XFmode:
749 libcall = extenddfxf2_libfunc;
750 break;
752 case TFmode:
753 libcall = extenddftf2_libfunc;
754 break;
756 default:
757 break;
759 break;
761 case XFmode:
762 switch (to_mode)
764 case SFmode:
765 libcall = truncxfsf2_libfunc;
766 break;
768 case DFmode:
769 libcall = truncxfdf2_libfunc;
770 break;
772 default:
773 break;
775 break;
777 case TFmode:
778 switch (to_mode)
780 case SFmode:
781 libcall = trunctfsf2_libfunc;
782 break;
784 case DFmode:
785 libcall = trunctfdf2_libfunc;
786 break;
788 default:
789 break;
791 break;
793 default:
794 break;
797 if (libcall == (rtx) 0)
798 /* This conversion is not implemented yet. */
799 abort ();
801 start_sequence ();
802 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
803 1, from, from_mode);
804 insns = get_insns ();
805 end_sequence ();
806 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
807 from));
808 return;
811 /* Now both modes are integers. */
813 /* Handle expanding beyond a word. */
814 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
815 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
817 rtx insns;
818 rtx lowpart;
819 rtx fill_value;
820 rtx lowfrom;
821 int i;
822 enum machine_mode lowpart_mode;
823 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
825 /* Try converting directly if the insn is supported. */
826 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
827 != CODE_FOR_nothing)
829 /* If FROM is a SUBREG, put it into a register. Do this
830 so that we always generate the same set of insns for
831 better cse'ing; if an intermediate assignment occurred,
832 we won't be doing the operation directly on the SUBREG. */
833 if (optimize > 0 && GET_CODE (from) == SUBREG)
834 from = force_reg (from_mode, from);
835 emit_unop_insn (code, to, from, equiv_code);
836 return;
838 /* Next, try converting via full word. */
839 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
840 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
841 != CODE_FOR_nothing))
843 if (GET_CODE (to) == REG)
844 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
845 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
846 emit_unop_insn (code, to,
847 gen_lowpart (word_mode, to), equiv_code);
848 return;
851 /* No special multiword conversion insn; do it by hand. */
852 start_sequence ();
854 /* Since we will turn this into a no conflict block, we must ensure
855 that the source does not overlap the target. */
857 if (reg_overlap_mentioned_p (to, from))
858 from = force_reg (from_mode, from);
860 /* Get a copy of FROM widened to a word, if necessary. */
861 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
862 lowpart_mode = word_mode;
863 else
864 lowpart_mode = from_mode;
866 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
868 lowpart = gen_lowpart (lowpart_mode, to);
869 emit_move_insn (lowpart, lowfrom);
871 /* Compute the value to put in each remaining word. */
872 if (unsignedp)
873 fill_value = const0_rtx;
874 else
876 #ifdef HAVE_slt
877 if (HAVE_slt
878 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
879 && STORE_FLAG_VALUE == -1)
881 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
882 lowpart_mode, 0, 0);
883 fill_value = gen_reg_rtx (word_mode);
884 emit_insn (gen_slt (fill_value));
886 else
887 #endif
889 fill_value
890 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
891 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
892 NULL_RTX, 0);
893 fill_value = convert_to_mode (word_mode, fill_value, 1);
897 /* Fill the remaining words. */
898 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
900 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
901 rtx subword = operand_subword (to, index, 1, to_mode);
903 if (subword == 0)
904 abort ();
906 if (fill_value != subword)
907 emit_move_insn (subword, fill_value);
910 insns = get_insns ();
911 end_sequence ();
913 emit_no_conflict_block (insns, to, from, NULL_RTX,
914 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
915 return;
918 /* Truncating multi-word to a word or less. */
919 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
920 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
922 if (!((GET_CODE (from) == MEM
923 && ! MEM_VOLATILE_P (from)
924 && direct_load[(int) to_mode]
925 && ! mode_dependent_address_p (XEXP (from, 0)))
926 || GET_CODE (from) == REG
927 || GET_CODE (from) == SUBREG))
928 from = force_reg (from_mode, from);
929 convert_move (to, gen_lowpart (word_mode, from), 0);
930 return;
933 /* Handle pointer conversion. */ /* SPEE 900220. */
934 if (to_mode == PQImode)
936 if (from_mode != QImode)
937 from = convert_to_mode (QImode, from, unsignedp);
939 #ifdef HAVE_truncqipqi2
940 if (HAVE_truncqipqi2)
942 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
943 return;
945 #endif /* HAVE_truncqipqi2 */
946 abort ();
949 if (from_mode == PQImode)
951 if (to_mode != QImode)
953 from = convert_to_mode (QImode, from, unsignedp);
954 from_mode = QImode;
956 else
958 #ifdef HAVE_extendpqiqi2
959 if (HAVE_extendpqiqi2)
961 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
962 return;
964 #endif /* HAVE_extendpqiqi2 */
965 abort ();
969 if (to_mode == PSImode)
971 if (from_mode != SImode)
972 from = convert_to_mode (SImode, from, unsignedp);
974 #ifdef HAVE_truncsipsi2
975 if (HAVE_truncsipsi2)
977 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
978 return;
980 #endif /* HAVE_truncsipsi2 */
981 abort ();
984 if (from_mode == PSImode)
986 if (to_mode != SImode)
988 from = convert_to_mode (SImode, from, unsignedp);
989 from_mode = SImode;
991 else
993 #ifdef HAVE_extendpsisi2
994 if (! unsignedp && HAVE_extendpsisi2)
996 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
997 return;
999 #endif /* HAVE_extendpsisi2 */
1000 #ifdef HAVE_zero_extendpsisi2
1001 if (unsignedp && HAVE_zero_extendpsisi2)
1003 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1004 return;
1006 #endif /* HAVE_zero_extendpsisi2 */
1007 abort ();
1011 if (to_mode == PDImode)
1013 if (from_mode != DImode)
1014 from = convert_to_mode (DImode, from, unsignedp);
1016 #ifdef HAVE_truncdipdi2
1017 if (HAVE_truncdipdi2)
1019 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1020 return;
1022 #endif /* HAVE_truncdipdi2 */
1023 abort ();
1026 if (from_mode == PDImode)
1028 if (to_mode != DImode)
1030 from = convert_to_mode (DImode, from, unsignedp);
1031 from_mode = DImode;
1033 else
1035 #ifdef HAVE_extendpdidi2
1036 if (HAVE_extendpdidi2)
1038 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1039 return;
1041 #endif /* HAVE_extendpdidi2 */
1042 abort ();
1046 /* Now follow all the conversions between integers
1047 no more than a word long. */
1049 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1050 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1051 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1052 GET_MODE_BITSIZE (from_mode)))
1054 if (!((GET_CODE (from) == MEM
1055 && ! MEM_VOLATILE_P (from)
1056 && direct_load[(int) to_mode]
1057 && ! mode_dependent_address_p (XEXP (from, 0)))
1058 || GET_CODE (from) == REG
1059 || GET_CODE (from) == SUBREG))
1060 from = force_reg (from_mode, from);
1061 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1062 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1063 from = copy_to_reg (from);
1064 emit_move_insn (to, gen_lowpart (to_mode, from));
1065 return;
1068 /* Handle extension. */
1069 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1071 /* Convert directly if that works. */
1072 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1073 != CODE_FOR_nothing)
1075 emit_unop_insn (code, to, from, equiv_code);
1076 return;
1078 else
1080 enum machine_mode intermediate;
1081 rtx tmp;
1082 tree shift_amount;
1084 /* Search for a mode to convert via. */
1085 for (intermediate = from_mode; intermediate != VOIDmode;
1086 intermediate = GET_MODE_WIDER_MODE (intermediate))
1087 if (((can_extend_p (to_mode, intermediate, unsignedp)
1088 != CODE_FOR_nothing)
1089 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1090 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1091 GET_MODE_BITSIZE (intermediate))))
1092 && (can_extend_p (intermediate, from_mode, unsignedp)
1093 != CODE_FOR_nothing))
1095 convert_move (to, convert_to_mode (intermediate, from,
1096 unsignedp), unsignedp);
1097 return;
1100 /* No suitable intermediate mode.
1101 Generate what we need with shifts. */
1102 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1103 - GET_MODE_BITSIZE (from_mode), 0);
1104 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1105 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1106 to, unsignedp);
1107 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1108 to, unsignedp);
1109 if (tmp != to)
1110 emit_move_insn (to, tmp);
1111 return;
1115 /* Support special truncate insns for certain modes. */
1117 if (from_mode == DImode && to_mode == SImode)
1119 #ifdef HAVE_truncdisi2
1120 if (HAVE_truncdisi2)
1122 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1123 return;
1125 #endif
1126 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 return;
1130 if (from_mode == DImode && to_mode == HImode)
1132 #ifdef HAVE_truncdihi2
1133 if (HAVE_truncdihi2)
1135 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1136 return;
1138 #endif
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 return;
1143 if (from_mode == DImode && to_mode == QImode)
1145 #ifdef HAVE_truncdiqi2
1146 if (HAVE_truncdiqi2)
1148 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1149 return;
1151 #endif
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 return;
1156 if (from_mode == SImode && to_mode == HImode)
1158 #ifdef HAVE_truncsihi2
1159 if (HAVE_truncsihi2)
1161 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1162 return;
1164 #endif
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 return;
1169 if (from_mode == SImode && to_mode == QImode)
1171 #ifdef HAVE_truncsiqi2
1172 if (HAVE_truncsiqi2)
1174 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1175 return;
1177 #endif
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 return;
1182 if (from_mode == HImode && to_mode == QImode)
1184 #ifdef HAVE_trunchiqi2
1185 if (HAVE_trunchiqi2)
1187 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1188 return;
1190 #endif
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 return;
1195 if (from_mode == TImode && to_mode == DImode)
1197 #ifdef HAVE_trunctidi2
1198 if (HAVE_trunctidi2)
1200 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1201 return;
1203 #endif
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 return;
1208 if (from_mode == TImode && to_mode == SImode)
1210 #ifdef HAVE_trunctisi2
1211 if (HAVE_trunctisi2)
1213 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1214 return;
1216 #endif
1217 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 return;
1221 if (from_mode == TImode && to_mode == HImode)
1223 #ifdef HAVE_trunctihi2
1224 if (HAVE_trunctihi2)
1226 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1227 return;
1229 #endif
1230 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 return;
1234 if (from_mode == TImode && to_mode == QImode)
1236 #ifdef HAVE_trunctiqi2
1237 if (HAVE_trunctiqi2)
1239 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1240 return;
1242 #endif
1243 convert_move (to, force_reg (from_mode, from), unsignedp);
1244 return;
1247 /* Handle truncation of volatile memrefs, and so on;
1248 the things that couldn't be truncated directly,
1249 and for which there was no special instruction. */
1250 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1252 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1253 emit_move_insn (to, temp);
1254 return;
1257 /* Mode combination is not recognized. */
1258 abort ();
1261 /* Return an rtx for a value that would result
1262 from converting X to mode MODE.
1263 Both X and MODE may be floating, or both integer.
1264 UNSIGNEDP is nonzero if X is an unsigned value.
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1268 This function *must not* call protect_from_queue
1269 except when putting X into an insn (in which case convert_move does it). */
1272 convert_to_mode (mode, x, unsignedp)
1273 enum machine_mode mode;
1274 rtx x;
1275 int unsignedp;
1277 return convert_modes (mode, VOIDmode, x, unsignedp);
1280 /* Return an rtx for a value that would result
1281 from converting X from mode OLDMODE to mode MODE.
1282 Both modes may be floating, or both integer.
1283 UNSIGNEDP is nonzero if X is an unsigned value.
1285 This can be done by referring to a part of X in place
1286 or by copying to a new temporary with conversion.
1288 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1290 This function *must not* call protect_from_queue
1291 except when putting X into an insn (in which case convert_move does it). */
1294 convert_modes (mode, oldmode, x, unsignedp)
1295 enum machine_mode mode, oldmode;
1296 rtx x;
1297 int unsignedp;
1299 register rtx temp;
1301 /* If FROM is a SUBREG that indicates that we have already done at least
1302 the required extension, strip it. */
1304 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1305 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1306 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1307 x = gen_lowpart (mode, x);
1309 if (GET_MODE (x) != VOIDmode)
1310 oldmode = GET_MODE (x);
1312 if (mode == oldmode)
1313 return x;
1315 /* There is one case that we must handle specially: If we are converting
1316 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1317 we are to interpret the constant as unsigned, gen_lowpart will do
1318 the wrong if the constant appears negative. What we want to do is
1319 make the high-order word of the constant zero, not all ones. */
1321 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1322 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1323 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1325 HOST_WIDE_INT val = INTVAL (x);
1327 if (oldmode != VOIDmode
1328 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1330 int width = GET_MODE_BITSIZE (oldmode);
1332 /* We need to zero extend VAL. */
1333 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1336 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1339 /* We can do this with a gen_lowpart if both desired and current modes
1340 are integer, and this is either a constant integer, a register, or a
1341 non-volatile MEM. Except for the constant case where MODE is no
1342 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1344 if ((GET_CODE (x) == CONST_INT
1345 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1346 || (GET_MODE_CLASS (mode) == MODE_INT
1347 && GET_MODE_CLASS (oldmode) == MODE_INT
1348 && (GET_CODE (x) == CONST_DOUBLE
1349 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1350 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1351 && direct_load[(int) mode])
1352 || (GET_CODE (x) == REG
1353 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1354 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1356 /* ?? If we don't know OLDMODE, we have to assume here that
1357 X does not need sign- or zero-extension. This may not be
1358 the case, but it's the best we can do. */
1359 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1360 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1362 HOST_WIDE_INT val = INTVAL (x);
1363 int width = GET_MODE_BITSIZE (oldmode);
1365 /* We must sign or zero-extend in this case. Start by
1366 zero-extending, then sign extend if we need to. */
1367 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1368 if (! unsignedp
1369 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1370 val |= (HOST_WIDE_INT) (-1) << width;
1372 return GEN_INT (trunc_int_for_mode (val, mode));
1375 return gen_lowpart (mode, x);
1378 temp = gen_reg_rtx (mode);
1379 convert_move (temp, x, unsignedp);
1380 return temp;
1383 /* This macro is used to determine what the largest unit size that
1384 move_by_pieces can use is. */
1386 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1387 move efficiently, as opposed to MOVE_MAX which is the maximum
1388 number of bytes we can move with a single instruction. */
1390 #ifndef MOVE_MAX_PIECES
1391 #define MOVE_MAX_PIECES MOVE_MAX
1392 #endif
1394 /* Generate several move instructions to copy LEN bytes from block FROM to
1395 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1396 and TO through protect_from_queue before calling.
1398 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1399 used to push FROM to the stack.
1401 ALIGN is maximum alignment we can assume. */
1403 void
1404 move_by_pieces (to, from, len, align)
1405 rtx to, from;
1406 unsigned HOST_WIDE_INT len;
1407 unsigned int align;
1409 struct move_by_pieces data;
1410 rtx to_addr, from_addr = XEXP (from, 0);
1411 unsigned int max_size = MOVE_MAX_PIECES + 1;
1412 enum machine_mode mode = VOIDmode, tmode;
1413 enum insn_code icode;
1415 data.offset = 0;
1416 data.from_addr = from_addr;
1417 if (to)
1419 to_addr = XEXP (to, 0);
1420 data.to = to;
1421 data.autinc_to
1422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1424 data.reverse
1425 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1427 else
1429 to_addr = NULL_RTX;
1430 data.to = NULL_RTX;
1431 data.autinc_to = 1;
1432 #ifdef STACK_GROWS_DOWNWARD
1433 data.reverse = 1;
1434 #else
1435 data.reverse = 0;
1436 #endif
1438 data.to_addr = to_addr;
1439 data.from = from;
1440 data.autinc_from
1441 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1442 || GET_CODE (from_addr) == POST_INC
1443 || GET_CODE (from_addr) == POST_DEC);
1445 data.explicit_inc_from = 0;
1446 data.explicit_inc_to = 0;
1447 if (data.reverse) data.offset = len;
1448 data.len = len;
1450 /* If copying requires more than two move insns,
1451 copy addresses to registers (to make displacements shorter)
1452 and use post-increment if available. */
1453 if (!(data.autinc_from && data.autinc_to)
1454 && move_by_pieces_ninsns (len, align) > 2)
1456 /* Find the mode of the largest move... */
1457 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1458 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1459 if (GET_MODE_SIZE (tmode) < max_size)
1460 mode = tmode;
1462 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1464 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1465 data.autinc_from = 1;
1466 data.explicit_inc_from = -1;
1468 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1470 data.from_addr = copy_addr_to_reg (from_addr);
1471 data.autinc_from = 1;
1472 data.explicit_inc_from = 1;
1474 if (!data.autinc_from && CONSTANT_P (from_addr))
1475 data.from_addr = copy_addr_to_reg (from_addr);
1476 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1478 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1479 data.autinc_to = 1;
1480 data.explicit_inc_to = -1;
1482 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1484 data.to_addr = copy_addr_to_reg (to_addr);
1485 data.autinc_to = 1;
1486 data.explicit_inc_to = 1;
1488 if (!data.autinc_to && CONSTANT_P (to_addr))
1489 data.to_addr = copy_addr_to_reg (to_addr);
1492 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1493 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1494 align = MOVE_MAX * BITS_PER_UNIT;
1496 /* First move what we can in the largest integer mode, then go to
1497 successively smaller modes. */
1499 while (max_size > 1)
1501 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1502 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1503 if (GET_MODE_SIZE (tmode) < max_size)
1504 mode = tmode;
1506 if (mode == VOIDmode)
1507 break;
1509 icode = mov_optab->handlers[(int) mode].insn_code;
1510 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1511 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1513 max_size = GET_MODE_SIZE (mode);
1516 /* The code above should have handled everything. */
1517 if (data.len > 0)
1518 abort ();
1521 /* Return number of insns required to move L bytes by pieces.
1522 ALIGN (in bits) is maximum alignment we can assume. */
1524 static unsigned HOST_WIDE_INT
1525 move_by_pieces_ninsns (l, align)
1526 unsigned HOST_WIDE_INT l;
1527 unsigned int align;
1529 unsigned HOST_WIDE_INT n_insns = 0;
1530 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1532 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1533 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1534 align = MOVE_MAX * BITS_PER_UNIT;
1536 while (max_size > 1)
1538 enum machine_mode mode = VOIDmode, tmode;
1539 enum insn_code icode;
1541 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1542 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1543 if (GET_MODE_SIZE (tmode) < max_size)
1544 mode = tmode;
1546 if (mode == VOIDmode)
1547 break;
1549 icode = mov_optab->handlers[(int) mode].insn_code;
1550 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1551 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1553 max_size = GET_MODE_SIZE (mode);
1556 if (l)
1557 abort ();
1558 return n_insns;
1561 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1562 with move instructions for mode MODE. GENFUN is the gen_... function
1563 to make a move insn for that mode. DATA has all the other info. */
1565 static void
1566 move_by_pieces_1 (genfun, mode, data)
1567 rtx (*genfun) PARAMS ((rtx, ...));
1568 enum machine_mode mode;
1569 struct move_by_pieces *data;
1571 unsigned int size = GET_MODE_SIZE (mode);
1572 rtx to1 = NULL_RTX, from1;
1574 while (data->len >= size)
1576 if (data->reverse)
1577 data->offset -= size;
1579 if (data->to)
1581 if (data->autinc_to)
1583 to1 = replace_equiv_address (data->to, data->to_addr);
1584 to1 = adjust_address (to1, mode, 0);
1586 else
1587 to1 = adjust_address (data->to, mode, data->offset);
1590 if (data->autinc_from)
1592 from1 = replace_equiv_address (data->from, data->from_addr);
1593 from1 = adjust_address (from1, mode, 0);
1595 else
1596 from1 = adjust_address (data->from, mode, data->offset);
1598 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1599 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1600 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1601 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1603 if (data->to)
1604 emit_insn ((*genfun) (to1, from1));
1605 else
1607 #ifdef PUSH_ROUNDING
1608 emit_single_push_insn (mode, from1, NULL);
1609 #else
1610 abort ();
1611 #endif
1614 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1615 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1616 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1617 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1619 if (! data->reverse)
1620 data->offset += size;
1622 data->len -= size;
1626 /* Emit code to move a block Y to a block X.
1627 This may be done with string-move instructions,
1628 with multiple scalar move instructions, or with a library call.
1630 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1631 with mode BLKmode.
1632 SIZE is an rtx that says how long they are.
1633 ALIGN is the maximum alignment we can assume they have.
1635 Return the address of the new block, if memcpy is called and returns it,
1636 0 otherwise. */
1639 emit_block_move (x, y, size, align)
1640 rtx x, y;
1641 rtx size;
1642 unsigned int align;
1644 rtx retval = 0;
1645 #ifdef TARGET_MEM_FUNCTIONS
1646 static tree fn;
1647 tree call_expr, arg_list;
1648 #endif
1650 if (GET_MODE (x) != BLKmode)
1651 abort ();
1653 if (GET_MODE (y) != BLKmode)
1654 abort ();
1656 x = protect_from_queue (x, 1);
1657 y = protect_from_queue (y, 0);
1658 size = protect_from_queue (size, 0);
1660 if (GET_CODE (x) != MEM)
1661 abort ();
1662 if (GET_CODE (y) != MEM)
1663 abort ();
1664 if (size == 0)
1665 abort ();
1667 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1668 move_by_pieces (x, y, INTVAL (size), align);
1669 else
1671 /* Try the most limited insn first, because there's no point
1672 including more than one in the machine description unless
1673 the more limited one has some advantage. */
1675 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1676 enum machine_mode mode;
1678 /* Since this is a move insn, we don't care about volatility. */
1679 volatile_ok = 1;
1681 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1682 mode = GET_MODE_WIDER_MODE (mode))
1684 enum insn_code code = movstr_optab[(int) mode];
1685 insn_operand_predicate_fn pred;
1687 if (code != CODE_FOR_nothing
1688 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1689 here because if SIZE is less than the mode mask, as it is
1690 returned by the macro, it will definitely be less than the
1691 actual mode mask. */
1692 && ((GET_CODE (size) == CONST_INT
1693 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1694 <= (GET_MODE_MASK (mode) >> 1)))
1695 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1696 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1697 || (*pred) (x, BLKmode))
1698 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1699 || (*pred) (y, BLKmode))
1700 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1701 || (*pred) (opalign, VOIDmode)))
1703 rtx op2;
1704 rtx last = get_last_insn ();
1705 rtx pat;
1707 op2 = convert_to_mode (mode, size, 1);
1708 pred = insn_data[(int) code].operand[2].predicate;
1709 if (pred != 0 && ! (*pred) (op2, mode))
1710 op2 = copy_to_mode_reg (mode, op2);
1712 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1713 if (pat)
1715 emit_insn (pat);
1716 volatile_ok = 0;
1717 return 0;
1719 else
1720 delete_insns_since (last);
1724 volatile_ok = 0;
1726 /* X, Y, or SIZE may have been passed through protect_from_queue.
1728 It is unsafe to save the value generated by protect_from_queue
1729 and reuse it later. Consider what happens if emit_queue is
1730 called before the return value from protect_from_queue is used.
1732 Expansion of the CALL_EXPR below will call emit_queue before
1733 we are finished emitting RTL for argument setup. So if we are
1734 not careful we could get the wrong value for an argument.
1736 To avoid this problem we go ahead and emit code to copy X, Y &
1737 SIZE into new pseudos. We can then place those new pseudos
1738 into an RTL_EXPR and use them later, even after a call to
1739 emit_queue.
1741 Note this is not strictly needed for library calls since they
1742 do not call emit_queue before loading their arguments. However,
1743 we may need to have library calls call emit_queue in the future
1744 since failing to do so could cause problems for targets which
1745 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1746 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1747 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1749 #ifdef TARGET_MEM_FUNCTIONS
1750 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1751 #else
1752 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1753 TREE_UNSIGNED (integer_type_node));
1754 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1755 #endif
1757 #ifdef TARGET_MEM_FUNCTIONS
1758 /* It is incorrect to use the libcall calling conventions to call
1759 memcpy in this context.
1761 This could be a user call to memcpy and the user may wish to
1762 examine the return value from memcpy.
1764 For targets where libcalls and normal calls have different conventions
1765 for returning pointers, we could end up generating incorrect code.
1767 So instead of using a libcall sequence we build up a suitable
1768 CALL_EXPR and expand the call in the normal fashion. */
1769 if (fn == NULL_TREE)
1771 tree fntype;
1773 /* This was copied from except.c, I don't know if all this is
1774 necessary in this context or not. */
1775 fn = get_identifier ("memcpy");
1776 fntype = build_pointer_type (void_type_node);
1777 fntype = build_function_type (fntype, NULL_TREE);
1778 fn = build_decl (FUNCTION_DECL, fn, fntype);
1779 ggc_add_tree_root (&fn, 1);
1780 DECL_EXTERNAL (fn) = 1;
1781 TREE_PUBLIC (fn) = 1;
1782 DECL_ARTIFICIAL (fn) = 1;
1783 TREE_NOTHROW (fn) = 1;
1784 make_decl_rtl (fn, NULL);
1785 assemble_external (fn);
1788 /* We need to make an argument list for the function call.
1790 memcpy has three arguments, the first two are void * addresses and
1791 the last is a size_t byte count for the copy. */
1792 arg_list
1793 = build_tree_list (NULL_TREE,
1794 make_tree (build_pointer_type (void_type_node), x));
1795 TREE_CHAIN (arg_list)
1796 = build_tree_list (NULL_TREE,
1797 make_tree (build_pointer_type (void_type_node), y));
1798 TREE_CHAIN (TREE_CHAIN (arg_list))
1799 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1800 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1802 /* Now we have to build up the CALL_EXPR itself. */
1803 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1804 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1805 call_expr, arg_list, NULL_TREE);
1806 TREE_SIDE_EFFECTS (call_expr) = 1;
1808 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1809 #else
1810 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1811 VOIDmode, 3, y, Pmode, x, Pmode,
1812 convert_to_mode (TYPE_MODE (integer_type_node), size,
1813 TREE_UNSIGNED (integer_type_node)),
1814 TYPE_MODE (integer_type_node));
1815 #endif
1818 return retval;
1821 /* Copy all or part of a value X into registers starting at REGNO.
1822 The number of registers to be filled is NREGS. */
1824 void
1825 move_block_to_reg (regno, x, nregs, mode)
1826 int regno;
1827 rtx x;
1828 int nregs;
1829 enum machine_mode mode;
1831 int i;
1832 #ifdef HAVE_load_multiple
1833 rtx pat;
1834 rtx last;
1835 #endif
1837 if (nregs == 0)
1838 return;
1840 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1841 x = validize_mem (force_const_mem (mode, x));
1843 /* See if the machine can do this with a load multiple insn. */
1844 #ifdef HAVE_load_multiple
1845 if (HAVE_load_multiple)
1847 last = get_last_insn ();
1848 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1849 GEN_INT (nregs));
1850 if (pat)
1852 emit_insn (pat);
1853 return;
1855 else
1856 delete_insns_since (last);
1858 #endif
1860 for (i = 0; i < nregs; i++)
1861 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1862 operand_subword_force (x, i, mode));
1865 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1866 The number of registers to be filled is NREGS. SIZE indicates the number
1867 of bytes in the object X. */
1869 void
1870 move_block_from_reg (regno, x, nregs, size)
1871 int regno;
1872 rtx x;
1873 int nregs;
1874 int size;
1876 int i;
1877 #ifdef HAVE_store_multiple
1878 rtx pat;
1879 rtx last;
1880 #endif
1881 enum machine_mode mode;
1883 if (nregs == 0)
1884 return;
1886 /* If SIZE is that of a mode no bigger than a word, just use that
1887 mode's store operation. */
1888 if (size <= UNITS_PER_WORD
1889 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1891 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1892 return;
1895 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1896 to the left before storing to memory. Note that the previous test
1897 doesn't handle all cases (e.g. SIZE == 3). */
1898 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1900 rtx tem = operand_subword (x, 0, 1, BLKmode);
1901 rtx shift;
1903 if (tem == 0)
1904 abort ();
1906 shift = expand_shift (LSHIFT_EXPR, word_mode,
1907 gen_rtx_REG (word_mode, regno),
1908 build_int_2 ((UNITS_PER_WORD - size)
1909 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1910 emit_move_insn (tem, shift);
1911 return;
1914 /* See if the machine can do this with a store multiple insn. */
1915 #ifdef HAVE_store_multiple
1916 if (HAVE_store_multiple)
1918 last = get_last_insn ();
1919 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1920 GEN_INT (nregs));
1921 if (pat)
1923 emit_insn (pat);
1924 return;
1926 else
1927 delete_insns_since (last);
1929 #endif
1931 for (i = 0; i < nregs; i++)
1933 rtx tem = operand_subword (x, i, 1, BLKmode);
1935 if (tem == 0)
1936 abort ();
1938 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1942 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1943 registers represented by a PARALLEL. SSIZE represents the total size of
1944 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1945 SRC in bits. */
1946 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1947 the balance will be in what would be the low-order memory addresses, i.e.
1948 left justified for big endian, right justified for little endian. This
1949 happens to be true for the targets currently using this support. If this
1950 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1951 would be needed. */
1953 void
1954 emit_group_load (dst, orig_src, ssize, align)
1955 rtx dst, orig_src;
1956 unsigned int align;
1957 int ssize;
1959 rtx *tmps, src;
1960 int start, i;
1962 if (GET_CODE (dst) != PARALLEL)
1963 abort ();
1965 /* Check for a NULL entry, used to indicate that the parameter goes
1966 both on the stack and in registers. */
1967 if (XEXP (XVECEXP (dst, 0, 0), 0))
1968 start = 0;
1969 else
1970 start = 1;
1972 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1974 /* Process the pieces. */
1975 for (i = start; i < XVECLEN (dst, 0); i++)
1977 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1978 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1979 unsigned int bytelen = GET_MODE_SIZE (mode);
1980 int shift = 0;
1982 /* Handle trailing fragments that run over the size of the struct. */
1983 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1985 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1986 bytelen = ssize - bytepos;
1987 if (bytelen <= 0)
1988 abort ();
1991 /* If we won't be loading directly from memory, protect the real source
1992 from strange tricks we might play; but make sure that the source can
1993 be loaded directly into the destination. */
1994 src = orig_src;
1995 if (GET_CODE (orig_src) != MEM
1996 && (!CONSTANT_P (orig_src)
1997 || (GET_MODE (orig_src) != mode
1998 && GET_MODE (orig_src) != VOIDmode)))
2000 if (GET_MODE (orig_src) == VOIDmode)
2001 src = gen_reg_rtx (mode);
2002 else
2003 src = gen_reg_rtx (GET_MODE (orig_src));
2004 emit_move_insn (src, orig_src);
2007 /* Optimize the access just a bit. */
2008 if (GET_CODE (src) == MEM
2009 && align >= GET_MODE_ALIGNMENT (mode)
2010 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2011 && bytelen == GET_MODE_SIZE (mode))
2013 tmps[i] = gen_reg_rtx (mode);
2014 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2016 else if (GET_CODE (src) == CONCAT)
2018 if (bytepos == 0
2019 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2020 tmps[i] = XEXP (src, 0);
2021 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2022 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2023 tmps[i] = XEXP (src, 1);
2024 else
2025 abort ();
2027 else if (CONSTANT_P (src)
2028 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2029 tmps[i] = src;
2030 else
2031 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2032 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2033 mode, mode, align, ssize);
2035 if (BYTES_BIG_ENDIAN && shift)
2036 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2037 tmps[i], 0, OPTAB_WIDEN);
2040 emit_queue ();
2042 /* Copy the extracted pieces into the proper (probable) hard regs. */
2043 for (i = start; i < XVECLEN (dst, 0); i++)
2044 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2047 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2048 registers represented by a PARALLEL. SSIZE represents the total size of
2049 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2051 void
2052 emit_group_store (orig_dst, src, ssize, align)
2053 rtx orig_dst, src;
2054 int ssize;
2055 unsigned int align;
2057 rtx *tmps, dst;
2058 int start, i;
2060 if (GET_CODE (src) != PARALLEL)
2061 abort ();
2063 /* Check for a NULL entry, used to indicate that the parameter goes
2064 both on the stack and in registers. */
2065 if (XEXP (XVECEXP (src, 0, 0), 0))
2066 start = 0;
2067 else
2068 start = 1;
2070 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2072 /* Copy the (probable) hard regs into pseudos. */
2073 for (i = start; i < XVECLEN (src, 0); i++)
2075 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2076 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2077 emit_move_insn (tmps[i], reg);
2079 emit_queue ();
2081 /* If we won't be storing directly into memory, protect the real destination
2082 from strange tricks we might play. */
2083 dst = orig_dst;
2084 if (GET_CODE (dst) == PARALLEL)
2086 rtx temp;
2088 /* We can get a PARALLEL dst if there is a conditional expression in
2089 a return statement. In that case, the dst and src are the same,
2090 so no action is necessary. */
2091 if (rtx_equal_p (dst, src))
2092 return;
2094 /* It is unclear if we can ever reach here, but we may as well handle
2095 it. Allocate a temporary, and split this into a store/load to/from
2096 the temporary. */
2098 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2099 emit_group_store (temp, src, ssize, align);
2100 emit_group_load (dst, temp, ssize, align);
2101 return;
2103 else if (GET_CODE (dst) != MEM)
2105 dst = gen_reg_rtx (GET_MODE (orig_dst));
2106 /* Make life a bit easier for combine. */
2107 emit_move_insn (dst, const0_rtx);
2110 /* Process the pieces. */
2111 for (i = start; i < XVECLEN (src, 0); i++)
2113 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2114 enum machine_mode mode = GET_MODE (tmps[i]);
2115 unsigned int bytelen = GET_MODE_SIZE (mode);
2117 /* Handle trailing fragments that run over the size of the struct. */
2118 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2120 if (BYTES_BIG_ENDIAN)
2122 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2123 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2124 tmps[i], 0, OPTAB_WIDEN);
2126 bytelen = ssize - bytepos;
2129 /* Optimize the access just a bit. */
2130 if (GET_CODE (dst) == MEM
2131 && align >= GET_MODE_ALIGNMENT (mode)
2132 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2133 && bytelen == GET_MODE_SIZE (mode))
2134 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2135 else
2136 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2137 mode, tmps[i], align, ssize);
2140 emit_queue ();
2142 /* Copy from the pseudo into the (probable) hard reg. */
2143 if (GET_CODE (dst) == REG)
2144 emit_move_insn (orig_dst, dst);
2147 /* Generate code to copy a BLKmode object of TYPE out of a
2148 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2149 is null, a stack temporary is created. TGTBLK is returned.
2151 The primary purpose of this routine is to handle functions
2152 that return BLKmode structures in registers. Some machines
2153 (the PA for example) want to return all small structures
2154 in registers regardless of the structure's alignment. */
2157 copy_blkmode_from_reg (tgtblk, srcreg, type)
2158 rtx tgtblk;
2159 rtx srcreg;
2160 tree type;
2162 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2163 rtx src = NULL, dst = NULL;
2164 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2165 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2167 if (tgtblk == 0)
2169 tgtblk = assign_temp (build_qualified_type (type,
2170 (TYPE_QUALS (type)
2171 | TYPE_QUAL_CONST)),
2172 0, 1, 1);
2173 preserve_temp_slots (tgtblk);
2176 /* This code assumes srcreg is at least a full word. If it isn't,
2177 copy it into a new pseudo which is a full word. */
2178 if (GET_MODE (srcreg) != BLKmode
2179 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2180 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2182 /* Structures whose size is not a multiple of a word are aligned
2183 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2184 machine, this means we must skip the empty high order bytes when
2185 calculating the bit offset. */
2186 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2187 big_endian_correction
2188 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2190 /* Copy the structure BITSIZE bites at a time.
2192 We could probably emit more efficient code for machines which do not use
2193 strict alignment, but it doesn't seem worth the effort at the current
2194 time. */
2195 for (bitpos = 0, xbitpos = big_endian_correction;
2196 bitpos < bytes * BITS_PER_UNIT;
2197 bitpos += bitsize, xbitpos += bitsize)
2199 /* We need a new source operand each time xbitpos is on a
2200 word boundary and when xbitpos == big_endian_correction
2201 (the first time through). */
2202 if (xbitpos % BITS_PER_WORD == 0
2203 || xbitpos == big_endian_correction)
2204 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205 GET_MODE (srcreg));
2207 /* We need a new destination operand each time bitpos is on
2208 a word boundary. */
2209 if (bitpos % BITS_PER_WORD == 0)
2210 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2212 /* Use xbitpos for the source extraction (right justified) and
2213 xbitpos for the destination store (left justified). */
2214 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2215 extract_bit_field (src, bitsize,
2216 xbitpos % BITS_PER_WORD, 1,
2217 NULL_RTX, word_mode, word_mode,
2218 bitsize, BITS_PER_WORD),
2219 bitsize, BITS_PER_WORD);
2222 return tgtblk;
2225 /* Add a USE expression for REG to the (possibly empty) list pointed
2226 to by CALL_FUSAGE. REG must denote a hard register. */
2228 void
2229 use_reg (call_fusage, reg)
2230 rtx *call_fusage, reg;
2232 if (GET_CODE (reg) != REG
2233 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2234 abort ();
2236 *call_fusage
2237 = gen_rtx_EXPR_LIST (VOIDmode,
2238 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2241 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2242 starting at REGNO. All of these registers must be hard registers. */
2244 void
2245 use_regs (call_fusage, regno, nregs)
2246 rtx *call_fusage;
2247 int regno;
2248 int nregs;
2250 int i;
2252 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2253 abort ();
2255 for (i = 0; i < nregs; i++)
2256 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2259 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2260 PARALLEL REGS. This is for calls that pass values in multiple
2261 non-contiguous locations. The Irix 6 ABI has examples of this. */
2263 void
2264 use_group_regs (call_fusage, regs)
2265 rtx *call_fusage;
2266 rtx regs;
2268 int i;
2270 for (i = 0; i < XVECLEN (regs, 0); i++)
2272 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2274 /* A NULL entry means the parameter goes both on the stack and in
2275 registers. This can also be a MEM for targets that pass values
2276 partially on the stack and partially in registers. */
2277 if (reg != 0 && GET_CODE (reg) == REG)
2278 use_reg (call_fusage, reg);
2284 can_store_by_pieces (len, constfun, constfundata, align)
2285 unsigned HOST_WIDE_INT len;
2286 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2287 PTR constfundata;
2288 unsigned int align;
2290 unsigned HOST_WIDE_INT max_size, l;
2291 HOST_WIDE_INT offset = 0;
2292 enum machine_mode mode, tmode;
2293 enum insn_code icode;
2294 int reverse;
2295 rtx cst;
2297 if (! MOVE_BY_PIECES_P (len, align))
2298 return 0;
2300 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2301 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2302 align = MOVE_MAX * BITS_PER_UNIT;
2304 /* We would first store what we can in the largest integer mode, then go to
2305 successively smaller modes. */
2307 for (reverse = 0;
2308 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2309 reverse++)
2311 l = len;
2312 mode = VOIDmode;
2313 max_size = MOVE_MAX_PIECES + 1;
2314 while (max_size > 1)
2316 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2318 if (GET_MODE_SIZE (tmode) < max_size)
2319 mode = tmode;
2321 if (mode == VOIDmode)
2322 break;
2324 icode = mov_optab->handlers[(int) mode].insn_code;
2325 if (icode != CODE_FOR_nothing
2326 && align >= GET_MODE_ALIGNMENT (mode))
2328 unsigned int size = GET_MODE_SIZE (mode);
2330 while (l >= size)
2332 if (reverse)
2333 offset -= size;
2335 cst = (*constfun) (constfundata, offset, mode);
2336 if (!LEGITIMATE_CONSTANT_P (cst))
2337 return 0;
2339 if (!reverse)
2340 offset += size;
2342 l -= size;
2346 max_size = GET_MODE_SIZE (mode);
2349 /* The code above should have handled everything. */
2350 if (l != 0)
2351 abort ();
2354 return 1;
2357 /* Generate several move instructions to store LEN bytes generated by
2358 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2359 pointer which will be passed as argument in every CONSTFUN call.
2360 ALIGN is maximum alignment we can assume. */
2362 void
2363 store_by_pieces (to, len, constfun, constfundata, align)
2364 rtx to;
2365 unsigned HOST_WIDE_INT len;
2366 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2367 PTR constfundata;
2368 unsigned int align;
2370 struct store_by_pieces data;
2372 if (! MOVE_BY_PIECES_P (len, align))
2373 abort ();
2374 to = protect_from_queue (to, 1);
2375 data.constfun = constfun;
2376 data.constfundata = constfundata;
2377 data.len = len;
2378 data.to = to;
2379 store_by_pieces_1 (&data, align);
2382 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2383 rtx with BLKmode). The caller must pass TO through protect_from_queue
2384 before calling. ALIGN is maximum alignment we can assume. */
2386 static void
2387 clear_by_pieces (to, len, align)
2388 rtx to;
2389 unsigned HOST_WIDE_INT len;
2390 unsigned int align;
2392 struct store_by_pieces data;
2394 data.constfun = clear_by_pieces_1;
2395 data.constfundata = NULL;
2396 data.len = len;
2397 data.to = to;
2398 store_by_pieces_1 (&data, align);
2401 /* Callback routine for clear_by_pieces.
2402 Return const0_rtx unconditionally. */
2404 static rtx
2405 clear_by_pieces_1 (data, offset, mode)
2406 PTR data ATTRIBUTE_UNUSED;
2407 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2408 enum machine_mode mode ATTRIBUTE_UNUSED;
2410 return const0_rtx;
2413 /* Subroutine of clear_by_pieces and store_by_pieces.
2414 Generate several move instructions to store LEN bytes of block TO. (A MEM
2415 rtx with BLKmode). The caller must pass TO through protect_from_queue
2416 before calling. ALIGN is maximum alignment we can assume. */
2418 static void
2419 store_by_pieces_1 (data, align)
2420 struct store_by_pieces *data;
2421 unsigned int align;
2423 rtx to_addr = XEXP (data->to, 0);
2424 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2425 enum machine_mode mode = VOIDmode, tmode;
2426 enum insn_code icode;
2428 data->offset = 0;
2429 data->to_addr = to_addr;
2430 data->autinc_to
2431 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2432 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2434 data->explicit_inc_to = 0;
2435 data->reverse
2436 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2437 if (data->reverse)
2438 data->offset = data->len;
2440 /* If storing requires more than two move insns,
2441 copy addresses to registers (to make displacements shorter)
2442 and use post-increment if available. */
2443 if (!data->autinc_to
2444 && move_by_pieces_ninsns (data->len, align) > 2)
2446 /* Determine the main mode we'll be using. */
2447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2449 if (GET_MODE_SIZE (tmode) < max_size)
2450 mode = tmode;
2452 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2454 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2455 data->autinc_to = 1;
2456 data->explicit_inc_to = -1;
2459 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2460 && ! data->autinc_to)
2462 data->to_addr = copy_addr_to_reg (to_addr);
2463 data->autinc_to = 1;
2464 data->explicit_inc_to = 1;
2467 if ( !data->autinc_to && CONSTANT_P (to_addr))
2468 data->to_addr = copy_addr_to_reg (to_addr);
2471 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2472 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2473 align = MOVE_MAX * BITS_PER_UNIT;
2475 /* First store what we can in the largest integer mode, then go to
2476 successively smaller modes. */
2478 while (max_size > 1)
2480 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2481 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2482 if (GET_MODE_SIZE (tmode) < max_size)
2483 mode = tmode;
2485 if (mode == VOIDmode)
2486 break;
2488 icode = mov_optab->handlers[(int) mode].insn_code;
2489 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2490 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2492 max_size = GET_MODE_SIZE (mode);
2495 /* The code above should have handled everything. */
2496 if (data->len != 0)
2497 abort ();
2500 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2501 with move instructions for mode MODE. GENFUN is the gen_... function
2502 to make a move insn for that mode. DATA has all the other info. */
2504 static void
2505 store_by_pieces_2 (genfun, mode, data)
2506 rtx (*genfun) PARAMS ((rtx, ...));
2507 enum machine_mode mode;
2508 struct store_by_pieces *data;
2510 unsigned int size = GET_MODE_SIZE (mode);
2511 rtx to1, cst;
2513 while (data->len >= size)
2515 if (data->reverse)
2516 data->offset -= size;
2518 if (data->autinc_to)
2520 to1 = replace_equiv_address (data->to, data->to_addr);
2521 to1 = adjust_address (to1, mode, 0);
2523 else
2524 to1 = adjust_address (data->to, mode, data->offset);
2526 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2527 emit_insn (gen_add2_insn (data->to_addr,
2528 GEN_INT (-(HOST_WIDE_INT) size)));
2530 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2531 emit_insn ((*genfun) (to1, cst));
2533 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2536 if (! data->reverse)
2537 data->offset += size;
2539 data->len -= size;
2543 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2544 its length in bytes and ALIGN is the maximum alignment we can is has.
2546 If we call a function that returns the length of the block, return it. */
2549 clear_storage (object, size, align)
2550 rtx object;
2551 rtx size;
2552 unsigned int align;
2554 #ifdef TARGET_MEM_FUNCTIONS
2555 static tree fn;
2556 tree call_expr, arg_list;
2557 #endif
2558 rtx retval = 0;
2560 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2561 just move a zero. Otherwise, do this a piece at a time. */
2562 if (GET_MODE (object) != BLKmode
2563 && GET_CODE (size) == CONST_INT
2564 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2565 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2566 else
2568 object = protect_from_queue (object, 1);
2569 size = protect_from_queue (size, 0);
2571 if (GET_CODE (size) == CONST_INT
2572 && MOVE_BY_PIECES_P (INTVAL (size), align))
2573 clear_by_pieces (object, INTVAL (size), align);
2574 else
2576 /* Try the most limited insn first, because there's no point
2577 including more than one in the machine description unless
2578 the more limited one has some advantage. */
2580 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2581 enum machine_mode mode;
2583 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2584 mode = GET_MODE_WIDER_MODE (mode))
2586 enum insn_code code = clrstr_optab[(int) mode];
2587 insn_operand_predicate_fn pred;
2589 if (code != CODE_FOR_nothing
2590 /* We don't need MODE to be narrower than
2591 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2592 the mode mask, as it is returned by the macro, it will
2593 definitely be less than the actual mode mask. */
2594 && ((GET_CODE (size) == CONST_INT
2595 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2596 <= (GET_MODE_MASK (mode) >> 1)))
2597 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2598 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2599 || (*pred) (object, BLKmode))
2600 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2601 || (*pred) (opalign, VOIDmode)))
2603 rtx op1;
2604 rtx last = get_last_insn ();
2605 rtx pat;
2607 op1 = convert_to_mode (mode, size, 1);
2608 pred = insn_data[(int) code].operand[1].predicate;
2609 if (pred != 0 && ! (*pred) (op1, mode))
2610 op1 = copy_to_mode_reg (mode, op1);
2612 pat = GEN_FCN ((int) code) (object, op1, opalign);
2613 if (pat)
2615 emit_insn (pat);
2616 return 0;
2618 else
2619 delete_insns_since (last);
2623 /* OBJECT or SIZE may have been passed through protect_from_queue.
2625 It is unsafe to save the value generated by protect_from_queue
2626 and reuse it later. Consider what happens if emit_queue is
2627 called before the return value from protect_from_queue is used.
2629 Expansion of the CALL_EXPR below will call emit_queue before
2630 we are finished emitting RTL for argument setup. So if we are
2631 not careful we could get the wrong value for an argument.
2633 To avoid this problem we go ahead and emit code to copy OBJECT
2634 and SIZE into new pseudos. We can then place those new pseudos
2635 into an RTL_EXPR and use them later, even after a call to
2636 emit_queue.
2638 Note this is not strictly needed for library calls since they
2639 do not call emit_queue before loading their arguments. However,
2640 we may need to have library calls call emit_queue in the future
2641 since failing to do so could cause problems for targets which
2642 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2643 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2645 #ifdef TARGET_MEM_FUNCTIONS
2646 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2647 #else
2648 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2649 TREE_UNSIGNED (integer_type_node));
2650 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2651 #endif
2653 #ifdef TARGET_MEM_FUNCTIONS
2654 /* It is incorrect to use the libcall calling conventions to call
2655 memset in this context.
2657 This could be a user call to memset and the user may wish to
2658 examine the return value from memset.
2660 For targets where libcalls and normal calls have different
2661 conventions for returning pointers, we could end up generating
2662 incorrect code.
2664 So instead of using a libcall sequence we build up a suitable
2665 CALL_EXPR and expand the call in the normal fashion. */
2666 if (fn == NULL_TREE)
2668 tree fntype;
2670 /* This was copied from except.c, I don't know if all this is
2671 necessary in this context or not. */
2672 fn = get_identifier ("memset");
2673 fntype = build_pointer_type (void_type_node);
2674 fntype = build_function_type (fntype, NULL_TREE);
2675 fn = build_decl (FUNCTION_DECL, fn, fntype);
2676 ggc_add_tree_root (&fn, 1);
2677 DECL_EXTERNAL (fn) = 1;
2678 TREE_PUBLIC (fn) = 1;
2679 DECL_ARTIFICIAL (fn) = 1;
2680 TREE_NOTHROW (fn) = 1;
2681 make_decl_rtl (fn, NULL);
2682 assemble_external (fn);
2685 /* We need to make an argument list for the function call.
2687 memset has three arguments, the first is a void * addresses, the
2688 second a integer with the initialization value, the last is a
2689 size_t byte count for the copy. */
2690 arg_list
2691 = build_tree_list (NULL_TREE,
2692 make_tree (build_pointer_type (void_type_node),
2693 object));
2694 TREE_CHAIN (arg_list)
2695 = build_tree_list (NULL_TREE,
2696 make_tree (integer_type_node, const0_rtx));
2697 TREE_CHAIN (TREE_CHAIN (arg_list))
2698 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2699 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2701 /* Now we have to build up the CALL_EXPR itself. */
2702 call_expr = build1 (ADDR_EXPR,
2703 build_pointer_type (TREE_TYPE (fn)), fn);
2704 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2705 call_expr, arg_list, NULL_TREE);
2706 TREE_SIDE_EFFECTS (call_expr) = 1;
2708 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2709 #else
2710 emit_library_call (bzero_libfunc, LCT_NORMAL,
2711 VOIDmode, 2, object, Pmode, size,
2712 TYPE_MODE (integer_type_node));
2713 #endif
2717 return retval;
2720 /* Generate code to copy Y into X.
2721 Both Y and X must have the same mode, except that
2722 Y can be a constant with VOIDmode.
2723 This mode cannot be BLKmode; use emit_block_move for that.
2725 Return the last instruction emitted. */
2728 emit_move_insn (x, y)
2729 rtx x, y;
2731 enum machine_mode mode = GET_MODE (x);
2732 rtx y_cst = NULL_RTX;
2733 rtx last_insn;
2735 x = protect_from_queue (x, 1);
2736 y = protect_from_queue (y, 0);
2738 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2739 abort ();
2741 /* Never force constant_p_rtx to memory. */
2742 if (GET_CODE (y) == CONSTANT_P_RTX)
2744 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2746 y_cst = y;
2747 y = force_const_mem (mode, y);
2750 /* If X or Y are memory references, verify that their addresses are valid
2751 for the machine. */
2752 if (GET_CODE (x) == MEM
2753 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2754 && ! push_operand (x, GET_MODE (x)))
2755 || (flag_force_addr
2756 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2757 x = validize_mem (x);
2759 if (GET_CODE (y) == MEM
2760 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2761 || (flag_force_addr
2762 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2763 y = validize_mem (y);
2765 if (mode == BLKmode)
2766 abort ();
2768 last_insn = emit_move_insn_1 (x, y);
2770 if (y_cst && GET_CODE (x) == REG)
2771 REG_NOTES (last_insn)
2772 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2774 return last_insn;
2777 /* Low level part of emit_move_insn.
2778 Called just like emit_move_insn, but assumes X and Y
2779 are basically valid. */
2782 emit_move_insn_1 (x, y)
2783 rtx x, y;
2785 enum machine_mode mode = GET_MODE (x);
2786 enum machine_mode submode;
2787 enum mode_class class = GET_MODE_CLASS (mode);
2788 unsigned int i;
2790 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2791 abort ();
2793 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2794 return
2795 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2797 /* Expand complex moves by moving real part and imag part, if possible. */
2798 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2799 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2800 * BITS_PER_UNIT),
2801 (class == MODE_COMPLEX_INT
2802 ? MODE_INT : MODE_FLOAT),
2804 && (mov_optab->handlers[(int) submode].insn_code
2805 != CODE_FOR_nothing))
2807 /* Don't split destination if it is a stack push. */
2808 int stack = push_operand (x, GET_MODE (x));
2810 #ifdef PUSH_ROUNDING
2811 /* In case we output to the stack, but the size is smaller machine can
2812 push exactly, we need to use move instructions. */
2813 if (stack
2814 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2816 rtx temp;
2817 int offset1, offset2;
2819 /* Do not use anti_adjust_stack, since we don't want to update
2820 stack_pointer_delta. */
2821 temp = expand_binop (Pmode,
2822 #ifdef STACK_GROWS_DOWNWARD
2823 sub_optab,
2824 #else
2825 add_optab,
2826 #endif
2827 stack_pointer_rtx,
2828 GEN_INT
2829 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2830 stack_pointer_rtx,
2832 OPTAB_LIB_WIDEN);
2833 if (temp != stack_pointer_rtx)
2834 emit_move_insn (stack_pointer_rtx, temp);
2835 #ifdef STACK_GROWS_DOWNWARD
2836 offset1 = 0;
2837 offset2 = GET_MODE_SIZE (submode);
2838 #else
2839 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2840 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2841 + GET_MODE_SIZE (submode));
2842 #endif
2843 emit_move_insn (change_address (x, submode,
2844 gen_rtx_PLUS (Pmode,
2845 stack_pointer_rtx,
2846 GEN_INT (offset1))),
2847 gen_realpart (submode, y));
2848 emit_move_insn (change_address (x, submode,
2849 gen_rtx_PLUS (Pmode,
2850 stack_pointer_rtx,
2851 GEN_INT (offset2))),
2852 gen_imagpart (submode, y));
2854 else
2855 #endif
2856 /* If this is a stack, push the highpart first, so it
2857 will be in the argument order.
2859 In that case, change_address is used only to convert
2860 the mode, not to change the address. */
2861 if (stack)
2863 /* Note that the real part always precedes the imag part in memory
2864 regardless of machine's endianness. */
2865 #ifdef STACK_GROWS_DOWNWARD
2866 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2867 (gen_rtx_MEM (submode, XEXP (x, 0)),
2868 gen_imagpart (submode, y)));
2869 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2870 (gen_rtx_MEM (submode, XEXP (x, 0)),
2871 gen_realpart (submode, y)));
2872 #else
2873 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2874 (gen_rtx_MEM (submode, XEXP (x, 0)),
2875 gen_realpart (submode, y)));
2876 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877 (gen_rtx_MEM (submode, XEXP (x, 0)),
2878 gen_imagpart (submode, y)));
2879 #endif
2881 else
2883 rtx realpart_x, realpart_y;
2884 rtx imagpart_x, imagpart_y;
2886 /* If this is a complex value with each part being smaller than a
2887 word, the usual calling sequence will likely pack the pieces into
2888 a single register. Unfortunately, SUBREG of hard registers only
2889 deals in terms of words, so we have a problem converting input
2890 arguments to the CONCAT of two registers that is used elsewhere
2891 for complex values. If this is before reload, we can copy it into
2892 memory and reload. FIXME, we should see about using extract and
2893 insert on integer registers, but complex short and complex char
2894 variables should be rarely used. */
2895 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2896 && (reload_in_progress | reload_completed) == 0)
2898 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2899 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2901 if (packed_dest_p || packed_src_p)
2903 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2904 ? MODE_FLOAT : MODE_INT);
2906 enum machine_mode reg_mode
2907 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2909 if (reg_mode != BLKmode)
2911 rtx mem = assign_stack_temp (reg_mode,
2912 GET_MODE_SIZE (mode), 0);
2913 rtx cmem = adjust_address (mem, mode, 0);
2915 cfun->cannot_inline
2916 = N_("function using short complex types cannot be inline");
2918 if (packed_dest_p)
2920 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2921 emit_move_insn_1 (cmem, y);
2922 return emit_move_insn_1 (sreg, mem);
2924 else
2926 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2927 emit_move_insn_1 (mem, sreg);
2928 return emit_move_insn_1 (x, cmem);
2934 realpart_x = gen_realpart (submode, x);
2935 realpart_y = gen_realpart (submode, y);
2936 imagpart_x = gen_imagpart (submode, x);
2937 imagpart_y = gen_imagpart (submode, y);
2939 /* Show the output dies here. This is necessary for SUBREGs
2940 of pseudos since we cannot track their lifetimes correctly;
2941 hard regs shouldn't appear here except as return values.
2942 We never want to emit such a clobber after reload. */
2943 if (x != y
2944 && ! (reload_in_progress || reload_completed)
2945 && (GET_CODE (realpart_x) == SUBREG
2946 || GET_CODE (imagpart_x) == SUBREG))
2948 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (realpart_x, realpart_y));
2953 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2954 (imagpart_x, imagpart_y));
2957 return get_last_insn ();
2960 /* This will handle any multi-word mode that lacks a move_insn pattern.
2961 However, you will get better code if you define such patterns,
2962 even if they must turn into multiple assembler instructions. */
2963 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2965 rtx last_insn = 0;
2966 rtx seq, inner;
2967 int need_clobber;
2969 #ifdef PUSH_ROUNDING
2971 /* If X is a push on the stack, do the push now and replace
2972 X with a reference to the stack pointer. */
2973 if (push_operand (x, GET_MODE (x)))
2975 rtx temp;
2976 enum rtx_code code;
2978 /* Do not use anti_adjust_stack, since we don't want to update
2979 stack_pointer_delta. */
2980 temp = expand_binop (Pmode,
2981 #ifdef STACK_GROWS_DOWNWARD
2982 sub_optab,
2983 #else
2984 add_optab,
2985 #endif
2986 stack_pointer_rtx,
2987 GEN_INT
2988 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2989 stack_pointer_rtx,
2991 OPTAB_LIB_WIDEN);
2992 if (temp != stack_pointer_rtx)
2993 emit_move_insn (stack_pointer_rtx, temp);
2995 code = GET_CODE (XEXP (x, 0));
2996 /* Just hope that small offsets off SP are OK. */
2997 if (code == POST_INC)
2998 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2999 GEN_INT (-(HOST_WIDE_INT)
3000 GET_MODE_SIZE (GET_MODE (x))));
3001 else if (code == POST_DEC)
3002 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3003 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3004 else
3005 temp = stack_pointer_rtx;
3007 x = change_address (x, VOIDmode, temp);
3009 #endif
3011 /* If we are in reload, see if either operand is a MEM whose address
3012 is scheduled for replacement. */
3013 if (reload_in_progress && GET_CODE (x) == MEM
3014 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3015 x = replace_equiv_address_nv (x, inner);
3016 if (reload_in_progress && GET_CODE (y) == MEM
3017 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3018 y = replace_equiv_address_nv (y, inner);
3020 start_sequence ();
3022 need_clobber = 0;
3023 for (i = 0;
3024 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3025 i++)
3027 rtx xpart = operand_subword (x, i, 1, mode);
3028 rtx ypart = operand_subword (y, i, 1, mode);
3030 /* If we can't get a part of Y, put Y into memory if it is a
3031 constant. Otherwise, force it into a register. If we still
3032 can't get a part of Y, abort. */
3033 if (ypart == 0 && CONSTANT_P (y))
3035 y = force_const_mem (mode, y);
3036 ypart = operand_subword (y, i, 1, mode);
3038 else if (ypart == 0)
3039 ypart = operand_subword_force (y, i, mode);
3041 if (xpart == 0 || ypart == 0)
3042 abort ();
3044 need_clobber |= (GET_CODE (xpart) == SUBREG);
3046 last_insn = emit_move_insn (xpart, ypart);
3049 seq = gen_sequence ();
3050 end_sequence ();
3052 /* Show the output dies here. This is necessary for SUBREGs
3053 of pseudos since we cannot track their lifetimes correctly;
3054 hard regs shouldn't appear here except as return values.
3055 We never want to emit such a clobber after reload. */
3056 if (x != y
3057 && ! (reload_in_progress || reload_completed)
3058 && need_clobber != 0)
3060 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3063 emit_insn (seq);
3065 return last_insn;
3067 else
3068 abort ();
3071 /* Pushing data onto the stack. */
3073 /* Push a block of length SIZE (perhaps variable)
3074 and return an rtx to address the beginning of the block.
3075 Note that it is not possible for the value returned to be a QUEUED.
3076 The value may be virtual_outgoing_args_rtx.
3078 EXTRA is the number of bytes of padding to push in addition to SIZE.
3079 BELOW nonzero means this padding comes at low addresses;
3080 otherwise, the padding comes at high addresses. */
3083 push_block (size, extra, below)
3084 rtx size;
3085 int extra, below;
3087 register rtx temp;
3089 size = convert_modes (Pmode, ptr_mode, size, 1);
3090 if (CONSTANT_P (size))
3091 anti_adjust_stack (plus_constant (size, extra));
3092 else if (GET_CODE (size) == REG && extra == 0)
3093 anti_adjust_stack (size);
3094 else
3096 temp = copy_to_mode_reg (Pmode, size);
3097 if (extra != 0)
3098 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3099 temp, 0, OPTAB_LIB_WIDEN);
3100 anti_adjust_stack (temp);
3103 #ifndef STACK_GROWS_DOWNWARD
3104 #ifdef ARGS_GROW_DOWNWARD
3105 if (!ACCUMULATE_OUTGOING_ARGS)
3106 #else
3107 if (0)
3108 #endif
3109 #else
3110 if (1)
3111 #endif
3113 /* Return the lowest stack address when STACK or ARGS grow downward and
3114 we are not aaccumulating outgoing arguments (the c4x port uses such
3115 conventions). */
3116 temp = virtual_outgoing_args_rtx;
3117 if (extra != 0 && below)
3118 temp = plus_constant (temp, extra);
3120 else
3122 if (GET_CODE (size) == CONST_INT)
3123 temp = plus_constant (virtual_outgoing_args_rtx,
3124 -INTVAL (size) - (below ? 0 : extra));
3125 else if (extra != 0 && !below)
3126 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3127 negate_rtx (Pmode, plus_constant (size, extra)));
3128 else
3129 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3130 negate_rtx (Pmode, size));
3133 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3137 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3138 block of SIZE bytes. */
3140 static rtx
3141 get_push_address (size)
3142 int size;
3144 register rtx temp;
3146 if (STACK_PUSH_CODE == POST_DEC)
3147 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3148 else if (STACK_PUSH_CODE == POST_INC)
3149 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3150 else
3151 temp = stack_pointer_rtx;
3153 return copy_to_reg (temp);
3156 #ifdef PUSH_ROUNDING
3158 /* Emit single push insn. */
3160 static void
3161 emit_single_push_insn (mode, x, type)
3162 rtx x;
3163 enum machine_mode mode;
3164 tree type;
3166 rtx dest_addr;
3167 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3168 rtx dest;
3169 enum insn_code icode;
3170 insn_operand_predicate_fn pred;
3172 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3173 /* If there is push pattern, use it. Otherwise try old way of throwing
3174 MEM representing push operation to move expander. */
3175 icode = push_optab->handlers[(int) mode].insn_code;
3176 if (icode != CODE_FOR_nothing)
3178 if (((pred = insn_data[(int) icode].operand[0].predicate)
3179 && !((*pred) (x, mode))))
3180 x = force_reg (mode, x);
3181 emit_insn (GEN_FCN (icode) (x));
3182 return;
3184 if (GET_MODE_SIZE (mode) == rounded_size)
3185 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3186 else
3188 #ifdef STACK_GROWS_DOWNWARD
3189 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3190 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3191 #else
3192 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3193 GEN_INT (rounded_size));
3194 #endif
3195 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3198 dest = gen_rtx_MEM (mode, dest_addr);
3200 if (type != 0)
3202 set_mem_attributes (dest, type, 1);
3203 /* Function incoming arguments may overlap with sibling call
3204 outgoing arguments and we cannot allow reordering of reads
3205 from function arguments with stores to outgoing arguments
3206 of sibling calls. */
3207 set_mem_alias_set (dest, 0);
3209 emit_move_insn (dest, x);
3211 #endif
3213 /* Generate code to push X onto the stack, assuming it has mode MODE and
3214 type TYPE.
3215 MODE is redundant except when X is a CONST_INT (since they don't
3216 carry mode info).
3217 SIZE is an rtx for the size of data to be copied (in bytes),
3218 needed only if X is BLKmode.
3220 ALIGN (in bits) is maximum alignment we can assume.
3222 If PARTIAL and REG are both nonzero, then copy that many of the first
3223 words of X into registers starting with REG, and push the rest of X.
3224 The amount of space pushed is decreased by PARTIAL words,
3225 rounded *down* to a multiple of PARM_BOUNDARY.
3226 REG must be a hard register in this case.
3227 If REG is zero but PARTIAL is not, take any all others actions for an
3228 argument partially in registers, but do not actually load any
3229 registers.
3231 EXTRA is the amount in bytes of extra space to leave next to this arg.
3232 This is ignored if an argument block has already been allocated.
3234 On a machine that lacks real push insns, ARGS_ADDR is the address of
3235 the bottom of the argument block for this call. We use indexing off there
3236 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3237 argument block has not been preallocated.
3239 ARGS_SO_FAR is the size of args previously pushed for this call.
3241 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3242 for arguments passed in registers. If nonzero, it will be the number
3243 of bytes required. */
3245 void
3246 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3247 args_addr, args_so_far, reg_parm_stack_space,
3248 alignment_pad)
3249 register rtx x;
3250 enum machine_mode mode;
3251 tree type;
3252 rtx size;
3253 unsigned int align;
3254 int partial;
3255 rtx reg;
3256 int extra;
3257 rtx args_addr;
3258 rtx args_so_far;
3259 int reg_parm_stack_space;
3260 rtx alignment_pad;
3262 rtx xinner;
3263 enum direction stack_direction
3264 #ifdef STACK_GROWS_DOWNWARD
3265 = downward;
3266 #else
3267 = upward;
3268 #endif
3270 /* Decide where to pad the argument: `downward' for below,
3271 `upward' for above, or `none' for don't pad it.
3272 Default is below for small data on big-endian machines; else above. */
3273 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3275 /* Invert direction if stack is post-update. */
3276 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3277 if (where_pad != none)
3278 where_pad = (where_pad == downward ? upward : downward);
3280 xinner = x = protect_from_queue (x, 0);
3282 if (mode == BLKmode)
3284 /* Copy a block into the stack, entirely or partially. */
3286 register rtx temp;
3287 int used = partial * UNITS_PER_WORD;
3288 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3289 int skip;
3291 if (size == 0)
3292 abort ();
3294 used -= offset;
3296 /* USED is now the # of bytes we need not copy to the stack
3297 because registers will take care of them. */
3299 if (partial != 0)
3300 xinner = adjust_address (xinner, BLKmode, used);
3302 /* If the partial register-part of the arg counts in its stack size,
3303 skip the part of stack space corresponding to the registers.
3304 Otherwise, start copying to the beginning of the stack space,
3305 by setting SKIP to 0. */
3306 skip = (reg_parm_stack_space == 0) ? 0 : used;
3308 #ifdef PUSH_ROUNDING
3309 /* Do it with several push insns if that doesn't take lots of insns
3310 and if there is no difficulty with push insns that skip bytes
3311 on the stack for alignment purposes. */
3312 if (args_addr == 0
3313 && PUSH_ARGS
3314 && GET_CODE (size) == CONST_INT
3315 && skip == 0
3316 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3317 /* Here we avoid the case of a structure whose weak alignment
3318 forces many pushes of a small amount of data,
3319 and such small pushes do rounding that causes trouble. */
3320 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3321 || align >= BIGGEST_ALIGNMENT
3322 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3323 == (align / BITS_PER_UNIT)))
3324 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3326 /* Push padding now if padding above and stack grows down,
3327 or if padding below and stack grows up.
3328 But if space already allocated, this has already been done. */
3329 if (extra && args_addr == 0
3330 && where_pad != none && where_pad != stack_direction)
3331 anti_adjust_stack (GEN_INT (extra));
3333 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3335 if (current_function_check_memory_usage && ! in_check_memory_usage)
3337 rtx temp;
3339 in_check_memory_usage = 1;
3340 temp = get_push_address (INTVAL (size) - used);
3341 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3342 emit_library_call (chkr_copy_bitmap_libfunc,
3343 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3344 Pmode, XEXP (xinner, 0), Pmode,
3345 GEN_INT (INTVAL (size) - used),
3346 TYPE_MODE (sizetype));
3347 else
3348 emit_library_call (chkr_set_right_libfunc,
3349 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3350 Pmode, GEN_INT (INTVAL (size) - used),
3351 TYPE_MODE (sizetype),
3352 GEN_INT (MEMORY_USE_RW),
3353 TYPE_MODE (integer_type_node));
3354 in_check_memory_usage = 0;
3357 else
3358 #endif /* PUSH_ROUNDING */
3360 rtx target;
3362 /* Otherwise make space on the stack and copy the data
3363 to the address of that space. */
3365 /* Deduct words put into registers from the size we must copy. */
3366 if (partial != 0)
3368 if (GET_CODE (size) == CONST_INT)
3369 size = GEN_INT (INTVAL (size) - used);
3370 else
3371 size = expand_binop (GET_MODE (size), sub_optab, size,
3372 GEN_INT (used), NULL_RTX, 0,
3373 OPTAB_LIB_WIDEN);
3376 /* Get the address of the stack space.
3377 In this case, we do not deal with EXTRA separately.
3378 A single stack adjust will do. */
3379 if (! args_addr)
3381 temp = push_block (size, extra, where_pad == downward);
3382 extra = 0;
3384 else if (GET_CODE (args_so_far) == CONST_INT)
3385 temp = memory_address (BLKmode,
3386 plus_constant (args_addr,
3387 skip + INTVAL (args_so_far)));
3388 else
3389 temp = memory_address (BLKmode,
3390 plus_constant (gen_rtx_PLUS (Pmode,
3391 args_addr,
3392 args_so_far),
3393 skip));
3394 if (current_function_check_memory_usage && ! in_check_memory_usage)
3396 in_check_memory_usage = 1;
3397 target = copy_to_reg (temp);
3398 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3399 emit_library_call (chkr_copy_bitmap_libfunc,
3400 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3401 target, Pmode,
3402 XEXP (xinner, 0), Pmode,
3403 size, TYPE_MODE (sizetype));
3404 else
3405 emit_library_call (chkr_set_right_libfunc,
3406 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3407 target, Pmode,
3408 size, TYPE_MODE (sizetype),
3409 GEN_INT (MEMORY_USE_RW),
3410 TYPE_MODE (integer_type_node));
3411 in_check_memory_usage = 0;
3414 target = gen_rtx_MEM (BLKmode, temp);
3416 if (type != 0)
3418 set_mem_attributes (target, type, 1);
3419 /* Function incoming arguments may overlap with sibling call
3420 outgoing arguments and we cannot allow reordering of reads
3421 from function arguments with stores to outgoing arguments
3422 of sibling calls. */
3423 set_mem_alias_set (target, 0);
3426 /* TEMP is the address of the block. Copy the data there. */
3427 if (GET_CODE (size) == CONST_INT
3428 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3430 move_by_pieces (target, xinner, INTVAL (size), align);
3431 goto ret;
3433 else
3435 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3436 enum machine_mode mode;
3438 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3439 mode != VOIDmode;
3440 mode = GET_MODE_WIDER_MODE (mode))
3442 enum insn_code code = movstr_optab[(int) mode];
3443 insn_operand_predicate_fn pred;
3445 if (code != CODE_FOR_nothing
3446 && ((GET_CODE (size) == CONST_INT
3447 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3448 <= (GET_MODE_MASK (mode) >> 1)))
3449 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3450 && (!(pred = insn_data[(int) code].operand[0].predicate)
3451 || ((*pred) (target, BLKmode)))
3452 && (!(pred = insn_data[(int) code].operand[1].predicate)
3453 || ((*pred) (xinner, BLKmode)))
3454 && (!(pred = insn_data[(int) code].operand[3].predicate)
3455 || ((*pred) (opalign, VOIDmode))))
3457 rtx op2 = convert_to_mode (mode, size, 1);
3458 rtx last = get_last_insn ();
3459 rtx pat;
3461 pred = insn_data[(int) code].operand[2].predicate;
3462 if (pred != 0 && ! (*pred) (op2, mode))
3463 op2 = copy_to_mode_reg (mode, op2);
3465 pat = GEN_FCN ((int) code) (target, xinner,
3466 op2, opalign);
3467 if (pat)
3469 emit_insn (pat);
3470 goto ret;
3472 else
3473 delete_insns_since (last);
3478 if (!ACCUMULATE_OUTGOING_ARGS)
3480 /* If the source is referenced relative to the stack pointer,
3481 copy it to another register to stabilize it. We do not need
3482 to do this if we know that we won't be changing sp. */
3484 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3485 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3486 temp = copy_to_reg (temp);
3489 /* Make inhibit_defer_pop nonzero around the library call
3490 to force it to pop the bcopy-arguments right away. */
3491 NO_DEFER_POP;
3492 #ifdef TARGET_MEM_FUNCTIONS
3493 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3494 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3495 convert_to_mode (TYPE_MODE (sizetype),
3496 size, TREE_UNSIGNED (sizetype)),
3497 TYPE_MODE (sizetype));
3498 #else
3499 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3500 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3501 convert_to_mode (TYPE_MODE (integer_type_node),
3502 size,
3503 TREE_UNSIGNED (integer_type_node)),
3504 TYPE_MODE (integer_type_node));
3505 #endif
3506 OK_DEFER_POP;
3509 else if (partial > 0)
3511 /* Scalar partly in registers. */
3513 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3514 int i;
3515 int not_stack;
3516 /* # words of start of argument
3517 that we must make space for but need not store. */
3518 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3519 int args_offset = INTVAL (args_so_far);
3520 int skip;
3522 /* Push padding now if padding above and stack grows down,
3523 or if padding below and stack grows up.
3524 But if space already allocated, this has already been done. */
3525 if (extra && args_addr == 0
3526 && where_pad != none && where_pad != stack_direction)
3527 anti_adjust_stack (GEN_INT (extra));
3529 /* If we make space by pushing it, we might as well push
3530 the real data. Otherwise, we can leave OFFSET nonzero
3531 and leave the space uninitialized. */
3532 if (args_addr == 0)
3533 offset = 0;
3535 /* Now NOT_STACK gets the number of words that we don't need to
3536 allocate on the stack. */
3537 not_stack = partial - offset;
3539 /* If the partial register-part of the arg counts in its stack size,
3540 skip the part of stack space corresponding to the registers.
3541 Otherwise, start copying to the beginning of the stack space,
3542 by setting SKIP to 0. */
3543 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3545 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3546 x = validize_mem (force_const_mem (mode, x));
3548 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3549 SUBREGs of such registers are not allowed. */
3550 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3551 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3552 x = copy_to_reg (x);
3554 /* Loop over all the words allocated on the stack for this arg. */
3555 /* We can do it by words, because any scalar bigger than a word
3556 has a size a multiple of a word. */
3557 #ifndef PUSH_ARGS_REVERSED
3558 for (i = not_stack; i < size; i++)
3559 #else
3560 for (i = size - 1; i >= not_stack; i--)
3561 #endif
3562 if (i >= not_stack + offset)
3563 emit_push_insn (operand_subword_force (x, i, mode),
3564 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3565 0, args_addr,
3566 GEN_INT (args_offset + ((i - not_stack + skip)
3567 * UNITS_PER_WORD)),
3568 reg_parm_stack_space, alignment_pad);
3570 else
3572 rtx addr;
3573 rtx target = NULL_RTX;
3574 rtx dest;
3576 /* Push padding now if padding above and stack grows down,
3577 or if padding below and stack grows up.
3578 But if space already allocated, this has already been done. */
3579 if (extra && args_addr == 0
3580 && where_pad != none && where_pad != stack_direction)
3581 anti_adjust_stack (GEN_INT (extra));
3583 #ifdef PUSH_ROUNDING
3584 if (args_addr == 0 && PUSH_ARGS)
3585 emit_single_push_insn (mode, x, type);
3586 else
3587 #endif
3589 if (GET_CODE (args_so_far) == CONST_INT)
3590 addr
3591 = memory_address (mode,
3592 plus_constant (args_addr,
3593 INTVAL (args_so_far)));
3594 else
3595 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3596 args_so_far));
3597 target = addr;
3598 dest = gen_rtx_MEM (mode, addr);
3599 if (type != 0)
3601 set_mem_attributes (dest, type, 1);
3602 /* Function incoming arguments may overlap with sibling call
3603 outgoing arguments and we cannot allow reordering of reads
3604 from function arguments with stores to outgoing arguments
3605 of sibling calls. */
3606 set_mem_alias_set (dest, 0);
3609 emit_move_insn (dest, x);
3613 if (current_function_check_memory_usage && ! in_check_memory_usage)
3615 in_check_memory_usage = 1;
3616 if (target == 0)
3617 target = get_push_address (GET_MODE_SIZE (mode));
3619 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3620 emit_library_call (chkr_copy_bitmap_libfunc,
3621 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3622 Pmode, XEXP (x, 0), Pmode,
3623 GEN_INT (GET_MODE_SIZE (mode)),
3624 TYPE_MODE (sizetype));
3625 else
3626 emit_library_call (chkr_set_right_libfunc,
3627 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3628 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3629 TYPE_MODE (sizetype),
3630 GEN_INT (MEMORY_USE_RW),
3631 TYPE_MODE (integer_type_node));
3632 in_check_memory_usage = 0;
3636 ret:
3637 /* If part should go in registers, copy that part
3638 into the appropriate registers. Do this now, at the end,
3639 since mem-to-mem copies above may do function calls. */
3640 if (partial > 0 && reg != 0)
3642 /* Handle calls that pass values in multiple non-contiguous locations.
3643 The Irix 6 ABI has examples of this. */
3644 if (GET_CODE (reg) == PARALLEL)
3645 emit_group_load (reg, x, -1, align); /* ??? size? */
3646 else
3647 move_block_to_reg (REGNO (reg), x, partial, mode);
3650 if (extra && args_addr == 0 && where_pad == stack_direction)
3651 anti_adjust_stack (GEN_INT (extra));
3653 if (alignment_pad && args_addr == 0)
3654 anti_adjust_stack (alignment_pad);
3657 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3658 operations. */
3660 static rtx
3661 get_subtarget (x)
3662 rtx x;
3664 return ((x == 0
3665 /* Only registers can be subtargets. */
3666 || GET_CODE (x) != REG
3667 /* If the register is readonly, it can't be set more than once. */
3668 || RTX_UNCHANGING_P (x)
3669 /* Don't use hard regs to avoid extending their life. */
3670 || REGNO (x) < FIRST_PSEUDO_REGISTER
3671 /* Avoid subtargets inside loops,
3672 since they hide some invariant expressions. */
3673 || preserve_subexpressions_p ())
3674 ? 0 : x);
3677 /* Expand an assignment that stores the value of FROM into TO.
3678 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3679 (This may contain a QUEUED rtx;
3680 if the value is constant, this rtx is a constant.)
3681 Otherwise, the returned value is NULL_RTX.
3683 SUGGEST_REG is no longer actually used.
3684 It used to mean, copy the value through a register
3685 and return that register, if that is possible.
3686 We now use WANT_VALUE to decide whether to do this. */
3689 expand_assignment (to, from, want_value, suggest_reg)
3690 tree to, from;
3691 int want_value;
3692 int suggest_reg ATTRIBUTE_UNUSED;
3694 register rtx to_rtx = 0;
3695 rtx result;
3697 /* Don't crash if the lhs of the assignment was erroneous. */
3699 if (TREE_CODE (to) == ERROR_MARK)
3701 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3702 return want_value ? result : NULL_RTX;
3705 /* Assignment of a structure component needs special treatment
3706 if the structure component's rtx is not simply a MEM.
3707 Assignment of an array element at a constant index, and assignment of
3708 an array element in an unaligned packed structure field, has the same
3709 problem. */
3711 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3712 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3714 enum machine_mode mode1;
3715 HOST_WIDE_INT bitsize, bitpos;
3716 tree offset;
3717 int unsignedp;
3718 int volatilep = 0;
3719 tree tem;
3720 unsigned int alignment;
3722 push_temp_slots ();
3723 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3724 &unsignedp, &volatilep, &alignment);
3726 /* If we are going to use store_bit_field and extract_bit_field,
3727 make sure to_rtx will be safe for multiple use. */
3729 if (mode1 == VOIDmode && want_value)
3730 tem = stabilize_reference (tem);
3732 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3733 if (offset != 0)
3735 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3737 if (GET_CODE (to_rtx) != MEM)
3738 abort ();
3740 if (GET_MODE (offset_rtx) != ptr_mode)
3742 #ifdef POINTERS_EXTEND_UNSIGNED
3743 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3744 #else
3745 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3746 #endif
3749 /* A constant address in TO_RTX can have VOIDmode, we must not try
3750 to call force_reg for that case. Avoid that case. */
3751 if (GET_CODE (to_rtx) == MEM
3752 && GET_MODE (to_rtx) == BLKmode
3753 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3754 && bitsize
3755 && (bitpos % bitsize) == 0
3756 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3757 && alignment == GET_MODE_ALIGNMENT (mode1))
3759 rtx temp
3760 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3762 if (GET_CODE (XEXP (temp, 0)) == REG)
3763 to_rtx = temp;
3764 else
3765 to_rtx = (replace_equiv_address
3766 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3767 XEXP (temp, 0))));
3768 bitpos = 0;
3771 to_rtx = change_address (to_rtx, VOIDmode,
3772 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3773 force_reg (ptr_mode,
3774 offset_rtx)));
3777 if (volatilep)
3779 if (GET_CODE (to_rtx) == MEM)
3781 /* When the offset is zero, to_rtx is the address of the
3782 structure we are storing into, and hence may be shared.
3783 We must make a new MEM before setting the volatile bit. */
3784 if (offset == 0)
3785 to_rtx = copy_rtx (to_rtx);
3787 MEM_VOLATILE_P (to_rtx) = 1;
3789 #if 0 /* This was turned off because, when a field is volatile
3790 in an object which is not volatile, the object may be in a register,
3791 and then we would abort over here. */
3792 else
3793 abort ();
3794 #endif
3797 if (TREE_CODE (to) == COMPONENT_REF
3798 && TREE_READONLY (TREE_OPERAND (to, 1)))
3800 if (offset == 0)
3801 to_rtx = copy_rtx (to_rtx);
3803 RTX_UNCHANGING_P (to_rtx) = 1;
3806 /* Check the access. */
3807 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3809 rtx to_addr;
3810 int size;
3811 int best_mode_size;
3812 enum machine_mode best_mode;
3814 best_mode = get_best_mode (bitsize, bitpos,
3815 TYPE_ALIGN (TREE_TYPE (tem)),
3816 mode1, volatilep);
3817 if (best_mode == VOIDmode)
3818 best_mode = QImode;
3820 best_mode_size = GET_MODE_BITSIZE (best_mode);
3821 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3822 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3823 size *= GET_MODE_SIZE (best_mode);
3825 /* Check the access right of the pointer. */
3826 in_check_memory_usage = 1;
3827 if (size)
3828 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3829 VOIDmode, 3, to_addr, Pmode,
3830 GEN_INT (size), TYPE_MODE (sizetype),
3831 GEN_INT (MEMORY_USE_WO),
3832 TYPE_MODE (integer_type_node));
3833 in_check_memory_usage = 0;
3836 /* If this is a varying-length object, we must get the address of
3837 the source and do an explicit block move. */
3838 if (bitsize < 0)
3840 unsigned int from_align;
3841 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3842 rtx inner_to_rtx
3843 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3845 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3846 MIN (alignment, from_align));
3847 free_temp_slots ();
3848 pop_temp_slots ();
3849 return to_rtx;
3851 else
3853 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3854 (want_value
3855 /* Spurious cast for HPUX compiler. */
3856 ? ((enum machine_mode)
3857 TYPE_MODE (TREE_TYPE (to)))
3858 : VOIDmode),
3859 unsignedp,
3860 alignment,
3861 int_size_in_bytes (TREE_TYPE (tem)),
3862 get_alias_set (to));
3864 preserve_temp_slots (result);
3865 free_temp_slots ();
3866 pop_temp_slots ();
3868 /* If the value is meaningful, convert RESULT to the proper mode.
3869 Otherwise, return nothing. */
3870 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3871 TYPE_MODE (TREE_TYPE (from)),
3872 result,
3873 TREE_UNSIGNED (TREE_TYPE (to)))
3874 : NULL_RTX);
3878 /* If the rhs is a function call and its value is not an aggregate,
3879 call the function before we start to compute the lhs.
3880 This is needed for correct code for cases such as
3881 val = setjmp (buf) on machines where reference to val
3882 requires loading up part of an address in a separate insn.
3884 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3885 since it might be a promoted variable where the zero- or sign- extension
3886 needs to be done. Handling this in the normal way is safe because no
3887 computation is done before the call. */
3888 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3889 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3890 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3891 && GET_CODE (DECL_RTL (to)) == REG))
3893 rtx value;
3895 push_temp_slots ();
3896 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3897 if (to_rtx == 0)
3898 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3900 /* Handle calls that return values in multiple non-contiguous locations.
3901 The Irix 6 ABI has examples of this. */
3902 if (GET_CODE (to_rtx) == PARALLEL)
3903 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3904 TYPE_ALIGN (TREE_TYPE (from)));
3905 else if (GET_MODE (to_rtx) == BLKmode)
3906 emit_block_move (to_rtx, value, expr_size (from),
3907 TYPE_ALIGN (TREE_TYPE (from)));
3908 else
3910 #ifdef POINTERS_EXTEND_UNSIGNED
3911 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3912 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3913 value = convert_memory_address (GET_MODE (to_rtx), value);
3914 #endif
3915 emit_move_insn (to_rtx, value);
3917 preserve_temp_slots (to_rtx);
3918 free_temp_slots ();
3919 pop_temp_slots ();
3920 return want_value ? to_rtx : NULL_RTX;
3923 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3924 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3926 if (to_rtx == 0)
3928 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3929 if (GET_CODE (to_rtx) == MEM)
3930 set_mem_alias_set (to_rtx, get_alias_set (to));
3933 /* Don't move directly into a return register. */
3934 if (TREE_CODE (to) == RESULT_DECL
3935 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3937 rtx temp;
3939 push_temp_slots ();
3940 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3942 if (GET_CODE (to_rtx) == PARALLEL)
3943 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3944 TYPE_ALIGN (TREE_TYPE (from)));
3945 else
3946 emit_move_insn (to_rtx, temp);
3948 preserve_temp_slots (to_rtx);
3949 free_temp_slots ();
3950 pop_temp_slots ();
3951 return want_value ? to_rtx : NULL_RTX;
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct)
3961 rtx from_rtx, size;
3963 push_temp_slots ();
3964 size = expr_size (from);
3965 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3966 EXPAND_MEMORY_USE_DONT);
3968 /* Copy the rights of the bitmap. */
3969 if (current_function_check_memory_usage)
3970 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TREE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3977 #ifdef TARGET_MEM_FUNCTIONS
3978 emit_library_call (memmove_libfunc, LCT_NORMAL,
3979 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3980 XEXP (from_rtx, 0), Pmode,
3981 convert_to_mode (TYPE_MODE (sizetype),
3982 size, TREE_UNSIGNED (sizetype)),
3983 TYPE_MODE (sizetype));
3984 #else
3985 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3986 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3987 XEXP (to_rtx, 0), Pmode,
3988 convert_to_mode (TYPE_MODE (integer_type_node),
3989 size, TREE_UNSIGNED (integer_type_node)),
3990 TYPE_MODE (integer_type_node));
3991 #endif
3993 preserve_temp_slots (to_rtx);
3994 free_temp_slots ();
3995 pop_temp_slots ();
3996 return want_value ? to_rtx : NULL_RTX;
3999 /* Compute FROM and store the value in the rtx we got. */
4001 push_temp_slots ();
4002 result = store_expr (from, to_rtx, want_value);
4003 preserve_temp_slots (result);
4004 free_temp_slots ();
4005 pop_temp_slots ();
4006 return want_value ? result : NULL_RTX;
4009 /* Generate code for computing expression EXP,
4010 and storing the value into TARGET.
4011 TARGET may contain a QUEUED rtx.
4013 If WANT_VALUE is nonzero, return a copy of the value
4014 not in TARGET, so that we can be sure to use the proper
4015 value in a containing expression even if TARGET has something
4016 else stored in it. If possible, we copy the value through a pseudo
4017 and return that pseudo. Or, if the value is constant, we try to
4018 return the constant. In some cases, we return a pseudo
4019 copied *from* TARGET.
4021 If the mode is BLKmode then we may return TARGET itself.
4022 It turns out that in BLKmode it doesn't cause a problem.
4023 because C has no operators that could combine two different
4024 assignments into the same BLKmode object with different values
4025 with no sequence point. Will other languages need this to
4026 be more thorough?
4028 If WANT_VALUE is 0, we return NULL, to make sure
4029 to catch quickly any cases where the caller uses the value
4030 and fails to set WANT_VALUE. */
4033 store_expr (exp, target, want_value)
4034 register tree exp;
4035 register rtx target;
4036 int want_value;
4038 register rtx temp;
4039 int dont_return_target = 0;
4040 int dont_store_target = 0;
4042 if (TREE_CODE (exp) == COMPOUND_EXPR)
4044 /* Perform first part of compound expression, then assign from second
4045 part. */
4046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4047 emit_queue ();
4048 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4050 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4052 /* For conditional expression, get safe form of the target. Then
4053 test the condition, doing the appropriate assignment on either
4054 side. This avoids the creation of unnecessary temporaries.
4055 For non-BLKmode, it is more efficient not to do this. */
4057 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4059 emit_queue ();
4060 target = protect_from_queue (target, 1);
4062 do_pending_stack_adjust ();
4063 NO_DEFER_POP;
4064 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4065 start_cleanup_deferral ();
4066 store_expr (TREE_OPERAND (exp, 1), target, 0);
4067 end_cleanup_deferral ();
4068 emit_queue ();
4069 emit_jump_insn (gen_jump (lab2));
4070 emit_barrier ();
4071 emit_label (lab1);
4072 start_cleanup_deferral ();
4073 store_expr (TREE_OPERAND (exp, 2), target, 0);
4074 end_cleanup_deferral ();
4075 emit_queue ();
4076 emit_label (lab2);
4077 OK_DEFER_POP;
4079 return want_value ? target : NULL_RTX;
4081 else if (queued_subexp_p (target))
4082 /* If target contains a postincrement, let's not risk
4083 using it as the place to generate the rhs. */
4085 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4087 /* Expand EXP into a new pseudo. */
4088 temp = gen_reg_rtx (GET_MODE (target));
4089 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4091 else
4092 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4094 /* If target is volatile, ANSI requires accessing the value
4095 *from* the target, if it is accessed. So make that happen.
4096 In no case return the target itself. */
4097 if (! MEM_VOLATILE_P (target) && want_value)
4098 dont_return_target = 1;
4100 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4101 && GET_MODE (target) != BLKmode)
4102 /* If target is in memory and caller wants value in a register instead,
4103 arrange that. Pass TARGET as target for expand_expr so that,
4104 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4105 We know expand_expr will not use the target in that case.
4106 Don't do this if TARGET is volatile because we are supposed
4107 to write it and then read it. */
4109 temp = expand_expr (exp, target, GET_MODE (target), 0);
4110 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4112 /* If TEMP is already in the desired TARGET, only copy it from
4113 memory and don't store it there again. */
4114 if (temp == target
4115 || (rtx_equal_p (temp, target)
4116 && ! side_effects_p (temp) && ! side_effects_p (target)))
4117 dont_store_target = 1;
4118 temp = copy_to_reg (temp);
4120 dont_return_target = 1;
4122 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4123 /* If this is an scalar in a register that is stored in a wider mode
4124 than the declared mode, compute the result into its declared mode
4125 and then convert to the wider mode. Our value is the computed
4126 expression. */
4128 /* If we don't want a value, we can do the conversion inside EXP,
4129 which will often result in some optimizations. Do the conversion
4130 in two steps: first change the signedness, if needed, then
4131 the extend. But don't do this if the type of EXP is a subtype
4132 of something else since then the conversion might involve
4133 more than just converting modes. */
4134 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4135 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4137 if (TREE_UNSIGNED (TREE_TYPE (exp))
4138 != SUBREG_PROMOTED_UNSIGNED_P (target))
4140 = convert
4141 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4142 TREE_TYPE (exp)),
4143 exp);
4145 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4146 SUBREG_PROMOTED_UNSIGNED_P (target)),
4147 exp);
4150 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4152 /* If TEMP is a volatile MEM and we want a result value, make
4153 the access now so it gets done only once. Likewise if
4154 it contains TARGET. */
4155 if (GET_CODE (temp) == MEM && want_value
4156 && (MEM_VOLATILE_P (temp)
4157 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4158 temp = copy_to_reg (temp);
4160 /* If TEMP is a VOIDmode constant, use convert_modes to make
4161 sure that we properly convert it. */
4162 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4163 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4164 TYPE_MODE (TREE_TYPE (exp)), temp,
4165 SUBREG_PROMOTED_UNSIGNED_P (target));
4167 convert_move (SUBREG_REG (target), temp,
4168 SUBREG_PROMOTED_UNSIGNED_P (target));
4170 /* If we promoted a constant, change the mode back down to match
4171 target. Otherwise, the caller might get confused by a result whose
4172 mode is larger than expected. */
4174 if (want_value && GET_MODE (temp) != GET_MODE (target)
4175 && GET_MODE (temp) != VOIDmode)
4177 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4178 SUBREG_PROMOTED_VAR_P (temp) = 1;
4179 SUBREG_PROMOTED_UNSIGNED_P (temp)
4180 = SUBREG_PROMOTED_UNSIGNED_P (target);
4183 return want_value ? temp : NULL_RTX;
4185 else
4187 temp = expand_expr (exp, target, GET_MODE (target), 0);
4188 /* Return TARGET if it's a specified hardware register.
4189 If TARGET is a volatile mem ref, either return TARGET
4190 or return a reg copied *from* TARGET; ANSI requires this.
4192 Otherwise, if TEMP is not TARGET, return TEMP
4193 if it is constant (for efficiency),
4194 or if we really want the correct value. */
4195 if (!(target && GET_CODE (target) == REG
4196 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4197 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4198 && ! rtx_equal_p (temp, target)
4199 && (CONSTANT_P (temp) || want_value))
4200 dont_return_target = 1;
4203 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4204 the same as that of TARGET, adjust the constant. This is needed, for
4205 example, in case it is a CONST_DOUBLE and we want only a word-sized
4206 value. */
4207 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4208 && TREE_CODE (exp) != ERROR_MARK
4209 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4210 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4211 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4213 if (current_function_check_memory_usage
4214 && GET_CODE (target) == MEM
4215 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4217 in_check_memory_usage = 1;
4218 if (GET_CODE (temp) == MEM)
4219 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4220 VOIDmode, 3, XEXP (target, 0), Pmode,
4221 XEXP (temp, 0), Pmode,
4222 expr_size (exp), TYPE_MODE (sizetype));
4223 else
4224 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4225 VOIDmode, 3, XEXP (target, 0), Pmode,
4226 expr_size (exp), TYPE_MODE (sizetype),
4227 GEN_INT (MEMORY_USE_WO),
4228 TYPE_MODE (integer_type_node));
4229 in_check_memory_usage = 0;
4232 /* If value was not generated in the target, store it there.
4233 Convert the value to TARGET's type first if nec. */
4234 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4235 one or both of them are volatile memory refs, we have to distinguish
4236 two cases:
4237 - expand_expr has used TARGET. In this case, we must not generate
4238 another copy. This can be detected by TARGET being equal according
4239 to == .
4240 - expand_expr has not used TARGET - that means that the source just
4241 happens to have the same RTX form. Since temp will have been created
4242 by expand_expr, it will compare unequal according to == .
4243 We must generate a copy in this case, to reach the correct number
4244 of volatile memory references. */
4246 if ((! rtx_equal_p (temp, target)
4247 || (temp != target && (side_effects_p (temp)
4248 || side_effects_p (target))))
4249 && TREE_CODE (exp) != ERROR_MARK
4250 && ! dont_store_target)
4252 target = protect_from_queue (target, 1);
4253 if (GET_MODE (temp) != GET_MODE (target)
4254 && GET_MODE (temp) != VOIDmode)
4256 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4257 if (dont_return_target)
4259 /* In this case, we will return TEMP,
4260 so make sure it has the proper mode.
4261 But don't forget to store the value into TARGET. */
4262 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4263 emit_move_insn (target, temp);
4265 else
4266 convert_move (target, temp, unsignedp);
4269 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4271 /* Handle copying a string constant into an array.
4272 The string constant may be shorter than the array.
4273 So copy just the string's actual length, and clear the rest. */
4274 rtx size;
4275 rtx addr;
4277 /* Get the size of the data type of the string,
4278 which is actually the size of the target. */
4279 size = expr_size (exp);
4280 if (GET_CODE (size) == CONST_INT
4281 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4282 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4283 else
4285 /* Compute the size of the data to copy from the string. */
4286 tree copy_size
4287 = size_binop (MIN_EXPR,
4288 make_tree (sizetype, size),
4289 size_int (TREE_STRING_LENGTH (exp)));
4290 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4291 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4292 VOIDmode, 0);
4293 rtx label = 0;
4295 /* Copy that much. */
4296 emit_block_move (target, temp, copy_size_rtx,
4297 TYPE_ALIGN (TREE_TYPE (exp)));
4299 /* Figure out how much is left in TARGET that we have to clear.
4300 Do all calculations in ptr_mode. */
4302 addr = XEXP (target, 0);
4303 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4305 if (GET_CODE (copy_size_rtx) == CONST_INT)
4307 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4308 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4309 align = MIN (align,
4310 (unsigned int) (BITS_PER_UNIT
4311 * (INTVAL (copy_size_rtx)
4312 & - INTVAL (copy_size_rtx))));
4314 else
4316 addr = force_reg (ptr_mode, addr);
4317 addr = expand_binop (ptr_mode, add_optab, addr,
4318 copy_size_rtx, NULL_RTX, 0,
4319 OPTAB_LIB_WIDEN);
4321 size = expand_binop (ptr_mode, sub_optab, size,
4322 copy_size_rtx, NULL_RTX, 0,
4323 OPTAB_LIB_WIDEN);
4325 align = BITS_PER_UNIT;
4326 label = gen_label_rtx ();
4327 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4328 GET_MODE (size), 0, 0, label);
4330 align = MIN (align, expr_align (copy_size));
4332 if (size != const0_rtx)
4334 rtx dest = gen_rtx_MEM (BLKmode, addr);
4336 MEM_COPY_ATTRIBUTES (dest, target);
4338 /* Be sure we can write on ADDR. */
4339 in_check_memory_usage = 1;
4340 if (current_function_check_memory_usage)
4341 emit_library_call (chkr_check_addr_libfunc,
4342 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4343 addr, Pmode,
4344 size, TYPE_MODE (sizetype),
4345 GEN_INT (MEMORY_USE_WO),
4346 TYPE_MODE (integer_type_node));
4347 in_check_memory_usage = 0;
4348 clear_storage (dest, size, align);
4351 if (label)
4352 emit_label (label);
4355 /* Handle calls that return values in multiple non-contiguous locations.
4356 The Irix 6 ABI has examples of this. */
4357 else if (GET_CODE (target) == PARALLEL)
4358 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4359 TYPE_ALIGN (TREE_TYPE (exp)));
4360 else if (GET_MODE (temp) == BLKmode)
4361 emit_block_move (target, temp, expr_size (exp),
4362 TYPE_ALIGN (TREE_TYPE (exp)));
4363 else
4364 emit_move_insn (target, temp);
4367 /* If we don't want a value, return NULL_RTX. */
4368 if (! want_value)
4369 return NULL_RTX;
4371 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4372 ??? The latter test doesn't seem to make sense. */
4373 else if (dont_return_target && GET_CODE (temp) != MEM)
4374 return temp;
4376 /* Return TARGET itself if it is a hard register. */
4377 else if (want_value && GET_MODE (target) != BLKmode
4378 && ! (GET_CODE (target) == REG
4379 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4380 return copy_to_reg (target);
4382 else
4383 return target;
4386 /* Return 1 if EXP just contains zeros. */
4388 static int
4389 is_zeros_p (exp)
4390 tree exp;
4392 tree elt;
4394 switch (TREE_CODE (exp))
4396 case CONVERT_EXPR:
4397 case NOP_EXPR:
4398 case NON_LVALUE_EXPR:
4399 return is_zeros_p (TREE_OPERAND (exp, 0));
4401 case INTEGER_CST:
4402 return integer_zerop (exp);
4404 case COMPLEX_CST:
4405 return
4406 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4408 case REAL_CST:
4409 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4411 case CONSTRUCTOR:
4412 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4413 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4414 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4415 if (! is_zeros_p (TREE_VALUE (elt)))
4416 return 0;
4418 return 1;
4420 default:
4421 return 0;
4425 /* Return 1 if EXP contains mostly (3/4) zeros. */
4427 static int
4428 mostly_zeros_p (exp)
4429 tree exp;
4431 if (TREE_CODE (exp) == CONSTRUCTOR)
4433 int elts = 0, zeros = 0;
4434 tree elt = CONSTRUCTOR_ELTS (exp);
4435 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4437 /* If there are no ranges of true bits, it is all zero. */
4438 return elt == NULL_TREE;
4440 for (; elt; elt = TREE_CHAIN (elt))
4442 /* We do not handle the case where the index is a RANGE_EXPR,
4443 so the statistic will be somewhat inaccurate.
4444 We do make a more accurate count in store_constructor itself,
4445 so since this function is only used for nested array elements,
4446 this should be close enough. */
4447 if (mostly_zeros_p (TREE_VALUE (elt)))
4448 zeros++;
4449 elts++;
4452 return 4 * zeros >= 3 * elts;
4455 return is_zeros_p (exp);
4458 /* Helper function for store_constructor.
4459 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4460 TYPE is the type of the CONSTRUCTOR, not the element type.
4461 ALIGN and CLEARED are as for store_constructor.
4462 ALIAS_SET is the alias set to use for any stores.
4464 This provides a recursive shortcut back to store_constructor when it isn't
4465 necessary to go through store_field. This is so that we can pass through
4466 the cleared field to let store_constructor know that we may not have to
4467 clear a substructure if the outer structure has already been cleared. */
4469 static void
4470 store_constructor_field (target, bitsize, bitpos,
4471 mode, exp, type, align, cleared, alias_set)
4472 rtx target;
4473 unsigned HOST_WIDE_INT bitsize;
4474 HOST_WIDE_INT bitpos;
4475 enum machine_mode mode;
4476 tree exp, type;
4477 unsigned int align;
4478 int cleared;
4479 int alias_set;
4481 if (TREE_CODE (exp) == CONSTRUCTOR
4482 && bitpos % BITS_PER_UNIT == 0
4483 /* If we have a non-zero bitpos for a register target, then we just
4484 let store_field do the bitfield handling. This is unlikely to
4485 generate unnecessary clear instructions anyways. */
4486 && (bitpos == 0 || GET_CODE (target) == MEM))
4488 if (bitpos != 0)
4489 target
4490 = adjust_address (target,
4491 GET_MODE (target) == BLKmode
4492 || 0 != (bitpos
4493 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4494 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4497 /* Show the alignment may no longer be what it was and update the alias
4498 set, if required. */
4499 if (bitpos != 0)
4500 align = MIN (align, (unsigned int) bitpos & - bitpos);
4501 if (GET_CODE (target) == MEM)
4502 set_mem_alias_set (target, alias_set);
4504 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4506 else
4507 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4508 int_size_in_bytes (type), alias_set);
4511 /* Store the value of constructor EXP into the rtx TARGET.
4512 TARGET is either a REG or a MEM.
4513 ALIGN is the maximum known alignment for TARGET.
4514 CLEARED is true if TARGET is known to have been zero'd.
4515 SIZE is the number of bytes of TARGET we are allowed to modify: this
4516 may not be the same as the size of EXP if we are assigning to a field
4517 which has been packed to exclude padding bits. */
4519 static void
4520 store_constructor (exp, target, align, cleared, size)
4521 tree exp;
4522 rtx target;
4523 unsigned int align;
4524 int cleared;
4525 HOST_WIDE_INT size;
4527 tree type = TREE_TYPE (exp);
4528 #ifdef WORD_REGISTER_OPERATIONS
4529 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4530 #endif
4532 /* We know our target cannot conflict, since safe_from_p has been called. */
4533 #if 0
4534 /* Don't try copying piece by piece into a hard register
4535 since that is vulnerable to being clobbered by EXP.
4536 Instead, construct in a pseudo register and then copy it all. */
4537 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4539 rtx temp = gen_reg_rtx (GET_MODE (target));
4540 store_constructor (exp, temp, align, cleared, size);
4541 emit_move_insn (target, temp);
4542 return;
4544 #endif
4546 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4547 || TREE_CODE (type) == QUAL_UNION_TYPE)
4549 register tree elt;
4551 /* Inform later passes that the whole union value is dead. */
4552 if ((TREE_CODE (type) == UNION_TYPE
4553 || TREE_CODE (type) == QUAL_UNION_TYPE)
4554 && ! cleared)
4556 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4558 /* If the constructor is empty, clear the union. */
4559 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4560 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4563 /* If we are building a static constructor into a register,
4564 set the initial value as zero so we can fold the value into
4565 a constant. But if more than one register is involved,
4566 this probably loses. */
4567 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4568 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4570 if (! cleared)
4571 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4573 cleared = 1;
4576 /* If the constructor has fewer fields than the structure
4577 or if we are initializing the structure to mostly zeros,
4578 clear the whole structure first. Don't do this if TARGET is a
4579 register whose mode size isn't equal to SIZE since clear_storage
4580 can't handle this case. */
4581 else if (size > 0
4582 && ((list_length (CONSTRUCTOR_ELTS (exp))
4583 != fields_length (type))
4584 || mostly_zeros_p (exp))
4585 && (GET_CODE (target) != REG
4586 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4588 if (! cleared)
4589 clear_storage (target, GEN_INT (size), align);
4591 cleared = 1;
4593 else if (! cleared)
4594 /* Inform later passes that the old value is dead. */
4595 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4597 /* Store each element of the constructor into
4598 the corresponding field of TARGET. */
4600 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4602 register tree field = TREE_PURPOSE (elt);
4603 #ifdef WORD_REGISTER_OPERATIONS
4604 tree value = TREE_VALUE (elt);
4605 #endif
4606 register enum machine_mode mode;
4607 HOST_WIDE_INT bitsize;
4608 HOST_WIDE_INT bitpos = 0;
4609 int unsignedp;
4610 tree offset;
4611 rtx to_rtx = target;
4613 /* Just ignore missing fields.
4614 We cleared the whole structure, above,
4615 if any fields are missing. */
4616 if (field == 0)
4617 continue;
4619 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4620 continue;
4622 if (host_integerp (DECL_SIZE (field), 1))
4623 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4624 else
4625 bitsize = -1;
4627 unsignedp = TREE_UNSIGNED (field);
4628 mode = DECL_MODE (field);
4629 if (DECL_BIT_FIELD (field))
4630 mode = VOIDmode;
4632 offset = DECL_FIELD_OFFSET (field);
4633 if (host_integerp (offset, 0)
4634 && host_integerp (bit_position (field), 0))
4636 bitpos = int_bit_position (field);
4637 offset = 0;
4639 else
4640 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4642 if (offset)
4644 rtx offset_rtx;
4646 if (contains_placeholder_p (offset))
4647 offset = build (WITH_RECORD_EXPR, sizetype,
4648 offset, make_tree (TREE_TYPE (exp), target));
4650 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4651 if (GET_CODE (to_rtx) != MEM)
4652 abort ();
4654 if (GET_MODE (offset_rtx) != ptr_mode)
4656 #ifdef POINTERS_EXTEND_UNSIGNED
4657 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4658 #else
4659 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4660 #endif
4663 to_rtx
4664 = change_address (to_rtx, VOIDmode,
4665 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4666 force_reg (ptr_mode,
4667 offset_rtx)));
4668 align = DECL_OFFSET_ALIGN (field);
4671 if (TREE_READONLY (field))
4673 if (GET_CODE (to_rtx) == MEM)
4674 to_rtx = copy_rtx (to_rtx);
4676 RTX_UNCHANGING_P (to_rtx) = 1;
4679 #ifdef WORD_REGISTER_OPERATIONS
4680 /* If this initializes a field that is smaller than a word, at the
4681 start of a word, try to widen it to a full word.
4682 This special case allows us to output C++ member function
4683 initializations in a form that the optimizers can understand. */
4684 if (GET_CODE (target) == REG
4685 && bitsize < BITS_PER_WORD
4686 && bitpos % BITS_PER_WORD == 0
4687 && GET_MODE_CLASS (mode) == MODE_INT
4688 && TREE_CODE (value) == INTEGER_CST
4689 && exp_size >= 0
4690 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4692 tree type = TREE_TYPE (value);
4693 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4695 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4696 value = convert (type, value);
4698 if (BYTES_BIG_ENDIAN)
4699 value
4700 = fold (build (LSHIFT_EXPR, type, value,
4701 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4702 bitsize = BITS_PER_WORD;
4703 mode = word_mode;
4705 #endif
4706 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4707 TREE_VALUE (elt), type, align, cleared,
4708 (DECL_NONADDRESSABLE_P (field)
4709 && GET_CODE (to_rtx) == MEM)
4710 ? MEM_ALIAS_SET (to_rtx)
4711 : get_alias_set (TREE_TYPE (field)));
4714 else if (TREE_CODE (type) == ARRAY_TYPE)
4716 register tree elt;
4717 register int i;
4718 int need_to_clear;
4719 tree domain = TYPE_DOMAIN (type);
4720 tree elttype = TREE_TYPE (type);
4721 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4722 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4723 HOST_WIDE_INT minelt = 0;
4724 HOST_WIDE_INT maxelt = 0;
4726 /* If we have constant bounds for the range of the type, get them. */
4727 if (const_bounds_p)
4729 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4730 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4733 /* If the constructor has fewer elements than the array,
4734 clear the whole array first. Similarly if this is
4735 static constructor of a non-BLKmode object. */
4736 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4737 need_to_clear = 1;
4738 else
4740 HOST_WIDE_INT count = 0, zero_count = 0;
4741 need_to_clear = ! const_bounds_p;
4743 /* This loop is a more accurate version of the loop in
4744 mostly_zeros_p (it handles RANGE_EXPR in an index).
4745 It is also needed to check for missing elements. */
4746 for (elt = CONSTRUCTOR_ELTS (exp);
4747 elt != NULL_TREE && ! need_to_clear;
4748 elt = TREE_CHAIN (elt))
4750 tree index = TREE_PURPOSE (elt);
4751 HOST_WIDE_INT this_node_count;
4753 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4755 tree lo_index = TREE_OPERAND (index, 0);
4756 tree hi_index = TREE_OPERAND (index, 1);
4758 if (! host_integerp (lo_index, 1)
4759 || ! host_integerp (hi_index, 1))
4761 need_to_clear = 1;
4762 break;
4765 this_node_count = (tree_low_cst (hi_index, 1)
4766 - tree_low_cst (lo_index, 1) + 1);
4768 else
4769 this_node_count = 1;
4771 count += this_node_count;
4772 if (mostly_zeros_p (TREE_VALUE (elt)))
4773 zero_count += this_node_count;
4776 /* Clear the entire array first if there are any missing elements,
4777 or if the incidence of zero elements is >= 75%. */
4778 if (! need_to_clear
4779 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4780 need_to_clear = 1;
4783 if (need_to_clear && size > 0)
4785 if (! cleared)
4786 clear_storage (target, GEN_INT (size), align);
4787 cleared = 1;
4789 else if (REG_P (target))
4790 /* Inform later passes that the old value is dead. */
4791 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4793 /* Store each element of the constructor into
4794 the corresponding element of TARGET, determined
4795 by counting the elements. */
4796 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4797 elt;
4798 elt = TREE_CHAIN (elt), i++)
4800 register enum machine_mode mode;
4801 HOST_WIDE_INT bitsize;
4802 HOST_WIDE_INT bitpos;
4803 int unsignedp;
4804 tree value = TREE_VALUE (elt);
4805 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4806 tree index = TREE_PURPOSE (elt);
4807 rtx xtarget = target;
4809 if (cleared && is_zeros_p (value))
4810 continue;
4812 unsignedp = TREE_UNSIGNED (elttype);
4813 mode = TYPE_MODE (elttype);
4814 if (mode == BLKmode)
4815 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4816 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4817 : -1);
4818 else
4819 bitsize = GET_MODE_BITSIZE (mode);
4821 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4823 tree lo_index = TREE_OPERAND (index, 0);
4824 tree hi_index = TREE_OPERAND (index, 1);
4825 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4826 struct nesting *loop;
4827 HOST_WIDE_INT lo, hi, count;
4828 tree position;
4830 /* If the range is constant and "small", unroll the loop. */
4831 if (const_bounds_p
4832 && host_integerp (lo_index, 0)
4833 && host_integerp (hi_index, 0)
4834 && (lo = tree_low_cst (lo_index, 0),
4835 hi = tree_low_cst (hi_index, 0),
4836 count = hi - lo + 1,
4837 (GET_CODE (target) != MEM
4838 || count <= 2
4839 || (host_integerp (TYPE_SIZE (elttype), 1)
4840 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4841 <= 40 * 8)))))
4843 lo -= minelt; hi -= minelt;
4844 for (; lo <= hi; lo++)
4846 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4847 store_constructor_field
4848 (target, bitsize, bitpos, mode, value, type, align,
4849 cleared,
4850 TYPE_NONALIASED_COMPONENT (type)
4851 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4854 else
4856 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4857 loop_top = gen_label_rtx ();
4858 loop_end = gen_label_rtx ();
4860 unsignedp = TREE_UNSIGNED (domain);
4862 index = build_decl (VAR_DECL, NULL_TREE, domain);
4864 index_r
4865 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4866 &unsignedp, 0));
4867 SET_DECL_RTL (index, index_r);
4868 if (TREE_CODE (value) == SAVE_EXPR
4869 && SAVE_EXPR_RTL (value) == 0)
4871 /* Make sure value gets expanded once before the
4872 loop. */
4873 expand_expr (value, const0_rtx, VOIDmode, 0);
4874 emit_queue ();
4876 store_expr (lo_index, index_r, 0);
4877 loop = expand_start_loop (0);
4879 /* Assign value to element index. */
4880 position
4881 = convert (ssizetype,
4882 fold (build (MINUS_EXPR, TREE_TYPE (index),
4883 index, TYPE_MIN_VALUE (domain))));
4884 position = size_binop (MULT_EXPR, position,
4885 convert (ssizetype,
4886 TYPE_SIZE_UNIT (elttype)));
4888 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4889 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4890 xtarget = change_address (target, mode, addr);
4891 if (TREE_CODE (value) == CONSTRUCTOR)
4892 store_constructor (value, xtarget, align, cleared,
4893 bitsize / BITS_PER_UNIT);
4894 else
4895 store_expr (value, xtarget, 0);
4897 expand_exit_loop_if_false (loop,
4898 build (LT_EXPR, integer_type_node,
4899 index, hi_index));
4901 expand_increment (build (PREINCREMENT_EXPR,
4902 TREE_TYPE (index),
4903 index, integer_one_node), 0, 0);
4904 expand_end_loop ();
4905 emit_label (loop_end);
4908 else if ((index != 0 && ! host_integerp (index, 0))
4909 || ! host_integerp (TYPE_SIZE (elttype), 1))
4911 rtx pos_rtx, addr;
4912 tree position;
4914 if (index == 0)
4915 index = ssize_int (1);
4917 if (minelt)
4918 index = convert (ssizetype,
4919 fold (build (MINUS_EXPR, index,
4920 TYPE_MIN_VALUE (domain))));
4922 position = size_binop (MULT_EXPR, index,
4923 convert (ssizetype,
4924 TYPE_SIZE_UNIT (elttype)));
4925 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4926 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4927 xtarget = change_address (target, mode, addr);
4928 store_expr (value, xtarget, 0);
4930 else
4932 if (index != 0)
4933 bitpos = ((tree_low_cst (index, 0) - minelt)
4934 * tree_low_cst (TYPE_SIZE (elttype), 1));
4935 else
4936 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4938 store_constructor_field (target, bitsize, bitpos, mode, value,
4939 type, align, cleared,
4940 TYPE_NONALIASED_COMPONENT (type)
4941 && GET_CODE (target) == MEM
4942 ? MEM_ALIAS_SET (target) :
4943 get_alias_set (elttype));
4949 /* Set constructor assignments. */
4950 else if (TREE_CODE (type) == SET_TYPE)
4952 tree elt = CONSTRUCTOR_ELTS (exp);
4953 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4954 tree domain = TYPE_DOMAIN (type);
4955 tree domain_min, domain_max, bitlength;
4957 /* The default implementation strategy is to extract the constant
4958 parts of the constructor, use that to initialize the target,
4959 and then "or" in whatever non-constant ranges we need in addition.
4961 If a large set is all zero or all ones, it is
4962 probably better to set it using memset (if available) or bzero.
4963 Also, if a large set has just a single range, it may also be
4964 better to first clear all the first clear the set (using
4965 bzero/memset), and set the bits we want. */
4967 /* Check for all zeros. */
4968 if (elt == NULL_TREE && size > 0)
4970 if (!cleared)
4971 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4972 return;
4975 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4976 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4977 bitlength = size_binop (PLUS_EXPR,
4978 size_diffop (domain_max, domain_min),
4979 ssize_int (1));
4981 nbits = tree_low_cst (bitlength, 1);
4983 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4984 are "complicated" (more than one range), initialize (the
4985 constant parts) by copying from a constant. */
4986 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4987 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4989 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4990 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4991 char *bit_buffer = (char *) alloca (nbits);
4992 HOST_WIDE_INT word = 0;
4993 unsigned int bit_pos = 0;
4994 unsigned int ibit = 0;
4995 unsigned int offset = 0; /* In bytes from beginning of set. */
4997 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4998 for (;;)
5000 if (bit_buffer[ibit])
5002 if (BYTES_BIG_ENDIAN)
5003 word |= (1 << (set_word_size - 1 - bit_pos));
5004 else
5005 word |= 1 << bit_pos;
5008 bit_pos++; ibit++;
5009 if (bit_pos >= set_word_size || ibit == nbits)
5011 if (word != 0 || ! cleared)
5013 rtx datum = GEN_INT (word);
5014 rtx to_rtx;
5016 /* The assumption here is that it is safe to use
5017 XEXP if the set is multi-word, but not if
5018 it's single-word. */
5019 if (GET_CODE (target) == MEM)
5020 to_rtx = adjust_address (target, mode, offset);
5021 else if (offset == 0)
5022 to_rtx = target;
5023 else
5024 abort ();
5025 emit_move_insn (to_rtx, datum);
5028 if (ibit == nbits)
5029 break;
5030 word = 0;
5031 bit_pos = 0;
5032 offset += set_word_size / BITS_PER_UNIT;
5036 else if (!cleared)
5037 /* Don't bother clearing storage if the set is all ones. */
5038 if (TREE_CHAIN (elt) != NULL_TREE
5039 || (TREE_PURPOSE (elt) == NULL_TREE
5040 ? nbits != 1
5041 : ( ! host_integerp (TREE_VALUE (elt), 0)
5042 || ! host_integerp (TREE_PURPOSE (elt), 0)
5043 || (tree_low_cst (TREE_VALUE (elt), 0)
5044 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5045 != (HOST_WIDE_INT) nbits))))
5046 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5048 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5050 /* Start of range of element or NULL. */
5051 tree startbit = TREE_PURPOSE (elt);
5052 /* End of range of element, or element value. */
5053 tree endbit = TREE_VALUE (elt);
5054 #ifdef TARGET_MEM_FUNCTIONS
5055 HOST_WIDE_INT startb, endb;
5056 #endif
5057 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5059 bitlength_rtx = expand_expr (bitlength,
5060 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5062 /* Handle non-range tuple element like [ expr ]. */
5063 if (startbit == NULL_TREE)
5065 startbit = save_expr (endbit);
5066 endbit = startbit;
5069 startbit = convert (sizetype, startbit);
5070 endbit = convert (sizetype, endbit);
5071 if (! integer_zerop (domain_min))
5073 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5074 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5076 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5077 EXPAND_CONST_ADDRESS);
5078 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5079 EXPAND_CONST_ADDRESS);
5081 if (REG_P (target))
5083 targetx
5084 = assign_temp
5085 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5086 TYPE_QUAL_CONST)),
5087 0, 1, 1);
5088 emit_move_insn (targetx, target);
5091 else if (GET_CODE (target) == MEM)
5092 targetx = target;
5093 else
5094 abort ();
5096 #ifdef TARGET_MEM_FUNCTIONS
5097 /* Optimization: If startbit and endbit are
5098 constants divisible by BITS_PER_UNIT,
5099 call memset instead. */
5100 if (TREE_CODE (startbit) == INTEGER_CST
5101 && TREE_CODE (endbit) == INTEGER_CST
5102 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5103 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5105 emit_library_call (memset_libfunc, LCT_NORMAL,
5106 VOIDmode, 3,
5107 plus_constant (XEXP (targetx, 0),
5108 startb / BITS_PER_UNIT),
5109 Pmode,
5110 constm1_rtx, TYPE_MODE (integer_type_node),
5111 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5112 TYPE_MODE (sizetype));
5114 else
5115 #endif
5116 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5117 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5118 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5119 startbit_rtx, TYPE_MODE (sizetype),
5120 endbit_rtx, TYPE_MODE (sizetype));
5122 if (REG_P (target))
5123 emit_move_insn (target, targetx);
5127 else
5128 abort ();
5131 /* Store the value of EXP (an expression tree)
5132 into a subfield of TARGET which has mode MODE and occupies
5133 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5134 If MODE is VOIDmode, it means that we are storing into a bit-field.
5136 If VALUE_MODE is VOIDmode, return nothing in particular.
5137 UNSIGNEDP is not used in this case.
5139 Otherwise, return an rtx for the value stored. This rtx
5140 has mode VALUE_MODE if that is convenient to do.
5141 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5143 ALIGN is the alignment that TARGET is known to have.
5144 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5146 ALIAS_SET is the alias set for the destination. This value will
5147 (in general) be different from that for TARGET, since TARGET is a
5148 reference to the containing structure. */
5150 static rtx
5151 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5152 unsignedp, align, total_size, alias_set)
5153 rtx target;
5154 HOST_WIDE_INT bitsize;
5155 HOST_WIDE_INT bitpos;
5156 enum machine_mode mode;
5157 tree exp;
5158 enum machine_mode value_mode;
5159 int unsignedp;
5160 unsigned int align;
5161 HOST_WIDE_INT total_size;
5162 int alias_set;
5164 HOST_WIDE_INT width_mask = 0;
5166 if (TREE_CODE (exp) == ERROR_MARK)
5167 return const0_rtx;
5169 /* If we have nothing to store, do nothing unless the expression has
5170 side-effects. */
5171 if (bitsize == 0)
5172 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5174 if (bitsize < HOST_BITS_PER_WIDE_INT)
5175 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5177 /* If we are storing into an unaligned field of an aligned union that is
5178 in a register, we may have the mode of TARGET being an integer mode but
5179 MODE == BLKmode. In that case, get an aligned object whose size and
5180 alignment are the same as TARGET and store TARGET into it (we can avoid
5181 the store if the field being stored is the entire width of TARGET). Then
5182 call ourselves recursively to store the field into a BLKmode version of
5183 that object. Finally, load from the object into TARGET. This is not
5184 very efficient in general, but should only be slightly more expensive
5185 than the otherwise-required unaligned accesses. Perhaps this can be
5186 cleaned up later. */
5188 if (mode == BLKmode
5189 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5191 rtx object
5192 = assign_temp
5193 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5194 TYPE_QUAL_CONST),
5195 0, 1, 1);
5196 rtx blk_object = copy_rtx (object);
5198 PUT_MODE (blk_object, BLKmode);
5200 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5201 emit_move_insn (object, target);
5203 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5204 align, total_size, alias_set);
5206 /* Even though we aren't returning target, we need to
5207 give it the updated value. */
5208 emit_move_insn (target, object);
5210 return blk_object;
5213 if (GET_CODE (target) == CONCAT)
5215 /* We're storing into a struct containing a single __complex. */
5217 if (bitpos != 0)
5218 abort ();
5219 return store_expr (exp, target, 0);
5222 /* If the structure is in a register or if the component
5223 is a bit field, we cannot use addressing to access it.
5224 Use bit-field techniques or SUBREG to store in it. */
5226 if (mode == VOIDmode
5227 || (mode != BLKmode && ! direct_store[(int) mode]
5228 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5229 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5230 || GET_CODE (target) == REG
5231 || GET_CODE (target) == SUBREG
5232 /* If the field isn't aligned enough to store as an ordinary memref,
5233 store it as a bit field. */
5234 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5235 && (align < GET_MODE_ALIGNMENT (mode)
5236 || bitpos % GET_MODE_ALIGNMENT (mode)))
5237 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5238 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5239 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5240 /* If the RHS and field are a constant size and the size of the
5241 RHS isn't the same size as the bitfield, we must use bitfield
5242 operations. */
5243 || (bitsize >= 0
5244 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5245 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5247 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5249 /* If BITSIZE is narrower than the size of the type of EXP
5250 we will be narrowing TEMP. Normally, what's wanted are the
5251 low-order bits. However, if EXP's type is a record and this is
5252 big-endian machine, we want the upper BITSIZE bits. */
5253 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5254 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5255 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5256 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5257 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5258 - bitsize),
5259 temp, 1);
5261 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5262 MODE. */
5263 if (mode != VOIDmode && mode != BLKmode
5264 && mode != TYPE_MODE (TREE_TYPE (exp)))
5265 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5267 /* If the modes of TARGET and TEMP are both BLKmode, both
5268 must be in memory and BITPOS must be aligned on a byte
5269 boundary. If so, we simply do a block copy. */
5270 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5272 unsigned int exp_align = expr_align (exp);
5274 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5275 || bitpos % BITS_PER_UNIT != 0)
5276 abort ();
5278 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5280 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5281 align = MIN (exp_align, align);
5283 /* Find an alignment that is consistent with the bit position. */
5284 while ((bitpos % align) != 0)
5285 align >>= 1;
5287 emit_block_move (target, temp,
5288 bitsize == -1 ? expr_size (exp)
5289 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5290 / BITS_PER_UNIT),
5291 align);
5293 return value_mode == VOIDmode ? const0_rtx : target;
5296 /* Store the value in the bitfield. */
5297 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5298 if (value_mode != VOIDmode)
5300 /* The caller wants an rtx for the value. */
5301 /* If possible, avoid refetching from the bitfield itself. */
5302 if (width_mask != 0
5303 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5305 tree count;
5306 enum machine_mode tmode;
5308 if (unsignedp)
5309 return expand_and (temp,
5310 GEN_INT
5311 (trunc_int_for_mode
5312 (width_mask,
5313 GET_MODE (temp) == VOIDmode
5314 ? value_mode
5315 : GET_MODE (temp))), NULL_RTX);
5316 tmode = GET_MODE (temp);
5317 if (tmode == VOIDmode)
5318 tmode = value_mode;
5319 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5320 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5321 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5323 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5324 NULL_RTX, value_mode, 0, align,
5325 total_size);
5327 return const0_rtx;
5329 else
5331 rtx addr = XEXP (target, 0);
5332 rtx to_rtx;
5334 /* If a value is wanted, it must be the lhs;
5335 so make the address stable for multiple use. */
5337 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5338 && ! CONSTANT_ADDRESS_P (addr)
5339 /* A frame-pointer reference is already stable. */
5340 && ! (GET_CODE (addr) == PLUS
5341 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5342 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5343 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5344 target = replace_equiv_address (target, copy_to_reg (addr));
5346 /* Now build a reference to just the desired component. */
5348 to_rtx = copy_rtx (adjust_address (target, mode,
5349 bitpos / BITS_PER_UNIT));
5351 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5352 /* If the address of the structure varies, then it might be on
5353 the stack. And, stack slots may be shared across scopes.
5354 So, two different structures, of different types, can end up
5355 at the same location. We will give the structures alias set
5356 zero; here we must be careful not to give non-zero alias sets
5357 to their fields. */
5358 set_mem_alias_set (to_rtx,
5359 rtx_varies_p (addr, /*for_alias=*/0)
5360 ? 0 : alias_set);
5362 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5366 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5367 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5368 codes and find the ultimate containing object, which we return.
5370 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5371 bit position, and *PUNSIGNEDP to the signedness of the field.
5372 If the position of the field is variable, we store a tree
5373 giving the variable offset (in units) in *POFFSET.
5374 This offset is in addition to the bit position.
5375 If the position is not variable, we store 0 in *POFFSET.
5376 We set *PALIGNMENT to the alignment of the address that will be
5377 computed. This is the alignment of the thing we return if *POFFSET
5378 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5380 If any of the extraction expressions is volatile,
5381 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5383 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5384 is a mode that can be used to access the field. In that case, *PBITSIZE
5385 is redundant.
5387 If the field describes a variable-sized object, *PMODE is set to
5388 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5389 this case, but the address of the object can be found. */
5391 tree
5392 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5393 punsignedp, pvolatilep, palignment)
5394 tree exp;
5395 HOST_WIDE_INT *pbitsize;
5396 HOST_WIDE_INT *pbitpos;
5397 tree *poffset;
5398 enum machine_mode *pmode;
5399 int *punsignedp;
5400 int *pvolatilep;
5401 unsigned int *palignment;
5403 tree size_tree = 0;
5404 enum machine_mode mode = VOIDmode;
5405 tree offset = size_zero_node;
5406 tree bit_offset = bitsize_zero_node;
5407 unsigned int alignment = BIGGEST_ALIGNMENT;
5408 tree tem;
5410 /* First get the mode, signedness, and size. We do this from just the
5411 outermost expression. */
5412 if (TREE_CODE (exp) == COMPONENT_REF)
5414 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5415 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5416 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5418 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5420 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5422 size_tree = TREE_OPERAND (exp, 1);
5423 *punsignedp = TREE_UNSIGNED (exp);
5425 else
5427 mode = TYPE_MODE (TREE_TYPE (exp));
5428 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5430 if (mode == BLKmode)
5431 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5432 else
5433 *pbitsize = GET_MODE_BITSIZE (mode);
5436 if (size_tree != 0)
5438 if (! host_integerp (size_tree, 1))
5439 mode = BLKmode, *pbitsize = -1;
5440 else
5441 *pbitsize = tree_low_cst (size_tree, 1);
5444 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5445 and find the ultimate containing object. */
5446 while (1)
5448 if (TREE_CODE (exp) == BIT_FIELD_REF)
5449 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5450 else if (TREE_CODE (exp) == COMPONENT_REF)
5452 tree field = TREE_OPERAND (exp, 1);
5453 tree this_offset = DECL_FIELD_OFFSET (field);
5455 /* If this field hasn't been filled in yet, don't go
5456 past it. This should only happen when folding expressions
5457 made during type construction. */
5458 if (this_offset == 0)
5459 break;
5460 else if (! TREE_CONSTANT (this_offset)
5461 && contains_placeholder_p (this_offset))
5462 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5464 offset = size_binop (PLUS_EXPR, offset, this_offset);
5465 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5466 DECL_FIELD_BIT_OFFSET (field));
5468 if (! host_integerp (offset, 0))
5469 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5472 else if (TREE_CODE (exp) == ARRAY_REF
5473 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5475 tree index = TREE_OPERAND (exp, 1);
5476 tree array = TREE_OPERAND (exp, 0);
5477 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5478 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5479 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5481 /* We assume all arrays have sizes that are a multiple of a byte.
5482 First subtract the lower bound, if any, in the type of the
5483 index, then convert to sizetype and multiply by the size of the
5484 array element. */
5485 if (low_bound != 0 && ! integer_zerop (low_bound))
5486 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5487 index, low_bound));
5489 /* If the index has a self-referential type, pass it to a
5490 WITH_RECORD_EXPR; if the component size is, pass our
5491 component to one. */
5492 if (! TREE_CONSTANT (index)
5493 && contains_placeholder_p (index))
5494 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5495 if (! TREE_CONSTANT (unit_size)
5496 && contains_placeholder_p (unit_size))
5497 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5499 offset = size_binop (PLUS_EXPR, offset,
5500 size_binop (MULT_EXPR,
5501 convert (sizetype, index),
5502 unit_size));
5505 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5506 && ! ((TREE_CODE (exp) == NOP_EXPR
5507 || TREE_CODE (exp) == CONVERT_EXPR)
5508 && (TYPE_MODE (TREE_TYPE (exp))
5509 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5510 break;
5512 /* If any reference in the chain is volatile, the effect is volatile. */
5513 if (TREE_THIS_VOLATILE (exp))
5514 *pvolatilep = 1;
5516 /* If the offset is non-constant already, then we can't assume any
5517 alignment more than the alignment here. */
5518 if (! TREE_CONSTANT (offset))
5519 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5521 exp = TREE_OPERAND (exp, 0);
5524 if (DECL_P (exp))
5525 alignment = MIN (alignment, DECL_ALIGN (exp));
5526 else if (TREE_TYPE (exp) != 0)
5527 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5529 /* If OFFSET is constant, see if we can return the whole thing as a
5530 constant bit position. Otherwise, split it up. */
5531 if (host_integerp (offset, 0)
5532 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5533 bitsize_unit_node))
5534 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5535 && host_integerp (tem, 0))
5536 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5537 else
5538 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5540 *pmode = mode;
5541 *palignment = alignment;
5542 return exp;
5545 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5547 static enum memory_use_mode
5548 get_memory_usage_from_modifier (modifier)
5549 enum expand_modifier modifier;
5551 switch (modifier)
5553 case EXPAND_NORMAL:
5554 case EXPAND_SUM:
5555 return MEMORY_USE_RO;
5556 break;
5557 case EXPAND_MEMORY_USE_WO:
5558 return MEMORY_USE_WO;
5559 break;
5560 case EXPAND_MEMORY_USE_RW:
5561 return MEMORY_USE_RW;
5562 break;
5563 case EXPAND_MEMORY_USE_DONT:
5564 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5565 MEMORY_USE_DONT, because they are modifiers to a call of
5566 expand_expr in the ADDR_EXPR case of expand_expr. */
5567 case EXPAND_CONST_ADDRESS:
5568 case EXPAND_INITIALIZER:
5569 return MEMORY_USE_DONT;
5570 case EXPAND_MEMORY_USE_BAD:
5571 default:
5572 abort ();
5576 /* Given an rtx VALUE that may contain additions and multiplications, return
5577 an equivalent value that just refers to a register, memory, or constant.
5578 This is done by generating instructions to perform the arithmetic and
5579 returning a pseudo-register containing the value.
5581 The returned value may be a REG, SUBREG, MEM or constant. */
5584 force_operand (value, target)
5585 rtx value, target;
5587 register optab binoptab = 0;
5588 /* Use a temporary to force order of execution of calls to
5589 `force_operand'. */
5590 rtx tmp;
5591 register rtx op2;
5592 /* Use subtarget as the target for operand 0 of a binary operation. */
5593 register rtx subtarget = get_subtarget (target);
5595 /* Check for a PIC address load. */
5596 if (flag_pic
5597 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5598 && XEXP (value, 0) == pic_offset_table_rtx
5599 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5600 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5601 || GET_CODE (XEXP (value, 1)) == CONST))
5603 if (!subtarget)
5604 subtarget = gen_reg_rtx (GET_MODE (value));
5605 emit_move_insn (subtarget, value);
5606 return subtarget;
5609 if (GET_CODE (value) == PLUS)
5610 binoptab = add_optab;
5611 else if (GET_CODE (value) == MINUS)
5612 binoptab = sub_optab;
5613 else if (GET_CODE (value) == MULT)
5615 op2 = XEXP (value, 1);
5616 if (!CONSTANT_P (op2)
5617 && !(GET_CODE (op2) == REG && op2 != subtarget))
5618 subtarget = 0;
5619 tmp = force_operand (XEXP (value, 0), subtarget);
5620 return expand_mult (GET_MODE (value), tmp,
5621 force_operand (op2, NULL_RTX),
5622 target, 1);
5625 if (binoptab)
5627 op2 = XEXP (value, 1);
5628 if (!CONSTANT_P (op2)
5629 && !(GET_CODE (op2) == REG && op2 != subtarget))
5630 subtarget = 0;
5631 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5633 binoptab = add_optab;
5634 op2 = negate_rtx (GET_MODE (value), op2);
5637 /* Check for an addition with OP2 a constant integer and our first
5638 operand a PLUS of a virtual register and something else. In that
5639 case, we want to emit the sum of the virtual register and the
5640 constant first and then add the other value. This allows virtual
5641 register instantiation to simply modify the constant rather than
5642 creating another one around this addition. */
5643 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5644 && GET_CODE (XEXP (value, 0)) == PLUS
5645 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5646 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5647 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5649 rtx temp = expand_binop (GET_MODE (value), binoptab,
5650 XEXP (XEXP (value, 0), 0), op2,
5651 subtarget, 0, OPTAB_LIB_WIDEN);
5652 return expand_binop (GET_MODE (value), binoptab, temp,
5653 force_operand (XEXP (XEXP (value, 0), 1), 0),
5654 target, 0, OPTAB_LIB_WIDEN);
5657 tmp = force_operand (XEXP (value, 0), subtarget);
5658 return expand_binop (GET_MODE (value), binoptab, tmp,
5659 force_operand (op2, NULL_RTX),
5660 target, 0, OPTAB_LIB_WIDEN);
5661 /* We give UNSIGNEDP = 0 to expand_binop
5662 because the only operations we are expanding here are signed ones. */
5664 return value;
5667 /* Subroutine of expand_expr:
5668 save the non-copied parts (LIST) of an expr (LHS), and return a list
5669 which can restore these values to their previous values,
5670 should something modify their storage. */
5672 static tree
5673 save_noncopied_parts (lhs, list)
5674 tree lhs;
5675 tree list;
5677 tree tail;
5678 tree parts = 0;
5680 for (tail = list; tail; tail = TREE_CHAIN (tail))
5681 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5682 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5683 else
5685 tree part = TREE_VALUE (tail);
5686 tree part_type = TREE_TYPE (part);
5687 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5688 rtx target
5689 = assign_temp (build_qualified_type (part_type,
5690 (TYPE_QUALS (part_type)
5691 | TYPE_QUAL_CONST)),
5692 0, 1, 1);
5694 parts = tree_cons (to_be_saved,
5695 build (RTL_EXPR, part_type, NULL_TREE,
5696 (tree) validize_mem (target)),
5697 parts);
5698 store_expr (TREE_PURPOSE (parts),
5699 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5701 return parts;
5704 /* Subroutine of expand_expr:
5705 record the non-copied parts (LIST) of an expr (LHS), and return a list
5706 which specifies the initial values of these parts. */
5708 static tree
5709 init_noncopied_parts (lhs, list)
5710 tree lhs;
5711 tree list;
5713 tree tail;
5714 tree parts = 0;
5716 for (tail = list; tail; tail = TREE_CHAIN (tail))
5717 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5718 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5719 else if (TREE_PURPOSE (tail))
5721 tree part = TREE_VALUE (tail);
5722 tree part_type = TREE_TYPE (part);
5723 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5724 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5726 return parts;
5729 /* Subroutine of expand_expr: return nonzero iff there is no way that
5730 EXP can reference X, which is being modified. TOP_P is nonzero if this
5731 call is going to be used to determine whether we need a temporary
5732 for EXP, as opposed to a recursive call to this function.
5734 It is always safe for this routine to return zero since it merely
5735 searches for optimization opportunities. */
5738 safe_from_p (x, exp, top_p)
5739 rtx x;
5740 tree exp;
5741 int top_p;
5743 rtx exp_rtl = 0;
5744 int i, nops;
5745 static tree save_expr_list;
5747 if (x == 0
5748 /* If EXP has varying size, we MUST use a target since we currently
5749 have no way of allocating temporaries of variable size
5750 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5751 So we assume here that something at a higher level has prevented a
5752 clash. This is somewhat bogus, but the best we can do. Only
5753 do this when X is BLKmode and when we are at the top level. */
5754 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5755 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5756 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5757 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5758 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5759 != INTEGER_CST)
5760 && GET_MODE (x) == BLKmode)
5761 /* If X is in the outgoing argument area, it is always safe. */
5762 || (GET_CODE (x) == MEM
5763 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5764 || (GET_CODE (XEXP (x, 0)) == PLUS
5765 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5766 return 1;
5768 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5769 find the underlying pseudo. */
5770 if (GET_CODE (x) == SUBREG)
5772 x = SUBREG_REG (x);
5773 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5774 return 0;
5777 /* A SAVE_EXPR might appear many times in the expression passed to the
5778 top-level safe_from_p call, and if it has a complex subexpression,
5779 examining it multiple times could result in a combinatorial explosion.
5780 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5781 with optimization took about 28 minutes to compile -- even though it was
5782 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5783 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5784 we have processed. Note that the only test of top_p was above. */
5786 if (top_p)
5788 int rtn;
5789 tree t;
5791 save_expr_list = 0;
5793 rtn = safe_from_p (x, exp, 0);
5795 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5796 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5798 return rtn;
5801 /* Now look at our tree code and possibly recurse. */
5802 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5804 case 'd':
5805 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5806 break;
5808 case 'c':
5809 return 1;
5811 case 'x':
5812 if (TREE_CODE (exp) == TREE_LIST)
5813 return ((TREE_VALUE (exp) == 0
5814 || safe_from_p (x, TREE_VALUE (exp), 0))
5815 && (TREE_CHAIN (exp) == 0
5816 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5817 else if (TREE_CODE (exp) == ERROR_MARK)
5818 return 1; /* An already-visited SAVE_EXPR? */
5819 else
5820 return 0;
5822 case '1':
5823 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5825 case '2':
5826 case '<':
5827 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5828 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5830 case 'e':
5831 case 'r':
5832 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5833 the expression. If it is set, we conflict iff we are that rtx or
5834 both are in memory. Otherwise, we check all operands of the
5835 expression recursively. */
5837 switch (TREE_CODE (exp))
5839 case ADDR_EXPR:
5840 return (staticp (TREE_OPERAND (exp, 0))
5841 || TREE_STATIC (exp)
5842 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5844 case INDIRECT_REF:
5845 if (GET_CODE (x) == MEM
5846 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5847 get_alias_set (exp)))
5848 return 0;
5849 break;
5851 case CALL_EXPR:
5852 /* Assume that the call will clobber all hard registers and
5853 all of memory. */
5854 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5855 || GET_CODE (x) == MEM)
5856 return 0;
5857 break;
5859 case RTL_EXPR:
5860 /* If a sequence exists, we would have to scan every instruction
5861 in the sequence to see if it was safe. This is probably not
5862 worthwhile. */
5863 if (RTL_EXPR_SEQUENCE (exp))
5864 return 0;
5866 exp_rtl = RTL_EXPR_RTL (exp);
5867 break;
5869 case WITH_CLEANUP_EXPR:
5870 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5871 break;
5873 case CLEANUP_POINT_EXPR:
5874 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5876 case SAVE_EXPR:
5877 exp_rtl = SAVE_EXPR_RTL (exp);
5878 if (exp_rtl)
5879 break;
5881 /* If we've already scanned this, don't do it again. Otherwise,
5882 show we've scanned it and record for clearing the flag if we're
5883 going on. */
5884 if (TREE_PRIVATE (exp))
5885 return 1;
5887 TREE_PRIVATE (exp) = 1;
5888 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5890 TREE_PRIVATE (exp) = 0;
5891 return 0;
5894 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5895 return 1;
5897 case BIND_EXPR:
5898 /* The only operand we look at is operand 1. The rest aren't
5899 part of the expression. */
5900 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5902 case METHOD_CALL_EXPR:
5903 /* This takes a rtx argument, but shouldn't appear here. */
5904 abort ();
5906 default:
5907 break;
5910 /* If we have an rtx, we do not need to scan our operands. */
5911 if (exp_rtl)
5912 break;
5914 nops = first_rtl_op (TREE_CODE (exp));
5915 for (i = 0; i < nops; i++)
5916 if (TREE_OPERAND (exp, i) != 0
5917 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5918 return 0;
5920 /* If this is a language-specific tree code, it may require
5921 special handling. */
5922 if ((unsigned int) TREE_CODE (exp)
5923 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5924 && lang_safe_from_p
5925 && !(*lang_safe_from_p) (x, exp))
5926 return 0;
5929 /* If we have an rtl, find any enclosed object. Then see if we conflict
5930 with it. */
5931 if (exp_rtl)
5933 if (GET_CODE (exp_rtl) == SUBREG)
5935 exp_rtl = SUBREG_REG (exp_rtl);
5936 if (GET_CODE (exp_rtl) == REG
5937 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5938 return 0;
5941 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5942 are memory and they conflict. */
5943 return ! (rtx_equal_p (x, exp_rtl)
5944 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5945 && true_dependence (exp_rtl, GET_MODE (x), x,
5946 rtx_addr_varies_p)));
5949 /* If we reach here, it is safe. */
5950 return 1;
5953 /* Subroutine of expand_expr: return nonzero iff EXP is an
5954 expression whose type is statically determinable. */
5956 static int
5957 fixed_type_p (exp)
5958 tree exp;
5960 if (TREE_CODE (exp) == PARM_DECL
5961 || TREE_CODE (exp) == VAR_DECL
5962 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5963 || TREE_CODE (exp) == COMPONENT_REF
5964 || TREE_CODE (exp) == ARRAY_REF)
5965 return 1;
5966 return 0;
5969 /* Subroutine of expand_expr: return rtx if EXP is a
5970 variable or parameter; else return 0. */
5972 static rtx
5973 var_rtx (exp)
5974 tree exp;
5976 STRIP_NOPS (exp);
5977 switch (TREE_CODE (exp))
5979 case PARM_DECL:
5980 case VAR_DECL:
5981 return DECL_RTL (exp);
5982 default:
5983 return 0;
5987 #ifdef MAX_INTEGER_COMPUTATION_MODE
5989 void
5990 check_max_integer_computation_mode (exp)
5991 tree exp;
5993 enum tree_code code;
5994 enum machine_mode mode;
5996 /* Strip any NOPs that don't change the mode. */
5997 STRIP_NOPS (exp);
5998 code = TREE_CODE (exp);
6000 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6001 if (code == NOP_EXPR
6002 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6003 return;
6005 /* First check the type of the overall operation. We need only look at
6006 unary, binary and relational operations. */
6007 if (TREE_CODE_CLASS (code) == '1'
6008 || TREE_CODE_CLASS (code) == '2'
6009 || TREE_CODE_CLASS (code) == '<')
6011 mode = TYPE_MODE (TREE_TYPE (exp));
6012 if (GET_MODE_CLASS (mode) == MODE_INT
6013 && mode > MAX_INTEGER_COMPUTATION_MODE)
6014 internal_error ("unsupported wide integer operation");
6017 /* Check operand of a unary op. */
6018 if (TREE_CODE_CLASS (code) == '1')
6020 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6021 if (GET_MODE_CLASS (mode) == MODE_INT
6022 && mode > MAX_INTEGER_COMPUTATION_MODE)
6023 internal_error ("unsupported wide integer operation");
6026 /* Check operands of a binary/comparison op. */
6027 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6029 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6030 if (GET_MODE_CLASS (mode) == MODE_INT
6031 && mode > MAX_INTEGER_COMPUTATION_MODE)
6032 internal_error ("unsupported wide integer operation");
6034 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6035 if (GET_MODE_CLASS (mode) == MODE_INT
6036 && mode > MAX_INTEGER_COMPUTATION_MODE)
6037 internal_error ("unsupported wide integer operation");
6040 #endif
6042 /* expand_expr: generate code for computing expression EXP.
6043 An rtx for the computed value is returned. The value is never null.
6044 In the case of a void EXP, const0_rtx is returned.
6046 The value may be stored in TARGET if TARGET is nonzero.
6047 TARGET is just a suggestion; callers must assume that
6048 the rtx returned may not be the same as TARGET.
6050 If TARGET is CONST0_RTX, it means that the value will be ignored.
6052 If TMODE is not VOIDmode, it suggests generating the
6053 result in mode TMODE. But this is done only when convenient.
6054 Otherwise, TMODE is ignored and the value generated in its natural mode.
6055 TMODE is just a suggestion; callers must assume that
6056 the rtx returned may not have mode TMODE.
6058 Note that TARGET may have neither TMODE nor MODE. In that case, it
6059 probably will not be used.
6061 If MODIFIER is EXPAND_SUM then when EXP is an addition
6062 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6063 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6064 products as above, or REG or MEM, or constant.
6065 Ordinarily in such cases we would output mul or add instructions
6066 and then return a pseudo reg containing the sum.
6068 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6069 it also marks a label as absolutely required (it can't be dead).
6070 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6071 This is used for outputting expressions used in initializers.
6073 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6074 with a constant address even if that address is not normally legitimate.
6075 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6078 expand_expr (exp, target, tmode, modifier)
6079 register tree exp;
6080 rtx target;
6081 enum machine_mode tmode;
6082 enum expand_modifier modifier;
6084 register rtx op0, op1, temp;
6085 tree type = TREE_TYPE (exp);
6086 int unsignedp = TREE_UNSIGNED (type);
6087 register enum machine_mode mode;
6088 register enum tree_code code = TREE_CODE (exp);
6089 optab this_optab;
6090 rtx subtarget, original_target;
6091 int ignore;
6092 tree context;
6093 /* Used by check-memory-usage to make modifier read only. */
6094 enum expand_modifier ro_modifier;
6096 /* Handle ERROR_MARK before anybody tries to access its type. */
6097 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6099 op0 = CONST0_RTX (tmode);
6100 if (op0 != 0)
6101 return op0;
6102 return const0_rtx;
6105 mode = TYPE_MODE (type);
6106 /* Use subtarget as the target for operand 0 of a binary operation. */
6107 subtarget = get_subtarget (target);
6108 original_target = target;
6109 ignore = (target == const0_rtx
6110 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6111 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6112 || code == COND_EXPR)
6113 && TREE_CODE (type) == VOID_TYPE));
6115 /* Make a read-only version of the modifier. */
6116 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6117 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6118 ro_modifier = modifier;
6119 else
6120 ro_modifier = EXPAND_NORMAL;
6122 /* If we are going to ignore this result, we need only do something
6123 if there is a side-effect somewhere in the expression. If there
6124 is, short-circuit the most common cases here. Note that we must
6125 not call expand_expr with anything but const0_rtx in case this
6126 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6128 if (ignore)
6130 if (! TREE_SIDE_EFFECTS (exp))
6131 return const0_rtx;
6133 /* Ensure we reference a volatile object even if value is ignored, but
6134 don't do this if all we are doing is taking its address. */
6135 if (TREE_THIS_VOLATILE (exp)
6136 && TREE_CODE (exp) != FUNCTION_DECL
6137 && mode != VOIDmode && mode != BLKmode
6138 && modifier != EXPAND_CONST_ADDRESS)
6140 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6141 if (GET_CODE (temp) == MEM)
6142 temp = copy_to_reg (temp);
6143 return const0_rtx;
6146 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6147 || code == INDIRECT_REF || code == BUFFER_REF)
6148 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6149 VOIDmode, ro_modifier);
6150 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6151 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6153 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6154 ro_modifier);
6155 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6156 ro_modifier);
6157 return const0_rtx;
6159 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6160 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6161 /* If the second operand has no side effects, just evaluate
6162 the first. */
6163 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6164 VOIDmode, ro_modifier);
6165 else if (code == BIT_FIELD_REF)
6167 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6168 ro_modifier);
6169 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6170 ro_modifier);
6171 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6172 ro_modifier);
6173 return const0_rtx;
6176 target = 0;
6179 #ifdef MAX_INTEGER_COMPUTATION_MODE
6180 /* Only check stuff here if the mode we want is different from the mode
6181 of the expression; if it's the same, check_max_integer_computiation_mode
6182 will handle it. Do we really need to check this stuff at all? */
6184 if (target
6185 && GET_MODE (target) != mode
6186 && TREE_CODE (exp) != INTEGER_CST
6187 && TREE_CODE (exp) != PARM_DECL
6188 && TREE_CODE (exp) != ARRAY_REF
6189 && TREE_CODE (exp) != ARRAY_RANGE_REF
6190 && TREE_CODE (exp) != COMPONENT_REF
6191 && TREE_CODE (exp) != BIT_FIELD_REF
6192 && TREE_CODE (exp) != INDIRECT_REF
6193 && TREE_CODE (exp) != CALL_EXPR
6194 && TREE_CODE (exp) != VAR_DECL
6195 && TREE_CODE (exp) != RTL_EXPR)
6197 enum machine_mode mode = GET_MODE (target);
6199 if (GET_MODE_CLASS (mode) == MODE_INT
6200 && mode > MAX_INTEGER_COMPUTATION_MODE)
6201 internal_error ("unsupported wide integer operation");
6204 if (tmode != mode
6205 && TREE_CODE (exp) != INTEGER_CST
6206 && TREE_CODE (exp) != PARM_DECL
6207 && TREE_CODE (exp) != ARRAY_REF
6208 && TREE_CODE (exp) != ARRAY_RANGE_REF
6209 && TREE_CODE (exp) != COMPONENT_REF
6210 && TREE_CODE (exp) != BIT_FIELD_REF
6211 && TREE_CODE (exp) != INDIRECT_REF
6212 && TREE_CODE (exp) != VAR_DECL
6213 && TREE_CODE (exp) != CALL_EXPR
6214 && TREE_CODE (exp) != RTL_EXPR
6215 && GET_MODE_CLASS (tmode) == MODE_INT
6216 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6217 internal_error ("unsupported wide integer operation");
6219 check_max_integer_computation_mode (exp);
6220 #endif
6222 /* If will do cse, generate all results into pseudo registers
6223 since 1) that allows cse to find more things
6224 and 2) otherwise cse could produce an insn the machine
6225 cannot support. */
6227 if (! cse_not_expected && mode != BLKmode && target
6228 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6229 target = subtarget;
6231 switch (code)
6233 case LABEL_DECL:
6235 tree function = decl_function_context (exp);
6236 /* Handle using a label in a containing function. */
6237 if (function != current_function_decl
6238 && function != inline_function_decl && function != 0)
6240 struct function *p = find_function_data (function);
6241 p->expr->x_forced_labels
6242 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6243 p->expr->x_forced_labels);
6245 else
6247 if (modifier == EXPAND_INITIALIZER)
6248 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6249 label_rtx (exp),
6250 forced_labels);
6253 temp = gen_rtx_MEM (FUNCTION_MODE,
6254 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6255 if (function != current_function_decl
6256 && function != inline_function_decl && function != 0)
6257 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6258 return temp;
6261 case PARM_DECL:
6262 if (DECL_RTL (exp) == 0)
6264 error_with_decl (exp, "prior parameter's size depends on `%s'");
6265 return CONST0_RTX (mode);
6268 /* ... fall through ... */
6270 case VAR_DECL:
6271 /* If a static var's type was incomplete when the decl was written,
6272 but the type is complete now, lay out the decl now. */
6273 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6274 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6276 layout_decl (exp, 0);
6277 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6280 /* Although static-storage variables start off initialized, according to
6281 ANSI C, a memcpy could overwrite them with uninitialized values. So
6282 we check them too. This also lets us check for read-only variables
6283 accessed via a non-const declaration, in case it won't be detected
6284 any other way (e.g., in an embedded system or OS kernel without
6285 memory protection).
6287 Aggregates are not checked here; they're handled elsewhere. */
6288 if (cfun && current_function_check_memory_usage
6289 && code == VAR_DECL
6290 && GET_CODE (DECL_RTL (exp)) == MEM
6291 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6293 enum memory_use_mode memory_usage;
6294 memory_usage = get_memory_usage_from_modifier (modifier);
6296 in_check_memory_usage = 1;
6297 if (memory_usage != MEMORY_USE_DONT)
6298 emit_library_call (chkr_check_addr_libfunc,
6299 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6300 XEXP (DECL_RTL (exp), 0), Pmode,
6301 GEN_INT (int_size_in_bytes (type)),
6302 TYPE_MODE (sizetype),
6303 GEN_INT (memory_usage),
6304 TYPE_MODE (integer_type_node));
6305 in_check_memory_usage = 0;
6308 /* ... fall through ... */
6310 case FUNCTION_DECL:
6311 case RESULT_DECL:
6312 if (DECL_RTL (exp) == 0)
6313 abort ();
6315 /* Ensure variable marked as used even if it doesn't go through
6316 a parser. If it hasn't be used yet, write out an external
6317 definition. */
6318 if (! TREE_USED (exp))
6320 assemble_external (exp);
6321 TREE_USED (exp) = 1;
6324 /* Show we haven't gotten RTL for this yet. */
6325 temp = 0;
6327 /* Handle variables inherited from containing functions. */
6328 context = decl_function_context (exp);
6330 /* We treat inline_function_decl as an alias for the current function
6331 because that is the inline function whose vars, types, etc.
6332 are being merged into the current function.
6333 See expand_inline_function. */
6335 if (context != 0 && context != current_function_decl
6336 && context != inline_function_decl
6337 /* If var is static, we don't need a static chain to access it. */
6338 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6339 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6341 rtx addr;
6343 /* Mark as non-local and addressable. */
6344 DECL_NONLOCAL (exp) = 1;
6345 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6346 abort ();
6347 mark_addressable (exp);
6348 if (GET_CODE (DECL_RTL (exp)) != MEM)
6349 abort ();
6350 addr = XEXP (DECL_RTL (exp), 0);
6351 if (GET_CODE (addr) == MEM)
6352 addr
6353 = replace_equiv_address (addr,
6354 fix_lexical_addr (XEXP (addr, 0), exp));
6355 else
6356 addr = fix_lexical_addr (addr, exp);
6358 temp = replace_equiv_address (DECL_RTL (exp), addr);
6361 /* This is the case of an array whose size is to be determined
6362 from its initializer, while the initializer is still being parsed.
6363 See expand_decl. */
6365 else if (GET_CODE (DECL_RTL (exp)) == MEM
6366 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6367 temp = validize_mem (DECL_RTL (exp));
6369 /* If DECL_RTL is memory, we are in the normal case and either
6370 the address is not valid or it is not a register and -fforce-addr
6371 is specified, get the address into a register. */
6373 else if (GET_CODE (DECL_RTL (exp)) == MEM
6374 && modifier != EXPAND_CONST_ADDRESS
6375 && modifier != EXPAND_SUM
6376 && modifier != EXPAND_INITIALIZER
6377 && (! memory_address_p (DECL_MODE (exp),
6378 XEXP (DECL_RTL (exp), 0))
6379 || (flag_force_addr
6380 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6381 temp = replace_equiv_address (DECL_RTL (exp),
6382 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6384 /* If we got something, return it. But first, set the alignment
6385 if the address is a register. */
6386 if (temp != 0)
6388 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6389 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6391 return temp;
6394 /* If the mode of DECL_RTL does not match that of the decl, it
6395 must be a promoted value. We return a SUBREG of the wanted mode,
6396 but mark it so that we know that it was already extended. */
6398 if (GET_CODE (DECL_RTL (exp)) == REG
6399 && GET_MODE (DECL_RTL (exp)) != mode)
6401 /* Get the signedness used for this variable. Ensure we get the
6402 same mode we got when the variable was declared. */
6403 if (GET_MODE (DECL_RTL (exp))
6404 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6405 abort ();
6407 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6408 SUBREG_PROMOTED_VAR_P (temp) = 1;
6409 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6410 return temp;
6413 return DECL_RTL (exp);
6415 case INTEGER_CST:
6416 return immed_double_const (TREE_INT_CST_LOW (exp),
6417 TREE_INT_CST_HIGH (exp), mode);
6419 case CONST_DECL:
6420 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6421 EXPAND_MEMORY_USE_BAD);
6423 case REAL_CST:
6424 /* If optimized, generate immediate CONST_DOUBLE
6425 which will be turned into memory by reload if necessary.
6427 We used to force a register so that loop.c could see it. But
6428 this does not allow gen_* patterns to perform optimizations with
6429 the constants. It also produces two insns in cases like "x = 1.0;".
6430 On most machines, floating-point constants are not permitted in
6431 many insns, so we'd end up copying it to a register in any case.
6433 Now, we do the copying in expand_binop, if appropriate. */
6434 return immed_real_const (exp);
6436 case COMPLEX_CST:
6437 case STRING_CST:
6438 if (! TREE_CST_RTL (exp))
6439 output_constant_def (exp, 1);
6441 /* TREE_CST_RTL probably contains a constant address.
6442 On RISC machines where a constant address isn't valid,
6443 make some insns to get that address into a register. */
6444 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6445 && modifier != EXPAND_CONST_ADDRESS
6446 && modifier != EXPAND_INITIALIZER
6447 && modifier != EXPAND_SUM
6448 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6449 || (flag_force_addr
6450 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6451 return replace_equiv_address (TREE_CST_RTL (exp),
6452 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6453 return TREE_CST_RTL (exp);
6455 case EXPR_WITH_FILE_LOCATION:
6457 rtx to_return;
6458 const char *saved_input_filename = input_filename;
6459 int saved_lineno = lineno;
6460 input_filename = EXPR_WFL_FILENAME (exp);
6461 lineno = EXPR_WFL_LINENO (exp);
6462 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6463 emit_line_note (input_filename, lineno);
6464 /* Possibly avoid switching back and forth here. */
6465 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6466 input_filename = saved_input_filename;
6467 lineno = saved_lineno;
6468 return to_return;
6471 case SAVE_EXPR:
6472 context = decl_function_context (exp);
6474 /* If this SAVE_EXPR was at global context, assume we are an
6475 initialization function and move it into our context. */
6476 if (context == 0)
6477 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6479 /* We treat inline_function_decl as an alias for the current function
6480 because that is the inline function whose vars, types, etc.
6481 are being merged into the current function.
6482 See expand_inline_function. */
6483 if (context == current_function_decl || context == inline_function_decl)
6484 context = 0;
6486 /* If this is non-local, handle it. */
6487 if (context)
6489 /* The following call just exists to abort if the context is
6490 not of a containing function. */
6491 find_function_data (context);
6493 temp = SAVE_EXPR_RTL (exp);
6494 if (temp && GET_CODE (temp) == REG)
6496 put_var_into_stack (exp);
6497 temp = SAVE_EXPR_RTL (exp);
6499 if (temp == 0 || GET_CODE (temp) != MEM)
6500 abort ();
6501 return
6502 replace_equiv_address (temp,
6503 fix_lexical_addr (XEXP (temp, 0), exp));
6505 if (SAVE_EXPR_RTL (exp) == 0)
6507 if (mode == VOIDmode)
6508 temp = const0_rtx;
6509 else
6510 temp = assign_temp (build_qualified_type (type,
6511 (TYPE_QUALS (type)
6512 | TYPE_QUAL_CONST)),
6513 3, 0, 0);
6515 SAVE_EXPR_RTL (exp) = temp;
6516 if (!optimize && GET_CODE (temp) == REG)
6517 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6518 save_expr_regs);
6520 /* If the mode of TEMP does not match that of the expression, it
6521 must be a promoted value. We pass store_expr a SUBREG of the
6522 wanted mode but mark it so that we know that it was already
6523 extended. Note that `unsignedp' was modified above in
6524 this case. */
6526 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6528 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6529 SUBREG_PROMOTED_VAR_P (temp) = 1;
6530 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6533 if (temp == const0_rtx)
6534 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6535 EXPAND_MEMORY_USE_BAD);
6536 else
6537 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6539 TREE_USED (exp) = 1;
6542 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6543 must be a promoted value. We return a SUBREG of the wanted mode,
6544 but mark it so that we know that it was already extended. */
6546 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6547 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6549 /* Compute the signedness and make the proper SUBREG. */
6550 promote_mode (type, mode, &unsignedp, 0);
6551 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6552 SUBREG_PROMOTED_VAR_P (temp) = 1;
6553 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6554 return temp;
6557 return SAVE_EXPR_RTL (exp);
6559 case UNSAVE_EXPR:
6561 rtx temp;
6562 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6563 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6564 return temp;
6567 case PLACEHOLDER_EXPR:
6569 tree placeholder_expr;
6571 /* If there is an object on the head of the placeholder list,
6572 see if some object in it of type TYPE or a pointer to it. For
6573 further information, see tree.def. */
6574 for (placeholder_expr = placeholder_list;
6575 placeholder_expr != 0;
6576 placeholder_expr = TREE_CHAIN (placeholder_expr))
6578 tree need_type = TYPE_MAIN_VARIANT (type);
6579 tree object = 0;
6580 tree old_list = placeholder_list;
6581 tree elt;
6583 /* Find the outermost reference that is of the type we want.
6584 If none, see if any object has a type that is a pointer to
6585 the type we want. */
6586 for (elt = TREE_PURPOSE (placeholder_expr);
6587 elt != 0 && object == 0;
6589 = ((TREE_CODE (elt) == COMPOUND_EXPR
6590 || TREE_CODE (elt) == COND_EXPR)
6591 ? TREE_OPERAND (elt, 1)
6592 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6593 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6594 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6595 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6596 ? TREE_OPERAND (elt, 0) : 0))
6597 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6598 object = elt;
6600 for (elt = TREE_PURPOSE (placeholder_expr);
6601 elt != 0 && object == 0;
6603 = ((TREE_CODE (elt) == COMPOUND_EXPR
6604 || TREE_CODE (elt) == COND_EXPR)
6605 ? TREE_OPERAND (elt, 1)
6606 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6607 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6608 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6609 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6610 ? TREE_OPERAND (elt, 0) : 0))
6611 if (POINTER_TYPE_P (TREE_TYPE (elt))
6612 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6613 == need_type))
6614 object = build1 (INDIRECT_REF, need_type, elt);
6616 if (object != 0)
6618 /* Expand this object skipping the list entries before
6619 it was found in case it is also a PLACEHOLDER_EXPR.
6620 In that case, we want to translate it using subsequent
6621 entries. */
6622 placeholder_list = TREE_CHAIN (placeholder_expr);
6623 temp = expand_expr (object, original_target, tmode,
6624 ro_modifier);
6625 placeholder_list = old_list;
6626 return temp;
6631 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6632 abort ();
6634 case WITH_RECORD_EXPR:
6635 /* Put the object on the placeholder list, expand our first operand,
6636 and pop the list. */
6637 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6638 placeholder_list);
6639 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6640 tmode, ro_modifier);
6641 placeholder_list = TREE_CHAIN (placeholder_list);
6642 return target;
6644 case GOTO_EXPR:
6645 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6646 expand_goto (TREE_OPERAND (exp, 0));
6647 else
6648 expand_computed_goto (TREE_OPERAND (exp, 0));
6649 return const0_rtx;
6651 case EXIT_EXPR:
6652 expand_exit_loop_if_false (NULL,
6653 invert_truthvalue (TREE_OPERAND (exp, 0)));
6654 return const0_rtx;
6656 case LABELED_BLOCK_EXPR:
6657 if (LABELED_BLOCK_BODY (exp))
6658 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6659 /* Should perhaps use expand_label, but this is simpler and safer. */
6660 do_pending_stack_adjust ();
6661 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6662 return const0_rtx;
6664 case EXIT_BLOCK_EXPR:
6665 if (EXIT_BLOCK_RETURN (exp))
6666 sorry ("returned value in block_exit_expr");
6667 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6668 return const0_rtx;
6670 case LOOP_EXPR:
6671 push_temp_slots ();
6672 expand_start_loop (1);
6673 expand_expr_stmt (TREE_OPERAND (exp, 0));
6674 expand_end_loop ();
6675 pop_temp_slots ();
6677 return const0_rtx;
6679 case BIND_EXPR:
6681 tree vars = TREE_OPERAND (exp, 0);
6682 int vars_need_expansion = 0;
6684 /* Need to open a binding contour here because
6685 if there are any cleanups they must be contained here. */
6686 expand_start_bindings (2);
6688 /* Mark the corresponding BLOCK for output in its proper place. */
6689 if (TREE_OPERAND (exp, 2) != 0
6690 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6691 insert_block (TREE_OPERAND (exp, 2));
6693 /* If VARS have not yet been expanded, expand them now. */
6694 while (vars)
6696 if (!DECL_RTL_SET_P (vars))
6698 vars_need_expansion = 1;
6699 expand_decl (vars);
6701 expand_decl_init (vars);
6702 vars = TREE_CHAIN (vars);
6705 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6707 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6709 return temp;
6712 case RTL_EXPR:
6713 if (RTL_EXPR_SEQUENCE (exp))
6715 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6716 abort ();
6717 emit_insns (RTL_EXPR_SEQUENCE (exp));
6718 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6720 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6721 free_temps_for_rtl_expr (exp);
6722 return RTL_EXPR_RTL (exp);
6724 case CONSTRUCTOR:
6725 /* If we don't need the result, just ensure we evaluate any
6726 subexpressions. */
6727 if (ignore)
6729 tree elt;
6730 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6731 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6732 EXPAND_MEMORY_USE_BAD);
6733 return const0_rtx;
6736 /* All elts simple constants => refer to a constant in memory. But
6737 if this is a non-BLKmode mode, let it store a field at a time
6738 since that should make a CONST_INT or CONST_DOUBLE when we
6739 fold. Likewise, if we have a target we can use, it is best to
6740 store directly into the target unless the type is large enough
6741 that memcpy will be used. If we are making an initializer and
6742 all operands are constant, put it in memory as well. */
6743 else if ((TREE_STATIC (exp)
6744 && ((mode == BLKmode
6745 && ! (target != 0 && safe_from_p (target, exp, 1)))
6746 || TREE_ADDRESSABLE (exp)
6747 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6748 && (! MOVE_BY_PIECES_P
6749 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6750 TYPE_ALIGN (type)))
6751 && ! mostly_zeros_p (exp))))
6752 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6754 rtx constructor = output_constant_def (exp, 1);
6756 if (modifier != EXPAND_CONST_ADDRESS
6757 && modifier != EXPAND_INITIALIZER
6758 && modifier != EXPAND_SUM)
6759 constructor = validize_mem (constructor);
6761 return constructor;
6763 else
6765 /* Handle calls that pass values in multiple non-contiguous
6766 locations. The Irix 6 ABI has examples of this. */
6767 if (target == 0 || ! safe_from_p (target, exp, 1)
6768 || GET_CODE (target) == PARALLEL)
6769 target
6770 = assign_temp (build_qualified_type (type,
6771 (TYPE_QUALS (type)
6772 | (TREE_READONLY (exp)
6773 * TYPE_QUAL_CONST))),
6774 TREE_ADDRESSABLE (exp), 1, 1);
6776 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6777 int_size_in_bytes (TREE_TYPE (exp)));
6778 return target;
6781 case INDIRECT_REF:
6783 tree exp1 = TREE_OPERAND (exp, 0);
6784 tree index;
6785 tree string = string_constant (exp1, &index);
6787 /* Try to optimize reads from const strings. */
6788 if (string
6789 && TREE_CODE (string) == STRING_CST
6790 && TREE_CODE (index) == INTEGER_CST
6791 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6792 && GET_MODE_CLASS (mode) == MODE_INT
6793 && GET_MODE_SIZE (mode) == 1
6794 && modifier != EXPAND_MEMORY_USE_WO)
6795 return
6796 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6798 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6799 op0 = memory_address (mode, op0);
6801 if (cfun && current_function_check_memory_usage
6802 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6804 enum memory_use_mode memory_usage;
6805 memory_usage = get_memory_usage_from_modifier (modifier);
6807 if (memory_usage != MEMORY_USE_DONT)
6809 in_check_memory_usage = 1;
6810 emit_library_call (chkr_check_addr_libfunc,
6811 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6812 Pmode, GEN_INT (int_size_in_bytes (type)),
6813 TYPE_MODE (sizetype),
6814 GEN_INT (memory_usage),
6815 TYPE_MODE (integer_type_node));
6816 in_check_memory_usage = 0;
6820 temp = gen_rtx_MEM (mode, op0);
6821 set_mem_attributes (temp, exp, 0);
6823 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6824 here, because, in C and C++, the fact that a location is accessed
6825 through a pointer to const does not mean that the value there can
6826 never change. Languages where it can never change should
6827 also set TREE_STATIC. */
6828 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6830 /* If we are writing to this object and its type is a record with
6831 readonly fields, we must mark it as readonly so it will
6832 conflict with readonly references to those fields. */
6833 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6834 RTX_UNCHANGING_P (temp) = 1;
6836 return temp;
6839 case ARRAY_REF:
6840 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6841 abort ();
6844 tree array = TREE_OPERAND (exp, 0);
6845 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6846 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6847 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6848 HOST_WIDE_INT i;
6850 /* Optimize the special-case of a zero lower bound.
6852 We convert the low_bound to sizetype to avoid some problems
6853 with constant folding. (E.g. suppose the lower bound is 1,
6854 and its mode is QI. Without the conversion, (ARRAY
6855 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6856 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6858 if (! integer_zerop (low_bound))
6859 index = size_diffop (index, convert (sizetype, low_bound));
6861 /* Fold an expression like: "foo"[2].
6862 This is not done in fold so it won't happen inside &.
6863 Don't fold if this is for wide characters since it's too
6864 difficult to do correctly and this is a very rare case. */
6866 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6867 && TREE_CODE (array) == STRING_CST
6868 && TREE_CODE (index) == INTEGER_CST
6869 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6870 && GET_MODE_CLASS (mode) == MODE_INT
6871 && GET_MODE_SIZE (mode) == 1)
6872 return
6873 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6875 /* If this is a constant index into a constant array,
6876 just get the value from the array. Handle both the cases when
6877 we have an explicit constructor and when our operand is a variable
6878 that was declared const. */
6880 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6881 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6882 && TREE_CODE (index) == INTEGER_CST
6883 && 0 > compare_tree_int (index,
6884 list_length (CONSTRUCTOR_ELTS
6885 (TREE_OPERAND (exp, 0)))))
6887 tree elem;
6889 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6890 i = TREE_INT_CST_LOW (index);
6891 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6894 if (elem)
6895 return expand_expr (fold (TREE_VALUE (elem)), target,
6896 tmode, ro_modifier);
6899 else if (optimize >= 1
6900 && modifier != EXPAND_CONST_ADDRESS
6901 && modifier != EXPAND_INITIALIZER
6902 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6903 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6904 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6906 if (TREE_CODE (index) == INTEGER_CST)
6908 tree init = DECL_INITIAL (array);
6910 if (TREE_CODE (init) == CONSTRUCTOR)
6912 tree elem;
6914 for (elem = CONSTRUCTOR_ELTS (init);
6915 (elem
6916 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6917 elem = TREE_CHAIN (elem))
6920 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6921 return expand_expr (fold (TREE_VALUE (elem)), target,
6922 tmode, ro_modifier);
6924 else if (TREE_CODE (init) == STRING_CST
6925 && 0 > compare_tree_int (index,
6926 TREE_STRING_LENGTH (init)))
6928 tree type = TREE_TYPE (TREE_TYPE (init));
6929 enum machine_mode mode = TYPE_MODE (type);
6931 if (GET_MODE_CLASS (mode) == MODE_INT
6932 && GET_MODE_SIZE (mode) == 1)
6933 return (GEN_INT
6934 (TREE_STRING_POINTER
6935 (init)[TREE_INT_CST_LOW (index)]));
6940 /* Fall through. */
6942 case COMPONENT_REF:
6943 case BIT_FIELD_REF:
6944 case ARRAY_RANGE_REF:
6945 /* If the operand is a CONSTRUCTOR, we can just extract the
6946 appropriate field if it is present. Don't do this if we have
6947 already written the data since we want to refer to that copy
6948 and varasm.c assumes that's what we'll do. */
6949 if (code == COMPONENT_REF
6950 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6951 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6953 tree elt;
6955 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6956 elt = TREE_CHAIN (elt))
6957 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6958 /* We can normally use the value of the field in the
6959 CONSTRUCTOR. However, if this is a bitfield in
6960 an integral mode that we can fit in a HOST_WIDE_INT,
6961 we must mask only the number of bits in the bitfield,
6962 since this is done implicitly by the constructor. If
6963 the bitfield does not meet either of those conditions,
6964 we can't do this optimization. */
6965 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6966 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6967 == MODE_INT)
6968 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6969 <= HOST_BITS_PER_WIDE_INT))))
6971 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6972 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6974 HOST_WIDE_INT bitsize
6975 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6977 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6979 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6980 op0 = expand_and (op0, op1, target);
6982 else
6984 enum machine_mode imode
6985 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6986 tree count
6987 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6990 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6991 target, 0);
6992 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6993 target, 0);
6997 return op0;
7002 enum machine_mode mode1;
7003 HOST_WIDE_INT bitsize, bitpos;
7004 tree offset;
7005 int volatilep = 0;
7006 unsigned int alignment;
7007 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7008 &mode1, &unsignedp, &volatilep,
7009 &alignment);
7011 /* If we got back the original object, something is wrong. Perhaps
7012 we are evaluating an expression too early. In any event, don't
7013 infinitely recurse. */
7014 if (tem == exp)
7015 abort ();
7017 /* If TEM's type is a union of variable size, pass TARGET to the inner
7018 computation, since it will need a temporary and TARGET is known
7019 to have to do. This occurs in unchecked conversion in Ada. */
7021 op0 = expand_expr (tem,
7022 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7023 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7024 != INTEGER_CST)
7025 ? target : NULL_RTX),
7026 VOIDmode,
7027 (modifier == EXPAND_INITIALIZER
7028 || modifier == EXPAND_CONST_ADDRESS)
7029 ? modifier : EXPAND_NORMAL);
7031 /* If this is a constant, put it into a register if it is a
7032 legitimate constant and OFFSET is 0 and memory if it isn't. */
7033 if (CONSTANT_P (op0))
7035 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7036 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7037 && offset == 0)
7038 op0 = force_reg (mode, op0);
7039 else
7040 op0 = validize_mem (force_const_mem (mode, op0));
7043 if (offset != 0)
7045 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7047 /* If this object is in a register, put it into memory.
7048 This case can't occur in C, but can in Ada if we have
7049 unchecked conversion of an expression from a scalar type to
7050 an array or record type. */
7051 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7052 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7054 /* If the operand is a SAVE_EXPR, we can deal with this by
7055 forcing the SAVE_EXPR into memory. */
7056 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7058 put_var_into_stack (TREE_OPERAND (exp, 0));
7059 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7061 else
7063 tree nt
7064 = build_qualified_type (TREE_TYPE (tem),
7065 (TYPE_QUALS (TREE_TYPE (tem))
7066 | TYPE_QUAL_CONST));
7067 rtx memloc = assign_temp (nt, 1, 1, 1);
7069 mark_temp_addr_taken (memloc);
7070 emit_move_insn (memloc, op0);
7071 op0 = memloc;
7075 if (GET_CODE (op0) != MEM)
7076 abort ();
7078 if (GET_MODE (offset_rtx) != ptr_mode)
7080 #ifdef POINTERS_EXTEND_UNSIGNED
7081 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7082 #else
7083 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7084 #endif
7087 /* A constant address in OP0 can have VOIDmode, we must not try
7088 to call force_reg for that case. Avoid that case. */
7089 if (GET_CODE (op0) == MEM
7090 && GET_MODE (op0) == BLKmode
7091 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7092 && bitsize != 0
7093 && (bitpos % bitsize) == 0
7094 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7095 && alignment == GET_MODE_ALIGNMENT (mode1))
7097 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7099 if (GET_CODE (XEXP (temp, 0)) == REG)
7100 op0 = temp;
7101 else
7102 op0 = (replace_equiv_address
7103 (op0,
7104 force_reg (GET_MODE (XEXP (temp, 0)),
7105 XEXP (temp, 0))));
7106 bitpos = 0;
7109 op0 = change_address (op0, VOIDmode,
7110 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7111 force_reg (ptr_mode,
7112 offset_rtx)));
7115 /* Don't forget about volatility even if this is a bitfield. */
7116 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7118 op0 = copy_rtx (op0);
7119 MEM_VOLATILE_P (op0) = 1;
7122 /* Check the access. */
7123 if (cfun != 0 && current_function_check_memory_usage
7124 && GET_CODE (op0) == MEM)
7126 enum memory_use_mode memory_usage;
7127 memory_usage = get_memory_usage_from_modifier (modifier);
7129 if (memory_usage != MEMORY_USE_DONT)
7131 rtx to;
7132 int size;
7134 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7135 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7137 /* Check the access right of the pointer. */
7138 in_check_memory_usage = 1;
7139 if (size > BITS_PER_UNIT)
7140 emit_library_call (chkr_check_addr_libfunc,
7141 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7142 Pmode, GEN_INT (size / BITS_PER_UNIT),
7143 TYPE_MODE (sizetype),
7144 GEN_INT (memory_usage),
7145 TYPE_MODE (integer_type_node));
7146 in_check_memory_usage = 0;
7150 /* In cases where an aligned union has an unaligned object
7151 as a field, we might be extracting a BLKmode value from
7152 an integer-mode (e.g., SImode) object. Handle this case
7153 by doing the extract into an object as wide as the field
7154 (which we know to be the width of a basic mode), then
7155 storing into memory, and changing the mode to BLKmode. */
7156 if (mode1 == VOIDmode
7157 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7158 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7159 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7160 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7161 && modifier != EXPAND_CONST_ADDRESS
7162 && modifier != EXPAND_INITIALIZER)
7163 /* If the field isn't aligned enough to fetch as a memref,
7164 fetch it as a bit field. */
7165 || (mode1 != BLKmode
7166 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7167 && ((TYPE_ALIGN (TREE_TYPE (tem))
7168 < GET_MODE_ALIGNMENT (mode))
7169 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7170 /* If the type and the field are a constant size and the
7171 size of the type isn't the same size as the bitfield,
7172 we must use bitfield operations. */
7173 || (bitsize >= 0
7174 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7175 == INTEGER_CST)
7176 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7177 bitsize))
7178 || (mode == BLKmode
7179 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7180 && (TYPE_ALIGN (type) > alignment
7181 || bitpos % TYPE_ALIGN (type) != 0)))
7183 enum machine_mode ext_mode = mode;
7185 if (ext_mode == BLKmode
7186 && ! (target != 0 && GET_CODE (op0) == MEM
7187 && GET_CODE (target) == MEM
7188 && bitpos % BITS_PER_UNIT == 0))
7189 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7191 if (ext_mode == BLKmode)
7193 /* In this case, BITPOS must start at a byte boundary and
7194 TARGET, if specified, must be a MEM. */
7195 if (GET_CODE (op0) != MEM
7196 || (target != 0 && GET_CODE (target) != MEM)
7197 || bitpos % BITS_PER_UNIT != 0)
7198 abort ();
7200 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7201 if (target == 0)
7202 target = assign_temp (type, 0, 1, 1);
7204 emit_block_move (target, op0,
7205 bitsize == -1 ? expr_size (exp)
7206 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7207 / BITS_PER_UNIT),
7208 BITS_PER_UNIT);
7210 return target;
7213 op0 = validize_mem (op0);
7215 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7216 mark_reg_pointer (XEXP (op0, 0), alignment);
7218 op0 = extract_bit_field (op0, bitsize, bitpos,
7219 unsignedp, target, ext_mode, ext_mode,
7220 alignment,
7221 int_size_in_bytes (TREE_TYPE (tem)));
7223 /* If the result is a record type and BITSIZE is narrower than
7224 the mode of OP0, an integral mode, and this is a big endian
7225 machine, we must put the field into the high-order bits. */
7226 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7227 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7228 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7229 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7230 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7231 - bitsize),
7232 op0, 1);
7234 if (mode == BLKmode)
7236 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7237 TYPE_QUAL_CONST);
7238 rtx new = assign_temp (nt, 0, 1, 1);
7240 emit_move_insn (new, op0);
7241 op0 = copy_rtx (new);
7242 PUT_MODE (op0, BLKmode);
7245 return op0;
7248 /* If the result is BLKmode, use that to access the object
7249 now as well. */
7250 if (mode == BLKmode)
7251 mode1 = BLKmode;
7253 /* Get a reference to just this component. */
7254 if (modifier == EXPAND_CONST_ADDRESS
7255 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7256 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7257 else
7258 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7260 set_mem_attributes (op0, exp, 0);
7261 if (GET_CODE (XEXP (op0, 0)) == REG)
7262 mark_reg_pointer (XEXP (op0, 0), alignment);
7264 MEM_VOLATILE_P (op0) |= volatilep;
7265 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7266 || modifier == EXPAND_CONST_ADDRESS
7267 || modifier == EXPAND_INITIALIZER)
7268 return op0;
7269 else if (target == 0)
7270 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7272 convert_move (target, op0, unsignedp);
7273 return target;
7276 /* Intended for a reference to a buffer of a file-object in Pascal.
7277 But it's not certain that a special tree code will really be
7278 necessary for these. INDIRECT_REF might work for them. */
7279 case BUFFER_REF:
7280 abort ();
7282 case IN_EXPR:
7284 /* Pascal set IN expression.
7286 Algorithm:
7287 rlo = set_low - (set_low%bits_per_word);
7288 the_word = set [ (index - rlo)/bits_per_word ];
7289 bit_index = index % bits_per_word;
7290 bitmask = 1 << bit_index;
7291 return !!(the_word & bitmask); */
7293 tree set = TREE_OPERAND (exp, 0);
7294 tree index = TREE_OPERAND (exp, 1);
7295 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7296 tree set_type = TREE_TYPE (set);
7297 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7298 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7299 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7300 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7301 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7302 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7303 rtx setaddr = XEXP (setval, 0);
7304 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7305 rtx rlow;
7306 rtx diff, quo, rem, addr, bit, result;
7308 /* If domain is empty, answer is no. Likewise if index is constant
7309 and out of bounds. */
7310 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7311 && TREE_CODE (set_low_bound) == INTEGER_CST
7312 && tree_int_cst_lt (set_high_bound, set_low_bound))
7313 || (TREE_CODE (index) == INTEGER_CST
7314 && TREE_CODE (set_low_bound) == INTEGER_CST
7315 && tree_int_cst_lt (index, set_low_bound))
7316 || (TREE_CODE (set_high_bound) == INTEGER_CST
7317 && TREE_CODE (index) == INTEGER_CST
7318 && tree_int_cst_lt (set_high_bound, index))))
7319 return const0_rtx;
7321 if (target == 0)
7322 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7324 /* If we get here, we have to generate the code for both cases
7325 (in range and out of range). */
7327 op0 = gen_label_rtx ();
7328 op1 = gen_label_rtx ();
7330 if (! (GET_CODE (index_val) == CONST_INT
7331 && GET_CODE (lo_r) == CONST_INT))
7333 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7334 GET_MODE (index_val), iunsignedp, 0, op1);
7337 if (! (GET_CODE (index_val) == CONST_INT
7338 && GET_CODE (hi_r) == CONST_INT))
7340 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7341 GET_MODE (index_val), iunsignedp, 0, op1);
7344 /* Calculate the element number of bit zero in the first word
7345 of the set. */
7346 if (GET_CODE (lo_r) == CONST_INT)
7347 rlow = GEN_INT (INTVAL (lo_r)
7348 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7349 else
7350 rlow = expand_binop (index_mode, and_optab, lo_r,
7351 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7352 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7354 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7355 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7357 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7358 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7359 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7360 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7362 addr = memory_address (byte_mode,
7363 expand_binop (index_mode, add_optab, diff,
7364 setaddr, NULL_RTX, iunsignedp,
7365 OPTAB_LIB_WIDEN));
7367 /* Extract the bit we want to examine. */
7368 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7369 gen_rtx_MEM (byte_mode, addr),
7370 make_tree (TREE_TYPE (index), rem),
7371 NULL_RTX, 1);
7372 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7373 GET_MODE (target) == byte_mode ? target : 0,
7374 1, OPTAB_LIB_WIDEN);
7376 if (result != target)
7377 convert_move (target, result, 1);
7379 /* Output the code to handle the out-of-range case. */
7380 emit_jump (op0);
7381 emit_label (op1);
7382 emit_move_insn (target, const0_rtx);
7383 emit_label (op0);
7384 return target;
7387 case WITH_CLEANUP_EXPR:
7388 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7390 WITH_CLEANUP_EXPR_RTL (exp)
7391 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7392 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7394 /* That's it for this cleanup. */
7395 TREE_OPERAND (exp, 1) = 0;
7397 return WITH_CLEANUP_EXPR_RTL (exp);
7399 case CLEANUP_POINT_EXPR:
7401 /* Start a new binding layer that will keep track of all cleanup
7402 actions to be performed. */
7403 expand_start_bindings (2);
7405 target_temp_slot_level = temp_slot_level;
7407 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7408 /* If we're going to use this value, load it up now. */
7409 if (! ignore)
7410 op0 = force_not_mem (op0);
7411 preserve_temp_slots (op0);
7412 expand_end_bindings (NULL_TREE, 0, 0);
7414 return op0;
7416 case CALL_EXPR:
7417 /* Check for a built-in function. */
7418 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7419 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7420 == FUNCTION_DECL)
7421 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7423 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7424 == BUILT_IN_FRONTEND)
7425 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7426 else
7427 return expand_builtin (exp, target, subtarget, tmode, ignore);
7430 return expand_call (exp, target, ignore);
7432 case NON_LVALUE_EXPR:
7433 case NOP_EXPR:
7434 case CONVERT_EXPR:
7435 case REFERENCE_EXPR:
7436 if (TREE_OPERAND (exp, 0) == error_mark_node)
7437 return const0_rtx;
7439 if (TREE_CODE (type) == UNION_TYPE)
7441 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7443 /* If both input and output are BLKmode, this conversion
7444 isn't actually doing anything unless we need to make the
7445 alignment stricter. */
7446 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7447 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7448 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7449 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7450 modifier);
7452 if (target == 0)
7453 target = assign_temp (type, 0, 1, 1);
7455 if (GET_CODE (target) == MEM)
7456 /* Store data into beginning of memory target. */
7457 store_expr (TREE_OPERAND (exp, 0),
7458 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7460 else if (GET_CODE (target) == REG)
7461 /* Store this field into a union of the proper type. */
7462 store_field (target,
7463 MIN ((int_size_in_bytes (TREE_TYPE
7464 (TREE_OPERAND (exp, 0)))
7465 * BITS_PER_UNIT),
7466 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7467 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7468 VOIDmode, 0, BITS_PER_UNIT,
7469 int_size_in_bytes (type), 0);
7470 else
7471 abort ();
7473 /* Return the entire union. */
7474 return target;
7477 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7479 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7480 ro_modifier);
7482 /* If the signedness of the conversion differs and OP0 is
7483 a promoted SUBREG, clear that indication since we now
7484 have to do the proper extension. */
7485 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7486 && GET_CODE (op0) == SUBREG)
7487 SUBREG_PROMOTED_VAR_P (op0) = 0;
7489 return op0;
7492 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7493 if (GET_MODE (op0) == mode)
7494 return op0;
7496 /* If OP0 is a constant, just convert it into the proper mode. */
7497 if (CONSTANT_P (op0))
7498 return
7499 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7500 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7502 if (modifier == EXPAND_INITIALIZER)
7503 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7505 if (target == 0)
7506 return
7507 convert_to_mode (mode, op0,
7508 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7509 else
7510 convert_move (target, op0,
7511 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7512 return target;
7514 case PLUS_EXPR:
7515 /* We come here from MINUS_EXPR when the second operand is a
7516 constant. */
7517 plus_expr:
7518 this_optab = ! unsignedp && flag_trapv
7519 && (GET_MODE_CLASS(mode) == MODE_INT)
7520 ? addv_optab : add_optab;
7522 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7523 something else, make sure we add the register to the constant and
7524 then to the other thing. This case can occur during strength
7525 reduction and doing it this way will produce better code if the
7526 frame pointer or argument pointer is eliminated.
7528 fold-const.c will ensure that the constant is always in the inner
7529 PLUS_EXPR, so the only case we need to do anything about is if
7530 sp, ap, or fp is our second argument, in which case we must swap
7531 the innermost first argument and our second argument. */
7533 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7534 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7535 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7536 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7537 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7538 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7540 tree t = TREE_OPERAND (exp, 1);
7542 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7543 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7546 /* If the result is to be ptr_mode and we are adding an integer to
7547 something, we might be forming a constant. So try to use
7548 plus_constant. If it produces a sum and we can't accept it,
7549 use force_operand. This allows P = &ARR[const] to generate
7550 efficient code on machines where a SYMBOL_REF is not a valid
7551 address.
7553 If this is an EXPAND_SUM call, always return the sum. */
7554 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7555 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7557 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7558 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7559 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7561 rtx constant_part;
7563 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7564 EXPAND_SUM);
7565 /* Use immed_double_const to ensure that the constant is
7566 truncated according to the mode of OP1, then sign extended
7567 to a HOST_WIDE_INT. Using the constant directly can result
7568 in non-canonical RTL in a 64x32 cross compile. */
7569 constant_part
7570 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7571 (HOST_WIDE_INT) 0,
7572 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7573 op1 = plus_constant (op1, INTVAL (constant_part));
7574 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7575 op1 = force_operand (op1, target);
7576 return op1;
7579 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7580 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7581 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7583 rtx constant_part;
7585 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7586 EXPAND_SUM);
7587 if (! CONSTANT_P (op0))
7589 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7590 VOIDmode, modifier);
7591 /* Don't go to both_summands if modifier
7592 says it's not right to return a PLUS. */
7593 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7594 goto binop2;
7595 goto both_summands;
7597 /* Use immed_double_const to ensure that the constant is
7598 truncated according to the mode of OP1, then sign extended
7599 to a HOST_WIDE_INT. Using the constant directly can result
7600 in non-canonical RTL in a 64x32 cross compile. */
7601 constant_part
7602 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7603 (HOST_WIDE_INT) 0,
7604 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7605 op0 = plus_constant (op0, INTVAL (constant_part));
7606 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7607 op0 = force_operand (op0, target);
7608 return op0;
7612 /* No sense saving up arithmetic to be done
7613 if it's all in the wrong mode to form part of an address.
7614 And force_operand won't know whether to sign-extend or
7615 zero-extend. */
7616 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7617 || mode != ptr_mode)
7618 goto binop;
7620 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7621 subtarget = 0;
7623 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7624 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7626 both_summands:
7627 /* Make sure any term that's a sum with a constant comes last. */
7628 if (GET_CODE (op0) == PLUS
7629 && CONSTANT_P (XEXP (op0, 1)))
7631 temp = op0;
7632 op0 = op1;
7633 op1 = temp;
7635 /* If adding to a sum including a constant,
7636 associate it to put the constant outside. */
7637 if (GET_CODE (op1) == PLUS
7638 && CONSTANT_P (XEXP (op1, 1)))
7640 rtx constant_term = const0_rtx;
7642 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7643 if (temp != 0)
7644 op0 = temp;
7645 /* Ensure that MULT comes first if there is one. */
7646 else if (GET_CODE (op0) == MULT)
7647 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7648 else
7649 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7651 /* Let's also eliminate constants from op0 if possible. */
7652 op0 = eliminate_constant_term (op0, &constant_term);
7654 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7655 their sum should be a constant. Form it into OP1, since the
7656 result we want will then be OP0 + OP1. */
7658 temp = simplify_binary_operation (PLUS, mode, constant_term,
7659 XEXP (op1, 1));
7660 if (temp != 0)
7661 op1 = temp;
7662 else
7663 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7666 /* Put a constant term last and put a multiplication first. */
7667 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7668 temp = op1, op1 = op0, op0 = temp;
7670 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7671 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7673 case MINUS_EXPR:
7674 /* For initializers, we are allowed to return a MINUS of two
7675 symbolic constants. Here we handle all cases when both operands
7676 are constant. */
7677 /* Handle difference of two symbolic constants,
7678 for the sake of an initializer. */
7679 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7680 && really_constant_p (TREE_OPERAND (exp, 0))
7681 && really_constant_p (TREE_OPERAND (exp, 1)))
7683 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7684 VOIDmode, ro_modifier);
7685 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7686 VOIDmode, ro_modifier);
7688 /* If the last operand is a CONST_INT, use plus_constant of
7689 the negated constant. Else make the MINUS. */
7690 if (GET_CODE (op1) == CONST_INT)
7691 return plus_constant (op0, - INTVAL (op1));
7692 else
7693 return gen_rtx_MINUS (mode, op0, op1);
7695 /* Convert A - const to A + (-const). */
7696 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7698 tree negated = fold (build1 (NEGATE_EXPR, type,
7699 TREE_OPERAND (exp, 1)));
7701 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7702 /* If we can't negate the constant in TYPE, leave it alone and
7703 expand_binop will negate it for us. We used to try to do it
7704 here in the signed version of TYPE, but that doesn't work
7705 on POINTER_TYPEs. */;
7706 else
7708 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7709 goto plus_expr;
7712 this_optab = ! unsignedp && flag_trapv
7713 && (GET_MODE_CLASS(mode) == MODE_INT)
7714 ? subv_optab : sub_optab;
7715 goto binop;
7717 case MULT_EXPR:
7718 /* If first operand is constant, swap them.
7719 Thus the following special case checks need only
7720 check the second operand. */
7721 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7723 register tree t1 = TREE_OPERAND (exp, 0);
7724 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7725 TREE_OPERAND (exp, 1) = t1;
7728 /* Attempt to return something suitable for generating an
7729 indexed address, for machines that support that. */
7731 if (modifier == EXPAND_SUM && mode == ptr_mode
7732 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7733 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7736 EXPAND_SUM);
7738 /* Apply distributive law if OP0 is x+c. */
7739 if (GET_CODE (op0) == PLUS
7740 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7741 return
7742 gen_rtx_PLUS
7743 (mode,
7744 gen_rtx_MULT
7745 (mode, XEXP (op0, 0),
7746 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7747 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7748 * INTVAL (XEXP (op0, 1))));
7750 if (GET_CODE (op0) != REG)
7751 op0 = force_operand (op0, NULL_RTX);
7752 if (GET_CODE (op0) != REG)
7753 op0 = copy_to_mode_reg (mode, op0);
7755 return
7756 gen_rtx_MULT (mode, op0,
7757 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7760 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7761 subtarget = 0;
7763 /* Check for multiplying things that have been extended
7764 from a narrower type. If this machine supports multiplying
7765 in that narrower type with a result in the desired type,
7766 do it that way, and avoid the explicit type-conversion. */
7767 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7768 && TREE_CODE (type) == INTEGER_TYPE
7769 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7770 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7771 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7772 && int_fits_type_p (TREE_OPERAND (exp, 1),
7773 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7774 /* Don't use a widening multiply if a shift will do. */
7775 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7776 > HOST_BITS_PER_WIDE_INT)
7777 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7779 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7780 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7782 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7783 /* If both operands are extended, they must either both
7784 be zero-extended or both be sign-extended. */
7785 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7787 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7789 enum machine_mode innermode
7790 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7791 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7792 ? smul_widen_optab : umul_widen_optab);
7793 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7794 ? umul_widen_optab : smul_widen_optab);
7795 if (mode == GET_MODE_WIDER_MODE (innermode))
7797 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7799 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7800 NULL_RTX, VOIDmode, 0);
7801 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7802 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7803 VOIDmode, 0);
7804 else
7805 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7806 NULL_RTX, VOIDmode, 0);
7807 goto binop2;
7809 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7810 && innermode == word_mode)
7812 rtx htem;
7813 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7814 NULL_RTX, VOIDmode, 0);
7815 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7816 op1 = convert_modes (innermode, mode,
7817 expand_expr (TREE_OPERAND (exp, 1),
7818 NULL_RTX, VOIDmode, 0),
7819 unsignedp);
7820 else
7821 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7822 NULL_RTX, VOIDmode, 0);
7823 temp = expand_binop (mode, other_optab, op0, op1, target,
7824 unsignedp, OPTAB_LIB_WIDEN);
7825 htem = expand_mult_highpart_adjust (innermode,
7826 gen_highpart (innermode, temp),
7827 op0, op1,
7828 gen_highpart (innermode, temp),
7829 unsignedp);
7830 emit_move_insn (gen_highpart (innermode, temp), htem);
7831 return temp;
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7836 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7837 return expand_mult (mode, op0, op1, target, unsignedp);
7839 case TRUNC_DIV_EXPR:
7840 case FLOOR_DIV_EXPR:
7841 case CEIL_DIV_EXPR:
7842 case ROUND_DIV_EXPR:
7843 case EXACT_DIV_EXPR:
7844 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7845 subtarget = 0;
7846 /* Possible optimization: compute the dividend with EXPAND_SUM
7847 then if the divisor is constant can optimize the case
7848 where some terms of the dividend have coeffs divisible by it. */
7849 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7850 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7851 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7853 case RDIV_EXPR:
7854 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7855 expensive divide. If not, combine will rebuild the original
7856 computation. */
7857 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7858 && !real_onep (TREE_OPERAND (exp, 0)))
7859 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7860 build (RDIV_EXPR, type,
7861 build_real (type, dconst1),
7862 TREE_OPERAND (exp, 1))),
7863 target, tmode, unsignedp);
7864 this_optab = flodiv_optab;
7865 goto binop;
7867 case TRUNC_MOD_EXPR:
7868 case FLOOR_MOD_EXPR:
7869 case CEIL_MOD_EXPR:
7870 case ROUND_MOD_EXPR:
7871 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7872 subtarget = 0;
7873 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7874 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7875 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7877 case FIX_ROUND_EXPR:
7878 case FIX_FLOOR_EXPR:
7879 case FIX_CEIL_EXPR:
7880 abort (); /* Not used for C. */
7882 case FIX_TRUNC_EXPR:
7883 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7884 if (target == 0)
7885 target = gen_reg_rtx (mode);
7886 expand_fix (target, op0, unsignedp);
7887 return target;
7889 case FLOAT_EXPR:
7890 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7891 if (target == 0)
7892 target = gen_reg_rtx (mode);
7893 /* expand_float can't figure out what to do if FROM has VOIDmode.
7894 So give it the correct mode. With -O, cse will optimize this. */
7895 if (GET_MODE (op0) == VOIDmode)
7896 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7897 op0);
7898 expand_float (target, op0,
7899 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7900 return target;
7902 case NEGATE_EXPR:
7903 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7904 temp = expand_unop (mode,
7905 ! unsignedp && flag_trapv
7906 && (GET_MODE_CLASS(mode) == MODE_INT)
7907 ? negv_optab : neg_optab, op0, target, 0);
7908 if (temp == 0)
7909 abort ();
7910 return temp;
7912 case ABS_EXPR:
7913 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7915 /* Handle complex values specially. */
7916 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7917 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7918 return expand_complex_abs (mode, op0, target, unsignedp);
7920 /* Unsigned abs is simply the operand. Testing here means we don't
7921 risk generating incorrect code below. */
7922 if (TREE_UNSIGNED (type))
7923 return op0;
7925 return expand_abs (mode, op0, target, unsignedp,
7926 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7928 case MAX_EXPR:
7929 case MIN_EXPR:
7930 target = original_target;
7931 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7932 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7933 || GET_MODE (target) != mode
7934 || (GET_CODE (target) == REG
7935 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7936 target = gen_reg_rtx (mode);
7937 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7938 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7940 /* First try to do it with a special MIN or MAX instruction.
7941 If that does not win, use a conditional jump to select the proper
7942 value. */
7943 this_optab = (TREE_UNSIGNED (type)
7944 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7945 : (code == MIN_EXPR ? smin_optab : smax_optab));
7947 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7948 OPTAB_WIDEN);
7949 if (temp != 0)
7950 return temp;
7952 /* At this point, a MEM target is no longer useful; we will get better
7953 code without it. */
7955 if (GET_CODE (target) == MEM)
7956 target = gen_reg_rtx (mode);
7958 if (target != op0)
7959 emit_move_insn (target, op0);
7961 op0 = gen_label_rtx ();
7963 /* If this mode is an integer too wide to compare properly,
7964 compare word by word. Rely on cse to optimize constant cases. */
7965 if (GET_MODE_CLASS (mode) == MODE_INT
7966 && ! can_compare_p (GE, mode, ccp_jump))
7968 if (code == MAX_EXPR)
7969 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7970 target, op1, NULL_RTX, op0);
7971 else
7972 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7973 op1, target, NULL_RTX, op0);
7975 else
7977 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7978 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7979 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7980 op0);
7982 emit_move_insn (target, op1);
7983 emit_label (op0);
7984 return target;
7986 case BIT_NOT_EXPR:
7987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7988 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7989 if (temp == 0)
7990 abort ();
7991 return temp;
7993 case FFS_EXPR:
7994 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7995 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7996 if (temp == 0)
7997 abort ();
7998 return temp;
8000 /* ??? Can optimize bitwise operations with one arg constant.
8001 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8002 and (a bitwise1 b) bitwise2 b (etc)
8003 but that is probably not worth while. */
8005 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8006 boolean values when we want in all cases to compute both of them. In
8007 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8008 as actual zero-or-1 values and then bitwise anding. In cases where
8009 there cannot be any side effects, better code would be made by
8010 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8011 how to recognize those cases. */
8013 case TRUTH_AND_EXPR:
8014 case BIT_AND_EXPR:
8015 this_optab = and_optab;
8016 goto binop;
8018 case TRUTH_OR_EXPR:
8019 case BIT_IOR_EXPR:
8020 this_optab = ior_optab;
8021 goto binop;
8023 case TRUTH_XOR_EXPR:
8024 case BIT_XOR_EXPR:
8025 this_optab = xor_optab;
8026 goto binop;
8028 case LSHIFT_EXPR:
8029 case RSHIFT_EXPR:
8030 case LROTATE_EXPR:
8031 case RROTATE_EXPR:
8032 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8033 subtarget = 0;
8034 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8035 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8036 unsignedp);
8038 /* Could determine the answer when only additive constants differ. Also,
8039 the addition of one can be handled by changing the condition. */
8040 case LT_EXPR:
8041 case LE_EXPR:
8042 case GT_EXPR:
8043 case GE_EXPR:
8044 case EQ_EXPR:
8045 case NE_EXPR:
8046 case UNORDERED_EXPR:
8047 case ORDERED_EXPR:
8048 case UNLT_EXPR:
8049 case UNLE_EXPR:
8050 case UNGT_EXPR:
8051 case UNGE_EXPR:
8052 case UNEQ_EXPR:
8053 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8054 if (temp != 0)
8055 return temp;
8057 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8058 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8059 && original_target
8060 && GET_CODE (original_target) == REG
8061 && (GET_MODE (original_target)
8062 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8064 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8065 VOIDmode, 0);
8067 if (temp != original_target)
8068 temp = copy_to_reg (temp);
8070 op1 = gen_label_rtx ();
8071 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8072 GET_MODE (temp), unsignedp, 0, op1);
8073 emit_move_insn (temp, const1_rtx);
8074 emit_label (op1);
8075 return temp;
8078 /* If no set-flag instruction, must generate a conditional
8079 store into a temporary variable. Drop through
8080 and handle this like && and ||. */
8082 case TRUTH_ANDIF_EXPR:
8083 case TRUTH_ORIF_EXPR:
8084 if (! ignore
8085 && (target == 0 || ! safe_from_p (target, exp, 1)
8086 /* Make sure we don't have a hard reg (such as function's return
8087 value) live across basic blocks, if not optimizing. */
8088 || (!optimize && GET_CODE (target) == REG
8089 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8090 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8092 if (target)
8093 emit_clr_insn (target);
8095 op1 = gen_label_rtx ();
8096 jumpifnot (exp, op1);
8098 if (target)
8099 emit_0_to_1_insn (target);
8101 emit_label (op1);
8102 return ignore ? const0_rtx : target;
8104 case TRUTH_NOT_EXPR:
8105 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8106 /* The parser is careful to generate TRUTH_NOT_EXPR
8107 only with operands that are always zero or one. */
8108 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8109 target, 1, OPTAB_LIB_WIDEN);
8110 if (temp == 0)
8111 abort ();
8112 return temp;
8114 case COMPOUND_EXPR:
8115 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8116 emit_queue ();
8117 return expand_expr (TREE_OPERAND (exp, 1),
8118 (ignore ? const0_rtx : target),
8119 VOIDmode, 0);
8121 case COND_EXPR:
8122 /* If we would have a "singleton" (see below) were it not for a
8123 conversion in each arm, bring that conversion back out. */
8124 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8125 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8126 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8127 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8129 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8130 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8132 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8133 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8134 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8135 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8136 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8137 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8138 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8139 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8140 return expand_expr (build1 (NOP_EXPR, type,
8141 build (COND_EXPR, TREE_TYPE (iftrue),
8142 TREE_OPERAND (exp, 0),
8143 iftrue, iffalse)),
8144 target, tmode, modifier);
8148 /* Note that COND_EXPRs whose type is a structure or union
8149 are required to be constructed to contain assignments of
8150 a temporary variable, so that we can evaluate them here
8151 for side effect only. If type is void, we must do likewise. */
8153 /* If an arm of the branch requires a cleanup,
8154 only that cleanup is performed. */
8156 tree singleton = 0;
8157 tree binary_op = 0, unary_op = 0;
8159 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8160 convert it to our mode, if necessary. */
8161 if (integer_onep (TREE_OPERAND (exp, 1))
8162 && integer_zerop (TREE_OPERAND (exp, 2))
8163 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8165 if (ignore)
8167 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8168 ro_modifier);
8169 return const0_rtx;
8172 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8173 if (GET_MODE (op0) == mode)
8174 return op0;
8176 if (target == 0)
8177 target = gen_reg_rtx (mode);
8178 convert_move (target, op0, unsignedp);
8179 return target;
8182 /* Check for X ? A + B : A. If we have this, we can copy A to the
8183 output and conditionally add B. Similarly for unary operations.
8184 Don't do this if X has side-effects because those side effects
8185 might affect A or B and the "?" operation is a sequence point in
8186 ANSI. (operand_equal_p tests for side effects.) */
8188 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8189 && operand_equal_p (TREE_OPERAND (exp, 2),
8190 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8191 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8192 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8193 && operand_equal_p (TREE_OPERAND (exp, 1),
8194 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8195 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8196 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8197 && operand_equal_p (TREE_OPERAND (exp, 2),
8198 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8199 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8200 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8201 && operand_equal_p (TREE_OPERAND (exp, 1),
8202 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8203 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8205 /* If we are not to produce a result, we have no target. Otherwise,
8206 if a target was specified use it; it will not be used as an
8207 intermediate target unless it is safe. If no target, use a
8208 temporary. */
8210 if (ignore)
8211 temp = 0;
8212 else if (original_target
8213 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8214 || (singleton && GET_CODE (original_target) == REG
8215 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8216 && original_target == var_rtx (singleton)))
8217 && GET_MODE (original_target) == mode
8218 #ifdef HAVE_conditional_move
8219 && (! can_conditionally_move_p (mode)
8220 || GET_CODE (original_target) == REG
8221 || TREE_ADDRESSABLE (type))
8222 #endif
8223 && (GET_CODE (original_target) != MEM
8224 || TREE_ADDRESSABLE (type)))
8225 temp = original_target;
8226 else if (TREE_ADDRESSABLE (type))
8227 abort ();
8228 else
8229 temp = assign_temp (type, 0, 0, 1);
8231 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8232 do the test of X as a store-flag operation, do this as
8233 A + ((X != 0) << log C). Similarly for other simple binary
8234 operators. Only do for C == 1 if BRANCH_COST is low. */
8235 if (temp && singleton && binary_op
8236 && (TREE_CODE (binary_op) == PLUS_EXPR
8237 || TREE_CODE (binary_op) == MINUS_EXPR
8238 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8239 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8240 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8241 : integer_onep (TREE_OPERAND (binary_op, 1)))
8242 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8244 rtx result;
8245 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8246 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8247 ? addv_optab : add_optab)
8248 : TREE_CODE (binary_op) == MINUS_EXPR
8249 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8250 ? subv_optab : sub_optab)
8251 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8252 : xor_optab);
8254 /* If we had X ? A : A + 1, do this as A + (X == 0).
8256 We have to invert the truth value here and then put it
8257 back later if do_store_flag fails. We cannot simply copy
8258 TREE_OPERAND (exp, 0) to another variable and modify that
8259 because invert_truthvalue can modify the tree pointed to
8260 by its argument. */
8261 if (singleton == TREE_OPERAND (exp, 1))
8262 TREE_OPERAND (exp, 0)
8263 = invert_truthvalue (TREE_OPERAND (exp, 0));
8265 result = do_store_flag (TREE_OPERAND (exp, 0),
8266 (safe_from_p (temp, singleton, 1)
8267 ? temp : NULL_RTX),
8268 mode, BRANCH_COST <= 1);
8270 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8271 result = expand_shift (LSHIFT_EXPR, mode, result,
8272 build_int_2 (tree_log2
8273 (TREE_OPERAND
8274 (binary_op, 1)),
8276 (safe_from_p (temp, singleton, 1)
8277 ? temp : NULL_RTX), 0);
8279 if (result)
8281 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8282 return expand_binop (mode, boptab, op1, result, temp,
8283 unsignedp, OPTAB_LIB_WIDEN);
8285 else if (singleton == TREE_OPERAND (exp, 1))
8286 TREE_OPERAND (exp, 0)
8287 = invert_truthvalue (TREE_OPERAND (exp, 0));
8290 do_pending_stack_adjust ();
8291 NO_DEFER_POP;
8292 op0 = gen_label_rtx ();
8294 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8296 if (temp != 0)
8298 /* If the target conflicts with the other operand of the
8299 binary op, we can't use it. Also, we can't use the target
8300 if it is a hard register, because evaluating the condition
8301 might clobber it. */
8302 if ((binary_op
8303 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8304 || (GET_CODE (temp) == REG
8305 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8306 temp = gen_reg_rtx (mode);
8307 store_expr (singleton, temp, 0);
8309 else
8310 expand_expr (singleton,
8311 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8312 if (singleton == TREE_OPERAND (exp, 1))
8313 jumpif (TREE_OPERAND (exp, 0), op0);
8314 else
8315 jumpifnot (TREE_OPERAND (exp, 0), op0);
8317 start_cleanup_deferral ();
8318 if (binary_op && temp == 0)
8319 /* Just touch the other operand. */
8320 expand_expr (TREE_OPERAND (binary_op, 1),
8321 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8322 else if (binary_op)
8323 store_expr (build (TREE_CODE (binary_op), type,
8324 make_tree (type, temp),
8325 TREE_OPERAND (binary_op, 1)),
8326 temp, 0);
8327 else
8328 store_expr (build1 (TREE_CODE (unary_op), type,
8329 make_tree (type, temp)),
8330 temp, 0);
8331 op1 = op0;
8333 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8334 comparison operator. If we have one of these cases, set the
8335 output to A, branch on A (cse will merge these two references),
8336 then set the output to FOO. */
8337 else if (temp
8338 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8339 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8340 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8341 TREE_OPERAND (exp, 1), 0)
8342 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8343 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8344 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8346 if (GET_CODE (temp) == REG
8347 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8348 temp = gen_reg_rtx (mode);
8349 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8350 jumpif (TREE_OPERAND (exp, 0), op0);
8352 start_cleanup_deferral ();
8353 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8354 op1 = op0;
8356 else if (temp
8357 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8358 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8359 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8360 TREE_OPERAND (exp, 2), 0)
8361 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8362 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8363 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8365 if (GET_CODE (temp) == REG
8366 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8367 temp = gen_reg_rtx (mode);
8368 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8369 jumpifnot (TREE_OPERAND (exp, 0), op0);
8371 start_cleanup_deferral ();
8372 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8373 op1 = op0;
8375 else
8377 op1 = gen_label_rtx ();
8378 jumpifnot (TREE_OPERAND (exp, 0), op0);
8380 start_cleanup_deferral ();
8382 /* One branch of the cond can be void, if it never returns. For
8383 example A ? throw : E */
8384 if (temp != 0
8385 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8386 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8387 else
8388 expand_expr (TREE_OPERAND (exp, 1),
8389 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8390 end_cleanup_deferral ();
8391 emit_queue ();
8392 emit_jump_insn (gen_jump (op1));
8393 emit_barrier ();
8394 emit_label (op0);
8395 start_cleanup_deferral ();
8396 if (temp != 0
8397 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8398 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8399 else
8400 expand_expr (TREE_OPERAND (exp, 2),
8401 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8404 end_cleanup_deferral ();
8406 emit_queue ();
8407 emit_label (op1);
8408 OK_DEFER_POP;
8410 return temp;
8413 case TARGET_EXPR:
8415 /* Something needs to be initialized, but we didn't know
8416 where that thing was when building the tree. For example,
8417 it could be the return value of a function, or a parameter
8418 to a function which lays down in the stack, or a temporary
8419 variable which must be passed by reference.
8421 We guarantee that the expression will either be constructed
8422 or copied into our original target. */
8424 tree slot = TREE_OPERAND (exp, 0);
8425 tree cleanups = NULL_TREE;
8426 tree exp1;
8428 if (TREE_CODE (slot) != VAR_DECL)
8429 abort ();
8431 if (! ignore)
8432 target = original_target;
8434 /* Set this here so that if we get a target that refers to a
8435 register variable that's already been used, put_reg_into_stack
8436 knows that it should fix up those uses. */
8437 TREE_USED (slot) = 1;
8439 if (target == 0)
8441 if (DECL_RTL_SET_P (slot))
8443 target = DECL_RTL (slot);
8444 /* If we have already expanded the slot, so don't do
8445 it again. (mrs) */
8446 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8447 return target;
8449 else
8451 target = assign_temp (type, 2, 0, 1);
8452 /* All temp slots at this level must not conflict. */
8453 preserve_temp_slots (target);
8454 SET_DECL_RTL (slot, target);
8455 if (TREE_ADDRESSABLE (slot))
8456 put_var_into_stack (slot);
8458 /* Since SLOT is not known to the called function
8459 to belong to its stack frame, we must build an explicit
8460 cleanup. This case occurs when we must build up a reference
8461 to pass the reference as an argument. In this case,
8462 it is very likely that such a reference need not be
8463 built here. */
8465 if (TREE_OPERAND (exp, 2) == 0)
8466 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8467 cleanups = TREE_OPERAND (exp, 2);
8470 else
8472 /* This case does occur, when expanding a parameter which
8473 needs to be constructed on the stack. The target
8474 is the actual stack address that we want to initialize.
8475 The function we call will perform the cleanup in this case. */
8477 /* If we have already assigned it space, use that space,
8478 not target that we were passed in, as our target
8479 parameter is only a hint. */
8480 if (DECL_RTL_SET_P (slot))
8482 target = DECL_RTL (slot);
8483 /* If we have already expanded the slot, so don't do
8484 it again. (mrs) */
8485 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8486 return target;
8488 else
8490 SET_DECL_RTL (slot, target);
8491 /* If we must have an addressable slot, then make sure that
8492 the RTL that we just stored in slot is OK. */
8493 if (TREE_ADDRESSABLE (slot))
8494 put_var_into_stack (slot);
8498 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8499 /* Mark it as expanded. */
8500 TREE_OPERAND (exp, 1) = NULL_TREE;
8502 store_expr (exp1, target, 0);
8504 expand_decl_cleanup (NULL_TREE, cleanups);
8506 return target;
8509 case INIT_EXPR:
8511 tree lhs = TREE_OPERAND (exp, 0);
8512 tree rhs = TREE_OPERAND (exp, 1);
8513 tree noncopied_parts = 0;
8514 tree lhs_type = TREE_TYPE (lhs);
8516 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8517 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8518 noncopied_parts
8519 = init_noncopied_parts (stabilize_reference (lhs),
8520 TYPE_NONCOPIED_PARTS (lhs_type));
8522 while (noncopied_parts != 0)
8524 expand_assignment (TREE_VALUE (noncopied_parts),
8525 TREE_PURPOSE (noncopied_parts), 0, 0);
8526 noncopied_parts = TREE_CHAIN (noncopied_parts);
8528 return temp;
8531 case MODIFY_EXPR:
8533 /* If lhs is complex, expand calls in rhs before computing it.
8534 That's so we don't compute a pointer and save it over a call.
8535 If lhs is simple, compute it first so we can give it as a
8536 target if the rhs is just a call. This avoids an extra temp and copy
8537 and that prevents a partial-subsumption which makes bad code.
8538 Actually we could treat component_ref's of vars like vars. */
8540 tree lhs = TREE_OPERAND (exp, 0);
8541 tree rhs = TREE_OPERAND (exp, 1);
8542 tree noncopied_parts = 0;
8543 tree lhs_type = TREE_TYPE (lhs);
8545 temp = 0;
8547 /* Check for |= or &= of a bitfield of size one into another bitfield
8548 of size 1. In this case, (unless we need the result of the
8549 assignment) we can do this more efficiently with a
8550 test followed by an assignment, if necessary.
8552 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8553 things change so we do, this code should be enhanced to
8554 support it. */
8555 if (ignore
8556 && TREE_CODE (lhs) == COMPONENT_REF
8557 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8558 || TREE_CODE (rhs) == BIT_AND_EXPR)
8559 && TREE_OPERAND (rhs, 0) == lhs
8560 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8561 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8562 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8564 rtx label = gen_label_rtx ();
8566 do_jump (TREE_OPERAND (rhs, 1),
8567 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8568 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8569 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8570 (TREE_CODE (rhs) == BIT_IOR_EXPR
8571 ? integer_one_node
8572 : integer_zero_node)),
8573 0, 0);
8574 do_pending_stack_adjust ();
8575 emit_label (label);
8576 return const0_rtx;
8579 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8580 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8581 noncopied_parts
8582 = save_noncopied_parts (stabilize_reference (lhs),
8583 TYPE_NONCOPIED_PARTS (lhs_type));
8585 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8586 while (noncopied_parts != 0)
8588 expand_assignment (TREE_PURPOSE (noncopied_parts),
8589 TREE_VALUE (noncopied_parts), 0, 0);
8590 noncopied_parts = TREE_CHAIN (noncopied_parts);
8592 return temp;
8595 case RETURN_EXPR:
8596 if (!TREE_OPERAND (exp, 0))
8597 expand_null_return ();
8598 else
8599 expand_return (TREE_OPERAND (exp, 0));
8600 return const0_rtx;
8602 case PREINCREMENT_EXPR:
8603 case PREDECREMENT_EXPR:
8604 return expand_increment (exp, 0, ignore);
8606 case POSTINCREMENT_EXPR:
8607 case POSTDECREMENT_EXPR:
8608 /* Faster to treat as pre-increment if result is not used. */
8609 return expand_increment (exp, ! ignore, ignore);
8611 case ADDR_EXPR:
8612 /* If nonzero, TEMP will be set to the address of something that might
8613 be a MEM corresponding to a stack slot. */
8614 temp = 0;
8616 /* Are we taking the address of a nested function? */
8617 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8618 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8619 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8620 && ! TREE_STATIC (exp))
8622 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8623 op0 = force_operand (op0, target);
8625 /* If we are taking the address of something erroneous, just
8626 return a zero. */
8627 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8628 return const0_rtx;
8629 else
8631 /* We make sure to pass const0_rtx down if we came in with
8632 ignore set, to avoid doing the cleanups twice for something. */
8633 op0 = expand_expr (TREE_OPERAND (exp, 0),
8634 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8635 (modifier == EXPAND_INITIALIZER
8636 ? modifier : EXPAND_CONST_ADDRESS));
8638 /* If we are going to ignore the result, OP0 will have been set
8639 to const0_rtx, so just return it. Don't get confused and
8640 think we are taking the address of the constant. */
8641 if (ignore)
8642 return op0;
8644 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8645 clever and returns a REG when given a MEM. */
8646 op0 = protect_from_queue (op0, 1);
8648 /* We would like the object in memory. If it is a constant, we can
8649 have it be statically allocated into memory. For a non-constant,
8650 we need to allocate some memory and store the value into it. */
8652 if (CONSTANT_P (op0))
8653 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8654 op0);
8655 else if (GET_CODE (op0) == MEM)
8657 mark_temp_addr_taken (op0);
8658 temp = XEXP (op0, 0);
8661 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8662 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8663 || GET_CODE (op0) == PARALLEL)
8665 /* If this object is in a register, it must be not
8666 be BLKmode. */
8667 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8668 tree nt = build_qualified_type (inner_type,
8669 (TYPE_QUALS (inner_type)
8670 | TYPE_QUAL_CONST));
8671 rtx memloc = assign_temp (nt, 1, 1, 1);
8673 mark_temp_addr_taken (memloc);
8674 if (GET_CODE (op0) == PARALLEL)
8675 /* Handle calls that pass values in multiple non-contiguous
8676 locations. The Irix 6 ABI has examples of this. */
8677 emit_group_store (memloc, op0,
8678 int_size_in_bytes (inner_type),
8679 TYPE_ALIGN (inner_type));
8680 else
8681 emit_move_insn (memloc, op0);
8682 op0 = memloc;
8685 if (GET_CODE (op0) != MEM)
8686 abort ();
8688 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8690 temp = XEXP (op0, 0);
8691 #ifdef POINTERS_EXTEND_UNSIGNED
8692 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8693 && mode == ptr_mode)
8694 temp = convert_memory_address (ptr_mode, temp);
8695 #endif
8696 return temp;
8699 op0 = force_operand (XEXP (op0, 0), target);
8702 if (flag_force_addr && GET_CODE (op0) != REG)
8703 op0 = force_reg (Pmode, op0);
8705 if (GET_CODE (op0) == REG
8706 && ! REG_USERVAR_P (op0))
8707 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8709 /* If we might have had a temp slot, add an equivalent address
8710 for it. */
8711 if (temp != 0)
8712 update_temp_slot_address (temp, op0);
8714 #ifdef POINTERS_EXTEND_UNSIGNED
8715 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8716 && mode == ptr_mode)
8717 op0 = convert_memory_address (ptr_mode, op0);
8718 #endif
8720 return op0;
8722 case ENTRY_VALUE_EXPR:
8723 abort ();
8725 /* COMPLEX type for Extended Pascal & Fortran */
8726 case COMPLEX_EXPR:
8728 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8729 rtx insns;
8731 /* Get the rtx code of the operands. */
8732 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8733 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8735 if (! target)
8736 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8738 start_sequence ();
8740 /* Move the real (op0) and imaginary (op1) parts to their location. */
8741 emit_move_insn (gen_realpart (mode, target), op0);
8742 emit_move_insn (gen_imagpart (mode, target), op1);
8744 insns = get_insns ();
8745 end_sequence ();
8747 /* Complex construction should appear as a single unit. */
8748 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8749 each with a separate pseudo as destination.
8750 It's not correct for flow to treat them as a unit. */
8751 if (GET_CODE (target) != CONCAT)
8752 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8753 else
8754 emit_insns (insns);
8756 return target;
8759 case REALPART_EXPR:
8760 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8761 return gen_realpart (mode, op0);
8763 case IMAGPART_EXPR:
8764 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8765 return gen_imagpart (mode, op0);
8767 case CONJ_EXPR:
8769 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8770 rtx imag_t;
8771 rtx insns;
8773 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8775 if (! target)
8776 target = gen_reg_rtx (mode);
8778 start_sequence ();
8780 /* Store the realpart and the negated imagpart to target. */
8781 emit_move_insn (gen_realpart (partmode, target),
8782 gen_realpart (partmode, op0));
8784 imag_t = gen_imagpart (partmode, target);
8785 temp = expand_unop (partmode,
8786 ! unsignedp && flag_trapv
8787 && (GET_MODE_CLASS(partmode) == MODE_INT)
8788 ? negv_optab : neg_optab,
8789 gen_imagpart (partmode, op0), imag_t, 0);
8790 if (temp != imag_t)
8791 emit_move_insn (imag_t, temp);
8793 insns = get_insns ();
8794 end_sequence ();
8796 /* Conjugate should appear as a single unit
8797 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8798 each with a separate pseudo as destination.
8799 It's not correct for flow to treat them as a unit. */
8800 if (GET_CODE (target) != CONCAT)
8801 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8802 else
8803 emit_insns (insns);
8805 return target;
8808 case TRY_CATCH_EXPR:
8810 tree handler = TREE_OPERAND (exp, 1);
8812 expand_eh_region_start ();
8814 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8816 expand_eh_region_end_cleanup (handler);
8818 return op0;
8821 case TRY_FINALLY_EXPR:
8823 tree try_block = TREE_OPERAND (exp, 0);
8824 tree finally_block = TREE_OPERAND (exp, 1);
8825 rtx finally_label = gen_label_rtx ();
8826 rtx done_label = gen_label_rtx ();
8827 rtx return_link = gen_reg_rtx (Pmode);
8828 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8829 (tree) finally_label, (tree) return_link);
8830 TREE_SIDE_EFFECTS (cleanup) = 1;
8832 /* Start a new binding layer that will keep track of all cleanup
8833 actions to be performed. */
8834 expand_start_bindings (2);
8836 target_temp_slot_level = temp_slot_level;
8838 expand_decl_cleanup (NULL_TREE, cleanup);
8839 op0 = expand_expr (try_block, target, tmode, modifier);
8841 preserve_temp_slots (op0);
8842 expand_end_bindings (NULL_TREE, 0, 0);
8843 emit_jump (done_label);
8844 emit_label (finally_label);
8845 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8846 emit_indirect_jump (return_link);
8847 emit_label (done_label);
8848 return op0;
8851 case GOTO_SUBROUTINE_EXPR:
8853 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8854 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8855 rtx return_address = gen_label_rtx ();
8856 emit_move_insn (return_link,
8857 gen_rtx_LABEL_REF (Pmode, return_address));
8858 emit_jump (subr);
8859 emit_label (return_address);
8860 return const0_rtx;
8863 case VA_ARG_EXPR:
8864 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8866 case EXC_PTR_EXPR:
8867 return get_exception_pointer (cfun);
8869 default:
8870 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8873 /* Here to do an ordinary binary operator, generating an instruction
8874 from the optab already placed in `this_optab'. */
8875 binop:
8876 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8877 subtarget = 0;
8878 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8879 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8880 binop2:
8881 temp = expand_binop (mode, this_optab, op0, op1, target,
8882 unsignedp, OPTAB_LIB_WIDEN);
8883 if (temp == 0)
8884 abort ();
8885 return temp;
8888 /* Similar to expand_expr, except that we don't specify a target, target
8889 mode, or modifier and we return the alignment of the inner type. This is
8890 used in cases where it is not necessary to align the result to the
8891 alignment of its type as long as we know the alignment of the result, for
8892 example for comparisons of BLKmode values. */
8894 static rtx
8895 expand_expr_unaligned (exp, palign)
8896 register tree exp;
8897 unsigned int *palign;
8899 register rtx op0;
8900 tree type = TREE_TYPE (exp);
8901 register enum machine_mode mode = TYPE_MODE (type);
8903 /* Default the alignment we return to that of the type. */
8904 *palign = TYPE_ALIGN (type);
8906 /* The only cases in which we do anything special is if the resulting mode
8907 is BLKmode. */
8908 if (mode != BLKmode)
8909 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8911 switch (TREE_CODE (exp))
8913 case CONVERT_EXPR:
8914 case NOP_EXPR:
8915 case NON_LVALUE_EXPR:
8916 /* Conversions between BLKmode values don't change the underlying
8917 alignment or value. */
8918 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8919 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8920 break;
8922 case ARRAY_REF:
8923 /* Much of the code for this case is copied directly from expand_expr.
8924 We need to duplicate it here because we will do something different
8925 in the fall-through case, so we need to handle the same exceptions
8926 it does. */
8928 tree array = TREE_OPERAND (exp, 0);
8929 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8930 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8931 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8932 HOST_WIDE_INT i;
8934 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8935 abort ();
8937 /* Optimize the special-case of a zero lower bound.
8939 We convert the low_bound to sizetype to avoid some problems
8940 with constant folding. (E.g. suppose the lower bound is 1,
8941 and its mode is QI. Without the conversion, (ARRAY
8942 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8943 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8945 if (! integer_zerop (low_bound))
8946 index = size_diffop (index, convert (sizetype, low_bound));
8948 /* If this is a constant index into a constant array,
8949 just get the value from the array. Handle both the cases when
8950 we have an explicit constructor and when our operand is a variable
8951 that was declared const. */
8953 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8954 && host_integerp (index, 0)
8955 && 0 > compare_tree_int (index,
8956 list_length (CONSTRUCTOR_ELTS
8957 (TREE_OPERAND (exp, 0)))))
8959 tree elem;
8961 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8962 i = tree_low_cst (index, 0);
8963 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8966 if (elem)
8967 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8970 else if (optimize >= 1
8971 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8972 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8973 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8975 if (TREE_CODE (index) == INTEGER_CST)
8977 tree init = DECL_INITIAL (array);
8979 if (TREE_CODE (init) == CONSTRUCTOR)
8981 tree elem;
8983 for (elem = CONSTRUCTOR_ELTS (init);
8984 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8985 elem = TREE_CHAIN (elem))
8988 if (elem)
8989 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8990 palign);
8995 /* Fall through. */
8997 case COMPONENT_REF:
8998 case BIT_FIELD_REF:
8999 case ARRAY_RANGE_REF:
9000 /* If the operand is a CONSTRUCTOR, we can just extract the
9001 appropriate field if it is present. Don't do this if we have
9002 already written the data since we want to refer to that copy
9003 and varasm.c assumes that's what we'll do. */
9004 if (TREE_CODE (exp) == COMPONENT_REF
9005 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9006 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9008 tree elt;
9010 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9011 elt = TREE_CHAIN (elt))
9012 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9013 /* Note that unlike the case in expand_expr, we know this is
9014 BLKmode and hence not an integer. */
9015 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9019 enum machine_mode mode1;
9020 HOST_WIDE_INT bitsize, bitpos;
9021 tree offset;
9022 int volatilep = 0;
9023 unsigned int alignment;
9024 int unsignedp;
9025 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9026 &mode1, &unsignedp, &volatilep,
9027 &alignment);
9029 /* If we got back the original object, something is wrong. Perhaps
9030 we are evaluating an expression too early. In any event, don't
9031 infinitely recurse. */
9032 if (tem == exp)
9033 abort ();
9035 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9037 /* If this is a constant, put it into a register if it is a
9038 legitimate constant and OFFSET is 0 and memory if it isn't. */
9039 if (CONSTANT_P (op0))
9041 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9043 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9044 && offset == 0)
9045 op0 = force_reg (inner_mode, op0);
9046 else
9047 op0 = validize_mem (force_const_mem (inner_mode, op0));
9050 if (offset != 0)
9052 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9054 /* If this object is in a register, put it into memory.
9055 This case can't occur in C, but can in Ada if we have
9056 unchecked conversion of an expression from a scalar type to
9057 an array or record type. */
9058 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9059 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9061 tree nt = build_qualified_type (TREE_TYPE (tem),
9062 (TYPE_QUALS (TREE_TYPE (tem))
9063 | TYPE_QUAL_CONST));
9064 rtx memloc = assign_temp (nt, 1, 1, 1);
9066 mark_temp_addr_taken (memloc);
9067 emit_move_insn (memloc, op0);
9068 op0 = memloc;
9071 if (GET_CODE (op0) != MEM)
9072 abort ();
9074 if (GET_MODE (offset_rtx) != ptr_mode)
9076 #ifdef POINTERS_EXTEND_UNSIGNED
9077 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9078 #else
9079 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9080 #endif
9083 op0 = change_address (op0, VOIDmode,
9084 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9085 force_reg (ptr_mode,
9086 offset_rtx)));
9089 /* Don't forget about volatility even if this is a bitfield. */
9090 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9092 op0 = copy_rtx (op0);
9093 MEM_VOLATILE_P (op0) = 1;
9096 /* Check the access. */
9097 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9099 rtx to;
9100 int size;
9102 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9103 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9105 /* Check the access right of the pointer. */
9106 in_check_memory_usage = 1;
9107 if (size > BITS_PER_UNIT)
9108 emit_library_call (chkr_check_addr_libfunc,
9109 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9110 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9111 TYPE_MODE (sizetype),
9112 GEN_INT (MEMORY_USE_RO),
9113 TYPE_MODE (integer_type_node));
9114 in_check_memory_usage = 0;
9117 /* In cases where an aligned union has an unaligned object
9118 as a field, we might be extracting a BLKmode value from
9119 an integer-mode (e.g., SImode) object. Handle this case
9120 by doing the extract into an object as wide as the field
9121 (which we know to be the width of a basic mode), then
9122 storing into memory, and changing the mode to BLKmode.
9123 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9124 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9125 if (mode1 == VOIDmode
9126 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9127 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9128 && (TYPE_ALIGN (type) > alignment
9129 || bitpos % TYPE_ALIGN (type) != 0)))
9131 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9133 if (ext_mode == BLKmode)
9135 /* In this case, BITPOS must start at a byte boundary. */
9136 if (GET_CODE (op0) != MEM
9137 || bitpos % BITS_PER_UNIT != 0)
9138 abort ();
9140 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9142 else
9144 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9145 TYPE_QUAL_CONST);
9146 rtx new = assign_temp (nt, 0, 1, 1);
9148 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9149 unsignedp, NULL_RTX, ext_mode,
9150 ext_mode, alignment,
9151 int_size_in_bytes (TREE_TYPE (tem)));
9153 /* If the result is a record type and BITSIZE is narrower than
9154 the mode of OP0, an integral mode, and this is a big endian
9155 machine, we must put the field into the high-order bits. */
9156 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9157 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9158 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9159 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9160 size_int (GET_MODE_BITSIZE
9161 (GET_MODE (op0))
9162 - bitsize),
9163 op0, 1);
9165 emit_move_insn (new, op0);
9166 op0 = copy_rtx (new);
9167 PUT_MODE (op0, BLKmode);
9170 else
9171 /* Get a reference to just this component. */
9172 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9174 set_mem_alias_set (op0, get_alias_set (exp));
9176 /* Adjust the alignment in case the bit position is not
9177 a multiple of the alignment of the inner object. */
9178 while (bitpos % alignment != 0)
9179 alignment >>= 1;
9181 if (GET_CODE (XEXP (op0, 0)) == REG)
9182 mark_reg_pointer (XEXP (op0, 0), alignment);
9184 MEM_IN_STRUCT_P (op0) = 1;
9185 MEM_VOLATILE_P (op0) |= volatilep;
9187 *palign = alignment;
9188 return op0;
9191 default:
9192 break;
9196 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9199 /* Return the tree node if a ARG corresponds to a string constant or zero
9200 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9201 in bytes within the string that ARG is accessing. The type of the
9202 offset will be `sizetype'. */
9204 tree
9205 string_constant (arg, ptr_offset)
9206 tree arg;
9207 tree *ptr_offset;
9209 STRIP_NOPS (arg);
9211 if (TREE_CODE (arg) == ADDR_EXPR
9212 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9214 *ptr_offset = size_zero_node;
9215 return TREE_OPERAND (arg, 0);
9217 else if (TREE_CODE (arg) == PLUS_EXPR)
9219 tree arg0 = TREE_OPERAND (arg, 0);
9220 tree arg1 = TREE_OPERAND (arg, 1);
9222 STRIP_NOPS (arg0);
9223 STRIP_NOPS (arg1);
9225 if (TREE_CODE (arg0) == ADDR_EXPR
9226 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9228 *ptr_offset = convert (sizetype, arg1);
9229 return TREE_OPERAND (arg0, 0);
9231 else if (TREE_CODE (arg1) == ADDR_EXPR
9232 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9234 *ptr_offset = convert (sizetype, arg0);
9235 return TREE_OPERAND (arg1, 0);
9239 return 0;
9242 /* Expand code for a post- or pre- increment or decrement
9243 and return the RTX for the result.
9244 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9246 static rtx
9247 expand_increment (exp, post, ignore)
9248 register tree exp;
9249 int post, ignore;
9251 register rtx op0, op1;
9252 register rtx temp, value;
9253 register tree incremented = TREE_OPERAND (exp, 0);
9254 optab this_optab = add_optab;
9255 int icode;
9256 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9257 int op0_is_copy = 0;
9258 int single_insn = 0;
9259 /* 1 means we can't store into OP0 directly,
9260 because it is a subreg narrower than a word,
9261 and we don't dare clobber the rest of the word. */
9262 int bad_subreg = 0;
9264 /* Stabilize any component ref that might need to be
9265 evaluated more than once below. */
9266 if (!post
9267 || TREE_CODE (incremented) == BIT_FIELD_REF
9268 || (TREE_CODE (incremented) == COMPONENT_REF
9269 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9270 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9271 incremented = stabilize_reference (incremented);
9272 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9273 ones into save exprs so that they don't accidentally get evaluated
9274 more than once by the code below. */
9275 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9276 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9277 incremented = save_expr (incremented);
9279 /* Compute the operands as RTX.
9280 Note whether OP0 is the actual lvalue or a copy of it:
9281 I believe it is a copy iff it is a register or subreg
9282 and insns were generated in computing it. */
9284 temp = get_last_insn ();
9285 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9287 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9288 in place but instead must do sign- or zero-extension during assignment,
9289 so we copy it into a new register and let the code below use it as
9290 a copy.
9292 Note that we can safely modify this SUBREG since it is know not to be
9293 shared (it was made by the expand_expr call above). */
9295 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9297 if (post)
9298 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9299 else
9300 bad_subreg = 1;
9302 else if (GET_CODE (op0) == SUBREG
9303 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9305 /* We cannot increment this SUBREG in place. If we are
9306 post-incrementing, get a copy of the old value. Otherwise,
9307 just mark that we cannot increment in place. */
9308 if (post)
9309 op0 = copy_to_reg (op0);
9310 else
9311 bad_subreg = 1;
9314 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9315 && temp != get_last_insn ());
9316 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9317 EXPAND_MEMORY_USE_BAD);
9319 /* Decide whether incrementing or decrementing. */
9320 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9321 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9322 this_optab = sub_optab;
9324 /* Convert decrement by a constant into a negative increment. */
9325 if (this_optab == sub_optab
9326 && GET_CODE (op1) == CONST_INT)
9328 op1 = GEN_INT (-INTVAL (op1));
9329 this_optab = add_optab;
9332 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9333 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9335 /* For a preincrement, see if we can do this with a single instruction. */
9336 if (!post)
9338 icode = (int) this_optab->handlers[(int) mode].insn_code;
9339 if (icode != (int) CODE_FOR_nothing
9340 /* Make sure that OP0 is valid for operands 0 and 1
9341 of the insn we want to queue. */
9342 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9343 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9344 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9345 single_insn = 1;
9348 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9349 then we cannot just increment OP0. We must therefore contrive to
9350 increment the original value. Then, for postincrement, we can return
9351 OP0 since it is a copy of the old value. For preincrement, expand here
9352 unless we can do it with a single insn.
9354 Likewise if storing directly into OP0 would clobber high bits
9355 we need to preserve (bad_subreg). */
9356 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9358 /* This is the easiest way to increment the value wherever it is.
9359 Problems with multiple evaluation of INCREMENTED are prevented
9360 because either (1) it is a component_ref or preincrement,
9361 in which case it was stabilized above, or (2) it is an array_ref
9362 with constant index in an array in a register, which is
9363 safe to reevaluate. */
9364 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9365 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9366 ? MINUS_EXPR : PLUS_EXPR),
9367 TREE_TYPE (exp),
9368 incremented,
9369 TREE_OPERAND (exp, 1));
9371 while (TREE_CODE (incremented) == NOP_EXPR
9372 || TREE_CODE (incremented) == CONVERT_EXPR)
9374 newexp = convert (TREE_TYPE (incremented), newexp);
9375 incremented = TREE_OPERAND (incremented, 0);
9378 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9379 return post ? op0 : temp;
9382 if (post)
9384 /* We have a true reference to the value in OP0.
9385 If there is an insn to add or subtract in this mode, queue it.
9386 Queueing the increment insn avoids the register shuffling
9387 that often results if we must increment now and first save
9388 the old value for subsequent use. */
9390 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9391 op0 = stabilize (op0);
9392 #endif
9394 icode = (int) this_optab->handlers[(int) mode].insn_code;
9395 if (icode != (int) CODE_FOR_nothing
9396 /* Make sure that OP0 is valid for operands 0 and 1
9397 of the insn we want to queue. */
9398 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9399 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9401 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9402 op1 = force_reg (mode, op1);
9404 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9406 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9408 rtx addr = (general_operand (XEXP (op0, 0), mode)
9409 ? force_reg (Pmode, XEXP (op0, 0))
9410 : copy_to_reg (XEXP (op0, 0)));
9411 rtx temp, result;
9413 op0 = replace_equiv_address (op0, addr);
9414 temp = force_reg (GET_MODE (op0), op0);
9415 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9416 op1 = force_reg (mode, op1);
9418 /* The increment queue is LIFO, thus we have to `queue'
9419 the instructions in reverse order. */
9420 enqueue_insn (op0, gen_move_insn (op0, temp));
9421 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9422 return result;
9426 /* Preincrement, or we can't increment with one simple insn. */
9427 if (post)
9428 /* Save a copy of the value before inc or dec, to return it later. */
9429 temp = value = copy_to_reg (op0);
9430 else
9431 /* Arrange to return the incremented value. */
9432 /* Copy the rtx because expand_binop will protect from the queue,
9433 and the results of that would be invalid for us to return
9434 if our caller does emit_queue before using our result. */
9435 temp = copy_rtx (value = op0);
9437 /* Increment however we can. */
9438 op1 = expand_binop (mode, this_optab, value, op1,
9439 current_function_check_memory_usage ? NULL_RTX : op0,
9440 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9441 /* Make sure the value is stored into OP0. */
9442 if (op1 != op0)
9443 emit_move_insn (op0, op1);
9445 return temp;
9448 /* At the start of a function, record that we have no previously-pushed
9449 arguments waiting to be popped. */
9451 void
9452 init_pending_stack_adjust ()
9454 pending_stack_adjust = 0;
9457 /* When exiting from function, if safe, clear out any pending stack adjust
9458 so the adjustment won't get done.
9460 Note, if the current function calls alloca, then it must have a
9461 frame pointer regardless of the value of flag_omit_frame_pointer. */
9463 void
9464 clear_pending_stack_adjust ()
9466 #ifdef EXIT_IGNORE_STACK
9467 if (optimize > 0
9468 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9469 && EXIT_IGNORE_STACK
9470 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9471 && ! flag_inline_functions)
9473 stack_pointer_delta -= pending_stack_adjust,
9474 pending_stack_adjust = 0;
9476 #endif
9479 /* Pop any previously-pushed arguments that have not been popped yet. */
9481 void
9482 do_pending_stack_adjust ()
9484 if (inhibit_defer_pop == 0)
9486 if (pending_stack_adjust != 0)
9487 adjust_stack (GEN_INT (pending_stack_adjust));
9488 pending_stack_adjust = 0;
9492 /* Expand conditional expressions. */
9494 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9495 LABEL is an rtx of code CODE_LABEL, in this function and all the
9496 functions here. */
9498 void
9499 jumpifnot (exp, label)
9500 tree exp;
9501 rtx label;
9503 do_jump (exp, label, NULL_RTX);
9506 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9508 void
9509 jumpif (exp, label)
9510 tree exp;
9511 rtx label;
9513 do_jump (exp, NULL_RTX, label);
9516 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9517 the result is zero, or IF_TRUE_LABEL if the result is one.
9518 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9519 meaning fall through in that case.
9521 do_jump always does any pending stack adjust except when it does not
9522 actually perform a jump. An example where there is no jump
9523 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9525 This function is responsible for optimizing cases such as
9526 &&, || and comparison operators in EXP. */
9528 void
9529 do_jump (exp, if_false_label, if_true_label)
9530 tree exp;
9531 rtx if_false_label, if_true_label;
9533 register enum tree_code code = TREE_CODE (exp);
9534 /* Some cases need to create a label to jump to
9535 in order to properly fall through.
9536 These cases set DROP_THROUGH_LABEL nonzero. */
9537 rtx drop_through_label = 0;
9538 rtx temp;
9539 int i;
9540 tree type;
9541 enum machine_mode mode;
9543 #ifdef MAX_INTEGER_COMPUTATION_MODE
9544 check_max_integer_computation_mode (exp);
9545 #endif
9547 emit_queue ();
9549 switch (code)
9551 case ERROR_MARK:
9552 break;
9554 case INTEGER_CST:
9555 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9556 if (temp)
9557 emit_jump (temp);
9558 break;
9560 #if 0
9561 /* This is not true with #pragma weak */
9562 case ADDR_EXPR:
9563 /* The address of something can never be zero. */
9564 if (if_true_label)
9565 emit_jump (if_true_label);
9566 break;
9567 #endif
9569 case NOP_EXPR:
9570 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9571 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9572 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9573 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9574 goto normal;
9575 case CONVERT_EXPR:
9576 /* If we are narrowing the operand, we have to do the compare in the
9577 narrower mode. */
9578 if ((TYPE_PRECISION (TREE_TYPE (exp))
9579 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9580 goto normal;
9581 case NON_LVALUE_EXPR:
9582 case REFERENCE_EXPR:
9583 case ABS_EXPR:
9584 case NEGATE_EXPR:
9585 case LROTATE_EXPR:
9586 case RROTATE_EXPR:
9587 /* These cannot change zero->non-zero or vice versa. */
9588 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9589 break;
9591 case WITH_RECORD_EXPR:
9592 /* Put the object on the placeholder list, recurse through our first
9593 operand, and pop the list. */
9594 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9595 placeholder_list);
9596 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9597 placeholder_list = TREE_CHAIN (placeholder_list);
9598 break;
9600 #if 0
9601 /* This is never less insns than evaluating the PLUS_EXPR followed by
9602 a test and can be longer if the test is eliminated. */
9603 case PLUS_EXPR:
9604 /* Reduce to minus. */
9605 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9606 TREE_OPERAND (exp, 0),
9607 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9608 TREE_OPERAND (exp, 1))));
9609 /* Process as MINUS. */
9610 #endif
9612 case MINUS_EXPR:
9613 /* Non-zero iff operands of minus differ. */
9614 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9615 TREE_OPERAND (exp, 0),
9616 TREE_OPERAND (exp, 1)),
9617 NE, NE, if_false_label, if_true_label);
9618 break;
9620 case BIT_AND_EXPR:
9621 /* If we are AND'ing with a small constant, do this comparison in the
9622 smallest type that fits. If the machine doesn't have comparisons
9623 that small, it will be converted back to the wider comparison.
9624 This helps if we are testing the sign bit of a narrower object.
9625 combine can't do this for us because it can't know whether a
9626 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9628 if (! SLOW_BYTE_ACCESS
9629 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9630 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9631 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9632 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9633 && (type = type_for_mode (mode, 1)) != 0
9634 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9635 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9636 != CODE_FOR_nothing))
9638 do_jump (convert (type, exp), if_false_label, if_true_label);
9639 break;
9641 goto normal;
9643 case TRUTH_NOT_EXPR:
9644 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9645 break;
9647 case TRUTH_ANDIF_EXPR:
9648 if (if_false_label == 0)
9649 if_false_label = drop_through_label = gen_label_rtx ();
9650 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9651 start_cleanup_deferral ();
9652 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9653 end_cleanup_deferral ();
9654 break;
9656 case TRUTH_ORIF_EXPR:
9657 if (if_true_label == 0)
9658 if_true_label = drop_through_label = gen_label_rtx ();
9659 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9660 start_cleanup_deferral ();
9661 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9662 end_cleanup_deferral ();
9663 break;
9665 case COMPOUND_EXPR:
9666 push_temp_slots ();
9667 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9668 preserve_temp_slots (NULL_RTX);
9669 free_temp_slots ();
9670 pop_temp_slots ();
9671 emit_queue ();
9672 do_pending_stack_adjust ();
9673 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9674 break;
9676 case COMPONENT_REF:
9677 case BIT_FIELD_REF:
9678 case ARRAY_REF:
9679 case ARRAY_RANGE_REF:
9681 HOST_WIDE_INT bitsize, bitpos;
9682 int unsignedp;
9683 enum machine_mode mode;
9684 tree type;
9685 tree offset;
9686 int volatilep = 0;
9687 unsigned int alignment;
9689 /* Get description of this reference. We don't actually care
9690 about the underlying object here. */
9691 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9692 &unsignedp, &volatilep, &alignment);
9694 type = type_for_size (bitsize, unsignedp);
9695 if (! SLOW_BYTE_ACCESS
9696 && type != 0 && bitsize >= 0
9697 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9698 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9699 != CODE_FOR_nothing))
9701 do_jump (convert (type, exp), if_false_label, if_true_label);
9702 break;
9704 goto normal;
9707 case COND_EXPR:
9708 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9709 if (integer_onep (TREE_OPERAND (exp, 1))
9710 && integer_zerop (TREE_OPERAND (exp, 2)))
9711 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9713 else if (integer_zerop (TREE_OPERAND (exp, 1))
9714 && integer_onep (TREE_OPERAND (exp, 2)))
9715 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9717 else
9719 register rtx label1 = gen_label_rtx ();
9720 drop_through_label = gen_label_rtx ();
9722 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9724 start_cleanup_deferral ();
9725 /* Now the THEN-expression. */
9726 do_jump (TREE_OPERAND (exp, 1),
9727 if_false_label ? if_false_label : drop_through_label,
9728 if_true_label ? if_true_label : drop_through_label);
9729 /* In case the do_jump just above never jumps. */
9730 do_pending_stack_adjust ();
9731 emit_label (label1);
9733 /* Now the ELSE-expression. */
9734 do_jump (TREE_OPERAND (exp, 2),
9735 if_false_label ? if_false_label : drop_through_label,
9736 if_true_label ? if_true_label : drop_through_label);
9737 end_cleanup_deferral ();
9739 break;
9741 case EQ_EXPR:
9743 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9745 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9746 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9748 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9749 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9750 do_jump
9751 (fold
9752 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9753 fold (build (EQ_EXPR, TREE_TYPE (exp),
9754 fold (build1 (REALPART_EXPR,
9755 TREE_TYPE (inner_type),
9756 exp0)),
9757 fold (build1 (REALPART_EXPR,
9758 TREE_TYPE (inner_type),
9759 exp1)))),
9760 fold (build (EQ_EXPR, TREE_TYPE (exp),
9761 fold (build1 (IMAGPART_EXPR,
9762 TREE_TYPE (inner_type),
9763 exp0)),
9764 fold (build1 (IMAGPART_EXPR,
9765 TREE_TYPE (inner_type),
9766 exp1)))))),
9767 if_false_label, if_true_label);
9770 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9771 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9773 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9774 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9775 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9776 else
9777 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9778 break;
9781 case NE_EXPR:
9783 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9785 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9786 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9788 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9789 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9790 do_jump
9791 (fold
9792 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9793 fold (build (NE_EXPR, TREE_TYPE (exp),
9794 fold (build1 (REALPART_EXPR,
9795 TREE_TYPE (inner_type),
9796 exp0)),
9797 fold (build1 (REALPART_EXPR,
9798 TREE_TYPE (inner_type),
9799 exp1)))),
9800 fold (build (NE_EXPR, TREE_TYPE (exp),
9801 fold (build1 (IMAGPART_EXPR,
9802 TREE_TYPE (inner_type),
9803 exp0)),
9804 fold (build1 (IMAGPART_EXPR,
9805 TREE_TYPE (inner_type),
9806 exp1)))))),
9807 if_false_label, if_true_label);
9810 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9811 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9813 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9814 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9815 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9816 else
9817 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9818 break;
9821 case LT_EXPR:
9822 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9823 if (GET_MODE_CLASS (mode) == MODE_INT
9824 && ! can_compare_p (LT, mode, ccp_jump))
9825 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9826 else
9827 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9828 break;
9830 case LE_EXPR:
9831 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9832 if (GET_MODE_CLASS (mode) == MODE_INT
9833 && ! can_compare_p (LE, mode, ccp_jump))
9834 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9835 else
9836 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9837 break;
9839 case GT_EXPR:
9840 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9841 if (GET_MODE_CLASS (mode) == MODE_INT
9842 && ! can_compare_p (GT, mode, ccp_jump))
9843 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9844 else
9845 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9846 break;
9848 case GE_EXPR:
9849 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9850 if (GET_MODE_CLASS (mode) == MODE_INT
9851 && ! can_compare_p (GE, mode, ccp_jump))
9852 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9853 else
9854 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9855 break;
9857 case UNORDERED_EXPR:
9858 case ORDERED_EXPR:
9860 enum rtx_code cmp, rcmp;
9861 int do_rev;
9863 if (code == UNORDERED_EXPR)
9864 cmp = UNORDERED, rcmp = ORDERED;
9865 else
9866 cmp = ORDERED, rcmp = UNORDERED;
9867 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9869 do_rev = 0;
9870 if (! can_compare_p (cmp, mode, ccp_jump)
9871 && (can_compare_p (rcmp, mode, ccp_jump)
9872 /* If the target doesn't provide either UNORDERED or ORDERED
9873 comparisons, canonicalize on UNORDERED for the library. */
9874 || rcmp == UNORDERED))
9875 do_rev = 1;
9877 if (! do_rev)
9878 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9879 else
9880 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9882 break;
9885 enum rtx_code rcode1;
9886 enum tree_code tcode2;
9888 case UNLT_EXPR:
9889 rcode1 = UNLT;
9890 tcode2 = LT_EXPR;
9891 goto unordered_bcc;
9892 case UNLE_EXPR:
9893 rcode1 = UNLE;
9894 tcode2 = LE_EXPR;
9895 goto unordered_bcc;
9896 case UNGT_EXPR:
9897 rcode1 = UNGT;
9898 tcode2 = GT_EXPR;
9899 goto unordered_bcc;
9900 case UNGE_EXPR:
9901 rcode1 = UNGE;
9902 tcode2 = GE_EXPR;
9903 goto unordered_bcc;
9904 case UNEQ_EXPR:
9905 rcode1 = UNEQ;
9906 tcode2 = EQ_EXPR;
9907 goto unordered_bcc;
9909 unordered_bcc:
9910 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9911 if (can_compare_p (rcode1, mode, ccp_jump))
9912 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9913 if_true_label);
9914 else
9916 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9917 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9918 tree cmp0, cmp1;
9920 /* If the target doesn't support combined unordered
9921 compares, decompose into UNORDERED + comparison. */
9922 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9923 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9924 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9925 do_jump (exp, if_false_label, if_true_label);
9928 break;
9930 /* Special case:
9931 __builtin_expect (<test>, 0) and
9932 __builtin_expect (<test>, 1)
9934 We need to do this here, so that <test> is not converted to a SCC
9935 operation on machines that use condition code registers and COMPARE
9936 like the PowerPC, and then the jump is done based on whether the SCC
9937 operation produced a 1 or 0. */
9938 case CALL_EXPR:
9939 /* Check for a built-in function. */
9940 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9942 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9943 tree arglist = TREE_OPERAND (exp, 1);
9945 if (TREE_CODE (fndecl) == FUNCTION_DECL
9946 && DECL_BUILT_IN (fndecl)
9947 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9948 && arglist != NULL_TREE
9949 && TREE_CHAIN (arglist) != NULL_TREE)
9951 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9952 if_true_label);
9954 if (seq != NULL_RTX)
9956 emit_insn (seq);
9957 return;
9961 /* fall through and generate the normal code. */
9963 default:
9964 normal:
9965 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9966 #if 0
9967 /* This is not needed any more and causes poor code since it causes
9968 comparisons and tests from non-SI objects to have different code
9969 sequences. */
9970 /* Copy to register to avoid generating bad insns by cse
9971 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9972 if (!cse_not_expected && GET_CODE (temp) == MEM)
9973 temp = copy_to_reg (temp);
9974 #endif
9975 do_pending_stack_adjust ();
9976 /* Do any postincrements in the expression that was tested. */
9977 emit_queue ();
9979 if (GET_CODE (temp) == CONST_INT
9980 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9981 || GET_CODE (temp) == LABEL_REF)
9983 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9984 if (target)
9985 emit_jump (target);
9987 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9988 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9989 /* Note swapping the labels gives us not-equal. */
9990 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9991 else if (GET_MODE (temp) != VOIDmode)
9992 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9993 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9994 GET_MODE (temp), NULL_RTX, 0,
9995 if_false_label, if_true_label);
9996 else
9997 abort ();
10000 if (drop_through_label)
10002 /* If do_jump produces code that might be jumped around,
10003 do any stack adjusts from that code, before the place
10004 where control merges in. */
10005 do_pending_stack_adjust ();
10006 emit_label (drop_through_label);
10010 /* Given a comparison expression EXP for values too wide to be compared
10011 with one insn, test the comparison and jump to the appropriate label.
10012 The code of EXP is ignored; we always test GT if SWAP is 0,
10013 and LT if SWAP is 1. */
10015 static void
10016 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10017 tree exp;
10018 int swap;
10019 rtx if_false_label, if_true_label;
10021 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10022 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10023 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10024 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10026 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10029 /* Compare OP0 with OP1, word at a time, in mode MODE.
10030 UNSIGNEDP says to do unsigned comparison.
10031 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10033 void
10034 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10035 enum machine_mode mode;
10036 int unsignedp;
10037 rtx op0, op1;
10038 rtx if_false_label, if_true_label;
10040 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10041 rtx drop_through_label = 0;
10042 int i;
10044 if (! if_true_label || ! if_false_label)
10045 drop_through_label = gen_label_rtx ();
10046 if (! if_true_label)
10047 if_true_label = drop_through_label;
10048 if (! if_false_label)
10049 if_false_label = drop_through_label;
10051 /* Compare a word at a time, high order first. */
10052 for (i = 0; i < nwords; i++)
10054 rtx op0_word, op1_word;
10056 if (WORDS_BIG_ENDIAN)
10058 op0_word = operand_subword_force (op0, i, mode);
10059 op1_word = operand_subword_force (op1, i, mode);
10061 else
10063 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10064 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10067 /* All but high-order word must be compared as unsigned. */
10068 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10069 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10070 NULL_RTX, if_true_label);
10072 /* Consider lower words only if these are equal. */
10073 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10074 NULL_RTX, 0, NULL_RTX, if_false_label);
10077 if (if_false_label)
10078 emit_jump (if_false_label);
10079 if (drop_through_label)
10080 emit_label (drop_through_label);
10083 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10084 with one insn, test the comparison and jump to the appropriate label. */
10086 static void
10087 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10088 tree exp;
10089 rtx if_false_label, if_true_label;
10091 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10092 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10093 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10094 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10095 int i;
10096 rtx drop_through_label = 0;
10098 if (! if_false_label)
10099 drop_through_label = if_false_label = gen_label_rtx ();
10101 for (i = 0; i < nwords; i++)
10102 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10103 operand_subword_force (op1, i, mode),
10104 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10105 word_mode, NULL_RTX, 0, if_false_label,
10106 NULL_RTX);
10108 if (if_true_label)
10109 emit_jump (if_true_label);
10110 if (drop_through_label)
10111 emit_label (drop_through_label);
10114 /* Jump according to whether OP0 is 0.
10115 We assume that OP0 has an integer mode that is too wide
10116 for the available compare insns. */
10118 void
10119 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10120 rtx op0;
10121 rtx if_false_label, if_true_label;
10123 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10124 rtx part;
10125 int i;
10126 rtx drop_through_label = 0;
10128 /* The fastest way of doing this comparison on almost any machine is to
10129 "or" all the words and compare the result. If all have to be loaded
10130 from memory and this is a very wide item, it's possible this may
10131 be slower, but that's highly unlikely. */
10133 part = gen_reg_rtx (word_mode);
10134 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10135 for (i = 1; i < nwords && part != 0; i++)
10136 part = expand_binop (word_mode, ior_optab, part,
10137 operand_subword_force (op0, i, GET_MODE (op0)),
10138 part, 1, OPTAB_WIDEN);
10140 if (part != 0)
10142 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10143 NULL_RTX, 0, if_false_label, if_true_label);
10145 return;
10148 /* If we couldn't do the "or" simply, do this with a series of compares. */
10149 if (! if_false_label)
10150 drop_through_label = if_false_label = gen_label_rtx ();
10152 for (i = 0; i < nwords; i++)
10153 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10154 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10155 if_false_label, NULL_RTX);
10157 if (if_true_label)
10158 emit_jump (if_true_label);
10160 if (drop_through_label)
10161 emit_label (drop_through_label);
10164 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10165 (including code to compute the values to be compared)
10166 and set (CC0) according to the result.
10167 The decision as to signed or unsigned comparison must be made by the caller.
10169 We force a stack adjustment unless there are currently
10170 things pushed on the stack that aren't yet used.
10172 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10173 compared.
10175 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10176 size of MODE should be used. */
10179 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10180 register rtx op0, op1;
10181 enum rtx_code code;
10182 int unsignedp;
10183 enum machine_mode mode;
10184 rtx size;
10185 unsigned int align;
10187 rtx tem;
10189 /* If one operand is constant, make it the second one. Only do this
10190 if the other operand is not constant as well. */
10192 if (swap_commutative_operands_p (op0, op1))
10194 tem = op0;
10195 op0 = op1;
10196 op1 = tem;
10197 code = swap_condition (code);
10200 if (flag_force_mem)
10202 op0 = force_not_mem (op0);
10203 op1 = force_not_mem (op1);
10206 do_pending_stack_adjust ();
10208 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10209 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10210 return tem;
10212 #if 0
10213 /* There's no need to do this now that combine.c can eliminate lots of
10214 sign extensions. This can be less efficient in certain cases on other
10215 machines. */
10217 /* If this is a signed equality comparison, we can do it as an
10218 unsigned comparison since zero-extension is cheaper than sign
10219 extension and comparisons with zero are done as unsigned. This is
10220 the case even on machines that can do fast sign extension, since
10221 zero-extension is easier to combine with other operations than
10222 sign-extension is. If we are comparing against a constant, we must
10223 convert it to what it would look like unsigned. */
10224 if ((code == EQ || code == NE) && ! unsignedp
10225 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10227 if (GET_CODE (op1) == CONST_INT
10228 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10229 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10230 unsignedp = 1;
10232 #endif
10234 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10236 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10239 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10240 The decision as to signed or unsigned comparison must be made by the caller.
10242 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10243 compared.
10245 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10246 size of MODE should be used. */
10248 void
10249 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10250 if_false_label, if_true_label)
10251 register rtx op0, op1;
10252 enum rtx_code code;
10253 int unsignedp;
10254 enum machine_mode mode;
10255 rtx size;
10256 unsigned int align;
10257 rtx if_false_label, if_true_label;
10259 rtx tem;
10260 int dummy_true_label = 0;
10262 /* Reverse the comparison if that is safe and we want to jump if it is
10263 false. */
10264 if (! if_true_label && ! FLOAT_MODE_P (mode))
10266 if_true_label = if_false_label;
10267 if_false_label = 0;
10268 code = reverse_condition (code);
10271 /* If one operand is constant, make it the second one. Only do this
10272 if the other operand is not constant as well. */
10274 if (swap_commutative_operands_p (op0, op1))
10276 tem = op0;
10277 op0 = op1;
10278 op1 = tem;
10279 code = swap_condition (code);
10282 if (flag_force_mem)
10284 op0 = force_not_mem (op0);
10285 op1 = force_not_mem (op1);
10288 do_pending_stack_adjust ();
10290 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10291 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10293 if (tem == const_true_rtx)
10295 if (if_true_label)
10296 emit_jump (if_true_label);
10298 else
10300 if (if_false_label)
10301 emit_jump (if_false_label);
10303 return;
10306 #if 0
10307 /* There's no need to do this now that combine.c can eliminate lots of
10308 sign extensions. This can be less efficient in certain cases on other
10309 machines. */
10311 /* If this is a signed equality comparison, we can do it as an
10312 unsigned comparison since zero-extension is cheaper than sign
10313 extension and comparisons with zero are done as unsigned. This is
10314 the case even on machines that can do fast sign extension, since
10315 zero-extension is easier to combine with other operations than
10316 sign-extension is. If we are comparing against a constant, we must
10317 convert it to what it would look like unsigned. */
10318 if ((code == EQ || code == NE) && ! unsignedp
10319 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10321 if (GET_CODE (op1) == CONST_INT
10322 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10323 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10324 unsignedp = 1;
10326 #endif
10328 if (! if_true_label)
10330 dummy_true_label = 1;
10331 if_true_label = gen_label_rtx ();
10334 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10335 if_true_label);
10337 if (if_false_label)
10338 emit_jump (if_false_label);
10339 if (dummy_true_label)
10340 emit_label (if_true_label);
10343 /* Generate code for a comparison expression EXP (including code to compute
10344 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10345 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10346 generated code will drop through.
10347 SIGNED_CODE should be the rtx operation for this comparison for
10348 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10350 We force a stack adjustment unless there are currently
10351 things pushed on the stack that aren't yet used. */
10353 static void
10354 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10355 if_true_label)
10356 register tree exp;
10357 enum rtx_code signed_code, unsigned_code;
10358 rtx if_false_label, if_true_label;
10360 unsigned int align0, align1;
10361 register rtx op0, op1;
10362 register tree type;
10363 register enum machine_mode mode;
10364 int unsignedp;
10365 enum rtx_code code;
10367 /* Don't crash if the comparison was erroneous. */
10368 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10369 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10370 return;
10372 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10373 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10374 return;
10376 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10377 mode = TYPE_MODE (type);
10378 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10379 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10380 || (GET_MODE_BITSIZE (mode)
10381 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10382 1)))))))
10384 /* op0 might have been replaced by promoted constant, in which
10385 case the type of second argument should be used. */
10386 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10387 mode = TYPE_MODE (type);
10389 unsignedp = TREE_UNSIGNED (type);
10390 code = unsignedp ? unsigned_code : signed_code;
10392 #ifdef HAVE_canonicalize_funcptr_for_compare
10393 /* If function pointers need to be "canonicalized" before they can
10394 be reliably compared, then canonicalize them. */
10395 if (HAVE_canonicalize_funcptr_for_compare
10396 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10397 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10398 == FUNCTION_TYPE))
10400 rtx new_op0 = gen_reg_rtx (mode);
10402 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10403 op0 = new_op0;
10406 if (HAVE_canonicalize_funcptr_for_compare
10407 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10408 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10409 == FUNCTION_TYPE))
10411 rtx new_op1 = gen_reg_rtx (mode);
10413 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10414 op1 = new_op1;
10416 #endif
10418 /* Do any postincrements in the expression that was tested. */
10419 emit_queue ();
10421 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10422 ((mode == BLKmode)
10423 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10424 MIN (align0, align1),
10425 if_false_label, if_true_label);
10428 /* Generate code to calculate EXP using a store-flag instruction
10429 and return an rtx for the result. EXP is either a comparison
10430 or a TRUTH_NOT_EXPR whose operand is a comparison.
10432 If TARGET is nonzero, store the result there if convenient.
10434 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10435 cheap.
10437 Return zero if there is no suitable set-flag instruction
10438 available on this machine.
10440 Once expand_expr has been called on the arguments of the comparison,
10441 we are committed to doing the store flag, since it is not safe to
10442 re-evaluate the expression. We emit the store-flag insn by calling
10443 emit_store_flag, but only expand the arguments if we have a reason
10444 to believe that emit_store_flag will be successful. If we think that
10445 it will, but it isn't, we have to simulate the store-flag with a
10446 set/jump/set sequence. */
10448 static rtx
10449 do_store_flag (exp, target, mode, only_cheap)
10450 tree exp;
10451 rtx target;
10452 enum machine_mode mode;
10453 int only_cheap;
10455 enum rtx_code code;
10456 tree arg0, arg1, type;
10457 tree tem;
10458 enum machine_mode operand_mode;
10459 int invert = 0;
10460 int unsignedp;
10461 rtx op0, op1;
10462 enum insn_code icode;
10463 rtx subtarget = target;
10464 rtx result, label;
10466 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10467 result at the end. We can't simply invert the test since it would
10468 have already been inverted if it were valid. This case occurs for
10469 some floating-point comparisons. */
10471 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10472 invert = 1, exp = TREE_OPERAND (exp, 0);
10474 arg0 = TREE_OPERAND (exp, 0);
10475 arg1 = TREE_OPERAND (exp, 1);
10477 /* Don't crash if the comparison was erroneous. */
10478 if (arg0 == error_mark_node || arg1 == error_mark_node)
10479 return const0_rtx;
10481 type = TREE_TYPE (arg0);
10482 operand_mode = TYPE_MODE (type);
10483 unsignedp = TREE_UNSIGNED (type);
10485 /* We won't bother with BLKmode store-flag operations because it would mean
10486 passing a lot of information to emit_store_flag. */
10487 if (operand_mode == BLKmode)
10488 return 0;
10490 /* We won't bother with store-flag operations involving function pointers
10491 when function pointers must be canonicalized before comparisons. */
10492 #ifdef HAVE_canonicalize_funcptr_for_compare
10493 if (HAVE_canonicalize_funcptr_for_compare
10494 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10495 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10496 == FUNCTION_TYPE))
10497 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10498 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10499 == FUNCTION_TYPE))))
10500 return 0;
10501 #endif
10503 STRIP_NOPS (arg0);
10504 STRIP_NOPS (arg1);
10506 /* Get the rtx comparison code to use. We know that EXP is a comparison
10507 operation of some type. Some comparisons against 1 and -1 can be
10508 converted to comparisons with zero. Do so here so that the tests
10509 below will be aware that we have a comparison with zero. These
10510 tests will not catch constants in the first operand, but constants
10511 are rarely passed as the first operand. */
10513 switch (TREE_CODE (exp))
10515 case EQ_EXPR:
10516 code = EQ;
10517 break;
10518 case NE_EXPR:
10519 code = NE;
10520 break;
10521 case LT_EXPR:
10522 if (integer_onep (arg1))
10523 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10524 else
10525 code = unsignedp ? LTU : LT;
10526 break;
10527 case LE_EXPR:
10528 if (! unsignedp && integer_all_onesp (arg1))
10529 arg1 = integer_zero_node, code = LT;
10530 else
10531 code = unsignedp ? LEU : LE;
10532 break;
10533 case GT_EXPR:
10534 if (! unsignedp && integer_all_onesp (arg1))
10535 arg1 = integer_zero_node, code = GE;
10536 else
10537 code = unsignedp ? GTU : GT;
10538 break;
10539 case GE_EXPR:
10540 if (integer_onep (arg1))
10541 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10542 else
10543 code = unsignedp ? GEU : GE;
10544 break;
10546 case UNORDERED_EXPR:
10547 code = UNORDERED;
10548 break;
10549 case ORDERED_EXPR:
10550 code = ORDERED;
10551 break;
10552 case UNLT_EXPR:
10553 code = UNLT;
10554 break;
10555 case UNLE_EXPR:
10556 code = UNLE;
10557 break;
10558 case UNGT_EXPR:
10559 code = UNGT;
10560 break;
10561 case UNGE_EXPR:
10562 code = UNGE;
10563 break;
10564 case UNEQ_EXPR:
10565 code = UNEQ;
10566 break;
10568 default:
10569 abort ();
10572 /* Put a constant second. */
10573 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10575 tem = arg0; arg0 = arg1; arg1 = tem;
10576 code = swap_condition (code);
10579 /* If this is an equality or inequality test of a single bit, we can
10580 do this by shifting the bit being tested to the low-order bit and
10581 masking the result with the constant 1. If the condition was EQ,
10582 we xor it with 1. This does not require an scc insn and is faster
10583 than an scc insn even if we have it. */
10585 if ((code == NE || code == EQ)
10586 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10587 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10589 tree inner = TREE_OPERAND (arg0, 0);
10590 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10591 int ops_unsignedp;
10593 /* If INNER is a right shift of a constant and it plus BITNUM does
10594 not overflow, adjust BITNUM and INNER. */
10596 if (TREE_CODE (inner) == RSHIFT_EXPR
10597 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10598 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10599 && bitnum < TYPE_PRECISION (type)
10600 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10601 bitnum - TYPE_PRECISION (type)))
10603 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10604 inner = TREE_OPERAND (inner, 0);
10607 /* If we are going to be able to omit the AND below, we must do our
10608 operations as unsigned. If we must use the AND, we have a choice.
10609 Normally unsigned is faster, but for some machines signed is. */
10610 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10611 #ifdef LOAD_EXTEND_OP
10612 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10613 #else
10615 #endif
10618 if (! get_subtarget (subtarget)
10619 || GET_MODE (subtarget) != operand_mode
10620 || ! safe_from_p (subtarget, inner, 1))
10621 subtarget = 0;
10623 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10625 if (bitnum != 0)
10626 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10627 size_int (bitnum), subtarget, ops_unsignedp);
10629 if (GET_MODE (op0) != mode)
10630 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10632 if ((code == EQ && ! invert) || (code == NE && invert))
10633 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10634 ops_unsignedp, OPTAB_LIB_WIDEN);
10636 /* Put the AND last so it can combine with more things. */
10637 if (bitnum != TYPE_PRECISION (type) - 1)
10638 op0 = expand_and (op0, const1_rtx, subtarget);
10640 return op0;
10643 /* Now see if we are likely to be able to do this. Return if not. */
10644 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10645 return 0;
10647 icode = setcc_gen_code[(int) code];
10648 if (icode == CODE_FOR_nothing
10649 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10651 /* We can only do this if it is one of the special cases that
10652 can be handled without an scc insn. */
10653 if ((code == LT && integer_zerop (arg1))
10654 || (! only_cheap && code == GE && integer_zerop (arg1)))
10656 else if (BRANCH_COST >= 0
10657 && ! only_cheap && (code == NE || code == EQ)
10658 && TREE_CODE (type) != REAL_TYPE
10659 && ((abs_optab->handlers[(int) operand_mode].insn_code
10660 != CODE_FOR_nothing)
10661 || (ffs_optab->handlers[(int) operand_mode].insn_code
10662 != CODE_FOR_nothing)))
10664 else
10665 return 0;
10668 if (! get_subtarget (target)
10669 || GET_MODE (subtarget) != operand_mode
10670 || ! safe_from_p (subtarget, arg1, 1))
10671 subtarget = 0;
10673 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10674 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10676 if (target == 0)
10677 target = gen_reg_rtx (mode);
10679 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10680 because, if the emit_store_flag does anything it will succeed and
10681 OP0 and OP1 will not be used subsequently. */
10683 result = emit_store_flag (target, code,
10684 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10685 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10686 operand_mode, unsignedp, 1);
10688 if (result)
10690 if (invert)
10691 result = expand_binop (mode, xor_optab, result, const1_rtx,
10692 result, 0, OPTAB_LIB_WIDEN);
10693 return result;
10696 /* If this failed, we have to do this with set/compare/jump/set code. */
10697 if (GET_CODE (target) != REG
10698 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10699 target = gen_reg_rtx (GET_MODE (target));
10701 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10702 result = compare_from_rtx (op0, op1, code, unsignedp,
10703 operand_mode, NULL_RTX, 0);
10704 if (GET_CODE (result) == CONST_INT)
10705 return (((result == const0_rtx && ! invert)
10706 || (result != const0_rtx && invert))
10707 ? const0_rtx : const1_rtx);
10709 label = gen_label_rtx ();
10710 if (bcc_gen_fctn[(int) code] == 0)
10711 abort ();
10713 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10714 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10715 emit_label (label);
10717 return target;
10720 /* Generate a tablejump instruction (used for switch statements). */
10722 #ifdef HAVE_tablejump
10724 /* INDEX is the value being switched on, with the lowest value
10725 in the table already subtracted.
10726 MODE is its expected mode (needed if INDEX is constant).
10727 RANGE is the length of the jump table.
10728 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10730 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10731 index value is out of range. */
10733 void
10734 do_tablejump (index, mode, range, table_label, default_label)
10735 rtx index, range, table_label, default_label;
10736 enum machine_mode mode;
10738 register rtx temp, vector;
10740 /* Do an unsigned comparison (in the proper mode) between the index
10741 expression and the value which represents the length of the range.
10742 Since we just finished subtracting the lower bound of the range
10743 from the index expression, this comparison allows us to simultaneously
10744 check that the original index expression value is both greater than
10745 or equal to the minimum value of the range and less than or equal to
10746 the maximum value of the range. */
10748 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10749 0, default_label);
10751 /* If index is in range, it must fit in Pmode.
10752 Convert to Pmode so we can index with it. */
10753 if (mode != Pmode)
10754 index = convert_to_mode (Pmode, index, 1);
10756 /* Don't let a MEM slip thru, because then INDEX that comes
10757 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10758 and break_out_memory_refs will go to work on it and mess it up. */
10759 #ifdef PIC_CASE_VECTOR_ADDRESS
10760 if (flag_pic && GET_CODE (index) != REG)
10761 index = copy_to_mode_reg (Pmode, index);
10762 #endif
10764 /* If flag_force_addr were to affect this address
10765 it could interfere with the tricky assumptions made
10766 about addresses that contain label-refs,
10767 which may be valid only very near the tablejump itself. */
10768 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10769 GET_MODE_SIZE, because this indicates how large insns are. The other
10770 uses should all be Pmode, because they are addresses. This code
10771 could fail if addresses and insns are not the same size. */
10772 index = gen_rtx_PLUS (Pmode,
10773 gen_rtx_MULT (Pmode, index,
10774 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10775 gen_rtx_LABEL_REF (Pmode, table_label));
10776 #ifdef PIC_CASE_VECTOR_ADDRESS
10777 if (flag_pic)
10778 index = PIC_CASE_VECTOR_ADDRESS (index);
10779 else
10780 #endif
10781 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10782 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10783 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10784 RTX_UNCHANGING_P (vector) = 1;
10785 convert_move (temp, vector, 0);
10787 emit_jump_insn (gen_tablejump (temp, table_label));
10789 /* If we are generating PIC code or if the table is PC-relative, the
10790 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10791 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10792 emit_barrier ();
10795 #endif /* HAVE_tablejump */