2000-05-02 Jeff Sturm <jsturm@one-point.com>
[official-gcc.git] / gcc / expr.c
blob0f75b6912f99d4fccc71725f03efadefecb884d8
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "intl.h"
43 #include "tm_p.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
51 #ifdef PUSH_ROUNDING
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
55 #endif
57 #endif
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
70 #endif
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
117 struct store_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
127 int reverse;
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
155 int));
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
157 HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
187 #ifndef MOVE_RATIO
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
189 #define MOVE_RATIO 2
190 #else
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
193 #endif
194 #endif
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
201 #endif
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
213 #endif
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
218 void
219 init_expr_once ()
221 rtx insn, pat;
222 enum machine_mode mode;
223 int num_clobbers;
224 rtx mem, mem1;
226 start_sequence ();
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
240 int regno;
241 rtx reg;
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
253 regno++)
255 if (! HARD_REGNO_MODE_OK (regno, mode))
256 continue;
258 reg = gen_rtx_REG (mode, regno);
260 SET_SRC (pat) = mem;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
282 end_sequence ();
285 /* This is run at the start of compiling a function. */
287 void
288 init_expr ()
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
292 pending_chain = 0;
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
296 saveregs_value = 0;
297 apply_args_value = 0;
298 forced_labels = 0;
301 void
302 mark_expr_status (p)
303 struct expr_status *p;
305 if (p == NULL)
306 return;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
313 void
314 free_expr_status (f)
315 struct function *f;
317 free (f->expr);
318 f->expr = NULL;
321 /* Small sanity check that the queue is empty at the end of a function. */
323 void
324 finish_expr_for_function ()
326 if (pending_chain)
327 abort ();
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
340 static rtx
341 enqueue_insn (var, body)
342 rtx var, body;
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
366 register rtx x;
367 int modify;
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
374 return x;
375 #endif
377 if (code != QUEUED)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
383 shared. */
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 register rtx y = XEXP (x, 0);
388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390 MEM_COPY_ATTRIBUTES (new, x);
392 if (QUEUED_INSN (y))
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
396 QUEUED_INSN (y));
397 return temp;
399 return new;
401 /* Otherwise, recursively protect the subexpressions of all
402 the kinds of rtx's that can contain a QUEUED. */
403 if (code == MEM)
405 rtx tem = protect_from_queue (XEXP (x, 0), 0);
406 if (tem != XEXP (x, 0))
408 x = copy_rtx (x);
409 XEXP (x, 0) = tem;
412 else if (code == PLUS || code == MULT)
414 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
415 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
416 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
418 x = copy_rtx (x);
419 XEXP (x, 0) = new0;
420 XEXP (x, 1) = new1;
423 return x;
425 /* If the increment has not happened, use the variable itself. */
426 if (QUEUED_INSN (x) == 0)
427 return QUEUED_VAR (x);
428 /* If the increment has happened and a pre-increment copy exists,
429 use that copy. */
430 if (QUEUED_COPY (x) != 0)
431 return QUEUED_COPY (x);
432 /* The increment has happened but we haven't set up a pre-increment copy.
433 Set one up now, and use it. */
434 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
435 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
436 QUEUED_INSN (x));
437 return QUEUED_COPY (x);
440 /* Return nonzero if X contains a QUEUED expression:
441 if it contains anything that will be altered by a queued increment.
442 We handle only combinations of MEM, PLUS, MINUS and MULT operators
443 since memory addresses generally contain only those. */
446 queued_subexp_p (x)
447 rtx x;
449 register enum rtx_code code = GET_CODE (x);
450 switch (code)
452 case QUEUED:
453 return 1;
454 case MEM:
455 return queued_subexp_p (XEXP (x, 0));
456 case MULT:
457 case PLUS:
458 case MINUS:
459 return (queued_subexp_p (XEXP (x, 0))
460 || queued_subexp_p (XEXP (x, 1)));
461 default:
462 return 0;
466 /* Perform all the pending incrementations. */
468 void
469 emit_queue ()
471 register rtx p;
472 while ((p = pending_chain))
474 rtx body = QUEUED_BODY (p);
476 if (GET_CODE (body) == SEQUENCE)
478 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
479 emit_insn (QUEUED_BODY (p));
481 else
482 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
483 pending_chain = QUEUED_NEXT (p);
487 /* Copy data from FROM to TO, where the machine modes are not the same.
488 Both modes may be integer, or both may be floating.
489 UNSIGNEDP should be nonzero if FROM is an unsigned type.
490 This causes zero-extension instead of sign-extension. */
492 void
493 convert_move (to, from, unsignedp)
494 register rtx to, from;
495 int unsignedp;
497 enum machine_mode to_mode = GET_MODE (to);
498 enum machine_mode from_mode = GET_MODE (from);
499 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
500 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
501 enum insn_code code;
502 rtx libcall;
504 /* rtx code for making an equivalent value. */
505 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
507 to = protect_from_queue (to, 1);
508 from = protect_from_queue (from, 0);
510 if (to_real != from_real)
511 abort ();
513 /* If FROM is a SUBREG that indicates that we have already done at least
514 the required extension, strip it. We don't handle such SUBREGs as
515 TO here. */
517 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
518 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
519 >= GET_MODE_SIZE (to_mode))
520 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
521 from = gen_lowpart (to_mode, from), from_mode = to_mode;
523 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
524 abort ();
526 if (to_mode == from_mode
527 || (from_mode == VOIDmode && CONSTANT_P (from)))
529 emit_move_insn (to, from);
530 return;
533 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
535 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
536 abort ();
538 if (VECTOR_MODE_P (to_mode))
539 from = gen_rtx_SUBREG (to_mode, from, 0);
540 else
541 to = gen_rtx_SUBREG (from_mode, to, 0);
543 emit_move_insn (to, from);
544 return;
547 if (to_real != from_real)
548 abort ();
550 if (to_real)
552 rtx value, insns;
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
558 != CODE_FOR_nothing)
560 emit_unop_insn (code, to, from, UNKNOWN);
561 return;
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
569 return;
571 #endif
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
576 return;
578 #endif
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
583 return;
585 #endif
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
590 return;
592 #endif
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
597 return;
599 #endif
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
604 return;
606 #endif
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
612 return;
614 #endif
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
619 return;
621 #endif
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
626 return;
628 #endif
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
633 return;
635 #endif
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
640 return;
642 #endif
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
648 return;
650 #endif
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
655 return;
657 #endif
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
662 return;
664 #endif
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
669 return;
671 #endif
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
677 return;
679 #endif
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
684 return;
686 #endif
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
691 return;
693 #endif
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
698 return;
700 #endif
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 return;
707 #endif
709 libcall = (rtx) 0;
710 switch (from_mode)
712 case SFmode:
713 switch (to_mode)
715 case DFmode:
716 libcall = extendsfdf2_libfunc;
717 break;
719 case XFmode:
720 libcall = extendsfxf2_libfunc;
721 break;
723 case TFmode:
724 libcall = extendsftf2_libfunc;
725 break;
727 default:
728 break;
730 break;
732 case DFmode:
733 switch (to_mode)
735 case SFmode:
736 libcall = truncdfsf2_libfunc;
737 break;
739 case XFmode:
740 libcall = extenddfxf2_libfunc;
741 break;
743 case TFmode:
744 libcall = extenddftf2_libfunc;
745 break;
747 default:
748 break;
750 break;
752 case XFmode:
753 switch (to_mode)
755 case SFmode:
756 libcall = truncxfsf2_libfunc;
757 break;
759 case DFmode:
760 libcall = truncxfdf2_libfunc;
761 break;
763 default:
764 break;
766 break;
768 case TFmode:
769 switch (to_mode)
771 case SFmode:
772 libcall = trunctfsf2_libfunc;
773 break;
775 case DFmode:
776 libcall = trunctfdf2_libfunc;
777 break;
779 default:
780 break;
782 break;
784 default:
785 break;
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
790 abort ();
792 start_sequence ();
793 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
794 1, from, from_mode);
795 insns = get_insns ();
796 end_sequence ();
797 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
798 from));
799 return;
802 /* Now both modes are integers. */
804 /* Handle expanding beyond a word. */
805 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
806 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
808 rtx insns;
809 rtx lowpart;
810 rtx fill_value;
811 rtx lowfrom;
812 int i;
813 enum machine_mode lowpart_mode;
814 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
816 /* Try converting directly if the insn is supported. */
817 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
818 != CODE_FOR_nothing)
820 /* If FROM is a SUBREG, put it into a register. Do this
821 so that we always generate the same set of insns for
822 better cse'ing; if an intermediate assignment occurred,
823 we won't be doing the operation directly on the SUBREG. */
824 if (optimize > 0 && GET_CODE (from) == SUBREG)
825 from = force_reg (from_mode, from);
826 emit_unop_insn (code, to, from, equiv_code);
827 return;
829 /* Next, try converting via full word. */
830 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
831 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
832 != CODE_FOR_nothing))
834 if (GET_CODE (to) == REG)
835 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
836 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
837 emit_unop_insn (code, to,
838 gen_lowpart (word_mode, to), equiv_code);
839 return;
842 /* No special multiword conversion insn; do it by hand. */
843 start_sequence ();
845 /* Since we will turn this into a no conflict block, we must ensure
846 that the source does not overlap the target. */
848 if (reg_overlap_mentioned_p (to, from))
849 from = force_reg (from_mode, from);
851 /* Get a copy of FROM widened to a word, if necessary. */
852 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
853 lowpart_mode = word_mode;
854 else
855 lowpart_mode = from_mode;
857 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
859 lowpart = gen_lowpart (lowpart_mode, to);
860 emit_move_insn (lowpart, lowfrom);
862 /* Compute the value to put in each remaining word. */
863 if (unsignedp)
864 fill_value = const0_rtx;
865 else
867 #ifdef HAVE_slt
868 if (HAVE_slt
869 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
870 && STORE_FLAG_VALUE == -1)
872 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
873 lowpart_mode, 0, 0);
874 fill_value = gen_reg_rtx (word_mode);
875 emit_insn (gen_slt (fill_value));
877 else
878 #endif
880 fill_value
881 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
882 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
883 NULL_RTX, 0);
884 fill_value = convert_to_mode (word_mode, fill_value, 1);
888 /* Fill the remaining words. */
889 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
891 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
892 rtx subword = operand_subword (to, index, 1, to_mode);
894 if (subword == 0)
895 abort ();
897 if (fill_value != subword)
898 emit_move_insn (subword, fill_value);
901 insns = get_insns ();
902 end_sequence ();
904 emit_no_conflict_block (insns, to, from, NULL_RTX,
905 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
906 return;
909 /* Truncating multi-word to a word or less. */
910 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
911 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
913 if (!((GET_CODE (from) == MEM
914 && ! MEM_VOLATILE_P (from)
915 && direct_load[(int) to_mode]
916 && ! mode_dependent_address_p (XEXP (from, 0)))
917 || GET_CODE (from) == REG
918 || GET_CODE (from) == SUBREG))
919 from = force_reg (from_mode, from);
920 convert_move (to, gen_lowpart (word_mode, from), 0);
921 return;
924 /* Handle pointer conversion. */ /* SPEE 900220. */
925 if (to_mode == PQImode)
927 if (from_mode != QImode)
928 from = convert_to_mode (QImode, from, unsignedp);
930 #ifdef HAVE_truncqipqi2
931 if (HAVE_truncqipqi2)
933 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
934 return;
936 #endif /* HAVE_truncqipqi2 */
937 abort ();
940 if (from_mode == PQImode)
942 if (to_mode != QImode)
944 from = convert_to_mode (QImode, from, unsignedp);
945 from_mode = QImode;
947 else
949 #ifdef HAVE_extendpqiqi2
950 if (HAVE_extendpqiqi2)
952 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
953 return;
955 #endif /* HAVE_extendpqiqi2 */
956 abort ();
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 return;
971 #endif /* HAVE_truncsipsi2 */
972 abort ();
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
982 else
984 #ifdef HAVE_extendpsisi2
985 if (! unsignedp && HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 return;
990 #endif /* HAVE_extendpsisi2 */
991 #ifdef HAVE_zero_extendpsisi2
992 if (unsignedp && HAVE_zero_extendpsisi2)
994 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
995 return;
997 #endif /* HAVE_zero_extendpsisi2 */
998 abort ();
1002 if (to_mode == PDImode)
1004 if (from_mode != DImode)
1005 from = convert_to_mode (DImode, from, unsignedp);
1007 #ifdef HAVE_truncdipdi2
1008 if (HAVE_truncdipdi2)
1010 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1011 return;
1013 #endif /* HAVE_truncdipdi2 */
1014 abort ();
1017 if (from_mode == PDImode)
1019 if (to_mode != DImode)
1021 from = convert_to_mode (DImode, from, unsignedp);
1022 from_mode = DImode;
1024 else
1026 #ifdef HAVE_extendpdidi2
1027 if (HAVE_extendpdidi2)
1029 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1030 return;
1032 #endif /* HAVE_extendpdidi2 */
1033 abort ();
1037 /* Now follow all the conversions between integers
1038 no more than a word long. */
1040 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1041 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1042 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1043 GET_MODE_BITSIZE (from_mode)))
1045 if (!((GET_CODE (from) == MEM
1046 && ! MEM_VOLATILE_P (from)
1047 && direct_load[(int) to_mode]
1048 && ! mode_dependent_address_p (XEXP (from, 0)))
1049 || GET_CODE (from) == REG
1050 || GET_CODE (from) == SUBREG))
1051 from = force_reg (from_mode, from);
1052 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1053 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1054 from = copy_to_reg (from);
1055 emit_move_insn (to, gen_lowpart (to_mode, from));
1056 return;
1059 /* Handle extension. */
1060 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1062 /* Convert directly if that works. */
1063 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1064 != CODE_FOR_nothing)
1066 emit_unop_insn (code, to, from, equiv_code);
1067 return;
1069 else
1071 enum machine_mode intermediate;
1072 rtx tmp;
1073 tree shift_amount;
1075 /* Search for a mode to convert via. */
1076 for (intermediate = from_mode; intermediate != VOIDmode;
1077 intermediate = GET_MODE_WIDER_MODE (intermediate))
1078 if (((can_extend_p (to_mode, intermediate, unsignedp)
1079 != CODE_FOR_nothing)
1080 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1081 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1082 GET_MODE_BITSIZE (intermediate))))
1083 && (can_extend_p (intermediate, from_mode, unsignedp)
1084 != CODE_FOR_nothing))
1086 convert_move (to, convert_to_mode (intermediate, from,
1087 unsignedp), unsignedp);
1088 return;
1091 /* No suitable intermediate mode.
1092 Generate what we need with shifts. */
1093 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1094 - GET_MODE_BITSIZE (from_mode), 0);
1095 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1096 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1097 to, unsignedp);
1098 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1099 to, unsignedp);
1100 if (tmp != to)
1101 emit_move_insn (to, tmp);
1102 return;
1106 /* Support special truncate insns for certain modes. */
1108 if (from_mode == DImode && to_mode == SImode)
1110 #ifdef HAVE_truncdisi2
1111 if (HAVE_truncdisi2)
1113 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1114 return;
1116 #endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1121 if (from_mode == DImode && to_mode == HImode)
1123 #ifdef HAVE_truncdihi2
1124 if (HAVE_truncdihi2)
1126 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1127 return;
1129 #endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1134 if (from_mode == DImode && to_mode == QImode)
1136 #ifdef HAVE_truncdiqi2
1137 if (HAVE_truncdiqi2)
1139 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1140 return;
1142 #endif
1143 convert_move (to, force_reg (from_mode, from), unsignedp);
1144 return;
1147 if (from_mode == SImode && to_mode == HImode)
1149 #ifdef HAVE_truncsihi2
1150 if (HAVE_truncsihi2)
1152 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1153 return;
1155 #endif
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 return;
1160 if (from_mode == SImode && to_mode == QImode)
1162 #ifdef HAVE_truncsiqi2
1163 if (HAVE_truncsiqi2)
1165 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1166 return;
1168 #endif
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 return;
1173 if (from_mode == HImode && to_mode == QImode)
1175 #ifdef HAVE_trunchiqi2
1176 if (HAVE_trunchiqi2)
1178 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == TImode && to_mode == DImode)
1188 #ifdef HAVE_trunctidi2
1189 if (HAVE_trunctidi2)
1191 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == TImode && to_mode == SImode)
1201 #ifdef HAVE_trunctisi2
1202 if (HAVE_trunctisi2)
1204 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == TImode && to_mode == HImode)
1214 #ifdef HAVE_trunctihi2
1215 if (HAVE_trunctihi2)
1217 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == TImode && to_mode == QImode)
1227 #ifdef HAVE_trunctiqi2
1228 if (HAVE_trunctiqi2)
1230 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 /* Handle truncation of volatile memrefs, and so on;
1239 the things that couldn't be truncated directly,
1240 and for which there was no special instruction. */
1241 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1243 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1244 emit_move_insn (to, temp);
1245 return;
1248 /* Mode combination is not recognized. */
1249 abort ();
1252 /* Return an rtx for a value that would result
1253 from converting X to mode MODE.
1254 Both X and MODE may be floating, or both integer.
1255 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 This function *must not* call protect_from_queue
1260 except when putting X into an insn (in which case convert_move does it). */
1263 convert_to_mode (mode, x, unsignedp)
1264 enum machine_mode mode;
1265 rtx x;
1266 int unsignedp;
1268 return convert_modes (mode, VOIDmode, x, unsignedp);
1271 /* Return an rtx for a value that would result
1272 from converting X from mode OLDMODE to mode MODE.
1273 Both modes may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1276 This can be done by referring to a part of X in place
1277 or by copying to a new temporary with conversion.
1279 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1281 This function *must not* call protect_from_queue
1282 except when putting X into an insn (in which case convert_move does it). */
1285 convert_modes (mode, oldmode, x, unsignedp)
1286 enum machine_mode mode, oldmode;
1287 rtx x;
1288 int unsignedp;
1290 register rtx temp;
1292 /* If FROM is a SUBREG that indicates that we have already done at least
1293 the required extension, strip it. */
1295 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1296 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1297 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1298 x = gen_lowpart (mode, x);
1300 if (GET_MODE (x) != VOIDmode)
1301 oldmode = GET_MODE (x);
1303 if (mode == oldmode)
1304 return x;
1306 /* There is one case that we must handle specially: If we are converting
1307 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1308 we are to interpret the constant as unsigned, gen_lowpart will do
1309 the wrong if the constant appears negative. What we want to do is
1310 make the high-order word of the constant zero, not all ones. */
1312 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1313 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1314 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1316 HOST_WIDE_INT val = INTVAL (x);
1318 if (oldmode != VOIDmode
1319 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1321 int width = GET_MODE_BITSIZE (oldmode);
1323 /* We need to zero extend VAL. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1330 /* We can do this with a gen_lowpart if both desired and current modes
1331 are integer, and this is either a constant integer, a register, or a
1332 non-volatile MEM. Except for the constant case where MODE is no
1333 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1335 if ((GET_CODE (x) == CONST_INT
1336 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1337 || (GET_MODE_CLASS (mode) == MODE_INT
1338 && GET_MODE_CLASS (oldmode) == MODE_INT
1339 && (GET_CODE (x) == CONST_DOUBLE
1340 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1341 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1342 && direct_load[(int) mode])
1343 || (GET_CODE (x) == REG
1344 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1345 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1347 /* ?? If we don't know OLDMODE, we have to assume here that
1348 X does not need sign- or zero-extension. This may not be
1349 the case, but it's the best we can do. */
1350 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1351 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1353 HOST_WIDE_INT val = INTVAL (x);
1354 int width = GET_MODE_BITSIZE (oldmode);
1356 /* We must sign or zero-extend in this case. Start by
1357 zero-extending, then sign extend if we need to. */
1358 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1359 if (! unsignedp
1360 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1361 val |= (HOST_WIDE_INT) (-1) << width;
1363 return GEN_INT (trunc_int_for_mode (val, mode));
1366 return gen_lowpart (mode, x);
1369 temp = gen_reg_rtx (mode);
1370 convert_move (temp, x, unsignedp);
1371 return temp;
1374 /* This macro is used to determine what the largest unit size that
1375 move_by_pieces can use is. */
1377 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1378 move efficiently, as opposed to MOVE_MAX which is the maximum
1379 number of bytes we can move with a single instruction. */
1381 #ifndef MOVE_MAX_PIECES
1382 #define MOVE_MAX_PIECES MOVE_MAX
1383 #endif
1385 /* Generate several move instructions to copy LEN bytes
1386 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1387 The caller must pass FROM and TO
1388 through protect_from_queue before calling.
1390 When TO is NULL, the emit_single_push_insn is used to push the
1391 FROM to stack.
1393 ALIGN is maximum alignment we can assume. */
1395 void
1396 move_by_pieces (to, from, len, align)
1397 rtx to, from;
1398 unsigned HOST_WIDE_INT len;
1399 unsigned int align;
1401 struct move_by_pieces data;
1402 rtx to_addr, from_addr = XEXP (from, 0);
1403 unsigned int max_size = MOVE_MAX_PIECES + 1;
1404 enum machine_mode mode = VOIDmode, tmode;
1405 enum insn_code icode;
1407 data.offset = 0;
1408 data.from_addr = from_addr;
1409 if (to)
1411 to_addr = XEXP (to, 0);
1412 data.to = to;
1413 data.autinc_to
1414 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1415 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1416 data.reverse
1417 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1419 else
1421 to_addr = NULL_RTX;
1422 data.to = NULL_RTX;
1423 data.autinc_to = 1;
1424 #ifdef STACK_GROWS_DOWNWARD
1425 data.reverse = 1;
1426 #else
1427 data.reverse = 0;
1428 #endif
1430 data.to_addr = to_addr;
1431 data.from = from;
1432 data.autinc_from
1433 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1434 || GET_CODE (from_addr) == POST_INC
1435 || GET_CODE (from_addr) == POST_DEC);
1437 data.explicit_inc_from = 0;
1438 data.explicit_inc_to = 0;
1439 if (data.reverse) data.offset = len;
1440 data.len = len;
1442 /* If copying requires more than two move insns,
1443 copy addresses to registers (to make displacements shorter)
1444 and use post-increment if available. */
1445 if (!(data.autinc_from && data.autinc_to)
1446 && move_by_pieces_ninsns (len, align) > 2)
1448 /* Find the mode of the largest move... */
1449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1451 if (GET_MODE_SIZE (tmode) < max_size)
1452 mode = tmode;
1454 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1456 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1457 data.autinc_from = 1;
1458 data.explicit_inc_from = -1;
1460 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1462 data.from_addr = copy_addr_to_reg (from_addr);
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = 1;
1466 if (!data.autinc_from && CONSTANT_P (from_addr))
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1470 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1471 data.autinc_to = 1;
1472 data.explicit_inc_to = -1;
1474 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1476 data.to_addr = copy_addr_to_reg (to_addr);
1477 data.autinc_to = 1;
1478 data.explicit_inc_to = 1;
1480 if (!data.autinc_to && CONSTANT_P (to_addr))
1481 data.to_addr = copy_addr_to_reg (to_addr);
1484 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1485 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1486 align = MOVE_MAX * BITS_PER_UNIT;
1488 /* First move what we can in the largest integer mode, then go to
1489 successively smaller modes. */
1491 while (max_size > 1)
1493 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1494 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1495 if (GET_MODE_SIZE (tmode) < max_size)
1496 mode = tmode;
1498 if (mode == VOIDmode)
1499 break;
1501 icode = mov_optab->handlers[(int) mode].insn_code;
1502 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1503 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1505 max_size = GET_MODE_SIZE (mode);
1508 /* The code above should have handled everything. */
1509 if (data.len > 0)
1510 abort ();
1513 /* Return number of insns required to move L bytes by pieces.
1514 ALIGN (in bits) is maximum alignment we can assume. */
1516 static unsigned HOST_WIDE_INT
1517 move_by_pieces_ninsns (l, align)
1518 unsigned HOST_WIDE_INT l;
1519 unsigned int align;
1521 unsigned HOST_WIDE_INT n_insns = 0;
1522 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1524 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1525 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1526 align = MOVE_MAX * BITS_PER_UNIT;
1528 while (max_size > 1)
1530 enum machine_mode mode = VOIDmode, tmode;
1531 enum insn_code icode;
1533 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1534 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1535 if (GET_MODE_SIZE (tmode) < max_size)
1536 mode = tmode;
1538 if (mode == VOIDmode)
1539 break;
1541 icode = mov_optab->handlers[(int) mode].insn_code;
1542 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1543 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1545 max_size = GET_MODE_SIZE (mode);
1548 if (l)
1549 abort ();
1550 return n_insns;
1553 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1554 with move instructions for mode MODE. GENFUN is the gen_... function
1555 to make a move insn for that mode. DATA has all the other info. */
1557 static void
1558 move_by_pieces_1 (genfun, mode, data)
1559 rtx (*genfun) PARAMS ((rtx, ...));
1560 enum machine_mode mode;
1561 struct move_by_pieces *data;
1563 unsigned int size = GET_MODE_SIZE (mode);
1564 rtx to1, from1;
1566 while (data->len >= size)
1568 if (data->reverse)
1569 data->offset -= size;
1571 if (data->to)
1573 if (data->autinc_to)
1575 to1 = gen_rtx_MEM (mode, data->to_addr);
1576 MEM_COPY_ATTRIBUTES (to1, data->to);
1578 else
1579 to1 = change_address (data->to, mode,
1580 plus_constant (data->to_addr, data->offset));
1583 if (data->autinc_from)
1585 from1 = gen_rtx_MEM (mode, data->from_addr);
1586 MEM_COPY_ATTRIBUTES (from1, data->from);
1588 else
1589 from1 = change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset));
1592 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1593 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1594 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1595 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1597 if (data->to)
1598 emit_insn ((*genfun) (to1, from1));
1599 else
1600 emit_single_push_insn (mode, from1, NULL);
1602 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1604 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1607 if (! data->reverse)
1608 data->offset += size;
1610 data->len -= size;
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 with mode BLKmode.
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have.
1623 Return the address of the new block, if memcpy is called and returns it,
1624 0 otherwise. */
1627 emit_block_move (x, y, size, align)
1628 rtx x, y;
1629 rtx size;
1630 unsigned int align;
1632 rtx retval = 0;
1633 #ifdef TARGET_MEM_FUNCTIONS
1634 static tree fn;
1635 tree call_expr, arg_list;
1636 #endif
1638 if (GET_MODE (x) != BLKmode)
1639 abort ();
1641 if (GET_MODE (y) != BLKmode)
1642 abort ();
1644 x = protect_from_queue (x, 1);
1645 y = protect_from_queue (y, 0);
1646 size = protect_from_queue (size, 0);
1648 if (GET_CODE (x) != MEM)
1649 abort ();
1650 if (GET_CODE (y) != MEM)
1651 abort ();
1652 if (size == 0)
1653 abort ();
1655 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1656 move_by_pieces (x, y, INTVAL (size), align);
1657 else
1659 /* Try the most limited insn first, because there's no point
1660 including more than one in the machine description unless
1661 the more limited one has some advantage. */
1663 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1664 enum machine_mode mode;
1666 /* Since this is a move insn, we don't care about volatility. */
1667 volatile_ok = 1;
1669 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1670 mode = GET_MODE_WIDER_MODE (mode))
1672 enum insn_code code = movstr_optab[(int) mode];
1673 insn_operand_predicate_fn pred;
1675 if (code != CODE_FOR_nothing
1676 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1677 here because if SIZE is less than the mode mask, as it is
1678 returned by the macro, it will definitely be less than the
1679 actual mode mask. */
1680 && ((GET_CODE (size) == CONST_INT
1681 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1682 <= (GET_MODE_MASK (mode) >> 1)))
1683 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1684 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1685 || (*pred) (x, BLKmode))
1686 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1687 || (*pred) (y, BLKmode))
1688 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1689 || (*pred) (opalign, VOIDmode)))
1691 rtx op2;
1692 rtx last = get_last_insn ();
1693 rtx pat;
1695 op2 = convert_to_mode (mode, size, 1);
1696 pred = insn_data[(int) code].operand[2].predicate;
1697 if (pred != 0 && ! (*pred) (op2, mode))
1698 op2 = copy_to_mode_reg (mode, op2);
1700 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1701 if (pat)
1703 emit_insn (pat);
1704 volatile_ok = 0;
1705 return 0;
1707 else
1708 delete_insns_since (last);
1712 volatile_ok = 0;
1714 /* X, Y, or SIZE may have been passed through protect_from_queue.
1716 It is unsafe to save the value generated by protect_from_queue
1717 and reuse it later. Consider what happens if emit_queue is
1718 called before the return value from protect_from_queue is used.
1720 Expansion of the CALL_EXPR below will call emit_queue before
1721 we are finished emitting RTL for argument setup. So if we are
1722 not careful we could get the wrong value for an argument.
1724 To avoid this problem we go ahead and emit code to copy X, Y &
1725 SIZE into new pseudos. We can then place those new pseudos
1726 into an RTL_EXPR and use them later, even after a call to
1727 emit_queue.
1729 Note this is not strictly needed for library calls since they
1730 do not call emit_queue before loading their arguments. However,
1731 we may need to have library calls call emit_queue in the future
1732 since failing to do so could cause problems for targets which
1733 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1734 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1735 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1737 #ifdef TARGET_MEM_FUNCTIONS
1738 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1739 #else
1740 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1741 TREE_UNSIGNED (integer_type_node));
1742 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1743 #endif
1745 #ifdef TARGET_MEM_FUNCTIONS
1746 /* It is incorrect to use the libcall calling conventions to call
1747 memcpy in this context.
1749 This could be a user call to memcpy and the user may wish to
1750 examine the return value from memcpy.
1752 For targets where libcalls and normal calls have different conventions
1753 for returning pointers, we could end up generating incorrect code.
1755 So instead of using a libcall sequence we build up a suitable
1756 CALL_EXPR and expand the call in the normal fashion. */
1757 if (fn == NULL_TREE)
1759 tree fntype;
1761 /* This was copied from except.c, I don't know if all this is
1762 necessary in this context or not. */
1763 fn = get_identifier ("memcpy");
1764 fntype = build_pointer_type (void_type_node);
1765 fntype = build_function_type (fntype, NULL_TREE);
1766 fn = build_decl (FUNCTION_DECL, fn, fntype);
1767 ggc_add_tree_root (&fn, 1);
1768 DECL_EXTERNAL (fn) = 1;
1769 TREE_PUBLIC (fn) = 1;
1770 DECL_ARTIFICIAL (fn) = 1;
1771 make_decl_rtl (fn, NULL);
1772 assemble_external (fn);
1775 /* We need to make an argument list for the function call.
1777 memcpy has three arguments, the first two are void * addresses and
1778 the last is a size_t byte count for the copy. */
1779 arg_list
1780 = build_tree_list (NULL_TREE,
1781 make_tree (build_pointer_type (void_type_node), x));
1782 TREE_CHAIN (arg_list)
1783 = build_tree_list (NULL_TREE,
1784 make_tree (build_pointer_type (void_type_node), y));
1785 TREE_CHAIN (TREE_CHAIN (arg_list))
1786 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1787 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1789 /* Now we have to build up the CALL_EXPR itself. */
1790 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1791 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1792 call_expr, arg_list, NULL_TREE);
1793 TREE_SIDE_EFFECTS (call_expr) = 1;
1795 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1796 #else
1797 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1798 VOIDmode, 3, y, Pmode, x, Pmode,
1799 convert_to_mode (TYPE_MODE (integer_type_node), size,
1800 TREE_UNSIGNED (integer_type_node)),
1801 TYPE_MODE (integer_type_node));
1802 #endif
1805 return retval;
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1811 void
1812 move_block_to_reg (regno, x, nregs, mode)
1813 int regno;
1814 rtx x;
1815 int nregs;
1816 enum machine_mode mode;
1818 int i;
1819 #ifdef HAVE_load_multiple
1820 rtx pat;
1821 rtx last;
1822 #endif
1824 if (nregs == 0)
1825 return;
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1836 GEN_INT (nregs));
1837 if (pat)
1839 emit_insn (pat);
1840 return;
1842 else
1843 delete_insns_since (last);
1845 #endif
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1856 void
1857 move_block_from_reg (regno, x, nregs, size)
1858 int regno;
1859 rtx x;
1860 int nregs;
1861 int size;
1863 int i;
1864 #ifdef HAVE_store_multiple
1865 rtx pat;
1866 rtx last;
1867 #endif
1868 enum machine_mode mode;
1870 if (nregs == 0)
1871 return;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1878 emit_move_insn (change_address (x, mode, NULL),
1879 gen_rtx_REG (mode, regno));
1880 return;
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1888 rtx tem = operand_subword (x, 0, 1, BLKmode);
1889 rtx shift;
1891 if (tem == 0)
1892 abort ();
1894 shift = expand_shift (LSHIFT_EXPR, word_mode,
1895 gen_rtx_REG (word_mode, regno),
1896 build_int_2 ((UNITS_PER_WORD - size)
1897 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1898 emit_move_insn (tem, shift);
1899 return;
1902 /* See if the machine can do this with a store multiple insn. */
1903 #ifdef HAVE_store_multiple
1904 if (HAVE_store_multiple)
1906 last = get_last_insn ();
1907 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1908 GEN_INT (nregs));
1909 if (pat)
1911 emit_insn (pat);
1912 return;
1914 else
1915 delete_insns_since (last);
1917 #endif
1919 for (i = 0; i < nregs; i++)
1921 rtx tem = operand_subword (x, i, 1, BLKmode);
1923 if (tem == 0)
1924 abort ();
1926 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1930 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1931 registers represented by a PARALLEL. SSIZE represents the total size of
1932 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1933 SRC in bits. */
1934 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1935 the balance will be in what would be the low-order memory addresses, i.e.
1936 left justified for big endian, right justified for little endian. This
1937 happens to be true for the targets currently using this support. If this
1938 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1939 would be needed. */
1941 void
1942 emit_group_load (dst, orig_src, ssize, align)
1943 rtx dst, orig_src;
1944 unsigned int align;
1945 int ssize;
1947 rtx *tmps, src;
1948 int start, i;
1950 if (GET_CODE (dst) != PARALLEL)
1951 abort ();
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1956 start = 0;
1957 else
1958 start = 1;
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1968 int shift = 0;
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1975 if (bytelen <= 0)
1976 abort ();
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1982 src = orig_src;
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1990 else
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1992 emit_move_insn (src, orig_src);
1995 /* Optimize the access just a bit. */
1996 if (GET_CODE (src) == MEM
1997 && align >= GET_MODE_ALIGNMENT (mode)
1998 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1999 && bytelen == GET_MODE_SIZE (mode))
2001 tmps[i] = gen_reg_rtx (mode);
2002 emit_move_insn (tmps[i],
2003 change_address (src, mode,
2004 plus_constant (XEXP (src, 0),
2005 bytepos)));
2007 else if (GET_CODE (src) == CONCAT)
2009 if (bytepos == 0
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011 tmps[i] = XEXP (src, 0);
2012 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2013 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2014 tmps[i] = XEXP (src, 1);
2015 else
2016 abort ();
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2020 tmps[i] = src;
2021 else
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, align, ssize);
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2031 emit_queue ();
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2042 void
2043 emit_group_store (orig_dst, src, ssize, align)
2044 rtx orig_dst, src;
2045 int ssize;
2046 unsigned int align;
2048 rtx *tmps, dst;
2049 int start, i;
2051 if (GET_CODE (src) != PARALLEL)
2052 abort ();
2054 /* Check for a NULL entry, used to indicate that the parameter goes
2055 both on the stack and in registers. */
2056 if (XEXP (XVECEXP (src, 0, 0), 0))
2057 start = 0;
2058 else
2059 start = 1;
2061 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2063 /* Copy the (probable) hard regs into pseudos. */
2064 for (i = start; i < XVECLEN (src, 0); i++)
2066 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2067 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2068 emit_move_insn (tmps[i], reg);
2070 emit_queue ();
2072 /* If we won't be storing directly into memory, protect the real destination
2073 from strange tricks we might play. */
2074 dst = orig_dst;
2075 if (GET_CODE (dst) == PARALLEL)
2077 rtx temp;
2079 /* We can get a PARALLEL dst if there is a conditional expression in
2080 a return statement. In that case, the dst and src are the same,
2081 so no action is necessary. */
2082 if (rtx_equal_p (dst, src))
2083 return;
2085 /* It is unclear if we can ever reach here, but we may as well handle
2086 it. Allocate a temporary, and split this into a store/load to/from
2087 the temporary. */
2089 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2090 emit_group_store (temp, src, ssize, align);
2091 emit_group_load (dst, temp, ssize, align);
2092 return;
2094 else if (GET_CODE (dst) != MEM)
2096 dst = gen_reg_rtx (GET_MODE (orig_dst));
2097 /* Make life a bit easier for combine. */
2098 emit_move_insn (dst, const0_rtx);
2101 /* Process the pieces. */
2102 for (i = start; i < XVECLEN (src, 0); i++)
2104 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2105 enum machine_mode mode = GET_MODE (tmps[i]);
2106 unsigned int bytelen = GET_MODE_SIZE (mode);
2108 /* Handle trailing fragments that run over the size of the struct. */
2109 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2111 if (BYTES_BIG_ENDIAN)
2113 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2114 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2115 tmps[i], 0, OPTAB_WIDEN);
2117 bytelen = ssize - bytepos;
2120 /* Optimize the access just a bit. */
2121 if (GET_CODE (dst) == MEM
2122 && align >= GET_MODE_ALIGNMENT (mode)
2123 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2124 && bytelen == GET_MODE_SIZE (mode))
2125 emit_move_insn (change_address (dst, mode,
2126 plus_constant (XEXP (dst, 0),
2127 bytepos)),
2128 tmps[i]);
2129 else
2130 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2131 mode, tmps[i], align, ssize);
2134 emit_queue ();
2136 /* Copy from the pseudo into the (probable) hard reg. */
2137 if (GET_CODE (dst) == REG)
2138 emit_move_insn (orig_dst, dst);
2141 /* Generate code to copy a BLKmode object of TYPE out of a
2142 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2143 is null, a stack temporary is created. TGTBLK is returned.
2145 The primary purpose of this routine is to handle functions
2146 that return BLKmode structures in registers. Some machines
2147 (the PA for example) want to return all small structures
2148 in registers regardless of the structure's alignment. */
2151 copy_blkmode_from_reg (tgtblk, srcreg, type)
2152 rtx tgtblk;
2153 rtx srcreg;
2154 tree type;
2156 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2157 rtx src = NULL, dst = NULL;
2158 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2159 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2161 if (tgtblk == 0)
2163 tgtblk = assign_temp (build_qualified_type (type,
2164 (TYPE_QUALS (type)
2165 | TYPE_QUAL_CONST)),
2166 0, 1, 1);
2167 preserve_temp_slots (tgtblk);
2170 /* This code assumes srcreg is at least a full word. If it isn't,
2171 copy it into a new pseudo which is a full word. */
2172 if (GET_MODE (srcreg) != BLKmode
2173 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2174 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2176 /* Structures whose size is not a multiple of a word are aligned
2177 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2178 machine, this means we must skip the empty high order bytes when
2179 calculating the bit offset. */
2180 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2181 big_endian_correction
2182 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2184 /* Copy the structure BITSIZE bites at a time.
2186 We could probably emit more efficient code for machines which do not use
2187 strict alignment, but it doesn't seem worth the effort at the current
2188 time. */
2189 for (bitpos = 0, xbitpos = big_endian_correction;
2190 bitpos < bytes * BITS_PER_UNIT;
2191 bitpos += bitsize, xbitpos += bitsize)
2193 /* We need a new source operand each time xbitpos is on a
2194 word boundary and when xbitpos == big_endian_correction
2195 (the first time through). */
2196 if (xbitpos % BITS_PER_WORD == 0
2197 || xbitpos == big_endian_correction)
2198 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2200 /* We need a new destination operand each time bitpos is on
2201 a word boundary. */
2202 if (bitpos % BITS_PER_WORD == 0)
2203 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2205 /* Use xbitpos for the source extraction (right justified) and
2206 xbitpos for the destination store (left justified). */
2207 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2208 extract_bit_field (src, bitsize,
2209 xbitpos % BITS_PER_WORD, 1,
2210 NULL_RTX, word_mode, word_mode,
2211 bitsize, BITS_PER_WORD),
2212 bitsize, BITS_PER_WORD);
2215 return tgtblk;
2218 /* Add a USE expression for REG to the (possibly empty) list pointed
2219 to by CALL_FUSAGE. REG must denote a hard register. */
2221 void
2222 use_reg (call_fusage, reg)
2223 rtx *call_fusage, reg;
2225 if (GET_CODE (reg) != REG
2226 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2227 abort ();
2229 *call_fusage
2230 = gen_rtx_EXPR_LIST (VOIDmode,
2231 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2234 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2235 starting at REGNO. All of these registers must be hard registers. */
2237 void
2238 use_regs (call_fusage, regno, nregs)
2239 rtx *call_fusage;
2240 int regno;
2241 int nregs;
2243 int i;
2245 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2246 abort ();
2248 for (i = 0; i < nregs; i++)
2249 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2252 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2253 PARALLEL REGS. This is for calls that pass values in multiple
2254 non-contiguous locations. The Irix 6 ABI has examples of this. */
2256 void
2257 use_group_regs (call_fusage, regs)
2258 rtx *call_fusage;
2259 rtx regs;
2261 int i;
2263 for (i = 0; i < XVECLEN (regs, 0); i++)
2265 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2267 /* A NULL entry means the parameter goes both on the stack and in
2268 registers. This can also be a MEM for targets that pass values
2269 partially on the stack and partially in registers. */
2270 if (reg != 0 && GET_CODE (reg) == REG)
2271 use_reg (call_fusage, reg);
2277 can_store_by_pieces (len, constfun, constfundata, align)
2278 unsigned HOST_WIDE_INT len;
2279 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2280 PTR constfundata;
2281 unsigned int align;
2283 unsigned HOST_WIDE_INT max_size, l;
2284 HOST_WIDE_INT offset = 0;
2285 enum machine_mode mode, tmode;
2286 enum insn_code icode;
2287 int reverse;
2288 rtx cst;
2290 if (! MOVE_BY_PIECES_P (len, align))
2291 return 0;
2293 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2294 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2295 align = MOVE_MAX * BITS_PER_UNIT;
2297 /* We would first store what we can in the largest integer mode, then go to
2298 successively smaller modes. */
2300 for (reverse = 0;
2301 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2302 reverse++)
2304 l = len;
2305 mode = VOIDmode;
2306 max_size = MOVE_MAX_PIECES + 1;
2307 while (max_size > 1)
2309 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2310 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2311 if (GET_MODE_SIZE (tmode) < max_size)
2312 mode = tmode;
2314 if (mode == VOIDmode)
2315 break;
2317 icode = mov_optab->handlers[(int) mode].insn_code;
2318 if (icode != CODE_FOR_nothing
2319 && align >= GET_MODE_ALIGNMENT (mode))
2321 unsigned int size = GET_MODE_SIZE (mode);
2323 while (l >= size)
2325 if (reverse)
2326 offset -= size;
2328 cst = (*constfun) (constfundata, offset, mode);
2329 if (!LEGITIMATE_CONSTANT_P (cst))
2330 return 0;
2332 if (!reverse)
2333 offset += size;
2335 l -= size;
2339 max_size = GET_MODE_SIZE (mode);
2342 /* The code above should have handled everything. */
2343 if (l != 0)
2344 abort ();
2347 return 1;
2350 /* Generate several move instructions to store LEN bytes generated by
2351 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2352 pointer which will be passed as argument in every CONSTFUN call.
2353 ALIGN is maximum alignment we can assume. */
2355 void
2356 store_by_pieces (to, len, constfun, constfundata, align)
2357 rtx to;
2358 unsigned HOST_WIDE_INT len;
2359 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2360 PTR constfundata;
2361 unsigned int align;
2363 struct store_by_pieces data;
2365 if (! MOVE_BY_PIECES_P (len, align))
2366 abort ();
2367 to = protect_from_queue (to, 1);
2368 data.constfun = constfun;
2369 data.constfundata = constfundata;
2370 data.len = len;
2371 data.to = to;
2372 store_by_pieces_1 (&data, align);
2375 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2376 rtx with BLKmode). The caller must pass TO through protect_from_queue
2377 before calling. ALIGN is maximum alignment we can assume. */
2379 static void
2380 clear_by_pieces (to, len, align)
2381 rtx to;
2382 unsigned HOST_WIDE_INT len;
2383 unsigned int align;
2385 struct store_by_pieces data;
2387 data.constfun = clear_by_pieces_1;
2388 data.constfundata = NULL;
2389 data.len = len;
2390 data.to = to;
2391 store_by_pieces_1 (&data, align);
2394 /* Callback routine for clear_by_pieces.
2395 Return const0_rtx unconditionally. */
2397 static rtx
2398 clear_by_pieces_1 (data, offset, mode)
2399 PTR data ATTRIBUTE_UNUSED;
2400 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2401 enum machine_mode mode ATTRIBUTE_UNUSED;
2403 return const0_rtx;
2406 /* Subroutine of clear_by_pieces and store_by_pieces.
2407 Generate several move instructions to store LEN bytes of block TO. (A MEM
2408 rtx with BLKmode). The caller must pass TO through protect_from_queue
2409 before calling. ALIGN is maximum alignment we can assume. */
2411 static void
2412 store_by_pieces_1 (data, align)
2413 struct store_by_pieces *data;
2414 unsigned int align;
2416 rtx to_addr = XEXP (data->to, 0);
2417 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2418 enum machine_mode mode = VOIDmode, tmode;
2419 enum insn_code icode;
2421 data->offset = 0;
2422 data->to_addr = to_addr;
2423 data->autinc_to
2424 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2425 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2427 data->explicit_inc_to = 0;
2428 data->reverse
2429 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2430 if (data->reverse)
2431 data->offset = data->len;
2433 /* If storing requires more than two move insns,
2434 copy addresses to registers (to make displacements shorter)
2435 and use post-increment if available. */
2436 if (!data->autinc_to
2437 && move_by_pieces_ninsns (data->len, align) > 2)
2439 /* Determine the main mode we'll be using. */
2440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2442 if (GET_MODE_SIZE (tmode) < max_size)
2443 mode = tmode;
2445 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2447 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2448 data->autinc_to = 1;
2449 data->explicit_inc_to = -1;
2452 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2453 && ! data->autinc_to)
2455 data->to_addr = copy_addr_to_reg (to_addr);
2456 data->autinc_to = 1;
2457 data->explicit_inc_to = 1;
2460 if ( !data->autinc_to && CONSTANT_P (to_addr))
2461 data->to_addr = copy_addr_to_reg (to_addr);
2464 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2465 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2466 align = MOVE_MAX * BITS_PER_UNIT;
2468 /* First store what we can in the largest integer mode, then go to
2469 successively smaller modes. */
2471 while (max_size > 1)
2473 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2474 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2475 if (GET_MODE_SIZE (tmode) < max_size)
2476 mode = tmode;
2478 if (mode == VOIDmode)
2479 break;
2481 icode = mov_optab->handlers[(int) mode].insn_code;
2482 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2483 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2485 max_size = GET_MODE_SIZE (mode);
2488 /* The code above should have handled everything. */
2489 if (data->len != 0)
2490 abort ();
2493 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2494 with move instructions for mode MODE. GENFUN is the gen_... function
2495 to make a move insn for that mode. DATA has all the other info. */
2497 static void
2498 store_by_pieces_2 (genfun, mode, data)
2499 rtx (*genfun) PARAMS ((rtx, ...));
2500 enum machine_mode mode;
2501 struct store_by_pieces *data;
2503 unsigned int size = GET_MODE_SIZE (mode);
2504 rtx to1, cst;
2506 while (data->len >= size)
2508 if (data->reverse)
2509 data->offset -= size;
2511 if (data->autinc_to)
2513 to1 = gen_rtx_MEM (mode, data->to_addr);
2514 MEM_COPY_ATTRIBUTES (to1, data->to);
2516 else
2517 to1 = change_address (data->to, mode,
2518 plus_constant (data->to_addr, data->offset));
2520 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2521 emit_insn (gen_add2_insn (data->to_addr,
2522 GEN_INT (-(HOST_WIDE_INT) size)));
2524 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2525 emit_insn ((*genfun) (to1, cst));
2527 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2528 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2530 if (! data->reverse)
2531 data->offset += size;
2533 data->len -= size;
2537 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2538 its length in bytes and ALIGN is the maximum alignment we can is has.
2540 If we call a function that returns the length of the block, return it. */
2543 clear_storage (object, size, align)
2544 rtx object;
2545 rtx size;
2546 unsigned int align;
2548 #ifdef TARGET_MEM_FUNCTIONS
2549 static tree fn;
2550 tree call_expr, arg_list;
2551 #endif
2552 rtx retval = 0;
2554 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2555 just move a zero. Otherwise, do this a piece at a time. */
2556 if (GET_MODE (object) != BLKmode
2557 && GET_CODE (size) == CONST_INT
2558 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2559 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2560 else
2562 object = protect_from_queue (object, 1);
2563 size = protect_from_queue (size, 0);
2565 if (GET_CODE (size) == CONST_INT
2566 && MOVE_BY_PIECES_P (INTVAL (size), align))
2567 clear_by_pieces (object, INTVAL (size), align);
2568 else
2570 /* Try the most limited insn first, because there's no point
2571 including more than one in the machine description unless
2572 the more limited one has some advantage. */
2574 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2575 enum machine_mode mode;
2577 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2578 mode = GET_MODE_WIDER_MODE (mode))
2580 enum insn_code code = clrstr_optab[(int) mode];
2581 insn_operand_predicate_fn pred;
2583 if (code != CODE_FOR_nothing
2584 /* We don't need MODE to be narrower than
2585 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2586 the mode mask, as it is returned by the macro, it will
2587 definitely be less than the actual mode mask. */
2588 && ((GET_CODE (size) == CONST_INT
2589 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2590 <= (GET_MODE_MASK (mode) >> 1)))
2591 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2592 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2593 || (*pred) (object, BLKmode))
2594 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2595 || (*pred) (opalign, VOIDmode)))
2597 rtx op1;
2598 rtx last = get_last_insn ();
2599 rtx pat;
2601 op1 = convert_to_mode (mode, size, 1);
2602 pred = insn_data[(int) code].operand[1].predicate;
2603 if (pred != 0 && ! (*pred) (op1, mode))
2604 op1 = copy_to_mode_reg (mode, op1);
2606 pat = GEN_FCN ((int) code) (object, op1, opalign);
2607 if (pat)
2609 emit_insn (pat);
2610 return 0;
2612 else
2613 delete_insns_since (last);
2617 /* OBJECT or SIZE may have been passed through protect_from_queue.
2619 It is unsafe to save the value generated by protect_from_queue
2620 and reuse it later. Consider what happens if emit_queue is
2621 called before the return value from protect_from_queue is used.
2623 Expansion of the CALL_EXPR below will call emit_queue before
2624 we are finished emitting RTL for argument setup. So if we are
2625 not careful we could get the wrong value for an argument.
2627 To avoid this problem we go ahead and emit code to copy OBJECT
2628 and SIZE into new pseudos. We can then place those new pseudos
2629 into an RTL_EXPR and use them later, even after a call to
2630 emit_queue.
2632 Note this is not strictly needed for library calls since they
2633 do not call emit_queue before loading their arguments. However,
2634 we may need to have library calls call emit_queue in the future
2635 since failing to do so could cause problems for targets which
2636 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2637 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2639 #ifdef TARGET_MEM_FUNCTIONS
2640 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2641 #else
2642 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2643 TREE_UNSIGNED (integer_type_node));
2644 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2645 #endif
2647 #ifdef TARGET_MEM_FUNCTIONS
2648 /* It is incorrect to use the libcall calling conventions to call
2649 memset in this context.
2651 This could be a user call to memset and the user may wish to
2652 examine the return value from memset.
2654 For targets where libcalls and normal calls have different
2655 conventions for returning pointers, we could end up generating
2656 incorrect code.
2658 So instead of using a libcall sequence we build up a suitable
2659 CALL_EXPR and expand the call in the normal fashion. */
2660 if (fn == NULL_TREE)
2662 tree fntype;
2664 /* This was copied from except.c, I don't know if all this is
2665 necessary in this context or not. */
2666 fn = get_identifier ("memset");
2667 fntype = build_pointer_type (void_type_node);
2668 fntype = build_function_type (fntype, NULL_TREE);
2669 fn = build_decl (FUNCTION_DECL, fn, fntype);
2670 ggc_add_tree_root (&fn, 1);
2671 DECL_EXTERNAL (fn) = 1;
2672 TREE_PUBLIC (fn) = 1;
2673 DECL_ARTIFICIAL (fn) = 1;
2674 make_decl_rtl (fn, NULL);
2675 assemble_external (fn);
2678 /* We need to make an argument list for the function call.
2680 memset has three arguments, the first is a void * addresses, the
2681 second a integer with the initialization value, the last is a
2682 size_t byte count for the copy. */
2683 arg_list
2684 = build_tree_list (NULL_TREE,
2685 make_tree (build_pointer_type (void_type_node),
2686 object));
2687 TREE_CHAIN (arg_list)
2688 = build_tree_list (NULL_TREE,
2689 make_tree (integer_type_node, const0_rtx));
2690 TREE_CHAIN (TREE_CHAIN (arg_list))
2691 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2692 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2694 /* Now we have to build up the CALL_EXPR itself. */
2695 call_expr = build1 (ADDR_EXPR,
2696 build_pointer_type (TREE_TYPE (fn)), fn);
2697 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2698 call_expr, arg_list, NULL_TREE);
2699 TREE_SIDE_EFFECTS (call_expr) = 1;
2701 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2702 #else
2703 emit_library_call (bzero_libfunc, LCT_NORMAL,
2704 VOIDmode, 2, object, Pmode, size,
2705 TYPE_MODE (integer_type_node));
2706 #endif
2710 return retval;
2713 /* Generate code to copy Y into X.
2714 Both Y and X must have the same mode, except that
2715 Y can be a constant with VOIDmode.
2716 This mode cannot be BLKmode; use emit_block_move for that.
2718 Return the last instruction emitted. */
2721 emit_move_insn (x, y)
2722 rtx x, y;
2724 enum machine_mode mode = GET_MODE (x);
2725 rtx y_cst = NULL_RTX;
2726 rtx last_insn;
2728 x = protect_from_queue (x, 1);
2729 y = protect_from_queue (y, 0);
2731 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2732 abort ();
2734 /* Never force constant_p_rtx to memory. */
2735 if (GET_CODE (y) == CONSTANT_P_RTX)
2737 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2739 y_cst = y;
2740 y = force_const_mem (mode, y);
2743 /* If X or Y are memory references, verify that their addresses are valid
2744 for the machine. */
2745 if (GET_CODE (x) == MEM
2746 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2747 && ! push_operand (x, GET_MODE (x)))
2748 || (flag_force_addr
2749 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2750 x = change_address (x, VOIDmode, XEXP (x, 0));
2752 if (GET_CODE (y) == MEM
2753 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2754 || (flag_force_addr
2755 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2756 y = change_address (y, VOIDmode, XEXP (y, 0));
2758 if (mode == BLKmode)
2759 abort ();
2761 last_insn = emit_move_insn_1 (x, y);
2763 if (y_cst && GET_CODE (x) == REG)
2764 REG_NOTES (last_insn)
2765 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2767 return last_insn;
2770 /* Low level part of emit_move_insn.
2771 Called just like emit_move_insn, but assumes X and Y
2772 are basically valid. */
2775 emit_move_insn_1 (x, y)
2776 rtx x, y;
2778 enum machine_mode mode = GET_MODE (x);
2779 enum machine_mode submode;
2780 enum mode_class class = GET_MODE_CLASS (mode);
2781 unsigned int i;
2783 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2784 abort ();
2786 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2787 return
2788 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2790 /* Expand complex moves by moving real part and imag part, if possible. */
2791 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2792 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2793 * BITS_PER_UNIT),
2794 (class == MODE_COMPLEX_INT
2795 ? MODE_INT : MODE_FLOAT),
2797 && (mov_optab->handlers[(int) submode].insn_code
2798 != CODE_FOR_nothing))
2800 /* Don't split destination if it is a stack push. */
2801 int stack = push_operand (x, GET_MODE (x));
2803 #ifdef PUSH_ROUNDING
2804 /* In case we output to the stack, but the size is smaller machine can
2805 push exactly, we need to use move instructions. */
2806 if (stack
2807 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2809 rtx temp;
2810 int offset1, offset2;
2812 /* Do not use anti_adjust_stack, since we don't want to update
2813 stack_pointer_delta. */
2814 temp = expand_binop (Pmode,
2815 #ifdef STACK_GROWS_DOWNWARD
2816 sub_optab,
2817 #else
2818 add_optab,
2819 #endif
2820 stack_pointer_rtx,
2821 GEN_INT
2822 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2823 stack_pointer_rtx,
2825 OPTAB_LIB_WIDEN);
2826 if (temp != stack_pointer_rtx)
2827 emit_move_insn (stack_pointer_rtx, temp);
2828 #ifdef STACK_GROWS_DOWNWARD
2829 offset1 = 0;
2830 offset2 = GET_MODE_SIZE (submode);
2831 #else
2832 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2833 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2834 + GET_MODE_SIZE (submode));
2835 #endif
2836 emit_move_insn (change_address (x, submode,
2837 gen_rtx_PLUS (Pmode,
2838 stack_pointer_rtx,
2839 GEN_INT (offset1))),
2840 gen_realpart (submode, y));
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2843 stack_pointer_rtx,
2844 GEN_INT (offset2))),
2845 gen_imagpart (submode, y));
2847 else
2848 #endif
2849 /* If this is a stack, push the highpart first, so it
2850 will be in the argument order.
2852 In that case, change_address is used only to convert
2853 the mode, not to change the address. */
2854 if (stack)
2856 /* Note that the real part always precedes the imag part in memory
2857 regardless of machine's endianness. */
2858 #ifdef STACK_GROWS_DOWNWARD
2859 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2860 (gen_rtx_MEM (submode, XEXP (x, 0)),
2861 gen_imagpart (submode, y)));
2862 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2863 (gen_rtx_MEM (submode, XEXP (x, 0)),
2864 gen_realpart (submode, y)));
2865 #else
2866 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2867 (gen_rtx_MEM (submode, XEXP (x, 0)),
2868 gen_realpart (submode, y)));
2869 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2870 (gen_rtx_MEM (submode, XEXP (x, 0)),
2871 gen_imagpart (submode, y)));
2872 #endif
2874 else
2876 rtx realpart_x, realpart_y;
2877 rtx imagpart_x, imagpart_y;
2879 /* If this is a complex value with each part being smaller than a
2880 word, the usual calling sequence will likely pack the pieces into
2881 a single register. Unfortunately, SUBREG of hard registers only
2882 deals in terms of words, so we have a problem converting input
2883 arguments to the CONCAT of two registers that is used elsewhere
2884 for complex values. If this is before reload, we can copy it into
2885 memory and reload. FIXME, we should see about using extract and
2886 insert on integer registers, but complex short and complex char
2887 variables should be rarely used. */
2888 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2889 && (reload_in_progress | reload_completed) == 0)
2891 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2892 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2894 if (packed_dest_p || packed_src_p)
2896 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2897 ? MODE_FLOAT : MODE_INT);
2899 enum machine_mode reg_mode
2900 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2902 if (reg_mode != BLKmode)
2904 rtx mem = assign_stack_temp (reg_mode,
2905 GET_MODE_SIZE (mode), 0);
2906 rtx cmem = change_address (mem, mode, NULL_RTX);
2908 cfun->cannot_inline
2909 = N_("function using short complex types cannot be inline");
2911 if (packed_dest_p)
2913 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2914 emit_move_insn_1 (cmem, y);
2915 return emit_move_insn_1 (sreg, mem);
2917 else
2919 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2920 emit_move_insn_1 (mem, sreg);
2921 return emit_move_insn_1 (x, cmem);
2927 realpart_x = gen_realpart (submode, x);
2928 realpart_y = gen_realpart (submode, y);
2929 imagpart_x = gen_imagpart (submode, x);
2930 imagpart_y = gen_imagpart (submode, y);
2932 /* Show the output dies here. This is necessary for SUBREGs
2933 of pseudos since we cannot track their lifetimes correctly;
2934 hard regs shouldn't appear here except as return values.
2935 We never want to emit such a clobber after reload. */
2936 if (x != y
2937 && ! (reload_in_progress || reload_completed)
2938 && (GET_CODE (realpart_x) == SUBREG
2939 || GET_CODE (imagpart_x) == SUBREG))
2941 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2944 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2945 (realpart_x, realpart_y));
2946 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2947 (imagpart_x, imagpart_y));
2950 return get_last_insn ();
2953 /* This will handle any multi-word mode that lacks a move_insn pattern.
2954 However, you will get better code if you define such patterns,
2955 even if they must turn into multiple assembler instructions. */
2956 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2958 rtx last_insn = 0;
2959 rtx seq, inner;
2960 int need_clobber;
2962 #ifdef PUSH_ROUNDING
2964 /* If X is a push on the stack, do the push now and replace
2965 X with a reference to the stack pointer. */
2966 if (push_operand (x, GET_MODE (x)))
2968 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2969 x = change_address (x, VOIDmode, stack_pointer_rtx);
2971 #endif
2973 /* If we are in reload, see if either operand is a MEM whose address
2974 is scheduled for replacement. */
2975 if (reload_in_progress && GET_CODE (x) == MEM
2976 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2978 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2980 MEM_COPY_ATTRIBUTES (new, x);
2981 x = new;
2983 if (reload_in_progress && GET_CODE (y) == MEM
2984 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2986 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2988 MEM_COPY_ATTRIBUTES (new, y);
2989 y = new;
2992 start_sequence ();
2994 need_clobber = 0;
2995 for (i = 0;
2996 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2997 i++)
2999 rtx xpart = operand_subword (x, i, 1, mode);
3000 rtx ypart = operand_subword (y, i, 1, mode);
3002 /* If we can't get a part of Y, put Y into memory if it is a
3003 constant. Otherwise, force it into a register. If we still
3004 can't get a part of Y, abort. */
3005 if (ypart == 0 && CONSTANT_P (y))
3007 y = force_const_mem (mode, y);
3008 ypart = operand_subword (y, i, 1, mode);
3010 else if (ypart == 0)
3011 ypart = operand_subword_force (y, i, mode);
3013 if (xpart == 0 || ypart == 0)
3014 abort ();
3016 need_clobber |= (GET_CODE (xpart) == SUBREG);
3018 last_insn = emit_move_insn (xpart, ypart);
3021 seq = gen_sequence ();
3022 end_sequence ();
3024 /* Show the output dies here. This is necessary for SUBREGs
3025 of pseudos since we cannot track their lifetimes correctly;
3026 hard regs shouldn't appear here except as return values.
3027 We never want to emit such a clobber after reload. */
3028 if (x != y
3029 && ! (reload_in_progress || reload_completed)
3030 && need_clobber != 0)
3032 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3035 emit_insn (seq);
3037 return last_insn;
3039 else
3040 abort ();
3043 /* Pushing data onto the stack. */
3045 /* Push a block of length SIZE (perhaps variable)
3046 and return an rtx to address the beginning of the block.
3047 Note that it is not possible for the value returned to be a QUEUED.
3048 The value may be virtual_outgoing_args_rtx.
3050 EXTRA is the number of bytes of padding to push in addition to SIZE.
3051 BELOW nonzero means this padding comes at low addresses;
3052 otherwise, the padding comes at high addresses. */
3055 push_block (size, extra, below)
3056 rtx size;
3057 int extra, below;
3059 register rtx temp;
3061 size = convert_modes (Pmode, ptr_mode, size, 1);
3062 if (CONSTANT_P (size))
3063 anti_adjust_stack (plus_constant (size, extra));
3064 else if (GET_CODE (size) == REG && extra == 0)
3065 anti_adjust_stack (size);
3066 else
3068 temp = copy_to_mode_reg (Pmode, size);
3069 if (extra != 0)
3070 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3071 temp, 0, OPTAB_LIB_WIDEN);
3072 anti_adjust_stack (temp);
3075 #ifndef STACK_GROWS_DOWNWARD
3076 #ifdef ARGS_GROW_DOWNWARD
3077 if (!ACCUMULATE_OUTGOING_ARGS)
3078 #else
3079 if (0)
3080 #endif
3081 #else
3082 if (1)
3083 #endif
3085 /* Return the lowest stack address when STACK or ARGS grow downward and
3086 we are not aaccumulating outgoing arguments (the c4x port uses such
3087 conventions). */
3088 temp = virtual_outgoing_args_rtx;
3089 if (extra != 0 && below)
3090 temp = plus_constant (temp, extra);
3092 else
3094 if (GET_CODE (size) == CONST_INT)
3095 temp = plus_constant (virtual_outgoing_args_rtx,
3096 -INTVAL (size) - (below ? 0 : extra));
3097 else if (extra != 0 && !below)
3098 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3099 negate_rtx (Pmode, plus_constant (size, extra)));
3100 else
3101 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3102 negate_rtx (Pmode, size));
3105 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3109 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3110 block of SIZE bytes. */
3112 static rtx
3113 get_push_address (size)
3114 int size;
3116 register rtx temp;
3118 if (STACK_PUSH_CODE == POST_DEC)
3119 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3120 else if (STACK_PUSH_CODE == POST_INC)
3121 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3122 else
3123 temp = stack_pointer_rtx;
3125 return copy_to_reg (temp);
3128 /* Emit single push insn. */
3129 static void
3130 emit_single_push_insn (mode, x, type)
3131 rtx x;
3132 enum machine_mode mode;
3133 tree type;
3135 #ifdef PUSH_ROUNDING
3136 rtx dest_addr;
3137 int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3138 rtx dest;
3140 if (GET_MODE_SIZE (mode) == rounded_size)
3141 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3142 else
3144 #ifdef STACK_GROWS_DOWNWARD
3145 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3146 GEN_INT (-rounded_size));
3147 #else
3148 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3149 GEN_INT (rounded_size));
3150 #endif
3151 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3154 dest = gen_rtx_MEM (mode, dest_addr);
3156 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3158 if (type != 0)
3160 set_mem_attributes (dest, type, 1);
3161 /* Function incoming arguments may overlap with sibling call
3162 outgoing arguments and we cannot allow reordering of reads
3163 from function arguments with stores to outgoing arguments
3164 of sibling calls. */
3165 MEM_ALIAS_SET (dest) = 0;
3167 emit_move_insn (dest, x);
3168 #else
3169 abort();
3170 #endif
3173 /* Generate code to push X onto the stack, assuming it has mode MODE and
3174 type TYPE.
3175 MODE is redundant except when X is a CONST_INT (since they don't
3176 carry mode info).
3177 SIZE is an rtx for the size of data to be copied (in bytes),
3178 needed only if X is BLKmode.
3180 ALIGN (in bits) is maximum alignment we can assume.
3182 If PARTIAL and REG are both nonzero, then copy that many of the first
3183 words of X into registers starting with REG, and push the rest of X.
3184 The amount of space pushed is decreased by PARTIAL words,
3185 rounded *down* to a multiple of PARM_BOUNDARY.
3186 REG must be a hard register in this case.
3187 If REG is zero but PARTIAL is not, take any all others actions for an
3188 argument partially in registers, but do not actually load any
3189 registers.
3191 EXTRA is the amount in bytes of extra space to leave next to this arg.
3192 This is ignored if an argument block has already been allocated.
3194 On a machine that lacks real push insns, ARGS_ADDR is the address of
3195 the bottom of the argument block for this call. We use indexing off there
3196 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3197 argument block has not been preallocated.
3199 ARGS_SO_FAR is the size of args previously pushed for this call.
3201 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3202 for arguments passed in registers. If nonzero, it will be the number
3203 of bytes required. */
3205 void
3206 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3207 args_addr, args_so_far, reg_parm_stack_space,
3208 alignment_pad)
3209 register rtx x;
3210 enum machine_mode mode;
3211 tree type;
3212 rtx size;
3213 unsigned int align;
3214 int partial;
3215 rtx reg;
3216 int extra;
3217 rtx args_addr;
3218 rtx args_so_far;
3219 int reg_parm_stack_space;
3220 rtx alignment_pad;
3222 rtx xinner;
3223 enum direction stack_direction
3224 #ifdef STACK_GROWS_DOWNWARD
3225 = downward;
3226 #else
3227 = upward;
3228 #endif
3230 /* Decide where to pad the argument: `downward' for below,
3231 `upward' for above, or `none' for don't pad it.
3232 Default is below for small data on big-endian machines; else above. */
3233 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3235 /* Invert direction if stack is post-update. */
3236 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3237 if (where_pad != none)
3238 where_pad = (where_pad == downward ? upward : downward);
3240 xinner = x = protect_from_queue (x, 0);
3242 if (mode == BLKmode)
3244 /* Copy a block into the stack, entirely or partially. */
3246 register rtx temp;
3247 int used = partial * UNITS_PER_WORD;
3248 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3249 int skip;
3251 if (size == 0)
3252 abort ();
3254 used -= offset;
3256 /* USED is now the # of bytes we need not copy to the stack
3257 because registers will take care of them. */
3259 if (partial != 0)
3260 xinner = change_address (xinner, BLKmode,
3261 plus_constant (XEXP (xinner, 0), used));
3263 /* If the partial register-part of the arg counts in its stack size,
3264 skip the part of stack space corresponding to the registers.
3265 Otherwise, start copying to the beginning of the stack space,
3266 by setting SKIP to 0. */
3267 skip = (reg_parm_stack_space == 0) ? 0 : used;
3269 #ifdef PUSH_ROUNDING
3270 /* Do it with several push insns if that doesn't take lots of insns
3271 and if there is no difficulty with push insns that skip bytes
3272 on the stack for alignment purposes. */
3273 if (args_addr == 0
3274 && PUSH_ARGS
3275 && GET_CODE (size) == CONST_INT
3276 && skip == 0
3277 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3278 /* Here we avoid the case of a structure whose weak alignment
3279 forces many pushes of a small amount of data,
3280 and such small pushes do rounding that causes trouble. */
3281 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3282 || align >= BIGGEST_ALIGNMENT
3283 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3284 == (align / BITS_PER_UNIT)))
3285 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3287 /* Push padding now if padding above and stack grows down,
3288 or if padding below and stack grows up.
3289 But if space already allocated, this has already been done. */
3290 if (extra && args_addr == 0
3291 && where_pad != none && where_pad != stack_direction)
3292 anti_adjust_stack (GEN_INT (extra));
3294 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3296 if (current_function_check_memory_usage && ! in_check_memory_usage)
3298 rtx temp;
3300 in_check_memory_usage = 1;
3301 temp = get_push_address (INTVAL (size) - used);
3302 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3303 emit_library_call (chkr_copy_bitmap_libfunc,
3304 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3305 Pmode, XEXP (xinner, 0), Pmode,
3306 GEN_INT (INTVAL (size) - used),
3307 TYPE_MODE (sizetype));
3308 else
3309 emit_library_call (chkr_set_right_libfunc,
3310 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3311 Pmode, GEN_INT (INTVAL (size) - used),
3312 TYPE_MODE (sizetype),
3313 GEN_INT (MEMORY_USE_RW),
3314 TYPE_MODE (integer_type_node));
3315 in_check_memory_usage = 0;
3318 else
3319 #endif /* PUSH_ROUNDING */
3321 rtx target;
3323 /* Otherwise make space on the stack and copy the data
3324 to the address of that space. */
3326 /* Deduct words put into registers from the size we must copy. */
3327 if (partial != 0)
3329 if (GET_CODE (size) == CONST_INT)
3330 size = GEN_INT (INTVAL (size) - used);
3331 else
3332 size = expand_binop (GET_MODE (size), sub_optab, size,
3333 GEN_INT (used), NULL_RTX, 0,
3334 OPTAB_LIB_WIDEN);
3337 /* Get the address of the stack space.
3338 In this case, we do not deal with EXTRA separately.
3339 A single stack adjust will do. */
3340 if (! args_addr)
3342 temp = push_block (size, extra, where_pad == downward);
3343 extra = 0;
3345 else if (GET_CODE (args_so_far) == CONST_INT)
3346 temp = memory_address (BLKmode,
3347 plus_constant (args_addr,
3348 skip + INTVAL (args_so_far)));
3349 else
3350 temp = memory_address (BLKmode,
3351 plus_constant (gen_rtx_PLUS (Pmode,
3352 args_addr,
3353 args_so_far),
3354 skip));
3355 if (current_function_check_memory_usage && ! in_check_memory_usage)
3357 in_check_memory_usage = 1;
3358 target = copy_to_reg (temp);
3359 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3360 emit_library_call (chkr_copy_bitmap_libfunc,
3361 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3362 target, Pmode,
3363 XEXP (xinner, 0), Pmode,
3364 size, TYPE_MODE (sizetype));
3365 else
3366 emit_library_call (chkr_set_right_libfunc,
3367 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3368 target, Pmode,
3369 size, TYPE_MODE (sizetype),
3370 GEN_INT (MEMORY_USE_RW),
3371 TYPE_MODE (integer_type_node));
3372 in_check_memory_usage = 0;
3375 target = gen_rtx_MEM (BLKmode, temp);
3377 if (type != 0)
3379 set_mem_attributes (target, type, 1);
3380 /* Function incoming arguments may overlap with sibling call
3381 outgoing arguments and we cannot allow reordering of reads
3382 from function arguments with stores to outgoing arguments
3383 of sibling calls. */
3384 MEM_ALIAS_SET (target) = 0;
3387 /* TEMP is the address of the block. Copy the data there. */
3388 if (GET_CODE (size) == CONST_INT
3389 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3391 move_by_pieces (target, xinner, INTVAL (size), align);
3392 goto ret;
3394 else
3396 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3397 enum machine_mode mode;
3399 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3400 mode != VOIDmode;
3401 mode = GET_MODE_WIDER_MODE (mode))
3403 enum insn_code code = movstr_optab[(int) mode];
3404 insn_operand_predicate_fn pred;
3406 if (code != CODE_FOR_nothing
3407 && ((GET_CODE (size) == CONST_INT
3408 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3409 <= (GET_MODE_MASK (mode) >> 1)))
3410 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3411 && (!(pred = insn_data[(int) code].operand[0].predicate)
3412 || ((*pred) (target, BLKmode)))
3413 && (!(pred = insn_data[(int) code].operand[1].predicate)
3414 || ((*pred) (xinner, BLKmode)))
3415 && (!(pred = insn_data[(int) code].operand[3].predicate)
3416 || ((*pred) (opalign, VOIDmode))))
3418 rtx op2 = convert_to_mode (mode, size, 1);
3419 rtx last = get_last_insn ();
3420 rtx pat;
3422 pred = insn_data[(int) code].operand[2].predicate;
3423 if (pred != 0 && ! (*pred) (op2, mode))
3424 op2 = copy_to_mode_reg (mode, op2);
3426 pat = GEN_FCN ((int) code) (target, xinner,
3427 op2, opalign);
3428 if (pat)
3430 emit_insn (pat);
3431 goto ret;
3433 else
3434 delete_insns_since (last);
3439 if (!ACCUMULATE_OUTGOING_ARGS)
3441 /* If the source is referenced relative to the stack pointer,
3442 copy it to another register to stabilize it. We do not need
3443 to do this if we know that we won't be changing sp. */
3445 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3446 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3447 temp = copy_to_reg (temp);
3450 /* Make inhibit_defer_pop nonzero around the library call
3451 to force it to pop the bcopy-arguments right away. */
3452 NO_DEFER_POP;
3453 #ifdef TARGET_MEM_FUNCTIONS
3454 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3455 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3456 convert_to_mode (TYPE_MODE (sizetype),
3457 size, TREE_UNSIGNED (sizetype)),
3458 TYPE_MODE (sizetype));
3459 #else
3460 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3461 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3462 convert_to_mode (TYPE_MODE (integer_type_node),
3463 size,
3464 TREE_UNSIGNED (integer_type_node)),
3465 TYPE_MODE (integer_type_node));
3466 #endif
3467 OK_DEFER_POP;
3470 else if (partial > 0)
3472 /* Scalar partly in registers. */
3474 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3475 int i;
3476 int not_stack;
3477 /* # words of start of argument
3478 that we must make space for but need not store. */
3479 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3480 int args_offset = INTVAL (args_so_far);
3481 int skip;
3483 /* Push padding now if padding above and stack grows down,
3484 or if padding below and stack grows up.
3485 But if space already allocated, this has already been done. */
3486 if (extra && args_addr == 0
3487 && where_pad != none && where_pad != stack_direction)
3488 anti_adjust_stack (GEN_INT (extra));
3490 /* If we make space by pushing it, we might as well push
3491 the real data. Otherwise, we can leave OFFSET nonzero
3492 and leave the space uninitialized. */
3493 if (args_addr == 0)
3494 offset = 0;
3496 /* Now NOT_STACK gets the number of words that we don't need to
3497 allocate on the stack. */
3498 not_stack = partial - offset;
3500 /* If the partial register-part of the arg counts in its stack size,
3501 skip the part of stack space corresponding to the registers.
3502 Otherwise, start copying to the beginning of the stack space,
3503 by setting SKIP to 0. */
3504 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3506 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3507 x = validize_mem (force_const_mem (mode, x));
3509 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3510 SUBREGs of such registers are not allowed. */
3511 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3512 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3513 x = copy_to_reg (x);
3515 /* Loop over all the words allocated on the stack for this arg. */
3516 /* We can do it by words, because any scalar bigger than a word
3517 has a size a multiple of a word. */
3518 #ifndef PUSH_ARGS_REVERSED
3519 for (i = not_stack; i < size; i++)
3520 #else
3521 for (i = size - 1; i >= not_stack; i--)
3522 #endif
3523 if (i >= not_stack + offset)
3524 emit_push_insn (operand_subword_force (x, i, mode),
3525 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3526 0, args_addr,
3527 GEN_INT (args_offset + ((i - not_stack + skip)
3528 * UNITS_PER_WORD)),
3529 reg_parm_stack_space, alignment_pad);
3531 else
3533 rtx addr;
3534 rtx target = NULL_RTX;
3535 rtx dest;
3537 /* Push padding now if padding above and stack grows down,
3538 or if padding below and stack grows up.
3539 But if space already allocated, this has already been done. */
3540 if (extra && args_addr == 0
3541 && where_pad != none && where_pad != stack_direction)
3542 anti_adjust_stack (GEN_INT (extra));
3544 #ifdef PUSH_ROUNDING
3545 if (args_addr == 0 && PUSH_ARGS)
3546 emit_single_push_insn (mode, x, type);
3547 else
3548 #endif
3550 if (GET_CODE (args_so_far) == CONST_INT)
3551 addr
3552 = memory_address (mode,
3553 plus_constant (args_addr,
3554 INTVAL (args_so_far)));
3555 else
3556 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3557 args_so_far));
3558 target = addr;
3559 dest = gen_rtx_MEM (mode, addr);
3560 if (type != 0)
3562 set_mem_attributes (dest, type, 1);
3563 /* Function incoming arguments may overlap with sibling call
3564 outgoing arguments and we cannot allow reordering of reads
3565 from function arguments with stores to outgoing arguments
3566 of sibling calls. */
3567 MEM_ALIAS_SET (dest) = 0;
3570 emit_move_insn (dest, x);
3574 if (current_function_check_memory_usage && ! in_check_memory_usage)
3576 in_check_memory_usage = 1;
3577 if (target == 0)
3578 target = get_push_address (GET_MODE_SIZE (mode));
3580 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3581 emit_library_call (chkr_copy_bitmap_libfunc,
3582 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3583 Pmode, XEXP (x, 0), Pmode,
3584 GEN_INT (GET_MODE_SIZE (mode)),
3585 TYPE_MODE (sizetype));
3586 else
3587 emit_library_call (chkr_set_right_libfunc,
3588 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3589 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3590 TYPE_MODE (sizetype),
3591 GEN_INT (MEMORY_USE_RW),
3592 TYPE_MODE (integer_type_node));
3593 in_check_memory_usage = 0;
3597 ret:
3598 /* If part should go in registers, copy that part
3599 into the appropriate registers. Do this now, at the end,
3600 since mem-to-mem copies above may do function calls. */
3601 if (partial > 0 && reg != 0)
3603 /* Handle calls that pass values in multiple non-contiguous locations.
3604 The Irix 6 ABI has examples of this. */
3605 if (GET_CODE (reg) == PARALLEL)
3606 emit_group_load (reg, x, -1, align); /* ??? size? */
3607 else
3608 move_block_to_reg (REGNO (reg), x, partial, mode);
3611 if (extra && args_addr == 0 && where_pad == stack_direction)
3612 anti_adjust_stack (GEN_INT (extra));
3614 if (alignment_pad && args_addr == 0)
3615 anti_adjust_stack (alignment_pad);
3618 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3619 operations. */
3621 static rtx
3622 get_subtarget (x)
3623 rtx x;
3625 return ((x == 0
3626 /* Only registers can be subtargets. */
3627 || GET_CODE (x) != REG
3628 /* If the register is readonly, it can't be set more than once. */
3629 || RTX_UNCHANGING_P (x)
3630 /* Don't use hard regs to avoid extending their life. */
3631 || REGNO (x) < FIRST_PSEUDO_REGISTER
3632 /* Avoid subtargets inside loops,
3633 since they hide some invariant expressions. */
3634 || preserve_subexpressions_p ())
3635 ? 0 : x);
3638 /* Expand an assignment that stores the value of FROM into TO.
3639 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3640 (This may contain a QUEUED rtx;
3641 if the value is constant, this rtx is a constant.)
3642 Otherwise, the returned value is NULL_RTX.
3644 SUGGEST_REG is no longer actually used.
3645 It used to mean, copy the value through a register
3646 and return that register, if that is possible.
3647 We now use WANT_VALUE to decide whether to do this. */
3650 expand_assignment (to, from, want_value, suggest_reg)
3651 tree to, from;
3652 int want_value;
3653 int suggest_reg ATTRIBUTE_UNUSED;
3655 register rtx to_rtx = 0;
3656 rtx result;
3658 /* Don't crash if the lhs of the assignment was erroneous. */
3660 if (TREE_CODE (to) == ERROR_MARK)
3662 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3663 return want_value ? result : NULL_RTX;
3666 /* Assignment of a structure component needs special treatment
3667 if the structure component's rtx is not simply a MEM.
3668 Assignment of an array element at a constant index, and assignment of
3669 an array element in an unaligned packed structure field, has the same
3670 problem. */
3672 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3673 || TREE_CODE (to) == ARRAY_REF)
3675 enum machine_mode mode1;
3676 HOST_WIDE_INT bitsize, bitpos;
3677 tree offset;
3678 int unsignedp;
3679 int volatilep = 0;
3680 tree tem;
3681 unsigned int alignment;
3683 push_temp_slots ();
3684 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3685 &unsignedp, &volatilep, &alignment);
3687 /* If we are going to use store_bit_field and extract_bit_field,
3688 make sure to_rtx will be safe for multiple use. */
3690 if (mode1 == VOIDmode && want_value)
3691 tem = stabilize_reference (tem);
3693 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3694 if (offset != 0)
3696 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3698 if (GET_CODE (to_rtx) != MEM)
3699 abort ();
3701 if (GET_MODE (offset_rtx) != ptr_mode)
3703 #ifdef POINTERS_EXTEND_UNSIGNED
3704 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3705 #else
3706 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3707 #endif
3710 /* A constant address in TO_RTX can have VOIDmode, we must not try
3711 to call force_reg for that case. Avoid that case. */
3712 if (GET_CODE (to_rtx) == MEM
3713 && GET_MODE (to_rtx) == BLKmode
3714 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3715 && bitsize
3716 && (bitpos % bitsize) == 0
3717 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3718 && alignment == GET_MODE_ALIGNMENT (mode1))
3720 rtx temp = change_address (to_rtx, mode1,
3721 plus_constant (XEXP (to_rtx, 0),
3722 (bitpos /
3723 BITS_PER_UNIT)));
3724 if (GET_CODE (XEXP (temp, 0)) == REG)
3725 to_rtx = temp;
3726 else
3727 to_rtx = change_address (to_rtx, mode1,
3728 force_reg (GET_MODE (XEXP (temp, 0)),
3729 XEXP (temp, 0)));
3730 bitpos = 0;
3733 to_rtx = change_address (to_rtx, VOIDmode,
3734 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3735 force_reg (ptr_mode,
3736 offset_rtx)));
3739 if (volatilep)
3741 if (GET_CODE (to_rtx) == MEM)
3743 /* When the offset is zero, to_rtx is the address of the
3744 structure we are storing into, and hence may be shared.
3745 We must make a new MEM before setting the volatile bit. */
3746 if (offset == 0)
3747 to_rtx = copy_rtx (to_rtx);
3749 MEM_VOLATILE_P (to_rtx) = 1;
3751 #if 0 /* This was turned off because, when a field is volatile
3752 in an object which is not volatile, the object may be in a register,
3753 and then we would abort over here. */
3754 else
3755 abort ();
3756 #endif
3759 if (TREE_CODE (to) == COMPONENT_REF
3760 && TREE_READONLY (TREE_OPERAND (to, 1)))
3762 if (offset == 0)
3763 to_rtx = copy_rtx (to_rtx);
3765 RTX_UNCHANGING_P (to_rtx) = 1;
3768 /* Check the access. */
3769 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3771 rtx to_addr;
3772 int size;
3773 int best_mode_size;
3774 enum machine_mode best_mode;
3776 best_mode = get_best_mode (bitsize, bitpos,
3777 TYPE_ALIGN (TREE_TYPE (tem)),
3778 mode1, volatilep);
3779 if (best_mode == VOIDmode)
3780 best_mode = QImode;
3782 best_mode_size = GET_MODE_BITSIZE (best_mode);
3783 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3784 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3785 size *= GET_MODE_SIZE (best_mode);
3787 /* Check the access right of the pointer. */
3788 in_check_memory_usage = 1;
3789 if (size)
3790 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3791 VOIDmode, 3, to_addr, Pmode,
3792 GEN_INT (size), TYPE_MODE (sizetype),
3793 GEN_INT (MEMORY_USE_WO),
3794 TYPE_MODE (integer_type_node));
3795 in_check_memory_usage = 0;
3798 /* If this is a varying-length object, we must get the address of
3799 the source and do an explicit block move. */
3800 if (bitsize < 0)
3802 unsigned int from_align;
3803 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3804 rtx inner_to_rtx
3805 = change_address (to_rtx, VOIDmode,
3806 plus_constant (XEXP (to_rtx, 0),
3807 bitpos / BITS_PER_UNIT));
3809 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3810 MIN (alignment, from_align));
3811 free_temp_slots ();
3812 pop_temp_slots ();
3813 return to_rtx;
3815 else
3817 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3818 (want_value
3819 /* Spurious cast for HPUX compiler. */
3820 ? ((enum machine_mode)
3821 TYPE_MODE (TREE_TYPE (to)))
3822 : VOIDmode),
3823 unsignedp,
3824 alignment,
3825 int_size_in_bytes (TREE_TYPE (tem)),
3826 get_alias_set (to));
3828 preserve_temp_slots (result);
3829 free_temp_slots ();
3830 pop_temp_slots ();
3832 /* If the value is meaningful, convert RESULT to the proper mode.
3833 Otherwise, return nothing. */
3834 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3835 TYPE_MODE (TREE_TYPE (from)),
3836 result,
3837 TREE_UNSIGNED (TREE_TYPE (to)))
3838 : NULL_RTX);
3842 /* If the rhs is a function call and its value is not an aggregate,
3843 call the function before we start to compute the lhs.
3844 This is needed for correct code for cases such as
3845 val = setjmp (buf) on machines where reference to val
3846 requires loading up part of an address in a separate insn.
3848 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3849 since it might be a promoted variable where the zero- or sign- extension
3850 needs to be done. Handling this in the normal way is safe because no
3851 computation is done before the call. */
3852 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3853 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3854 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3855 && GET_CODE (DECL_RTL (to)) == REG))
3857 rtx value;
3859 push_temp_slots ();
3860 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3861 if (to_rtx == 0)
3862 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3864 /* Handle calls that return values in multiple non-contiguous locations.
3865 The Irix 6 ABI has examples of this. */
3866 if (GET_CODE (to_rtx) == PARALLEL)
3867 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3868 TYPE_ALIGN (TREE_TYPE (from)));
3869 else if (GET_MODE (to_rtx) == BLKmode)
3870 emit_block_move (to_rtx, value, expr_size (from),
3871 TYPE_ALIGN (TREE_TYPE (from)));
3872 else
3874 #ifdef POINTERS_EXTEND_UNSIGNED
3875 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3876 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3877 value = convert_memory_address (GET_MODE (to_rtx), value);
3878 #endif
3879 emit_move_insn (to_rtx, value);
3881 preserve_temp_slots (to_rtx);
3882 free_temp_slots ();
3883 pop_temp_slots ();
3884 return want_value ? to_rtx : NULL_RTX;
3887 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3888 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3890 if (to_rtx == 0)
3892 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3893 if (GET_CODE (to_rtx) == MEM)
3894 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3897 /* Don't move directly into a return register. */
3898 if (TREE_CODE (to) == RESULT_DECL
3899 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3901 rtx temp;
3903 push_temp_slots ();
3904 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3906 if (GET_CODE (to_rtx) == PARALLEL)
3907 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3908 TYPE_ALIGN (TREE_TYPE (from)));
3909 else
3910 emit_move_insn (to_rtx, temp);
3912 preserve_temp_slots (to_rtx);
3913 free_temp_slots ();
3914 pop_temp_slots ();
3915 return want_value ? to_rtx : NULL_RTX;
3918 /* In case we are returning the contents of an object which overlaps
3919 the place the value is being stored, use a safe function when copying
3920 a value through a pointer into a structure value return block. */
3921 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3922 && current_function_returns_struct
3923 && !current_function_returns_pcc_struct)
3925 rtx from_rtx, size;
3927 push_temp_slots ();
3928 size = expr_size (from);
3929 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3930 EXPAND_MEMORY_USE_DONT);
3932 /* Copy the rights of the bitmap. */
3933 if (current_function_check_memory_usage)
3934 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3935 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3936 XEXP (from_rtx, 0), Pmode,
3937 convert_to_mode (TYPE_MODE (sizetype),
3938 size, TREE_UNSIGNED (sizetype)),
3939 TYPE_MODE (sizetype));
3941 #ifdef TARGET_MEM_FUNCTIONS
3942 emit_library_call (memmove_libfunc, LCT_NORMAL,
3943 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3944 XEXP (from_rtx, 0), Pmode,
3945 convert_to_mode (TYPE_MODE (sizetype),
3946 size, TREE_UNSIGNED (sizetype)),
3947 TYPE_MODE (sizetype));
3948 #else
3949 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3950 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3951 XEXP (to_rtx, 0), Pmode,
3952 convert_to_mode (TYPE_MODE (integer_type_node),
3953 size, TREE_UNSIGNED (integer_type_node)),
3954 TYPE_MODE (integer_type_node));
3955 #endif
3957 preserve_temp_slots (to_rtx);
3958 free_temp_slots ();
3959 pop_temp_slots ();
3960 return want_value ? to_rtx : NULL_RTX;
3963 /* Compute FROM and store the value in the rtx we got. */
3965 push_temp_slots ();
3966 result = store_expr (from, to_rtx, want_value);
3967 preserve_temp_slots (result);
3968 free_temp_slots ();
3969 pop_temp_slots ();
3970 return want_value ? result : NULL_RTX;
3973 /* Generate code for computing expression EXP,
3974 and storing the value into TARGET.
3975 TARGET may contain a QUEUED rtx.
3977 If WANT_VALUE is nonzero, return a copy of the value
3978 not in TARGET, so that we can be sure to use the proper
3979 value in a containing expression even if TARGET has something
3980 else stored in it. If possible, we copy the value through a pseudo
3981 and return that pseudo. Or, if the value is constant, we try to
3982 return the constant. In some cases, we return a pseudo
3983 copied *from* TARGET.
3985 If the mode is BLKmode then we may return TARGET itself.
3986 It turns out that in BLKmode it doesn't cause a problem.
3987 because C has no operators that could combine two different
3988 assignments into the same BLKmode object with different values
3989 with no sequence point. Will other languages need this to
3990 be more thorough?
3992 If WANT_VALUE is 0, we return NULL, to make sure
3993 to catch quickly any cases where the caller uses the value
3994 and fails to set WANT_VALUE. */
3997 store_expr (exp, target, want_value)
3998 register tree exp;
3999 register rtx target;
4000 int want_value;
4002 register rtx temp;
4003 int dont_return_target = 0;
4004 int dont_store_target = 0;
4006 if (TREE_CODE (exp) == COMPOUND_EXPR)
4008 /* Perform first part of compound expression, then assign from second
4009 part. */
4010 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4011 emit_queue ();
4012 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4014 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4016 /* For conditional expression, get safe form of the target. Then
4017 test the condition, doing the appropriate assignment on either
4018 side. This avoids the creation of unnecessary temporaries.
4019 For non-BLKmode, it is more efficient not to do this. */
4021 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4023 emit_queue ();
4024 target = protect_from_queue (target, 1);
4026 do_pending_stack_adjust ();
4027 NO_DEFER_POP;
4028 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4029 start_cleanup_deferral ();
4030 store_expr (TREE_OPERAND (exp, 1), target, 0);
4031 end_cleanup_deferral ();
4032 emit_queue ();
4033 emit_jump_insn (gen_jump (lab2));
4034 emit_barrier ();
4035 emit_label (lab1);
4036 start_cleanup_deferral ();
4037 store_expr (TREE_OPERAND (exp, 2), target, 0);
4038 end_cleanup_deferral ();
4039 emit_queue ();
4040 emit_label (lab2);
4041 OK_DEFER_POP;
4043 return want_value ? target : NULL_RTX;
4045 else if (queued_subexp_p (target))
4046 /* If target contains a postincrement, let's not risk
4047 using it as the place to generate the rhs. */
4049 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4051 /* Expand EXP into a new pseudo. */
4052 temp = gen_reg_rtx (GET_MODE (target));
4053 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4055 else
4056 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4058 /* If target is volatile, ANSI requires accessing the value
4059 *from* the target, if it is accessed. So make that happen.
4060 In no case return the target itself. */
4061 if (! MEM_VOLATILE_P (target) && want_value)
4062 dont_return_target = 1;
4064 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4065 && GET_MODE (target) != BLKmode)
4066 /* If target is in memory and caller wants value in a register instead,
4067 arrange that. Pass TARGET as target for expand_expr so that,
4068 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4069 We know expand_expr will not use the target in that case.
4070 Don't do this if TARGET is volatile because we are supposed
4071 to write it and then read it. */
4073 temp = expand_expr (exp, target, GET_MODE (target), 0);
4074 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4076 /* If TEMP is already in the desired TARGET, only copy it from
4077 memory and don't store it there again. */
4078 if (temp == target
4079 || (rtx_equal_p (temp, target)
4080 && ! side_effects_p (temp) && ! side_effects_p (target)))
4081 dont_store_target = 1;
4082 temp = copy_to_reg (temp);
4084 dont_return_target = 1;
4086 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4087 /* If this is an scalar in a register that is stored in a wider mode
4088 than the declared mode, compute the result into its declared mode
4089 and then convert to the wider mode. Our value is the computed
4090 expression. */
4092 /* If we don't want a value, we can do the conversion inside EXP,
4093 which will often result in some optimizations. Do the conversion
4094 in two steps: first change the signedness, if needed, then
4095 the extend. But don't do this if the type of EXP is a subtype
4096 of something else since then the conversion might involve
4097 more than just converting modes. */
4098 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4099 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4101 if (TREE_UNSIGNED (TREE_TYPE (exp))
4102 != SUBREG_PROMOTED_UNSIGNED_P (target))
4104 = convert
4105 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4106 TREE_TYPE (exp)),
4107 exp);
4109 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4110 SUBREG_PROMOTED_UNSIGNED_P (target)),
4111 exp);
4114 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4116 /* If TEMP is a volatile MEM and we want a result value, make
4117 the access now so it gets done only once. Likewise if
4118 it contains TARGET. */
4119 if (GET_CODE (temp) == MEM && want_value
4120 && (MEM_VOLATILE_P (temp)
4121 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4122 temp = copy_to_reg (temp);
4124 /* If TEMP is a VOIDmode constant, use convert_modes to make
4125 sure that we properly convert it. */
4126 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4127 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4128 TYPE_MODE (TREE_TYPE (exp)), temp,
4129 SUBREG_PROMOTED_UNSIGNED_P (target));
4131 convert_move (SUBREG_REG (target), temp,
4132 SUBREG_PROMOTED_UNSIGNED_P (target));
4134 /* If we promoted a constant, change the mode back down to match
4135 target. Otherwise, the caller might get confused by a result whose
4136 mode is larger than expected. */
4138 if (want_value && GET_MODE (temp) != GET_MODE (target)
4139 && GET_MODE (temp) != VOIDmode)
4141 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4142 SUBREG_PROMOTED_VAR_P (temp) = 1;
4143 SUBREG_PROMOTED_UNSIGNED_P (temp)
4144 = SUBREG_PROMOTED_UNSIGNED_P (target);
4147 return want_value ? temp : NULL_RTX;
4149 else
4151 temp = expand_expr (exp, target, GET_MODE (target), 0);
4152 /* Return TARGET if it's a specified hardware register.
4153 If TARGET is a volatile mem ref, either return TARGET
4154 or return a reg copied *from* TARGET; ANSI requires this.
4156 Otherwise, if TEMP is not TARGET, return TEMP
4157 if it is constant (for efficiency),
4158 or if we really want the correct value. */
4159 if (!(target && GET_CODE (target) == REG
4160 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4161 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4162 && ! rtx_equal_p (temp, target)
4163 && (CONSTANT_P (temp) || want_value))
4164 dont_return_target = 1;
4167 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4168 the same as that of TARGET, adjust the constant. This is needed, for
4169 example, in case it is a CONST_DOUBLE and we want only a word-sized
4170 value. */
4171 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4172 && TREE_CODE (exp) != ERROR_MARK
4173 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4174 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4175 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4177 if (current_function_check_memory_usage
4178 && GET_CODE (target) == MEM
4179 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4181 in_check_memory_usage = 1;
4182 if (GET_CODE (temp) == MEM)
4183 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4184 VOIDmode, 3, XEXP (target, 0), Pmode,
4185 XEXP (temp, 0), Pmode,
4186 expr_size (exp), TYPE_MODE (sizetype));
4187 else
4188 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4189 VOIDmode, 3, XEXP (target, 0), Pmode,
4190 expr_size (exp), TYPE_MODE (sizetype),
4191 GEN_INT (MEMORY_USE_WO),
4192 TYPE_MODE (integer_type_node));
4193 in_check_memory_usage = 0;
4196 /* If value was not generated in the target, store it there.
4197 Convert the value to TARGET's type first if nec. */
4198 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4199 one or both of them are volatile memory refs, we have to distinguish
4200 two cases:
4201 - expand_expr has used TARGET. In this case, we must not generate
4202 another copy. This can be detected by TARGET being equal according
4203 to == .
4204 - expand_expr has not used TARGET - that means that the source just
4205 happens to have the same RTX form. Since temp will have been created
4206 by expand_expr, it will compare unequal according to == .
4207 We must generate a copy in this case, to reach the correct number
4208 of volatile memory references. */
4210 if ((! rtx_equal_p (temp, target)
4211 || (temp != target && (side_effects_p (temp)
4212 || side_effects_p (target))))
4213 && TREE_CODE (exp) != ERROR_MARK
4214 && ! dont_store_target)
4216 target = protect_from_queue (target, 1);
4217 if (GET_MODE (temp) != GET_MODE (target)
4218 && GET_MODE (temp) != VOIDmode)
4220 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4221 if (dont_return_target)
4223 /* In this case, we will return TEMP,
4224 so make sure it has the proper mode.
4225 But don't forget to store the value into TARGET. */
4226 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4227 emit_move_insn (target, temp);
4229 else
4230 convert_move (target, temp, unsignedp);
4233 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4235 /* Handle copying a string constant into an array.
4236 The string constant may be shorter than the array.
4237 So copy just the string's actual length, and clear the rest. */
4238 rtx size;
4239 rtx addr;
4241 /* Get the size of the data type of the string,
4242 which is actually the size of the target. */
4243 size = expr_size (exp);
4244 if (GET_CODE (size) == CONST_INT
4245 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4246 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4247 else
4249 /* Compute the size of the data to copy from the string. */
4250 tree copy_size
4251 = size_binop (MIN_EXPR,
4252 make_tree (sizetype, size),
4253 size_int (TREE_STRING_LENGTH (exp)));
4254 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4255 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4256 VOIDmode, 0);
4257 rtx label = 0;
4259 /* Copy that much. */
4260 emit_block_move (target, temp, copy_size_rtx,
4261 TYPE_ALIGN (TREE_TYPE (exp)));
4263 /* Figure out how much is left in TARGET that we have to clear.
4264 Do all calculations in ptr_mode. */
4266 addr = XEXP (target, 0);
4267 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4269 if (GET_CODE (copy_size_rtx) == CONST_INT)
4271 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4272 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4273 align = MIN (align,
4274 (unsigned int) (BITS_PER_UNIT
4275 * (INTVAL (copy_size_rtx)
4276 & - INTVAL (copy_size_rtx))));
4278 else
4280 addr = force_reg (ptr_mode, addr);
4281 addr = expand_binop (ptr_mode, add_optab, addr,
4282 copy_size_rtx, NULL_RTX, 0,
4283 OPTAB_LIB_WIDEN);
4285 size = expand_binop (ptr_mode, sub_optab, size,
4286 copy_size_rtx, NULL_RTX, 0,
4287 OPTAB_LIB_WIDEN);
4289 align = BITS_PER_UNIT;
4290 label = gen_label_rtx ();
4291 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4292 GET_MODE (size), 0, 0, label);
4294 align = MIN (align, expr_align (copy_size));
4296 if (size != const0_rtx)
4298 rtx dest = gen_rtx_MEM (BLKmode, addr);
4300 MEM_COPY_ATTRIBUTES (dest, target);
4302 /* Be sure we can write on ADDR. */
4303 in_check_memory_usage = 1;
4304 if (current_function_check_memory_usage)
4305 emit_library_call (chkr_check_addr_libfunc,
4306 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4307 addr, Pmode,
4308 size, TYPE_MODE (sizetype),
4309 GEN_INT (MEMORY_USE_WO),
4310 TYPE_MODE (integer_type_node));
4311 in_check_memory_usage = 0;
4312 clear_storage (dest, size, align);
4315 if (label)
4316 emit_label (label);
4319 /* Handle calls that return values in multiple non-contiguous locations.
4320 The Irix 6 ABI has examples of this. */
4321 else if (GET_CODE (target) == PARALLEL)
4322 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4323 TYPE_ALIGN (TREE_TYPE (exp)));
4324 else if (GET_MODE (temp) == BLKmode)
4325 emit_block_move (target, temp, expr_size (exp),
4326 TYPE_ALIGN (TREE_TYPE (exp)));
4327 else
4328 emit_move_insn (target, temp);
4331 /* If we don't want a value, return NULL_RTX. */
4332 if (! want_value)
4333 return NULL_RTX;
4335 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4336 ??? The latter test doesn't seem to make sense. */
4337 else if (dont_return_target && GET_CODE (temp) != MEM)
4338 return temp;
4340 /* Return TARGET itself if it is a hard register. */
4341 else if (want_value && GET_MODE (target) != BLKmode
4342 && ! (GET_CODE (target) == REG
4343 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4344 return copy_to_reg (target);
4346 else
4347 return target;
4350 /* Return 1 if EXP just contains zeros. */
4352 static int
4353 is_zeros_p (exp)
4354 tree exp;
4356 tree elt;
4358 switch (TREE_CODE (exp))
4360 case CONVERT_EXPR:
4361 case NOP_EXPR:
4362 case NON_LVALUE_EXPR:
4363 return is_zeros_p (TREE_OPERAND (exp, 0));
4365 case INTEGER_CST:
4366 return integer_zerop (exp);
4368 case COMPLEX_CST:
4369 return
4370 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4372 case REAL_CST:
4373 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4375 case CONSTRUCTOR:
4376 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4377 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4378 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4379 if (! is_zeros_p (TREE_VALUE (elt)))
4380 return 0;
4382 return 1;
4384 default:
4385 return 0;
4389 /* Return 1 if EXP contains mostly (3/4) zeros. */
4391 static int
4392 mostly_zeros_p (exp)
4393 tree exp;
4395 if (TREE_CODE (exp) == CONSTRUCTOR)
4397 int elts = 0, zeros = 0;
4398 tree elt = CONSTRUCTOR_ELTS (exp);
4399 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4401 /* If there are no ranges of true bits, it is all zero. */
4402 return elt == NULL_TREE;
4404 for (; elt; elt = TREE_CHAIN (elt))
4406 /* We do not handle the case where the index is a RANGE_EXPR,
4407 so the statistic will be somewhat inaccurate.
4408 We do make a more accurate count in store_constructor itself,
4409 so since this function is only used for nested array elements,
4410 this should be close enough. */
4411 if (mostly_zeros_p (TREE_VALUE (elt)))
4412 zeros++;
4413 elts++;
4416 return 4 * zeros >= 3 * elts;
4419 return is_zeros_p (exp);
4422 /* Helper function for store_constructor.
4423 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4424 TYPE is the type of the CONSTRUCTOR, not the element type.
4425 ALIGN and CLEARED are as for store_constructor.
4426 ALIAS_SET is the alias set to use for any stores.
4428 This provides a recursive shortcut back to store_constructor when it isn't
4429 necessary to go through store_field. This is so that we can pass through
4430 the cleared field to let store_constructor know that we may not have to
4431 clear a substructure if the outer structure has already been cleared. */
4433 static void
4434 store_constructor_field (target, bitsize, bitpos,
4435 mode, exp, type, align, cleared, alias_set)
4436 rtx target;
4437 unsigned HOST_WIDE_INT bitsize;
4438 HOST_WIDE_INT bitpos;
4439 enum machine_mode mode;
4440 tree exp, type;
4441 unsigned int align;
4442 int cleared;
4443 int alias_set;
4445 if (TREE_CODE (exp) == CONSTRUCTOR
4446 && bitpos % BITS_PER_UNIT == 0
4447 /* If we have a non-zero bitpos for a register target, then we just
4448 let store_field do the bitfield handling. This is unlikely to
4449 generate unnecessary clear instructions anyways. */
4450 && (bitpos == 0 || GET_CODE (target) == MEM))
4452 if (bitpos != 0)
4453 target
4454 = change_address (target,
4455 GET_MODE (target) == BLKmode
4456 || 0 != (bitpos
4457 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4458 ? BLKmode : VOIDmode,
4459 plus_constant (XEXP (target, 0),
4460 bitpos / BITS_PER_UNIT));
4463 /* Show the alignment may no longer be what it was and update the alias
4464 set, if required. */
4465 if (bitpos != 0)
4466 align = MIN (align, (unsigned int) bitpos & - bitpos);
4467 if (GET_CODE (target) == MEM)
4468 MEM_ALIAS_SET (target) = alias_set;
4470 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4472 else
4473 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4474 int_size_in_bytes (type), alias_set);
4477 /* Store the value of constructor EXP into the rtx TARGET.
4478 TARGET is either a REG or a MEM.
4479 ALIGN is the maximum known alignment for TARGET.
4480 CLEARED is true if TARGET is known to have been zero'd.
4481 SIZE is the number of bytes of TARGET we are allowed to modify: this
4482 may not be the same as the size of EXP if we are assigning to a field
4483 which has been packed to exclude padding bits. */
4485 static void
4486 store_constructor (exp, target, align, cleared, size)
4487 tree exp;
4488 rtx target;
4489 unsigned int align;
4490 int cleared;
4491 HOST_WIDE_INT size;
4493 tree type = TREE_TYPE (exp);
4494 #ifdef WORD_REGISTER_OPERATIONS
4495 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4496 #endif
4498 /* We know our target cannot conflict, since safe_from_p has been called. */
4499 #if 0
4500 /* Don't try copying piece by piece into a hard register
4501 since that is vulnerable to being clobbered by EXP.
4502 Instead, construct in a pseudo register and then copy it all. */
4503 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4505 rtx temp = gen_reg_rtx (GET_MODE (target));
4506 store_constructor (exp, temp, align, cleared, size);
4507 emit_move_insn (target, temp);
4508 return;
4510 #endif
4512 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4513 || TREE_CODE (type) == QUAL_UNION_TYPE)
4515 register tree elt;
4517 /* Inform later passes that the whole union value is dead. */
4518 if ((TREE_CODE (type) == UNION_TYPE
4519 || TREE_CODE (type) == QUAL_UNION_TYPE)
4520 && ! cleared)
4522 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4524 /* If the constructor is empty, clear the union. */
4525 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4526 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4529 /* If we are building a static constructor into a register,
4530 set the initial value as zero so we can fold the value into
4531 a constant. But if more than one register is involved,
4532 this probably loses. */
4533 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4534 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4536 if (! cleared)
4537 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4539 cleared = 1;
4542 /* If the constructor has fewer fields than the structure
4543 or if we are initializing the structure to mostly zeros,
4544 clear the whole structure first. Don't do this is TARGET is
4545 register whose mode size isn't equal to SIZE since clear_storage
4546 can't handle this case. */
4547 else if (size > 0
4548 && ((list_length (CONSTRUCTOR_ELTS (exp))
4549 != fields_length (type))
4550 || mostly_zeros_p (exp))
4551 && (GET_CODE (target) != REG
4552 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4554 if (! cleared)
4555 clear_storage (target, GEN_INT (size), align);
4557 cleared = 1;
4559 else if (! cleared)
4560 /* Inform later passes that the old value is dead. */
4561 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4563 /* Store each element of the constructor into
4564 the corresponding field of TARGET. */
4566 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4568 register tree field = TREE_PURPOSE (elt);
4569 #ifdef WORD_REGISTER_OPERATIONS
4570 tree value = TREE_VALUE (elt);
4571 #endif
4572 register enum machine_mode mode;
4573 HOST_WIDE_INT bitsize;
4574 HOST_WIDE_INT bitpos = 0;
4575 int unsignedp;
4576 tree offset;
4577 rtx to_rtx = target;
4579 /* Just ignore missing fields.
4580 We cleared the whole structure, above,
4581 if any fields are missing. */
4582 if (field == 0)
4583 continue;
4585 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4586 continue;
4588 if (host_integerp (DECL_SIZE (field), 1))
4589 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4590 else
4591 bitsize = -1;
4593 unsignedp = TREE_UNSIGNED (field);
4594 mode = DECL_MODE (field);
4595 if (DECL_BIT_FIELD (field))
4596 mode = VOIDmode;
4598 offset = DECL_FIELD_OFFSET (field);
4599 if (host_integerp (offset, 0)
4600 && host_integerp (bit_position (field), 0))
4602 bitpos = int_bit_position (field);
4603 offset = 0;
4605 else
4606 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4608 if (offset)
4610 rtx offset_rtx;
4612 if (contains_placeholder_p (offset))
4613 offset = build (WITH_RECORD_EXPR, sizetype,
4614 offset, make_tree (TREE_TYPE (exp), target));
4616 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4617 if (GET_CODE (to_rtx) != MEM)
4618 abort ();
4620 if (GET_MODE (offset_rtx) != ptr_mode)
4622 #ifdef POINTERS_EXTEND_UNSIGNED
4623 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4624 #else
4625 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4626 #endif
4629 to_rtx
4630 = change_address (to_rtx, VOIDmode,
4631 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4632 force_reg (ptr_mode,
4633 offset_rtx)));
4634 align = DECL_OFFSET_ALIGN (field);
4637 if (TREE_READONLY (field))
4639 if (GET_CODE (to_rtx) == MEM)
4640 to_rtx = copy_rtx (to_rtx);
4642 RTX_UNCHANGING_P (to_rtx) = 1;
4645 #ifdef WORD_REGISTER_OPERATIONS
4646 /* If this initializes a field that is smaller than a word, at the
4647 start of a word, try to widen it to a full word.
4648 This special case allows us to output C++ member function
4649 initializations in a form that the optimizers can understand. */
4650 if (GET_CODE (target) == REG
4651 && bitsize < BITS_PER_WORD
4652 && bitpos % BITS_PER_WORD == 0
4653 && GET_MODE_CLASS (mode) == MODE_INT
4654 && TREE_CODE (value) == INTEGER_CST
4655 && exp_size >= 0
4656 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4658 tree type = TREE_TYPE (value);
4659 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4661 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4662 value = convert (type, value);
4664 if (BYTES_BIG_ENDIAN)
4665 value
4666 = fold (build (LSHIFT_EXPR, type, value,
4667 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4668 bitsize = BITS_PER_WORD;
4669 mode = word_mode;
4671 #endif
4672 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4673 TREE_VALUE (elt), type, align, cleared,
4674 (DECL_NONADDRESSABLE_P (field)
4675 && GET_CODE (to_rtx) == MEM)
4676 ? MEM_ALIAS_SET (to_rtx)
4677 : get_alias_set (TREE_TYPE (field)));
4680 else if (TREE_CODE (type) == ARRAY_TYPE)
4682 register tree elt;
4683 register int i;
4684 int need_to_clear;
4685 tree domain = TYPE_DOMAIN (type);
4686 tree elttype = TREE_TYPE (type);
4687 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4688 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4689 HOST_WIDE_INT minelt;
4690 HOST_WIDE_INT maxelt;
4692 /* If we have constant bounds for the range of the type, get them. */
4693 if (const_bounds_p)
4695 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4696 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4699 /* If the constructor has fewer elements than the array,
4700 clear the whole array first. Similarly if this is
4701 static constructor of a non-BLKmode object. */
4702 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4703 need_to_clear = 1;
4704 else
4706 HOST_WIDE_INT count = 0, zero_count = 0;
4707 need_to_clear = ! const_bounds_p;
4709 /* This loop is a more accurate version of the loop in
4710 mostly_zeros_p (it handles RANGE_EXPR in an index).
4711 It is also needed to check for missing elements. */
4712 for (elt = CONSTRUCTOR_ELTS (exp);
4713 elt != NULL_TREE && ! need_to_clear;
4714 elt = TREE_CHAIN (elt))
4716 tree index = TREE_PURPOSE (elt);
4717 HOST_WIDE_INT this_node_count;
4719 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4721 tree lo_index = TREE_OPERAND (index, 0);
4722 tree hi_index = TREE_OPERAND (index, 1);
4724 if (! host_integerp (lo_index, 1)
4725 || ! host_integerp (hi_index, 1))
4727 need_to_clear = 1;
4728 break;
4731 this_node_count = (tree_low_cst (hi_index, 1)
4732 - tree_low_cst (lo_index, 1) + 1);
4734 else
4735 this_node_count = 1;
4737 count += this_node_count;
4738 if (mostly_zeros_p (TREE_VALUE (elt)))
4739 zero_count += this_node_count;
4742 /* Clear the entire array first if there are any missing elements,
4743 or if the incidence of zero elements is >= 75%. */
4744 if (! need_to_clear
4745 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4746 need_to_clear = 1;
4749 if (need_to_clear && size > 0)
4751 if (! cleared)
4752 clear_storage (target, GEN_INT (size), align);
4753 cleared = 1;
4755 else
4756 /* Inform later passes that the old value is dead. */
4757 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4759 /* Store each element of the constructor into
4760 the corresponding element of TARGET, determined
4761 by counting the elements. */
4762 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4763 elt;
4764 elt = TREE_CHAIN (elt), i++)
4766 register enum machine_mode mode;
4767 HOST_WIDE_INT bitsize;
4768 HOST_WIDE_INT bitpos;
4769 int unsignedp;
4770 tree value = TREE_VALUE (elt);
4771 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4772 tree index = TREE_PURPOSE (elt);
4773 rtx xtarget = target;
4775 if (cleared && is_zeros_p (value))
4776 continue;
4778 unsignedp = TREE_UNSIGNED (elttype);
4779 mode = TYPE_MODE (elttype);
4780 if (mode == BLKmode)
4781 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4782 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4783 : -1);
4784 else
4785 bitsize = GET_MODE_BITSIZE (mode);
4787 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4789 tree lo_index = TREE_OPERAND (index, 0);
4790 tree hi_index = TREE_OPERAND (index, 1);
4791 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4792 struct nesting *loop;
4793 HOST_WIDE_INT lo, hi, count;
4794 tree position;
4796 /* If the range is constant and "small", unroll the loop. */
4797 if (const_bounds_p
4798 && host_integerp (lo_index, 0)
4799 && host_integerp (hi_index, 0)
4800 && (lo = tree_low_cst (lo_index, 0),
4801 hi = tree_low_cst (hi_index, 0),
4802 count = hi - lo + 1,
4803 (GET_CODE (target) != MEM
4804 || count <= 2
4805 || (host_integerp (TYPE_SIZE (elttype), 1)
4806 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4807 <= 40 * 8)))))
4809 lo -= minelt; hi -= minelt;
4810 for (; lo <= hi; lo++)
4812 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4813 store_constructor_field
4814 (target, bitsize, bitpos, mode, value, type, align,
4815 cleared,
4816 TYPE_NONALIASED_COMPONENT (type)
4817 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4820 else
4822 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4823 loop_top = gen_label_rtx ();
4824 loop_end = gen_label_rtx ();
4826 unsignedp = TREE_UNSIGNED (domain);
4828 index = build_decl (VAR_DECL, NULL_TREE, domain);
4830 index_r
4831 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4832 &unsignedp, 0));
4833 SET_DECL_RTL (index, index_r);
4834 if (TREE_CODE (value) == SAVE_EXPR
4835 && SAVE_EXPR_RTL (value) == 0)
4837 /* Make sure value gets expanded once before the
4838 loop. */
4839 expand_expr (value, const0_rtx, VOIDmode, 0);
4840 emit_queue ();
4842 store_expr (lo_index, index_r, 0);
4843 loop = expand_start_loop (0);
4845 /* Assign value to element index. */
4846 position
4847 = convert (ssizetype,
4848 fold (build (MINUS_EXPR, TREE_TYPE (index),
4849 index, TYPE_MIN_VALUE (domain))));
4850 position = size_binop (MULT_EXPR, position,
4851 convert (ssizetype,
4852 TYPE_SIZE_UNIT (elttype)));
4854 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4855 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4856 xtarget = change_address (target, mode, addr);
4857 if (TREE_CODE (value) == CONSTRUCTOR)
4858 store_constructor (value, xtarget, align, cleared,
4859 bitsize / BITS_PER_UNIT);
4860 else
4861 store_expr (value, xtarget, 0);
4863 expand_exit_loop_if_false (loop,
4864 build (LT_EXPR, integer_type_node,
4865 index, hi_index));
4867 expand_increment (build (PREINCREMENT_EXPR,
4868 TREE_TYPE (index),
4869 index, integer_one_node), 0, 0);
4870 expand_end_loop ();
4871 emit_label (loop_end);
4874 else if ((index != 0 && ! host_integerp (index, 0))
4875 || ! host_integerp (TYPE_SIZE (elttype), 1))
4877 rtx pos_rtx, addr;
4878 tree position;
4880 if (index == 0)
4881 index = ssize_int (1);
4883 if (minelt)
4884 index = convert (ssizetype,
4885 fold (build (MINUS_EXPR, index,
4886 TYPE_MIN_VALUE (domain))));
4888 position = size_binop (MULT_EXPR, index,
4889 convert (ssizetype,
4890 TYPE_SIZE_UNIT (elttype)));
4891 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4892 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4893 xtarget = change_address (target, mode, addr);
4894 store_expr (value, xtarget, 0);
4896 else
4898 if (index != 0)
4899 bitpos = ((tree_low_cst (index, 0) - minelt)
4900 * tree_low_cst (TYPE_SIZE (elttype), 1));
4901 else
4902 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4904 store_constructor_field (target, bitsize, bitpos, mode, value,
4905 type, align, cleared,
4906 TYPE_NONALIASED_COMPONENT (type)
4907 && GET_CODE (target) == MEM
4908 ? MEM_ALIAS_SET (target) :
4909 get_alias_set (elttype));
4915 /* Set constructor assignments. */
4916 else if (TREE_CODE (type) == SET_TYPE)
4918 tree elt = CONSTRUCTOR_ELTS (exp);
4919 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4920 tree domain = TYPE_DOMAIN (type);
4921 tree domain_min, domain_max, bitlength;
4923 /* The default implementation strategy is to extract the constant
4924 parts of the constructor, use that to initialize the target,
4925 and then "or" in whatever non-constant ranges we need in addition.
4927 If a large set is all zero or all ones, it is
4928 probably better to set it using memset (if available) or bzero.
4929 Also, if a large set has just a single range, it may also be
4930 better to first clear all the first clear the set (using
4931 bzero/memset), and set the bits we want. */
4933 /* Check for all zeros. */
4934 if (elt == NULL_TREE && size > 0)
4936 if (!cleared)
4937 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4938 return;
4941 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4942 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4943 bitlength = size_binop (PLUS_EXPR,
4944 size_diffop (domain_max, domain_min),
4945 ssize_int (1));
4947 nbits = tree_low_cst (bitlength, 1);
4949 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4950 are "complicated" (more than one range), initialize (the
4951 constant parts) by copying from a constant. */
4952 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4953 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4955 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4956 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4957 char *bit_buffer = (char *) alloca (nbits);
4958 HOST_WIDE_INT word = 0;
4959 unsigned int bit_pos = 0;
4960 unsigned int ibit = 0;
4961 unsigned int offset = 0; /* In bytes from beginning of set. */
4963 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4964 for (;;)
4966 if (bit_buffer[ibit])
4968 if (BYTES_BIG_ENDIAN)
4969 word |= (1 << (set_word_size - 1 - bit_pos));
4970 else
4971 word |= 1 << bit_pos;
4974 bit_pos++; ibit++;
4975 if (bit_pos >= set_word_size || ibit == nbits)
4977 if (word != 0 || ! cleared)
4979 rtx datum = GEN_INT (word);
4980 rtx to_rtx;
4982 /* The assumption here is that it is safe to use
4983 XEXP if the set is multi-word, but not if
4984 it's single-word. */
4985 if (GET_CODE (target) == MEM)
4987 to_rtx = plus_constant (XEXP (target, 0), offset);
4988 to_rtx = change_address (target, mode, to_rtx);
4990 else if (offset == 0)
4991 to_rtx = target;
4992 else
4993 abort ();
4994 emit_move_insn (to_rtx, datum);
4997 if (ibit == nbits)
4998 break;
4999 word = 0;
5000 bit_pos = 0;
5001 offset += set_word_size / BITS_PER_UNIT;
5005 else if (!cleared)
5006 /* Don't bother clearing storage if the set is all ones. */
5007 if (TREE_CHAIN (elt) != NULL_TREE
5008 || (TREE_PURPOSE (elt) == NULL_TREE
5009 ? nbits != 1
5010 : ( ! host_integerp (TREE_VALUE (elt), 0)
5011 || ! host_integerp (TREE_PURPOSE (elt), 0)
5012 || (tree_low_cst (TREE_VALUE (elt), 0)
5013 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5014 != (HOST_WIDE_INT) nbits))))
5015 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5017 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5019 /* Start of range of element or NULL. */
5020 tree startbit = TREE_PURPOSE (elt);
5021 /* End of range of element, or element value. */
5022 tree endbit = TREE_VALUE (elt);
5023 #ifdef TARGET_MEM_FUNCTIONS
5024 HOST_WIDE_INT startb, endb;
5025 #endif
5026 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5028 bitlength_rtx = expand_expr (bitlength,
5029 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5031 /* Handle non-range tuple element like [ expr ]. */
5032 if (startbit == NULL_TREE)
5034 startbit = save_expr (endbit);
5035 endbit = startbit;
5038 startbit = convert (sizetype, startbit);
5039 endbit = convert (sizetype, endbit);
5040 if (! integer_zerop (domain_min))
5042 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5043 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5045 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5046 EXPAND_CONST_ADDRESS);
5047 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5048 EXPAND_CONST_ADDRESS);
5050 if (REG_P (target))
5052 targetx
5053 = assign_temp
5054 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5055 TYPE_QUAL_CONST)),
5056 0, 1, 1);
5057 emit_move_insn (targetx, target);
5060 else if (GET_CODE (target) == MEM)
5061 targetx = target;
5062 else
5063 abort ();
5065 #ifdef TARGET_MEM_FUNCTIONS
5066 /* Optimization: If startbit and endbit are
5067 constants divisible by BITS_PER_UNIT,
5068 call memset instead. */
5069 if (TREE_CODE (startbit) == INTEGER_CST
5070 && TREE_CODE (endbit) == INTEGER_CST
5071 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5072 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5074 emit_library_call (memset_libfunc, LCT_NORMAL,
5075 VOIDmode, 3,
5076 plus_constant (XEXP (targetx, 0),
5077 startb / BITS_PER_UNIT),
5078 Pmode,
5079 constm1_rtx, TYPE_MODE (integer_type_node),
5080 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5081 TYPE_MODE (sizetype));
5083 else
5084 #endif
5085 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5086 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5087 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5088 startbit_rtx, TYPE_MODE (sizetype),
5089 endbit_rtx, TYPE_MODE (sizetype));
5091 if (REG_P (target))
5092 emit_move_insn (target, targetx);
5096 else
5097 abort ();
5100 /* Store the value of EXP (an expression tree)
5101 into a subfield of TARGET which has mode MODE and occupies
5102 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5103 If MODE is VOIDmode, it means that we are storing into a bit-field.
5105 If VALUE_MODE is VOIDmode, return nothing in particular.
5106 UNSIGNEDP is not used in this case.
5108 Otherwise, return an rtx for the value stored. This rtx
5109 has mode VALUE_MODE if that is convenient to do.
5110 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5112 ALIGN is the alignment that TARGET is known to have.
5113 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5115 ALIAS_SET is the alias set for the destination. This value will
5116 (in general) be different from that for TARGET, since TARGET is a
5117 reference to the containing structure. */
5119 static rtx
5120 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5121 unsignedp, align, total_size, alias_set)
5122 rtx target;
5123 HOST_WIDE_INT bitsize;
5124 HOST_WIDE_INT bitpos;
5125 enum machine_mode mode;
5126 tree exp;
5127 enum machine_mode value_mode;
5128 int unsignedp;
5129 unsigned int align;
5130 HOST_WIDE_INT total_size;
5131 int alias_set;
5133 HOST_WIDE_INT width_mask = 0;
5135 if (TREE_CODE (exp) == ERROR_MARK)
5136 return const0_rtx;
5138 /* If we have nothing to store, do nothing unless the expression has
5139 side-effects. */
5140 if (bitsize == 0)
5141 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5143 if (bitsize < HOST_BITS_PER_WIDE_INT)
5144 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5146 /* If we are storing into an unaligned field of an aligned union that is
5147 in a register, we may have the mode of TARGET being an integer mode but
5148 MODE == BLKmode. In that case, get an aligned object whose size and
5149 alignment are the same as TARGET and store TARGET into it (we can avoid
5150 the store if the field being stored is the entire width of TARGET). Then
5151 call ourselves recursively to store the field into a BLKmode version of
5152 that object. Finally, load from the object into TARGET. This is not
5153 very efficient in general, but should only be slightly more expensive
5154 than the otherwise-required unaligned accesses. Perhaps this can be
5155 cleaned up later. */
5157 if (mode == BLKmode
5158 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5160 rtx object
5161 = assign_temp
5162 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5163 TYPE_QUAL_CONST),
5164 0, 1, 1);
5165 rtx blk_object = copy_rtx (object);
5167 PUT_MODE (blk_object, BLKmode);
5169 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5170 emit_move_insn (object, target);
5172 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5173 align, total_size, alias_set);
5175 /* Even though we aren't returning target, we need to
5176 give it the updated value. */
5177 emit_move_insn (target, object);
5179 return blk_object;
5182 if (GET_CODE (target) == CONCAT)
5184 /* We're storing into a struct containing a single __complex. */
5186 if (bitpos != 0)
5187 abort ();
5188 return store_expr (exp, target, 0);
5191 /* If the structure is in a register or if the component
5192 is a bit field, we cannot use addressing to access it.
5193 Use bit-field techniques or SUBREG to store in it. */
5195 if (mode == VOIDmode
5196 || (mode != BLKmode && ! direct_store[(int) mode]
5197 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5198 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5199 || GET_CODE (target) == REG
5200 || GET_CODE (target) == SUBREG
5201 /* If the field isn't aligned enough to store as an ordinary memref,
5202 store it as a bit field. */
5203 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5204 && (align < GET_MODE_ALIGNMENT (mode)
5205 || bitpos % GET_MODE_ALIGNMENT (mode)))
5206 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5207 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5208 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5209 /* If the RHS and field are a constant size and the size of the
5210 RHS isn't the same size as the bitfield, we must use bitfield
5211 operations. */
5212 || (bitsize >= 0
5213 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5214 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5216 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5218 /* If BITSIZE is narrower than the size of the type of EXP
5219 we will be narrowing TEMP. Normally, what's wanted are the
5220 low-order bits. However, if EXP's type is a record and this is
5221 big-endian machine, we want the upper BITSIZE bits. */
5222 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5223 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5224 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5225 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5226 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5227 - bitsize),
5228 temp, 1);
5230 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5231 MODE. */
5232 if (mode != VOIDmode && mode != BLKmode
5233 && mode != TYPE_MODE (TREE_TYPE (exp)))
5234 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5236 /* If the modes of TARGET and TEMP are both BLKmode, both
5237 must be in memory and BITPOS must be aligned on a byte
5238 boundary. If so, we simply do a block copy. */
5239 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5241 unsigned int exp_align = expr_align (exp);
5243 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5244 || bitpos % BITS_PER_UNIT != 0)
5245 abort ();
5247 target = change_address (target, VOIDmode,
5248 plus_constant (XEXP (target, 0),
5249 bitpos / BITS_PER_UNIT));
5251 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5252 align = MIN (exp_align, align);
5254 /* Find an alignment that is consistent with the bit position. */
5255 while ((bitpos % align) != 0)
5256 align >>= 1;
5258 emit_block_move (target, temp,
5259 bitsize == -1 ? expr_size (exp)
5260 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5261 / BITS_PER_UNIT),
5262 align);
5264 return value_mode == VOIDmode ? const0_rtx : target;
5267 /* Store the value in the bitfield. */
5268 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5269 if (value_mode != VOIDmode)
5271 /* The caller wants an rtx for the value. */
5272 /* If possible, avoid refetching from the bitfield itself. */
5273 if (width_mask != 0
5274 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5276 tree count;
5277 enum machine_mode tmode;
5279 if (unsignedp)
5280 return expand_and (temp,
5281 GEN_INT
5282 (trunc_int_for_mode
5283 (width_mask,
5284 GET_MODE (temp) == VOIDmode
5285 ? value_mode
5286 : GET_MODE (temp))), NULL_RTX);
5287 tmode = GET_MODE (temp);
5288 if (tmode == VOIDmode)
5289 tmode = value_mode;
5290 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5291 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5292 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5294 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5295 NULL_RTX, value_mode, 0, align,
5296 total_size);
5298 return const0_rtx;
5300 else
5302 rtx addr = XEXP (target, 0);
5303 rtx to_rtx;
5305 /* If a value is wanted, it must be the lhs;
5306 so make the address stable for multiple use. */
5308 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5309 && ! CONSTANT_ADDRESS_P (addr)
5310 /* A frame-pointer reference is already stable. */
5311 && ! (GET_CODE (addr) == PLUS
5312 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5313 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5314 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5315 addr = copy_to_reg (addr);
5317 /* Now build a reference to just the desired component. */
5319 to_rtx = copy_rtx (change_address (target, mode,
5320 plus_constant (addr,
5321 (bitpos
5322 / BITS_PER_UNIT))));
5323 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5324 MEM_ALIAS_SET (to_rtx) = alias_set;
5326 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5330 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5331 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5332 ARRAY_REFs and find the ultimate containing object, which we return.
5334 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5335 bit position, and *PUNSIGNEDP to the signedness of the field.
5336 If the position of the field is variable, we store a tree
5337 giving the variable offset (in units) in *POFFSET.
5338 This offset is in addition to the bit position.
5339 If the position is not variable, we store 0 in *POFFSET.
5340 We set *PALIGNMENT to the alignment of the address that will be
5341 computed. This is the alignment of the thing we return if *POFFSET
5342 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5344 If any of the extraction expressions is volatile,
5345 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5347 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5348 is a mode that can be used to access the field. In that case, *PBITSIZE
5349 is redundant.
5351 If the field describes a variable-sized object, *PMODE is set to
5352 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5353 this case, but the address of the object can be found. */
5355 tree
5356 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5357 punsignedp, pvolatilep, palignment)
5358 tree exp;
5359 HOST_WIDE_INT *pbitsize;
5360 HOST_WIDE_INT *pbitpos;
5361 tree *poffset;
5362 enum machine_mode *pmode;
5363 int *punsignedp;
5364 int *pvolatilep;
5365 unsigned int *palignment;
5367 tree size_tree = 0;
5368 enum machine_mode mode = VOIDmode;
5369 tree offset = size_zero_node;
5370 tree bit_offset = bitsize_zero_node;
5371 unsigned int alignment = BIGGEST_ALIGNMENT;
5372 tree tem;
5374 /* First get the mode, signedness, and size. We do this from just the
5375 outermost expression. */
5376 if (TREE_CODE (exp) == COMPONENT_REF)
5378 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5379 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5380 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5382 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5384 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5386 size_tree = TREE_OPERAND (exp, 1);
5387 *punsignedp = TREE_UNSIGNED (exp);
5389 else
5391 mode = TYPE_MODE (TREE_TYPE (exp));
5392 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5394 if (mode == BLKmode)
5395 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5396 else
5397 *pbitsize = GET_MODE_BITSIZE (mode);
5400 if (size_tree != 0)
5402 if (! host_integerp (size_tree, 1))
5403 mode = BLKmode, *pbitsize = -1;
5404 else
5405 *pbitsize = tree_low_cst (size_tree, 1);
5408 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5409 and find the ultimate containing object. */
5410 while (1)
5412 if (TREE_CODE (exp) == BIT_FIELD_REF)
5413 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5414 else if (TREE_CODE (exp) == COMPONENT_REF)
5416 tree field = TREE_OPERAND (exp, 1);
5417 tree this_offset = DECL_FIELD_OFFSET (field);
5419 /* If this field hasn't been filled in yet, don't go
5420 past it. This should only happen when folding expressions
5421 made during type construction. */
5422 if (this_offset == 0)
5423 break;
5424 else if (! TREE_CONSTANT (this_offset)
5425 && contains_placeholder_p (this_offset))
5426 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5428 offset = size_binop (PLUS_EXPR, offset, this_offset);
5429 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5430 DECL_FIELD_BIT_OFFSET (field));
5432 if (! host_integerp (offset, 0))
5433 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5436 else if (TREE_CODE (exp) == ARRAY_REF)
5438 tree index = TREE_OPERAND (exp, 1);
5439 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5440 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5441 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5443 /* We assume all arrays have sizes that are a multiple of a byte.
5444 First subtract the lower bound, if any, in the type of the
5445 index, then convert to sizetype and multiply by the size of the
5446 array element. */
5447 if (low_bound != 0 && ! integer_zerop (low_bound))
5448 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5449 index, low_bound));
5451 /* If the index has a self-referential type, pass it to a
5452 WITH_RECORD_EXPR; if the component size is, pass our
5453 component to one. */
5454 if (! TREE_CONSTANT (index)
5455 && contains_placeholder_p (index))
5456 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5457 if (! TREE_CONSTANT (unit_size)
5458 && contains_placeholder_p (unit_size))
5459 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5460 TREE_OPERAND (exp, 0));
5462 offset = size_binop (PLUS_EXPR, offset,
5463 size_binop (MULT_EXPR,
5464 convert (sizetype, index),
5465 unit_size));
5468 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5469 && ! ((TREE_CODE (exp) == NOP_EXPR
5470 || TREE_CODE (exp) == CONVERT_EXPR)
5471 && (TYPE_MODE (TREE_TYPE (exp))
5472 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5473 break;
5475 /* If any reference in the chain is volatile, the effect is volatile. */
5476 if (TREE_THIS_VOLATILE (exp))
5477 *pvolatilep = 1;
5479 /* If the offset is non-constant already, then we can't assume any
5480 alignment more than the alignment here. */
5481 if (! TREE_CONSTANT (offset))
5482 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5484 exp = TREE_OPERAND (exp, 0);
5487 if (DECL_P (exp))
5488 alignment = MIN (alignment, DECL_ALIGN (exp));
5489 else if (TREE_TYPE (exp) != 0)
5490 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5492 /* If OFFSET is constant, see if we can return the whole thing as a
5493 constant bit position. Otherwise, split it up. */
5494 if (host_integerp (offset, 0)
5495 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5496 bitsize_unit_node))
5497 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5498 && host_integerp (tem, 0))
5499 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5500 else
5501 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5503 *pmode = mode;
5504 *palignment = alignment;
5505 return exp;
5508 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5510 static enum memory_use_mode
5511 get_memory_usage_from_modifier (modifier)
5512 enum expand_modifier modifier;
5514 switch (modifier)
5516 case EXPAND_NORMAL:
5517 case EXPAND_SUM:
5518 return MEMORY_USE_RO;
5519 break;
5520 case EXPAND_MEMORY_USE_WO:
5521 return MEMORY_USE_WO;
5522 break;
5523 case EXPAND_MEMORY_USE_RW:
5524 return MEMORY_USE_RW;
5525 break;
5526 case EXPAND_MEMORY_USE_DONT:
5527 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5528 MEMORY_USE_DONT, because they are modifiers to a call of
5529 expand_expr in the ADDR_EXPR case of expand_expr. */
5530 case EXPAND_CONST_ADDRESS:
5531 case EXPAND_INITIALIZER:
5532 return MEMORY_USE_DONT;
5533 case EXPAND_MEMORY_USE_BAD:
5534 default:
5535 abort ();
5539 /* Given an rtx VALUE that may contain additions and multiplications, return
5540 an equivalent value that just refers to a register, memory, or constant.
5541 This is done by generating instructions to perform the arithmetic and
5542 returning a pseudo-register containing the value.
5544 The returned value may be a REG, SUBREG, MEM or constant. */
5547 force_operand (value, target)
5548 rtx value, target;
5550 register optab binoptab = 0;
5551 /* Use a temporary to force order of execution of calls to
5552 `force_operand'. */
5553 rtx tmp;
5554 register rtx op2;
5555 /* Use subtarget as the target for operand 0 of a binary operation. */
5556 register rtx subtarget = get_subtarget (target);
5558 /* Check for a PIC address load. */
5559 if (flag_pic
5560 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5561 && XEXP (value, 0) == pic_offset_table_rtx
5562 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5563 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5564 || GET_CODE (XEXP (value, 1)) == CONST))
5566 if (!subtarget)
5567 subtarget = gen_reg_rtx (GET_MODE (value));
5568 emit_move_insn (subtarget, value);
5569 return subtarget;
5572 if (GET_CODE (value) == PLUS)
5573 binoptab = add_optab;
5574 else if (GET_CODE (value) == MINUS)
5575 binoptab = sub_optab;
5576 else if (GET_CODE (value) == MULT)
5578 op2 = XEXP (value, 1);
5579 if (!CONSTANT_P (op2)
5580 && !(GET_CODE (op2) == REG && op2 != subtarget))
5581 subtarget = 0;
5582 tmp = force_operand (XEXP (value, 0), subtarget);
5583 return expand_mult (GET_MODE (value), tmp,
5584 force_operand (op2, NULL_RTX),
5585 target, 1);
5588 if (binoptab)
5590 op2 = XEXP (value, 1);
5591 if (!CONSTANT_P (op2)
5592 && !(GET_CODE (op2) == REG && op2 != subtarget))
5593 subtarget = 0;
5594 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5596 binoptab = add_optab;
5597 op2 = negate_rtx (GET_MODE (value), op2);
5600 /* Check for an addition with OP2 a constant integer and our first
5601 operand a PLUS of a virtual register and something else. In that
5602 case, we want to emit the sum of the virtual register and the
5603 constant first and then add the other value. This allows virtual
5604 register instantiation to simply modify the constant rather than
5605 creating another one around this addition. */
5606 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5607 && GET_CODE (XEXP (value, 0)) == PLUS
5608 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5609 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5610 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5612 rtx temp = expand_binop (GET_MODE (value), binoptab,
5613 XEXP (XEXP (value, 0), 0), op2,
5614 subtarget, 0, OPTAB_LIB_WIDEN);
5615 return expand_binop (GET_MODE (value), binoptab, temp,
5616 force_operand (XEXP (XEXP (value, 0), 1), 0),
5617 target, 0, OPTAB_LIB_WIDEN);
5620 tmp = force_operand (XEXP (value, 0), subtarget);
5621 return expand_binop (GET_MODE (value), binoptab, tmp,
5622 force_operand (op2, NULL_RTX),
5623 target, 0, OPTAB_LIB_WIDEN);
5624 /* We give UNSIGNEDP = 0 to expand_binop
5625 because the only operations we are expanding here are signed ones. */
5627 return value;
5630 /* Subroutine of expand_expr:
5631 save the non-copied parts (LIST) of an expr (LHS), and return a list
5632 which can restore these values to their previous values,
5633 should something modify their storage. */
5635 static tree
5636 save_noncopied_parts (lhs, list)
5637 tree lhs;
5638 tree list;
5640 tree tail;
5641 tree parts = 0;
5643 for (tail = list; tail; tail = TREE_CHAIN (tail))
5644 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5645 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5646 else
5648 tree part = TREE_VALUE (tail);
5649 tree part_type = TREE_TYPE (part);
5650 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5651 rtx target
5652 = assign_temp (build_qualified_type (part_type,
5653 (TYPE_QUALS (part_type)
5654 | TYPE_QUAL_CONST)),
5655 0, 1, 1);
5657 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5658 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5659 parts = tree_cons (to_be_saved,
5660 build (RTL_EXPR, part_type, NULL_TREE,
5661 (tree) target),
5662 parts);
5663 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5665 return parts;
5668 /* Subroutine of expand_expr:
5669 record the non-copied parts (LIST) of an expr (LHS), and return a list
5670 which specifies the initial values of these parts. */
5672 static tree
5673 init_noncopied_parts (lhs, list)
5674 tree lhs;
5675 tree list;
5677 tree tail;
5678 tree parts = 0;
5680 for (tail = list; tail; tail = TREE_CHAIN (tail))
5681 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5682 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5683 else if (TREE_PURPOSE (tail))
5685 tree part = TREE_VALUE (tail);
5686 tree part_type = TREE_TYPE (part);
5687 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5688 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5690 return parts;
5693 /* Subroutine of expand_expr: return nonzero iff there is no way that
5694 EXP can reference X, which is being modified. TOP_P is nonzero if this
5695 call is going to be used to determine whether we need a temporary
5696 for EXP, as opposed to a recursive call to this function.
5698 It is always safe for this routine to return zero since it merely
5699 searches for optimization opportunities. */
5702 safe_from_p (x, exp, top_p)
5703 rtx x;
5704 tree exp;
5705 int top_p;
5707 rtx exp_rtl = 0;
5708 int i, nops;
5709 static tree save_expr_list;
5711 if (x == 0
5712 /* If EXP has varying size, we MUST use a target since we currently
5713 have no way of allocating temporaries of variable size
5714 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5715 So we assume here that something at a higher level has prevented a
5716 clash. This is somewhat bogus, but the best we can do. Only
5717 do this when X is BLKmode and when we are at the top level. */
5718 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5719 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5720 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5721 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5722 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5723 != INTEGER_CST)
5724 && GET_MODE (x) == BLKmode)
5725 /* If X is in the outgoing argument area, it is always safe. */
5726 || (GET_CODE (x) == MEM
5727 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5728 || (GET_CODE (XEXP (x, 0)) == PLUS
5729 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5730 return 1;
5732 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5733 find the underlying pseudo. */
5734 if (GET_CODE (x) == SUBREG)
5736 x = SUBREG_REG (x);
5737 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5738 return 0;
5741 /* A SAVE_EXPR might appear many times in the expression passed to the
5742 top-level safe_from_p call, and if it has a complex subexpression,
5743 examining it multiple times could result in a combinatorial explosion.
5744 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5745 with optimization took about 28 minutes to compile -- even though it was
5746 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5747 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5748 we have processed. Note that the only test of top_p was above. */
5750 if (top_p)
5752 int rtn;
5753 tree t;
5755 save_expr_list = 0;
5757 rtn = safe_from_p (x, exp, 0);
5759 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5760 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5762 return rtn;
5765 /* Now look at our tree code and possibly recurse. */
5766 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5768 case 'd':
5769 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5770 break;
5772 case 'c':
5773 return 1;
5775 case 'x':
5776 if (TREE_CODE (exp) == TREE_LIST)
5777 return ((TREE_VALUE (exp) == 0
5778 || safe_from_p (x, TREE_VALUE (exp), 0))
5779 && (TREE_CHAIN (exp) == 0
5780 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5781 else if (TREE_CODE (exp) == ERROR_MARK)
5782 return 1; /* An already-visited SAVE_EXPR? */
5783 else
5784 return 0;
5786 case '1':
5787 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5789 case '2':
5790 case '<':
5791 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5792 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5794 case 'e':
5795 case 'r':
5796 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5797 the expression. If it is set, we conflict iff we are that rtx or
5798 both are in memory. Otherwise, we check all operands of the
5799 expression recursively. */
5801 switch (TREE_CODE (exp))
5803 case ADDR_EXPR:
5804 return (staticp (TREE_OPERAND (exp, 0))
5805 || TREE_STATIC (exp)
5806 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5808 case INDIRECT_REF:
5809 if (GET_CODE (x) == MEM
5810 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5811 get_alias_set (exp)))
5812 return 0;
5813 break;
5815 case CALL_EXPR:
5816 /* Assume that the call will clobber all hard registers and
5817 all of memory. */
5818 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5819 || GET_CODE (x) == MEM)
5820 return 0;
5821 break;
5823 case RTL_EXPR:
5824 /* If a sequence exists, we would have to scan every instruction
5825 in the sequence to see if it was safe. This is probably not
5826 worthwhile. */
5827 if (RTL_EXPR_SEQUENCE (exp))
5828 return 0;
5830 exp_rtl = RTL_EXPR_RTL (exp);
5831 break;
5833 case WITH_CLEANUP_EXPR:
5834 exp_rtl = RTL_EXPR_RTL (exp);
5835 break;
5837 case CLEANUP_POINT_EXPR:
5838 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5840 case SAVE_EXPR:
5841 exp_rtl = SAVE_EXPR_RTL (exp);
5842 if (exp_rtl)
5843 break;
5845 /* If we've already scanned this, don't do it again. Otherwise,
5846 show we've scanned it and record for clearing the flag if we're
5847 going on. */
5848 if (TREE_PRIVATE (exp))
5849 return 1;
5851 TREE_PRIVATE (exp) = 1;
5852 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5854 TREE_PRIVATE (exp) = 0;
5855 return 0;
5858 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5859 return 1;
5861 case BIND_EXPR:
5862 /* The only operand we look at is operand 1. The rest aren't
5863 part of the expression. */
5864 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5866 case METHOD_CALL_EXPR:
5867 /* This takes a rtx argument, but shouldn't appear here. */
5868 abort ();
5870 default:
5871 break;
5874 /* If we have an rtx, we do not need to scan our operands. */
5875 if (exp_rtl)
5876 break;
5878 nops = first_rtl_op (TREE_CODE (exp));
5879 for (i = 0; i < nops; i++)
5880 if (TREE_OPERAND (exp, i) != 0
5881 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5882 return 0;
5884 /* If this is a language-specific tree code, it may require
5885 special handling. */
5886 if ((unsigned int) TREE_CODE (exp)
5887 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5888 && lang_safe_from_p
5889 && !(*lang_safe_from_p) (x, exp))
5890 return 0;
5893 /* If we have an rtl, find any enclosed object. Then see if we conflict
5894 with it. */
5895 if (exp_rtl)
5897 if (GET_CODE (exp_rtl) == SUBREG)
5899 exp_rtl = SUBREG_REG (exp_rtl);
5900 if (GET_CODE (exp_rtl) == REG
5901 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5902 return 0;
5905 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5906 are memory and they conflict. */
5907 return ! (rtx_equal_p (x, exp_rtl)
5908 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5909 && true_dependence (exp_rtl, GET_MODE (x), x,
5910 rtx_addr_varies_p)));
5913 /* If we reach here, it is safe. */
5914 return 1;
5917 /* Subroutine of expand_expr: return nonzero iff EXP is an
5918 expression whose type is statically determinable. */
5920 static int
5921 fixed_type_p (exp)
5922 tree exp;
5924 if (TREE_CODE (exp) == PARM_DECL
5925 || TREE_CODE (exp) == VAR_DECL
5926 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5927 || TREE_CODE (exp) == COMPONENT_REF
5928 || TREE_CODE (exp) == ARRAY_REF)
5929 return 1;
5930 return 0;
5933 /* Subroutine of expand_expr: return rtx if EXP is a
5934 variable or parameter; else return 0. */
5936 static rtx
5937 var_rtx (exp)
5938 tree exp;
5940 STRIP_NOPS (exp);
5941 switch (TREE_CODE (exp))
5943 case PARM_DECL:
5944 case VAR_DECL:
5945 return DECL_RTL (exp);
5946 default:
5947 return 0;
5951 #ifdef MAX_INTEGER_COMPUTATION_MODE
5953 void
5954 check_max_integer_computation_mode (exp)
5955 tree exp;
5957 enum tree_code code;
5958 enum machine_mode mode;
5960 /* Strip any NOPs that don't change the mode. */
5961 STRIP_NOPS (exp);
5962 code = TREE_CODE (exp);
5964 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5965 if (code == NOP_EXPR
5966 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5967 return;
5969 /* First check the type of the overall operation. We need only look at
5970 unary, binary and relational operations. */
5971 if (TREE_CODE_CLASS (code) == '1'
5972 || TREE_CODE_CLASS (code) == '2'
5973 || TREE_CODE_CLASS (code) == '<')
5975 mode = TYPE_MODE (TREE_TYPE (exp));
5976 if (GET_MODE_CLASS (mode) == MODE_INT
5977 && mode > MAX_INTEGER_COMPUTATION_MODE)
5978 internal_error ("unsupported wide integer operation");
5981 /* Check operand of a unary op. */
5982 if (TREE_CODE_CLASS (code) == '1')
5984 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5985 if (GET_MODE_CLASS (mode) == MODE_INT
5986 && mode > MAX_INTEGER_COMPUTATION_MODE)
5987 internal_error ("unsupported wide integer operation");
5990 /* Check operands of a binary/comparison op. */
5991 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5993 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5994 if (GET_MODE_CLASS (mode) == MODE_INT
5995 && mode > MAX_INTEGER_COMPUTATION_MODE)
5996 internal_error ("unsupported wide integer operation");
5998 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5999 if (GET_MODE_CLASS (mode) == MODE_INT
6000 && mode > MAX_INTEGER_COMPUTATION_MODE)
6001 internal_error ("unsupported wide integer operation");
6004 #endif
6006 /* expand_expr: generate code for computing expression EXP.
6007 An rtx for the computed value is returned. The value is never null.
6008 In the case of a void EXP, const0_rtx is returned.
6010 The value may be stored in TARGET if TARGET is nonzero.
6011 TARGET is just a suggestion; callers must assume that
6012 the rtx returned may not be the same as TARGET.
6014 If TARGET is CONST0_RTX, it means that the value will be ignored.
6016 If TMODE is not VOIDmode, it suggests generating the
6017 result in mode TMODE. But this is done only when convenient.
6018 Otherwise, TMODE is ignored and the value generated in its natural mode.
6019 TMODE is just a suggestion; callers must assume that
6020 the rtx returned may not have mode TMODE.
6022 Note that TARGET may have neither TMODE nor MODE. In that case, it
6023 probably will not be used.
6025 If MODIFIER is EXPAND_SUM then when EXP is an addition
6026 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6027 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6028 products as above, or REG or MEM, or constant.
6029 Ordinarily in such cases we would output mul or add instructions
6030 and then return a pseudo reg containing the sum.
6032 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6033 it also marks a label as absolutely required (it can't be dead).
6034 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6035 This is used for outputting expressions used in initializers.
6037 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6038 with a constant address even if that address is not normally legitimate.
6039 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6042 expand_expr (exp, target, tmode, modifier)
6043 register tree exp;
6044 rtx target;
6045 enum machine_mode tmode;
6046 enum expand_modifier modifier;
6048 register rtx op0, op1, temp;
6049 tree type = TREE_TYPE (exp);
6050 int unsignedp = TREE_UNSIGNED (type);
6051 register enum machine_mode mode;
6052 register enum tree_code code = TREE_CODE (exp);
6053 optab this_optab;
6054 rtx subtarget, original_target;
6055 int ignore;
6056 tree context;
6057 /* Used by check-memory-usage to make modifier read only. */
6058 enum expand_modifier ro_modifier;
6060 /* Handle ERROR_MARK before anybody tries to access its type. */
6061 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6063 op0 = CONST0_RTX (tmode);
6064 if (op0 != 0)
6065 return op0;
6066 return const0_rtx;
6069 mode = TYPE_MODE (type);
6070 /* Use subtarget as the target for operand 0 of a binary operation. */
6071 subtarget = get_subtarget (target);
6072 original_target = target;
6073 ignore = (target == const0_rtx
6074 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6075 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6076 || code == COND_EXPR)
6077 && TREE_CODE (type) == VOID_TYPE));
6079 /* Make a read-only version of the modifier. */
6080 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6081 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6082 ro_modifier = modifier;
6083 else
6084 ro_modifier = EXPAND_NORMAL;
6086 /* If we are going to ignore this result, we need only do something
6087 if there is a side-effect somewhere in the expression. If there
6088 is, short-circuit the most common cases here. Note that we must
6089 not call expand_expr with anything but const0_rtx in case this
6090 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6092 if (ignore)
6094 if (! TREE_SIDE_EFFECTS (exp))
6095 return const0_rtx;
6097 /* Ensure we reference a volatile object even if value is ignored, but
6098 don't do this if all we are doing is taking its address. */
6099 if (TREE_THIS_VOLATILE (exp)
6100 && TREE_CODE (exp) != FUNCTION_DECL
6101 && mode != VOIDmode && mode != BLKmode
6102 && modifier != EXPAND_CONST_ADDRESS)
6104 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6105 if (GET_CODE (temp) == MEM)
6106 temp = copy_to_reg (temp);
6107 return const0_rtx;
6110 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6111 || code == INDIRECT_REF || code == BUFFER_REF)
6112 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6113 VOIDmode, ro_modifier);
6114 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6115 || code == ARRAY_REF)
6117 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6118 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6119 return const0_rtx;
6121 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6122 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6123 /* If the second operand has no side effects, just evaluate
6124 the first. */
6125 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6126 VOIDmode, ro_modifier);
6127 else if (code == BIT_FIELD_REF)
6129 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6130 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6131 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6132 return const0_rtx;
6135 target = 0;
6138 #ifdef MAX_INTEGER_COMPUTATION_MODE
6139 /* Only check stuff here if the mode we want is different from the mode
6140 of the expression; if it's the same, check_max_integer_computiation_mode
6141 will handle it. Do we really need to check this stuff at all? */
6143 if (target
6144 && GET_MODE (target) != mode
6145 && TREE_CODE (exp) != INTEGER_CST
6146 && TREE_CODE (exp) != PARM_DECL
6147 && TREE_CODE (exp) != ARRAY_REF
6148 && TREE_CODE (exp) != COMPONENT_REF
6149 && TREE_CODE (exp) != BIT_FIELD_REF
6150 && TREE_CODE (exp) != INDIRECT_REF
6151 && TREE_CODE (exp) != CALL_EXPR
6152 && TREE_CODE (exp) != VAR_DECL
6153 && TREE_CODE (exp) != RTL_EXPR)
6155 enum machine_mode mode = GET_MODE (target);
6157 if (GET_MODE_CLASS (mode) == MODE_INT
6158 && mode > MAX_INTEGER_COMPUTATION_MODE)
6159 internal_error ("unsupported wide integer operation");
6162 if (tmode != mode
6163 && TREE_CODE (exp) != INTEGER_CST
6164 && TREE_CODE (exp) != PARM_DECL
6165 && TREE_CODE (exp) != ARRAY_REF
6166 && TREE_CODE (exp) != COMPONENT_REF
6167 && TREE_CODE (exp) != BIT_FIELD_REF
6168 && TREE_CODE (exp) != INDIRECT_REF
6169 && TREE_CODE (exp) != VAR_DECL
6170 && TREE_CODE (exp) != CALL_EXPR
6171 && TREE_CODE (exp) != RTL_EXPR
6172 && GET_MODE_CLASS (tmode) == MODE_INT
6173 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6174 internal_error ("unsupported wide integer operation");
6176 check_max_integer_computation_mode (exp);
6177 #endif
6179 /* If will do cse, generate all results into pseudo registers
6180 since 1) that allows cse to find more things
6181 and 2) otherwise cse could produce an insn the machine
6182 cannot support. */
6184 if (! cse_not_expected && mode != BLKmode && target
6185 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6186 target = subtarget;
6188 switch (code)
6190 case LABEL_DECL:
6192 tree function = decl_function_context (exp);
6193 /* Handle using a label in a containing function. */
6194 if (function != current_function_decl
6195 && function != inline_function_decl && function != 0)
6197 struct function *p = find_function_data (function);
6198 p->expr->x_forced_labels
6199 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6200 p->expr->x_forced_labels);
6202 else
6204 if (modifier == EXPAND_INITIALIZER)
6205 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6206 label_rtx (exp),
6207 forced_labels);
6210 temp = gen_rtx_MEM (FUNCTION_MODE,
6211 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6212 if (function != current_function_decl
6213 && function != inline_function_decl && function != 0)
6214 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6215 return temp;
6218 case PARM_DECL:
6219 if (DECL_RTL (exp) == 0)
6221 error_with_decl (exp, "prior parameter's size depends on `%s'");
6222 return CONST0_RTX (mode);
6225 /* ... fall through ... */
6227 case VAR_DECL:
6228 /* If a static var's type was incomplete when the decl was written,
6229 but the type is complete now, lay out the decl now. */
6230 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6231 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6233 layout_decl (exp, 0);
6234 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6237 /* Although static-storage variables start off initialized, according to
6238 ANSI C, a memcpy could overwrite them with uninitialized values. So
6239 we check them too. This also lets us check for read-only variables
6240 accessed via a non-const declaration, in case it won't be detected
6241 any other way (e.g., in an embedded system or OS kernel without
6242 memory protection).
6244 Aggregates are not checked here; they're handled elsewhere. */
6245 if (cfun && current_function_check_memory_usage
6246 && code == VAR_DECL
6247 && GET_CODE (DECL_RTL (exp)) == MEM
6248 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6250 enum memory_use_mode memory_usage;
6251 memory_usage = get_memory_usage_from_modifier (modifier);
6253 in_check_memory_usage = 1;
6254 if (memory_usage != MEMORY_USE_DONT)
6255 emit_library_call (chkr_check_addr_libfunc,
6256 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6257 XEXP (DECL_RTL (exp), 0), Pmode,
6258 GEN_INT (int_size_in_bytes (type)),
6259 TYPE_MODE (sizetype),
6260 GEN_INT (memory_usage),
6261 TYPE_MODE (integer_type_node));
6262 in_check_memory_usage = 0;
6265 /* ... fall through ... */
6267 case FUNCTION_DECL:
6268 case RESULT_DECL:
6269 if (DECL_RTL (exp) == 0)
6270 abort ();
6272 /* Ensure variable marked as used even if it doesn't go through
6273 a parser. If it hasn't be used yet, write out an external
6274 definition. */
6275 if (! TREE_USED (exp))
6277 assemble_external (exp);
6278 TREE_USED (exp) = 1;
6281 /* Show we haven't gotten RTL for this yet. */
6282 temp = 0;
6284 /* Handle variables inherited from containing functions. */
6285 context = decl_function_context (exp);
6287 /* We treat inline_function_decl as an alias for the current function
6288 because that is the inline function whose vars, types, etc.
6289 are being merged into the current function.
6290 See expand_inline_function. */
6292 if (context != 0 && context != current_function_decl
6293 && context != inline_function_decl
6294 /* If var is static, we don't need a static chain to access it. */
6295 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6296 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6298 rtx addr;
6300 /* Mark as non-local and addressable. */
6301 DECL_NONLOCAL (exp) = 1;
6302 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6303 abort ();
6304 mark_addressable (exp);
6305 if (GET_CODE (DECL_RTL (exp)) != MEM)
6306 abort ();
6307 addr = XEXP (DECL_RTL (exp), 0);
6308 if (GET_CODE (addr) == MEM)
6309 addr = change_address (addr, Pmode,
6310 fix_lexical_addr (XEXP (addr, 0), exp));
6311 else
6312 addr = fix_lexical_addr (addr, exp);
6314 temp = change_address (DECL_RTL (exp), mode, addr);
6317 /* This is the case of an array whose size is to be determined
6318 from its initializer, while the initializer is still being parsed.
6319 See expand_decl. */
6321 else if (GET_CODE (DECL_RTL (exp)) == MEM
6322 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6323 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6324 XEXP (DECL_RTL (exp), 0));
6326 /* If DECL_RTL is memory, we are in the normal case and either
6327 the address is not valid or it is not a register and -fforce-addr
6328 is specified, get the address into a register. */
6330 else if (GET_CODE (DECL_RTL (exp)) == MEM
6331 && modifier != EXPAND_CONST_ADDRESS
6332 && modifier != EXPAND_SUM
6333 && modifier != EXPAND_INITIALIZER
6334 && (! memory_address_p (DECL_MODE (exp),
6335 XEXP (DECL_RTL (exp), 0))
6336 || (flag_force_addr
6337 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6338 temp = change_address (DECL_RTL (exp), VOIDmode,
6339 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6341 /* If we got something, return it. But first, set the alignment
6342 the address is a register. */
6343 if (temp != 0)
6345 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6346 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6348 return temp;
6351 /* If the mode of DECL_RTL does not match that of the decl, it
6352 must be a promoted value. We return a SUBREG of the wanted mode,
6353 but mark it so that we know that it was already extended. */
6355 if (GET_CODE (DECL_RTL (exp)) == REG
6356 && GET_MODE (DECL_RTL (exp)) != mode)
6358 /* Get the signedness used for this variable. Ensure we get the
6359 same mode we got when the variable was declared. */
6360 if (GET_MODE (DECL_RTL (exp))
6361 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6362 abort ();
6364 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6365 SUBREG_PROMOTED_VAR_P (temp) = 1;
6366 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6367 return temp;
6370 return DECL_RTL (exp);
6372 case INTEGER_CST:
6373 return immed_double_const (TREE_INT_CST_LOW (exp),
6374 TREE_INT_CST_HIGH (exp), mode);
6376 case CONST_DECL:
6377 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6378 EXPAND_MEMORY_USE_BAD);
6380 case REAL_CST:
6381 /* If optimized, generate immediate CONST_DOUBLE
6382 which will be turned into memory by reload if necessary.
6384 We used to force a register so that loop.c could see it. But
6385 this does not allow gen_* patterns to perform optimizations with
6386 the constants. It also produces two insns in cases like "x = 1.0;".
6387 On most machines, floating-point constants are not permitted in
6388 many insns, so we'd end up copying it to a register in any case.
6390 Now, we do the copying in expand_binop, if appropriate. */
6391 return immed_real_const (exp);
6393 case COMPLEX_CST:
6394 case STRING_CST:
6395 if (! TREE_CST_RTL (exp))
6396 output_constant_def (exp, 1);
6398 /* TREE_CST_RTL probably contains a constant address.
6399 On RISC machines where a constant address isn't valid,
6400 make some insns to get that address into a register. */
6401 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6402 && modifier != EXPAND_CONST_ADDRESS
6403 && modifier != EXPAND_INITIALIZER
6404 && modifier != EXPAND_SUM
6405 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6406 || (flag_force_addr
6407 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6408 return change_address (TREE_CST_RTL (exp), VOIDmode,
6409 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6410 return TREE_CST_RTL (exp);
6412 case EXPR_WITH_FILE_LOCATION:
6414 rtx to_return;
6415 const char *saved_input_filename = input_filename;
6416 int saved_lineno = lineno;
6417 input_filename = EXPR_WFL_FILENAME (exp);
6418 lineno = EXPR_WFL_LINENO (exp);
6419 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6420 emit_line_note (input_filename, lineno);
6421 /* Possibly avoid switching back and force here. */
6422 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6423 input_filename = saved_input_filename;
6424 lineno = saved_lineno;
6425 return to_return;
6428 case SAVE_EXPR:
6429 context = decl_function_context (exp);
6431 /* If this SAVE_EXPR was at global context, assume we are an
6432 initialization function and move it into our context. */
6433 if (context == 0)
6434 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6436 /* We treat inline_function_decl as an alias for the current function
6437 because that is the inline function whose vars, types, etc.
6438 are being merged into the current function.
6439 See expand_inline_function. */
6440 if (context == current_function_decl || context == inline_function_decl)
6441 context = 0;
6443 /* If this is non-local, handle it. */
6444 if (context)
6446 /* The following call just exists to abort if the context is
6447 not of a containing function. */
6448 find_function_data (context);
6450 temp = SAVE_EXPR_RTL (exp);
6451 if (temp && GET_CODE (temp) == REG)
6453 put_var_into_stack (exp);
6454 temp = SAVE_EXPR_RTL (exp);
6456 if (temp == 0 || GET_CODE (temp) != MEM)
6457 abort ();
6458 return change_address (temp, mode,
6459 fix_lexical_addr (XEXP (temp, 0), exp));
6461 if (SAVE_EXPR_RTL (exp) == 0)
6463 if (mode == VOIDmode)
6464 temp = const0_rtx;
6465 else
6466 temp = assign_temp (build_qualified_type (type,
6467 (TYPE_QUALS (type)
6468 | TYPE_QUAL_CONST)),
6469 3, 0, 0);
6471 SAVE_EXPR_RTL (exp) = temp;
6472 if (!optimize && GET_CODE (temp) == REG)
6473 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6474 save_expr_regs);
6476 /* If the mode of TEMP does not match that of the expression, it
6477 must be a promoted value. We pass store_expr a SUBREG of the
6478 wanted mode but mark it so that we know that it was already
6479 extended. Note that `unsignedp' was modified above in
6480 this case. */
6482 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6484 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6485 SUBREG_PROMOTED_VAR_P (temp) = 1;
6486 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6489 if (temp == const0_rtx)
6490 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6491 EXPAND_MEMORY_USE_BAD);
6492 else
6493 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6495 TREE_USED (exp) = 1;
6498 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6499 must be a promoted value. We return a SUBREG of the wanted mode,
6500 but mark it so that we know that it was already extended. */
6502 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6503 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6505 /* Compute the signedness and make the proper SUBREG. */
6506 promote_mode (type, mode, &unsignedp, 0);
6507 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6508 SUBREG_PROMOTED_VAR_P (temp) = 1;
6509 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6510 return temp;
6513 return SAVE_EXPR_RTL (exp);
6515 case UNSAVE_EXPR:
6517 rtx temp;
6518 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6519 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6520 return temp;
6523 case PLACEHOLDER_EXPR:
6525 tree placeholder_expr;
6527 /* If there is an object on the head of the placeholder list,
6528 see if some object in it of type TYPE or a pointer to it. For
6529 further information, see tree.def. */
6530 for (placeholder_expr = placeholder_list;
6531 placeholder_expr != 0;
6532 placeholder_expr = TREE_CHAIN (placeholder_expr))
6534 tree need_type = TYPE_MAIN_VARIANT (type);
6535 tree object = 0;
6536 tree old_list = placeholder_list;
6537 tree elt;
6539 /* Find the outermost reference that is of the type we want.
6540 If none, see if any object has a type that is a pointer to
6541 the type we want. */
6542 for (elt = TREE_PURPOSE (placeholder_expr);
6543 elt != 0 && object == 0;
6545 = ((TREE_CODE (elt) == COMPOUND_EXPR
6546 || TREE_CODE (elt) == COND_EXPR)
6547 ? TREE_OPERAND (elt, 1)
6548 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6549 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6550 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6551 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6552 ? TREE_OPERAND (elt, 0) : 0))
6553 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6554 object = elt;
6556 for (elt = TREE_PURPOSE (placeholder_expr);
6557 elt != 0 && object == 0;
6559 = ((TREE_CODE (elt) == COMPOUND_EXPR
6560 || TREE_CODE (elt) == COND_EXPR)
6561 ? TREE_OPERAND (elt, 1)
6562 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6563 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6564 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6565 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6566 ? TREE_OPERAND (elt, 0) : 0))
6567 if (POINTER_TYPE_P (TREE_TYPE (elt))
6568 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6569 == need_type))
6570 object = build1 (INDIRECT_REF, need_type, elt);
6572 if (object != 0)
6574 /* Expand this object skipping the list entries before
6575 it was found in case it is also a PLACEHOLDER_EXPR.
6576 In that case, we want to translate it using subsequent
6577 entries. */
6578 placeholder_list = TREE_CHAIN (placeholder_expr);
6579 temp = expand_expr (object, original_target, tmode,
6580 ro_modifier);
6581 placeholder_list = old_list;
6582 return temp;
6587 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6588 abort ();
6590 case WITH_RECORD_EXPR:
6591 /* Put the object on the placeholder list, expand our first operand,
6592 and pop the list. */
6593 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6594 placeholder_list);
6595 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6596 tmode, ro_modifier);
6597 placeholder_list = TREE_CHAIN (placeholder_list);
6598 return target;
6600 case GOTO_EXPR:
6601 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6602 expand_goto (TREE_OPERAND (exp, 0));
6603 else
6604 expand_computed_goto (TREE_OPERAND (exp, 0));
6605 return const0_rtx;
6607 case EXIT_EXPR:
6608 expand_exit_loop_if_false (NULL,
6609 invert_truthvalue (TREE_OPERAND (exp, 0)));
6610 return const0_rtx;
6612 case LABELED_BLOCK_EXPR:
6613 if (LABELED_BLOCK_BODY (exp))
6614 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6615 /* Should perhaps use expand_label, but this is simpler and safer. */
6616 do_pending_stack_adjust ();
6617 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6618 return const0_rtx;
6620 case EXIT_BLOCK_EXPR:
6621 if (EXIT_BLOCK_RETURN (exp))
6622 sorry ("returned value in block_exit_expr");
6623 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6624 return const0_rtx;
6626 case LOOP_EXPR:
6627 push_temp_slots ();
6628 expand_start_loop (1);
6629 expand_expr_stmt (TREE_OPERAND (exp, 0));
6630 expand_end_loop ();
6631 pop_temp_slots ();
6633 return const0_rtx;
6635 case BIND_EXPR:
6637 tree vars = TREE_OPERAND (exp, 0);
6638 int vars_need_expansion = 0;
6640 /* Need to open a binding contour here because
6641 if there are any cleanups they must be contained here. */
6642 expand_start_bindings (2);
6644 /* Mark the corresponding BLOCK for output in its proper place. */
6645 if (TREE_OPERAND (exp, 2) != 0
6646 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6647 insert_block (TREE_OPERAND (exp, 2));
6649 /* If VARS have not yet been expanded, expand them now. */
6650 while (vars)
6652 if (!DECL_RTL_SET_P (vars))
6654 vars_need_expansion = 1;
6655 expand_decl (vars);
6657 expand_decl_init (vars);
6658 vars = TREE_CHAIN (vars);
6661 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6663 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6665 return temp;
6668 case RTL_EXPR:
6669 if (RTL_EXPR_SEQUENCE (exp))
6671 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6672 abort ();
6673 emit_insns (RTL_EXPR_SEQUENCE (exp));
6674 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6676 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6677 free_temps_for_rtl_expr (exp);
6678 return RTL_EXPR_RTL (exp);
6680 case CONSTRUCTOR:
6681 /* If we don't need the result, just ensure we evaluate any
6682 subexpressions. */
6683 if (ignore)
6685 tree elt;
6686 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6687 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6688 EXPAND_MEMORY_USE_BAD);
6689 return const0_rtx;
6692 /* All elts simple constants => refer to a constant in memory. But
6693 if this is a non-BLKmode mode, let it store a field at a time
6694 since that should make a CONST_INT or CONST_DOUBLE when we
6695 fold. Likewise, if we have a target we can use, it is best to
6696 store directly into the target unless the type is large enough
6697 that memcpy will be used. If we are making an initializer and
6698 all operands are constant, put it in memory as well. */
6699 else if ((TREE_STATIC (exp)
6700 && ((mode == BLKmode
6701 && ! (target != 0 && safe_from_p (target, exp, 1)))
6702 || TREE_ADDRESSABLE (exp)
6703 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6704 && (! MOVE_BY_PIECES_P
6705 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6706 TYPE_ALIGN (type)))
6707 && ! mostly_zeros_p (exp))))
6708 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6710 rtx constructor = output_constant_def (exp, 1);
6712 if (modifier != EXPAND_CONST_ADDRESS
6713 && modifier != EXPAND_INITIALIZER
6714 && modifier != EXPAND_SUM
6715 && (! memory_address_p (GET_MODE (constructor),
6716 XEXP (constructor, 0))
6717 || (flag_force_addr
6718 && GET_CODE (XEXP (constructor, 0)) != REG)))
6719 constructor = change_address (constructor, VOIDmode,
6720 XEXP (constructor, 0));
6721 return constructor;
6723 else
6725 /* Handle calls that pass values in multiple non-contiguous
6726 locations. The Irix 6 ABI has examples of this. */
6727 if (target == 0 || ! safe_from_p (target, exp, 1)
6728 || GET_CODE (target) == PARALLEL)
6729 target
6730 = assign_temp (build_qualified_type (type,
6731 (TYPE_QUALS (type)
6732 | (TREE_READONLY (exp)
6733 * TYPE_QUAL_CONST))),
6734 TREE_ADDRESSABLE (exp), 1, 1);
6736 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6737 int_size_in_bytes (TREE_TYPE (exp)));
6738 return target;
6741 case INDIRECT_REF:
6743 tree exp1 = TREE_OPERAND (exp, 0);
6744 tree index;
6745 tree string = string_constant (exp1, &index);
6747 /* Try to optimize reads from const strings. */
6748 if (string
6749 && TREE_CODE (string) == STRING_CST
6750 && TREE_CODE (index) == INTEGER_CST
6751 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6752 && GET_MODE_CLASS (mode) == MODE_INT
6753 && GET_MODE_SIZE (mode) == 1
6754 && modifier != EXPAND_MEMORY_USE_WO)
6755 return
6756 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6758 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6759 op0 = memory_address (mode, op0);
6761 if (cfun && current_function_check_memory_usage
6762 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6764 enum memory_use_mode memory_usage;
6765 memory_usage = get_memory_usage_from_modifier (modifier);
6767 if (memory_usage != MEMORY_USE_DONT)
6769 in_check_memory_usage = 1;
6770 emit_library_call (chkr_check_addr_libfunc,
6771 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6772 Pmode, GEN_INT (int_size_in_bytes (type)),
6773 TYPE_MODE (sizetype),
6774 GEN_INT (memory_usage),
6775 TYPE_MODE (integer_type_node));
6776 in_check_memory_usage = 0;
6780 temp = gen_rtx_MEM (mode, op0);
6781 set_mem_attributes (temp, exp, 0);
6783 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6784 here, because, in C and C++, the fact that a location is accessed
6785 through a pointer to const does not mean that the value there can
6786 never change. Languages where it can never change should
6787 also set TREE_STATIC. */
6788 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6790 /* If we are writing to this object and its type is a record with
6791 readonly fields, we must mark it as readonly so it will
6792 conflict with readonly references to those fields. */
6793 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6794 RTX_UNCHANGING_P (temp) = 1;
6796 return temp;
6799 case ARRAY_REF:
6800 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6801 abort ();
6804 tree array = TREE_OPERAND (exp, 0);
6805 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6806 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6807 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6808 HOST_WIDE_INT i;
6810 /* Optimize the special-case of a zero lower bound.
6812 We convert the low_bound to sizetype to avoid some problems
6813 with constant folding. (E.g. suppose the lower bound is 1,
6814 and its mode is QI. Without the conversion, (ARRAY
6815 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6816 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6818 if (! integer_zerop (low_bound))
6819 index = size_diffop (index, convert (sizetype, low_bound));
6821 /* Fold an expression like: "foo"[2].
6822 This is not done in fold so it won't happen inside &.
6823 Don't fold if this is for wide characters since it's too
6824 difficult to do correctly and this is a very rare case. */
6826 if (TREE_CODE (array) == STRING_CST
6827 && TREE_CODE (index) == INTEGER_CST
6828 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6829 && GET_MODE_CLASS (mode) == MODE_INT
6830 && GET_MODE_SIZE (mode) == 1)
6831 return
6832 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6834 /* If this is a constant index into a constant array,
6835 just get the value from the array. Handle both the cases when
6836 we have an explicit constructor and when our operand is a variable
6837 that was declared const. */
6839 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6840 && TREE_CODE (index) == INTEGER_CST
6841 && 0 > compare_tree_int (index,
6842 list_length (CONSTRUCTOR_ELTS
6843 (TREE_OPERAND (exp, 0)))))
6845 tree elem;
6847 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6848 i = TREE_INT_CST_LOW (index);
6849 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6852 if (elem)
6853 return expand_expr (fold (TREE_VALUE (elem)), target,
6854 tmode, ro_modifier);
6857 else if (optimize >= 1
6858 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6859 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6860 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6862 if (TREE_CODE (index) == INTEGER_CST)
6864 tree init = DECL_INITIAL (array);
6866 if (TREE_CODE (init) == CONSTRUCTOR)
6868 tree elem;
6870 for (elem = CONSTRUCTOR_ELTS (init);
6871 (elem
6872 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6873 elem = TREE_CHAIN (elem))
6876 if (elem)
6877 return expand_expr (fold (TREE_VALUE (elem)), target,
6878 tmode, ro_modifier);
6880 else if (TREE_CODE (init) == STRING_CST
6881 && 0 > compare_tree_int (index,
6882 TREE_STRING_LENGTH (init)))
6884 tree type = TREE_TYPE (TREE_TYPE (init));
6885 enum machine_mode mode = TYPE_MODE (type);
6887 if (GET_MODE_CLASS (mode) == MODE_INT
6888 && GET_MODE_SIZE (mode) == 1)
6889 return (GEN_INT
6890 (TREE_STRING_POINTER
6891 (init)[TREE_INT_CST_LOW (index)]));
6896 /* Fall through. */
6898 case COMPONENT_REF:
6899 case BIT_FIELD_REF:
6900 /* If the operand is a CONSTRUCTOR, we can just extract the
6901 appropriate field if it is present. Don't do this if we have
6902 already written the data since we want to refer to that copy
6903 and varasm.c assumes that's what we'll do. */
6904 if (code != ARRAY_REF
6905 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6906 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6908 tree elt;
6910 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6911 elt = TREE_CHAIN (elt))
6912 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6913 /* We can normally use the value of the field in the
6914 CONSTRUCTOR. However, if this is a bitfield in
6915 an integral mode that we can fit in a HOST_WIDE_INT,
6916 we must mask only the number of bits in the bitfield,
6917 since this is done implicitly by the constructor. If
6918 the bitfield does not meet either of those conditions,
6919 we can't do this optimization. */
6920 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6921 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6922 == MODE_INT)
6923 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6924 <= HOST_BITS_PER_WIDE_INT))))
6926 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6927 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6929 HOST_WIDE_INT bitsize
6930 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6932 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6934 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6935 op0 = expand_and (op0, op1, target);
6937 else
6939 enum machine_mode imode
6940 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6941 tree count
6942 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6945 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6946 target, 0);
6947 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6948 target, 0);
6952 return op0;
6957 enum machine_mode mode1;
6958 HOST_WIDE_INT bitsize, bitpos;
6959 tree offset;
6960 int volatilep = 0;
6961 unsigned int alignment;
6962 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6963 &mode1, &unsignedp, &volatilep,
6964 &alignment);
6966 /* If we got back the original object, something is wrong. Perhaps
6967 we are evaluating an expression too early. In any event, don't
6968 infinitely recurse. */
6969 if (tem == exp)
6970 abort ();
6972 /* If TEM's type is a union of variable size, pass TARGET to the inner
6973 computation, since it will need a temporary and TARGET is known
6974 to have to do. This occurs in unchecked conversion in Ada. */
6976 op0 = expand_expr (tem,
6977 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6978 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6979 != INTEGER_CST)
6980 ? target : NULL_RTX),
6981 VOIDmode,
6982 (modifier == EXPAND_INITIALIZER
6983 || modifier == EXPAND_CONST_ADDRESS)
6984 ? modifier : EXPAND_NORMAL);
6986 /* If this is a constant, put it into a register if it is a
6987 legitimate constant and OFFSET is 0 and memory if it isn't. */
6988 if (CONSTANT_P (op0))
6990 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6991 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6992 && offset == 0)
6993 op0 = force_reg (mode, op0);
6994 else
6995 op0 = validize_mem (force_const_mem (mode, op0));
6998 if (offset != 0)
7000 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7002 /* If this object is in memory, put it into a register.
7003 This case can't occur in C, but can in Ada if we have
7004 unchecked conversion of an expression from a scalar type to
7005 an array or record type. */
7006 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7007 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7009 tree nt = build_qualified_type (TREE_TYPE (tem),
7010 (TYPE_QUALS (TREE_TYPE (tem))
7011 | TYPE_QUAL_CONST));
7012 rtx memloc = assign_temp (nt, 1, 1, 1);
7014 mark_temp_addr_taken (memloc);
7015 emit_move_insn (memloc, op0);
7016 op0 = memloc;
7019 if (GET_CODE (op0) != MEM)
7020 abort ();
7022 if (GET_MODE (offset_rtx) != ptr_mode)
7024 #ifdef POINTERS_EXTEND_UNSIGNED
7025 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7026 #else
7027 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7028 #endif
7031 /* A constant address in OP0 can have VOIDmode, we must not try
7032 to call force_reg for that case. Avoid that case. */
7033 if (GET_CODE (op0) == MEM
7034 && GET_MODE (op0) == BLKmode
7035 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7036 && bitsize != 0
7037 && (bitpos % bitsize) == 0
7038 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7039 && alignment == GET_MODE_ALIGNMENT (mode1))
7041 rtx temp = change_address (op0, mode1,
7042 plus_constant (XEXP (op0, 0),
7043 (bitpos /
7044 BITS_PER_UNIT)));
7045 if (GET_CODE (XEXP (temp, 0)) == REG)
7046 op0 = temp;
7047 else
7048 op0 = change_address (op0, mode1,
7049 force_reg (GET_MODE (XEXP (temp, 0)),
7050 XEXP (temp, 0)));
7051 bitpos = 0;
7054 op0 = change_address (op0, VOIDmode,
7055 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7056 force_reg (ptr_mode,
7057 offset_rtx)));
7060 /* Don't forget about volatility even if this is a bitfield. */
7061 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7063 op0 = copy_rtx (op0);
7064 MEM_VOLATILE_P (op0) = 1;
7067 /* Check the access. */
7068 if (cfun != 0 && current_function_check_memory_usage
7069 && GET_CODE (op0) == MEM)
7071 enum memory_use_mode memory_usage;
7072 memory_usage = get_memory_usage_from_modifier (modifier);
7074 if (memory_usage != MEMORY_USE_DONT)
7076 rtx to;
7077 int size;
7079 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7080 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7082 /* Check the access right of the pointer. */
7083 in_check_memory_usage = 1;
7084 if (size > BITS_PER_UNIT)
7085 emit_library_call (chkr_check_addr_libfunc,
7086 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7087 Pmode, GEN_INT (size / BITS_PER_UNIT),
7088 TYPE_MODE (sizetype),
7089 GEN_INT (memory_usage),
7090 TYPE_MODE (integer_type_node));
7091 in_check_memory_usage = 0;
7095 /* In cases where an aligned union has an unaligned object
7096 as a field, we might be extracting a BLKmode value from
7097 an integer-mode (e.g., SImode) object. Handle this case
7098 by doing the extract into an object as wide as the field
7099 (which we know to be the width of a basic mode), then
7100 storing into memory, and changing the mode to BLKmode.
7101 If we ultimately want the address (EXPAND_CONST_ADDRESS or
7102 EXPAND_INITIALIZER), then we must not copy to a temporary. */
7103 if (mode1 == VOIDmode
7104 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7105 || (modifier != EXPAND_CONST_ADDRESS
7106 && modifier != EXPAND_INITIALIZER
7107 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
7108 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7109 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7110 /* If the field isn't aligned enough to fetch as a memref,
7111 fetch it as a bit field. */
7112 || (mode1 != BLKmode
7113 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7114 && ((TYPE_ALIGN (TREE_TYPE (tem))
7115 < GET_MODE_ALIGNMENT (mode))
7116 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7117 /* If the type and the field are a constant size and the
7118 size of the type isn't the same size as the bitfield,
7119 we must use bitfield operations. */
7120 || ((bitsize >= 0
7121 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7122 == INTEGER_CST)
7123 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7124 bitsize)))))
7125 || (modifier != EXPAND_CONST_ADDRESS
7126 && modifier != EXPAND_INITIALIZER
7127 && mode == BLKmode
7128 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7129 && (TYPE_ALIGN (type) > alignment
7130 || bitpos % TYPE_ALIGN (type) != 0)))
7132 enum machine_mode ext_mode = mode;
7134 if (ext_mode == BLKmode
7135 && ! (target != 0 && GET_CODE (op0) == MEM
7136 && GET_CODE (target) == MEM
7137 && bitpos % BITS_PER_UNIT == 0))
7138 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7140 if (ext_mode == BLKmode)
7142 /* In this case, BITPOS must start at a byte boundary and
7143 TARGET, if specified, must be a MEM. */
7144 if (GET_CODE (op0) != MEM
7145 || (target != 0 && GET_CODE (target) != MEM)
7146 || bitpos % BITS_PER_UNIT != 0)
7147 abort ();
7149 op0 = change_address (op0, VOIDmode,
7150 plus_constant (XEXP (op0, 0),
7151 bitpos / BITS_PER_UNIT));
7152 if (target == 0)
7153 target = assign_temp (type, 0, 1, 1);
7155 emit_block_move (target, op0,
7156 bitsize == -1 ? expr_size (exp)
7157 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7158 / BITS_PER_UNIT),
7159 BITS_PER_UNIT);
7161 return target;
7164 op0 = validize_mem (op0);
7166 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7167 mark_reg_pointer (XEXP (op0, 0), alignment);
7169 op0 = extract_bit_field (op0, bitsize, bitpos,
7170 unsignedp, target, ext_mode, ext_mode,
7171 alignment,
7172 int_size_in_bytes (TREE_TYPE (tem)));
7174 /* If the result is a record type and BITSIZE is narrower than
7175 the mode of OP0, an integral mode, and this is a big endian
7176 machine, we must put the field into the high-order bits. */
7177 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7178 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7179 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7180 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7181 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7182 - bitsize),
7183 op0, 1);
7185 if (mode == BLKmode)
7187 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7188 TYPE_QUAL_CONST);
7189 rtx new = assign_temp (nt, 0, 1, 1);
7191 emit_move_insn (new, op0);
7192 op0 = copy_rtx (new);
7193 PUT_MODE (op0, BLKmode);
7196 return op0;
7199 /* If the result is BLKmode, use that to access the object
7200 now as well. */
7201 if (mode == BLKmode)
7202 mode1 = BLKmode;
7204 /* Get a reference to just this component. */
7205 if (modifier == EXPAND_CONST_ADDRESS
7206 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7208 rtx new = gen_rtx_MEM (mode1,
7209 plus_constant (XEXP (op0, 0),
7210 (bitpos / BITS_PER_UNIT)));
7212 MEM_COPY_ATTRIBUTES (new, op0);
7213 op0 = new;
7215 else
7216 op0 = change_address (op0, mode1,
7217 plus_constant (XEXP (op0, 0),
7218 (bitpos / BITS_PER_UNIT)));
7220 set_mem_attributes (op0, exp, 0);
7221 if (GET_CODE (XEXP (op0, 0)) == REG)
7222 mark_reg_pointer (XEXP (op0, 0), alignment);
7224 MEM_VOLATILE_P (op0) |= volatilep;
7225 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7226 || modifier == EXPAND_CONST_ADDRESS
7227 || modifier == EXPAND_INITIALIZER)
7228 return op0;
7229 else if (target == 0)
7230 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7232 convert_move (target, op0, unsignedp);
7233 return target;
7236 /* Intended for a reference to a buffer of a file-object in Pascal.
7237 But it's not certain that a special tree code will really be
7238 necessary for these. INDIRECT_REF might work for them. */
7239 case BUFFER_REF:
7240 abort ();
7242 case IN_EXPR:
7244 /* Pascal set IN expression.
7246 Algorithm:
7247 rlo = set_low - (set_low%bits_per_word);
7248 the_word = set [ (index - rlo)/bits_per_word ];
7249 bit_index = index % bits_per_word;
7250 bitmask = 1 << bit_index;
7251 return !!(the_word & bitmask); */
7253 tree set = TREE_OPERAND (exp, 0);
7254 tree index = TREE_OPERAND (exp, 1);
7255 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7256 tree set_type = TREE_TYPE (set);
7257 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7258 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7259 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7260 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7261 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7262 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7263 rtx setaddr = XEXP (setval, 0);
7264 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7265 rtx rlow;
7266 rtx diff, quo, rem, addr, bit, result;
7268 /* If domain is empty, answer is no. Likewise if index is constant
7269 and out of bounds. */
7270 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7271 && TREE_CODE (set_low_bound) == INTEGER_CST
7272 && tree_int_cst_lt (set_high_bound, set_low_bound))
7273 || (TREE_CODE (index) == INTEGER_CST
7274 && TREE_CODE (set_low_bound) == INTEGER_CST
7275 && tree_int_cst_lt (index, set_low_bound))
7276 || (TREE_CODE (set_high_bound) == INTEGER_CST
7277 && TREE_CODE (index) == INTEGER_CST
7278 && tree_int_cst_lt (set_high_bound, index))))
7279 return const0_rtx;
7281 if (target == 0)
7282 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7284 /* If we get here, we have to generate the code for both cases
7285 (in range and out of range). */
7287 op0 = gen_label_rtx ();
7288 op1 = gen_label_rtx ();
7290 if (! (GET_CODE (index_val) == CONST_INT
7291 && GET_CODE (lo_r) == CONST_INT))
7293 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7294 GET_MODE (index_val), iunsignedp, 0, op1);
7297 if (! (GET_CODE (index_val) == CONST_INT
7298 && GET_CODE (hi_r) == CONST_INT))
7300 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7301 GET_MODE (index_val), iunsignedp, 0, op1);
7304 /* Calculate the element number of bit zero in the first word
7305 of the set. */
7306 if (GET_CODE (lo_r) == CONST_INT)
7307 rlow = GEN_INT (INTVAL (lo_r)
7308 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7309 else
7310 rlow = expand_binop (index_mode, and_optab, lo_r,
7311 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7312 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7314 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7315 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7317 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7318 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7319 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7320 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7322 addr = memory_address (byte_mode,
7323 expand_binop (index_mode, add_optab, diff,
7324 setaddr, NULL_RTX, iunsignedp,
7325 OPTAB_LIB_WIDEN));
7327 /* Extract the bit we want to examine. */
7328 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7329 gen_rtx_MEM (byte_mode, addr),
7330 make_tree (TREE_TYPE (index), rem),
7331 NULL_RTX, 1);
7332 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7333 GET_MODE (target) == byte_mode ? target : 0,
7334 1, OPTAB_LIB_WIDEN);
7336 if (result != target)
7337 convert_move (target, result, 1);
7339 /* Output the code to handle the out-of-range case. */
7340 emit_jump (op0);
7341 emit_label (op1);
7342 emit_move_insn (target, const0_rtx);
7343 emit_label (op0);
7344 return target;
7347 case WITH_CLEANUP_EXPR:
7348 if (RTL_EXPR_RTL (exp) == 0)
7350 RTL_EXPR_RTL (exp)
7351 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7352 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7354 /* That's it for this cleanup. */
7355 TREE_OPERAND (exp, 2) = 0;
7357 return RTL_EXPR_RTL (exp);
7359 case CLEANUP_POINT_EXPR:
7361 /* Start a new binding layer that will keep track of all cleanup
7362 actions to be performed. */
7363 expand_start_bindings (2);
7365 target_temp_slot_level = temp_slot_level;
7367 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7368 /* If we're going to use this value, load it up now. */
7369 if (! ignore)
7370 op0 = force_not_mem (op0);
7371 preserve_temp_slots (op0);
7372 expand_end_bindings (NULL_TREE, 0, 0);
7374 return op0;
7376 case CALL_EXPR:
7377 /* Check for a built-in function. */
7378 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7379 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7380 == FUNCTION_DECL)
7381 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7383 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7384 == BUILT_IN_FRONTEND)
7385 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7386 else
7387 return expand_builtin (exp, target, subtarget, tmode, ignore);
7390 return expand_call (exp, target, ignore);
7392 case NON_LVALUE_EXPR:
7393 case NOP_EXPR:
7394 case CONVERT_EXPR:
7395 case REFERENCE_EXPR:
7396 if (TREE_OPERAND (exp, 0) == error_mark_node)
7397 return const0_rtx;
7399 if (TREE_CODE (type) == UNION_TYPE)
7401 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7403 /* If both input and output are BLKmode, this conversion
7404 isn't actually doing anything unless we need to make the
7405 alignment stricter. */
7406 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7407 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7408 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7409 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7410 modifier);
7412 if (target == 0)
7413 target = assign_temp (type, 0, 1, 1);
7415 if (GET_CODE (target) == MEM)
7416 /* Store data into beginning of memory target. */
7417 store_expr (TREE_OPERAND (exp, 0),
7418 change_address (target, TYPE_MODE (valtype), 0), 0);
7420 else if (GET_CODE (target) == REG)
7421 /* Store this field into a union of the proper type. */
7422 store_field (target,
7423 MIN ((int_size_in_bytes (TREE_TYPE
7424 (TREE_OPERAND (exp, 0)))
7425 * BITS_PER_UNIT),
7426 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7427 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7428 VOIDmode, 0, BITS_PER_UNIT,
7429 int_size_in_bytes (type), 0);
7430 else
7431 abort ();
7433 /* Return the entire union. */
7434 return target;
7437 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7439 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7440 ro_modifier);
7442 /* If the signedness of the conversion differs and OP0 is
7443 a promoted SUBREG, clear that indication since we now
7444 have to do the proper extension. */
7445 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7446 && GET_CODE (op0) == SUBREG)
7447 SUBREG_PROMOTED_VAR_P (op0) = 0;
7449 return op0;
7452 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7453 if (GET_MODE (op0) == mode)
7454 return op0;
7456 /* If OP0 is a constant, just convert it into the proper mode. */
7457 if (CONSTANT_P (op0))
7458 return
7459 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7460 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7462 if (modifier == EXPAND_INITIALIZER)
7463 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7465 if (target == 0)
7466 return
7467 convert_to_mode (mode, op0,
7468 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7469 else
7470 convert_move (target, op0,
7471 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7472 return target;
7474 case PLUS_EXPR:
7475 /* We come here from MINUS_EXPR when the second operand is a
7476 constant. */
7477 plus_expr:
7478 this_optab = ! unsignedp && flag_trapv
7479 && (GET_MODE_CLASS(mode) == MODE_INT)
7480 ? addv_optab : add_optab;
7482 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7483 something else, make sure we add the register to the constant and
7484 then to the other thing. This case can occur during strength
7485 reduction and doing it this way will produce better code if the
7486 frame pointer or argument pointer is eliminated.
7488 fold-const.c will ensure that the constant is always in the inner
7489 PLUS_EXPR, so the only case we need to do anything about is if
7490 sp, ap, or fp is our second argument, in which case we must swap
7491 the innermost first argument and our second argument. */
7493 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7494 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7495 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7496 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7497 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7498 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7500 tree t = TREE_OPERAND (exp, 1);
7502 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7503 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7506 /* If the result is to be ptr_mode and we are adding an integer to
7507 something, we might be forming a constant. So try to use
7508 plus_constant. If it produces a sum and we can't accept it,
7509 use force_operand. This allows P = &ARR[const] to generate
7510 efficient code on machines where a SYMBOL_REF is not a valid
7511 address.
7513 If this is an EXPAND_SUM call, always return the sum. */
7514 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7515 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7517 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7518 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7519 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7521 rtx constant_part;
7523 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7524 EXPAND_SUM);
7525 /* Use immed_double_const to ensure that the constant is
7526 truncated according to the mode of OP1, then sign extended
7527 to a HOST_WIDE_INT. Using the constant directly can result
7528 in non-canonical RTL in a 64x32 cross compile. */
7529 constant_part
7530 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7531 (HOST_WIDE_INT) 0,
7532 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7533 op1 = plus_constant (op1, INTVAL (constant_part));
7534 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7535 op1 = force_operand (op1, target);
7536 return op1;
7539 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7540 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7541 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7543 rtx constant_part;
7545 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7546 EXPAND_SUM);
7547 if (! CONSTANT_P (op0))
7549 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7550 VOIDmode, modifier);
7551 /* Don't go to both_summands if modifier
7552 says it's not right to return a PLUS. */
7553 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7554 goto binop2;
7555 goto both_summands;
7557 /* Use immed_double_const to ensure that the constant is
7558 truncated according to the mode of OP1, then sign extended
7559 to a HOST_WIDE_INT. Using the constant directly can result
7560 in non-canonical RTL in a 64x32 cross compile. */
7561 constant_part
7562 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7563 (HOST_WIDE_INT) 0,
7564 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7565 op0 = plus_constant (op0, INTVAL (constant_part));
7566 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7567 op0 = force_operand (op0, target);
7568 return op0;
7572 /* No sense saving up arithmetic to be done
7573 if it's all in the wrong mode to form part of an address.
7574 And force_operand won't know whether to sign-extend or
7575 zero-extend. */
7576 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7577 || mode != ptr_mode)
7578 goto binop;
7580 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7581 subtarget = 0;
7583 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7584 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7586 both_summands:
7587 /* Make sure any term that's a sum with a constant comes last. */
7588 if (GET_CODE (op0) == PLUS
7589 && CONSTANT_P (XEXP (op0, 1)))
7591 temp = op0;
7592 op0 = op1;
7593 op1 = temp;
7595 /* If adding to a sum including a constant,
7596 associate it to put the constant outside. */
7597 if (GET_CODE (op1) == PLUS
7598 && CONSTANT_P (XEXP (op1, 1)))
7600 rtx constant_term = const0_rtx;
7602 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7603 if (temp != 0)
7604 op0 = temp;
7605 /* Ensure that MULT comes first if there is one. */
7606 else if (GET_CODE (op0) == MULT)
7607 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7608 else
7609 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7611 /* Let's also eliminate constants from op0 if possible. */
7612 op0 = eliminate_constant_term (op0, &constant_term);
7614 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7615 their sum should be a constant. Form it into OP1, since the
7616 result we want will then be OP0 + OP1. */
7618 temp = simplify_binary_operation (PLUS, mode, constant_term,
7619 XEXP (op1, 1));
7620 if (temp != 0)
7621 op1 = temp;
7622 else
7623 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7626 /* Put a constant term last and put a multiplication first. */
7627 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7628 temp = op1, op1 = op0, op0 = temp;
7630 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7631 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7633 case MINUS_EXPR:
7634 /* For initializers, we are allowed to return a MINUS of two
7635 symbolic constants. Here we handle all cases when both operands
7636 are constant. */
7637 /* Handle difference of two symbolic constants,
7638 for the sake of an initializer. */
7639 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7640 && really_constant_p (TREE_OPERAND (exp, 0))
7641 && really_constant_p (TREE_OPERAND (exp, 1)))
7643 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7644 VOIDmode, ro_modifier);
7645 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7646 VOIDmode, ro_modifier);
7648 /* If the last operand is a CONST_INT, use plus_constant of
7649 the negated constant. Else make the MINUS. */
7650 if (GET_CODE (op1) == CONST_INT)
7651 return plus_constant (op0, - INTVAL (op1));
7652 else
7653 return gen_rtx_MINUS (mode, op0, op1);
7655 /* Convert A - const to A + (-const). */
7656 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7658 tree negated = fold (build1 (NEGATE_EXPR, type,
7659 TREE_OPERAND (exp, 1)));
7661 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7662 /* If we can't negate the constant in TYPE, leave it alone and
7663 expand_binop will negate it for us. We used to try to do it
7664 here in the signed version of TYPE, but that doesn't work
7665 on POINTER_TYPEs. */;
7666 else
7668 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7669 goto plus_expr;
7672 this_optab = ! unsignedp && flag_trapv
7673 && (GET_MODE_CLASS(mode) == MODE_INT)
7674 ? subv_optab : sub_optab;
7675 goto binop;
7677 case MULT_EXPR:
7678 /* If first operand is constant, swap them.
7679 Thus the following special case checks need only
7680 check the second operand. */
7681 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7683 register tree t1 = TREE_OPERAND (exp, 0);
7684 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7685 TREE_OPERAND (exp, 1) = t1;
7688 /* Attempt to return something suitable for generating an
7689 indexed address, for machines that support that. */
7691 if (modifier == EXPAND_SUM && mode == ptr_mode
7692 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7693 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7695 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7696 EXPAND_SUM);
7698 /* Apply distributive law if OP0 is x+c. */
7699 if (GET_CODE (op0) == PLUS
7700 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7701 return
7702 gen_rtx_PLUS
7703 (mode,
7704 gen_rtx_MULT
7705 (mode, XEXP (op0, 0),
7706 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7707 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7708 * INTVAL (XEXP (op0, 1))));
7710 if (GET_CODE (op0) != REG)
7711 op0 = force_operand (op0, NULL_RTX);
7712 if (GET_CODE (op0) != REG)
7713 op0 = copy_to_mode_reg (mode, op0);
7715 return
7716 gen_rtx_MULT (mode, op0,
7717 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7720 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7721 subtarget = 0;
7723 /* Check for multiplying things that have been extended
7724 from a narrower type. If this machine supports multiplying
7725 in that narrower type with a result in the desired type,
7726 do it that way, and avoid the explicit type-conversion. */
7727 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7728 && TREE_CODE (type) == INTEGER_TYPE
7729 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7730 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7731 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7732 && int_fits_type_p (TREE_OPERAND (exp, 1),
7733 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7734 /* Don't use a widening multiply if a shift will do. */
7735 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7736 > HOST_BITS_PER_WIDE_INT)
7737 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7739 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7740 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7742 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7743 /* If both operands are extended, they must either both
7744 be zero-extended or both be sign-extended. */
7745 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7747 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7749 enum machine_mode innermode
7750 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7751 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7752 ? smul_widen_optab : umul_widen_optab);
7753 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7754 ? umul_widen_optab : smul_widen_optab);
7755 if (mode == GET_MODE_WIDER_MODE (innermode))
7757 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7759 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7760 NULL_RTX, VOIDmode, 0);
7761 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7762 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7763 VOIDmode, 0);
7764 else
7765 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7766 NULL_RTX, VOIDmode, 0);
7767 goto binop2;
7769 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7770 && innermode == word_mode)
7772 rtx htem;
7773 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7774 NULL_RTX, VOIDmode, 0);
7775 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7776 op1 = convert_modes (innermode, mode,
7777 expand_expr (TREE_OPERAND (exp, 1),
7778 NULL_RTX, VOIDmode, 0),
7779 unsignedp);
7780 else
7781 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7782 NULL_RTX, VOIDmode, 0);
7783 temp = expand_binop (mode, other_optab, op0, op1, target,
7784 unsignedp, OPTAB_LIB_WIDEN);
7785 htem = expand_mult_highpart_adjust (innermode,
7786 gen_highpart (innermode, temp),
7787 op0, op1,
7788 gen_highpart (innermode, temp),
7789 unsignedp);
7790 emit_move_insn (gen_highpart (innermode, temp), htem);
7791 return temp;
7795 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7796 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7797 return expand_mult (mode, op0, op1, target, unsignedp);
7799 case TRUNC_DIV_EXPR:
7800 case FLOOR_DIV_EXPR:
7801 case CEIL_DIV_EXPR:
7802 case ROUND_DIV_EXPR:
7803 case EXACT_DIV_EXPR:
7804 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7805 subtarget = 0;
7806 /* Possible optimization: compute the dividend with EXPAND_SUM
7807 then if the divisor is constant can optimize the case
7808 where some terms of the dividend have coeffs divisible by it. */
7809 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7810 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7811 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7813 case RDIV_EXPR:
7814 this_optab = flodiv_optab;
7815 goto binop;
7817 case TRUNC_MOD_EXPR:
7818 case FLOOR_MOD_EXPR:
7819 case CEIL_MOD_EXPR:
7820 case ROUND_MOD_EXPR:
7821 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7822 subtarget = 0;
7823 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7824 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7825 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7827 case FIX_ROUND_EXPR:
7828 case FIX_FLOOR_EXPR:
7829 case FIX_CEIL_EXPR:
7830 abort (); /* Not used for C. */
7832 case FIX_TRUNC_EXPR:
7833 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7834 if (target == 0)
7835 target = gen_reg_rtx (mode);
7836 expand_fix (target, op0, unsignedp);
7837 return target;
7839 case FLOAT_EXPR:
7840 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7841 if (target == 0)
7842 target = gen_reg_rtx (mode);
7843 /* expand_float can't figure out what to do if FROM has VOIDmode.
7844 So give it the correct mode. With -O, cse will optimize this. */
7845 if (GET_MODE (op0) == VOIDmode)
7846 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7847 op0);
7848 expand_float (target, op0,
7849 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7850 return target;
7852 case NEGATE_EXPR:
7853 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7854 temp = expand_unop (mode,
7855 ! unsignedp && flag_trapv
7856 && (GET_MODE_CLASS(mode) == MODE_INT)
7857 ? negv_optab : neg_optab, op0, target, 0);
7858 if (temp == 0)
7859 abort ();
7860 return temp;
7862 case ABS_EXPR:
7863 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7865 /* Handle complex values specially. */
7866 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7867 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7868 return expand_complex_abs (mode, op0, target, unsignedp);
7870 /* Unsigned abs is simply the operand. Testing here means we don't
7871 risk generating incorrect code below. */
7872 if (TREE_UNSIGNED (type))
7873 return op0;
7875 return expand_abs (mode, op0, target, unsignedp,
7876 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7878 case MAX_EXPR:
7879 case MIN_EXPR:
7880 target = original_target;
7881 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7882 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7883 || GET_MODE (target) != mode
7884 || (GET_CODE (target) == REG
7885 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7886 target = gen_reg_rtx (mode);
7887 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7888 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7890 /* First try to do it with a special MIN or MAX instruction.
7891 If that does not win, use a conditional jump to select the proper
7892 value. */
7893 this_optab = (TREE_UNSIGNED (type)
7894 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7895 : (code == MIN_EXPR ? smin_optab : smax_optab));
7897 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7898 OPTAB_WIDEN);
7899 if (temp != 0)
7900 return temp;
7902 /* At this point, a MEM target is no longer useful; we will get better
7903 code without it. */
7905 if (GET_CODE (target) == MEM)
7906 target = gen_reg_rtx (mode);
7908 if (target != op0)
7909 emit_move_insn (target, op0);
7911 op0 = gen_label_rtx ();
7913 /* If this mode is an integer too wide to compare properly,
7914 compare word by word. Rely on cse to optimize constant cases. */
7915 if (GET_MODE_CLASS (mode) == MODE_INT
7916 && ! can_compare_p (GE, mode, ccp_jump))
7918 if (code == MAX_EXPR)
7919 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7920 target, op1, NULL_RTX, op0);
7921 else
7922 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7923 op1, target, NULL_RTX, op0);
7925 else
7927 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7928 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7929 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7930 op0);
7932 emit_move_insn (target, op1);
7933 emit_label (op0);
7934 return target;
7936 case BIT_NOT_EXPR:
7937 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7938 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7939 if (temp == 0)
7940 abort ();
7941 return temp;
7943 case FFS_EXPR:
7944 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7945 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7946 if (temp == 0)
7947 abort ();
7948 return temp;
7950 /* ??? Can optimize bitwise operations with one arg constant.
7951 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7952 and (a bitwise1 b) bitwise2 b (etc)
7953 but that is probably not worth while. */
7955 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7956 boolean values when we want in all cases to compute both of them. In
7957 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7958 as actual zero-or-1 values and then bitwise anding. In cases where
7959 there cannot be any side effects, better code would be made by
7960 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7961 how to recognize those cases. */
7963 case TRUTH_AND_EXPR:
7964 case BIT_AND_EXPR:
7965 this_optab = and_optab;
7966 goto binop;
7968 case TRUTH_OR_EXPR:
7969 case BIT_IOR_EXPR:
7970 this_optab = ior_optab;
7971 goto binop;
7973 case TRUTH_XOR_EXPR:
7974 case BIT_XOR_EXPR:
7975 this_optab = xor_optab;
7976 goto binop;
7978 case LSHIFT_EXPR:
7979 case RSHIFT_EXPR:
7980 case LROTATE_EXPR:
7981 case RROTATE_EXPR:
7982 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7983 subtarget = 0;
7984 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7985 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7986 unsignedp);
7988 /* Could determine the answer when only additive constants differ. Also,
7989 the addition of one can be handled by changing the condition. */
7990 case LT_EXPR:
7991 case LE_EXPR:
7992 case GT_EXPR:
7993 case GE_EXPR:
7994 case EQ_EXPR:
7995 case NE_EXPR:
7996 case UNORDERED_EXPR:
7997 case ORDERED_EXPR:
7998 case UNLT_EXPR:
7999 case UNLE_EXPR:
8000 case UNGT_EXPR:
8001 case UNGE_EXPR:
8002 case UNEQ_EXPR:
8003 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8004 if (temp != 0)
8005 return temp;
8007 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8008 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8009 && original_target
8010 && GET_CODE (original_target) == REG
8011 && (GET_MODE (original_target)
8012 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8014 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8015 VOIDmode, 0);
8017 if (temp != original_target)
8018 temp = copy_to_reg (temp);
8020 op1 = gen_label_rtx ();
8021 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8022 GET_MODE (temp), unsignedp, 0, op1);
8023 emit_move_insn (temp, const1_rtx);
8024 emit_label (op1);
8025 return temp;
8028 /* If no set-flag instruction, must generate a conditional
8029 store into a temporary variable. Drop through
8030 and handle this like && and ||. */
8032 case TRUTH_ANDIF_EXPR:
8033 case TRUTH_ORIF_EXPR:
8034 if (! ignore
8035 && (target == 0 || ! safe_from_p (target, exp, 1)
8036 /* Make sure we don't have a hard reg (such as function's return
8037 value) live across basic blocks, if not optimizing. */
8038 || (!optimize && GET_CODE (target) == REG
8039 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8040 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8042 if (target)
8043 emit_clr_insn (target);
8045 op1 = gen_label_rtx ();
8046 jumpifnot (exp, op1);
8048 if (target)
8049 emit_0_to_1_insn (target);
8051 emit_label (op1);
8052 return ignore ? const0_rtx : target;
8054 case TRUTH_NOT_EXPR:
8055 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8056 /* The parser is careful to generate TRUTH_NOT_EXPR
8057 only with operands that are always zero or one. */
8058 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8059 target, 1, OPTAB_LIB_WIDEN);
8060 if (temp == 0)
8061 abort ();
8062 return temp;
8064 case COMPOUND_EXPR:
8065 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8066 emit_queue ();
8067 return expand_expr (TREE_OPERAND (exp, 1),
8068 (ignore ? const0_rtx : target),
8069 VOIDmode, 0);
8071 case COND_EXPR:
8072 /* If we would have a "singleton" (see below) were it not for a
8073 conversion in each arm, bring that conversion back out. */
8074 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8075 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8076 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8077 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8079 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8080 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8082 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8083 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8084 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8085 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8086 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8087 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8088 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8089 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8090 return expand_expr (build1 (NOP_EXPR, type,
8091 build (COND_EXPR, TREE_TYPE (iftrue),
8092 TREE_OPERAND (exp, 0),
8093 iftrue, iffalse)),
8094 target, tmode, modifier);
8098 /* Note that COND_EXPRs whose type is a structure or union
8099 are required to be constructed to contain assignments of
8100 a temporary variable, so that we can evaluate them here
8101 for side effect only. If type is void, we must do likewise. */
8103 /* If an arm of the branch requires a cleanup,
8104 only that cleanup is performed. */
8106 tree singleton = 0;
8107 tree binary_op = 0, unary_op = 0;
8109 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8110 convert it to our mode, if necessary. */
8111 if (integer_onep (TREE_OPERAND (exp, 1))
8112 && integer_zerop (TREE_OPERAND (exp, 2))
8113 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8115 if (ignore)
8117 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8118 ro_modifier);
8119 return const0_rtx;
8122 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8123 if (GET_MODE (op0) == mode)
8124 return op0;
8126 if (target == 0)
8127 target = gen_reg_rtx (mode);
8128 convert_move (target, op0, unsignedp);
8129 return target;
8132 /* Check for X ? A + B : A. If we have this, we can copy A to the
8133 output and conditionally add B. Similarly for unary operations.
8134 Don't do this if X has side-effects because those side effects
8135 might affect A or B and the "?" operation is a sequence point in
8136 ANSI. (operand_equal_p tests for side effects.) */
8138 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8139 && operand_equal_p (TREE_OPERAND (exp, 2),
8140 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8141 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8142 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8143 && operand_equal_p (TREE_OPERAND (exp, 1),
8144 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8145 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8146 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8147 && operand_equal_p (TREE_OPERAND (exp, 2),
8148 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8149 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8150 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8151 && operand_equal_p (TREE_OPERAND (exp, 1),
8152 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8153 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8155 /* If we are not to produce a result, we have no target. Otherwise,
8156 if a target was specified use it; it will not be used as an
8157 intermediate target unless it is safe. If no target, use a
8158 temporary. */
8160 if (ignore)
8161 temp = 0;
8162 else if (original_target
8163 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8164 || (singleton && GET_CODE (original_target) == REG
8165 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8166 && original_target == var_rtx (singleton)))
8167 && GET_MODE (original_target) == mode
8168 #ifdef HAVE_conditional_move
8169 && (! can_conditionally_move_p (mode)
8170 || GET_CODE (original_target) == REG
8171 || TREE_ADDRESSABLE (type))
8172 #endif
8173 && ! (GET_CODE (original_target) == MEM
8174 && MEM_VOLATILE_P (original_target)))
8175 temp = original_target;
8176 else if (TREE_ADDRESSABLE (type))
8177 abort ();
8178 else
8179 temp = assign_temp (type, 0, 0, 1);
8181 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8182 do the test of X as a store-flag operation, do this as
8183 A + ((X != 0) << log C). Similarly for other simple binary
8184 operators. Only do for C == 1 if BRANCH_COST is low. */
8185 if (temp && singleton && binary_op
8186 && (TREE_CODE (binary_op) == PLUS_EXPR
8187 || TREE_CODE (binary_op) == MINUS_EXPR
8188 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8189 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8190 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8191 : integer_onep (TREE_OPERAND (binary_op, 1)))
8192 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8194 rtx result;
8195 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8196 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8197 ? addv_optab : add_optab)
8198 : TREE_CODE (binary_op) == MINUS_EXPR
8199 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8200 ? subv_optab : sub_optab)
8201 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8202 : xor_optab);
8204 /* If we had X ? A : A + 1, do this as A + (X == 0).
8206 We have to invert the truth value here and then put it
8207 back later if do_store_flag fails. We cannot simply copy
8208 TREE_OPERAND (exp, 0) to another variable and modify that
8209 because invert_truthvalue can modify the tree pointed to
8210 by its argument. */
8211 if (singleton == TREE_OPERAND (exp, 1))
8212 TREE_OPERAND (exp, 0)
8213 = invert_truthvalue (TREE_OPERAND (exp, 0));
8215 result = do_store_flag (TREE_OPERAND (exp, 0),
8216 (safe_from_p (temp, singleton, 1)
8217 ? temp : NULL_RTX),
8218 mode, BRANCH_COST <= 1);
8220 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8221 result = expand_shift (LSHIFT_EXPR, mode, result,
8222 build_int_2 (tree_log2
8223 (TREE_OPERAND
8224 (binary_op, 1)),
8226 (safe_from_p (temp, singleton, 1)
8227 ? temp : NULL_RTX), 0);
8229 if (result)
8231 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8232 return expand_binop (mode, boptab, op1, result, temp,
8233 unsignedp, OPTAB_LIB_WIDEN);
8235 else if (singleton == TREE_OPERAND (exp, 1))
8236 TREE_OPERAND (exp, 0)
8237 = invert_truthvalue (TREE_OPERAND (exp, 0));
8240 do_pending_stack_adjust ();
8241 NO_DEFER_POP;
8242 op0 = gen_label_rtx ();
8244 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8246 if (temp != 0)
8248 /* If the target conflicts with the other operand of the
8249 binary op, we can't use it. Also, we can't use the target
8250 if it is a hard register, because evaluating the condition
8251 might clobber it. */
8252 if ((binary_op
8253 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8254 || (GET_CODE (temp) == REG
8255 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8256 temp = gen_reg_rtx (mode);
8257 store_expr (singleton, temp, 0);
8259 else
8260 expand_expr (singleton,
8261 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8262 if (singleton == TREE_OPERAND (exp, 1))
8263 jumpif (TREE_OPERAND (exp, 0), op0);
8264 else
8265 jumpifnot (TREE_OPERAND (exp, 0), op0);
8267 start_cleanup_deferral ();
8268 if (binary_op && temp == 0)
8269 /* Just touch the other operand. */
8270 expand_expr (TREE_OPERAND (binary_op, 1),
8271 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8272 else if (binary_op)
8273 store_expr (build (TREE_CODE (binary_op), type,
8274 make_tree (type, temp),
8275 TREE_OPERAND (binary_op, 1)),
8276 temp, 0);
8277 else
8278 store_expr (build1 (TREE_CODE (unary_op), type,
8279 make_tree (type, temp)),
8280 temp, 0);
8281 op1 = op0;
8283 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8284 comparison operator. If we have one of these cases, set the
8285 output to A, branch on A (cse will merge these two references),
8286 then set the output to FOO. */
8287 else if (temp
8288 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8289 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8290 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8291 TREE_OPERAND (exp, 1), 0)
8292 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8293 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8294 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8296 if (GET_CODE (temp) == REG
8297 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8298 temp = gen_reg_rtx (mode);
8299 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8300 jumpif (TREE_OPERAND (exp, 0), op0);
8302 start_cleanup_deferral ();
8303 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8304 op1 = op0;
8306 else if (temp
8307 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8308 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8309 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8310 TREE_OPERAND (exp, 2), 0)
8311 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8312 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8313 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8315 if (GET_CODE (temp) == REG
8316 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8317 temp = gen_reg_rtx (mode);
8318 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8319 jumpifnot (TREE_OPERAND (exp, 0), op0);
8321 start_cleanup_deferral ();
8322 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8323 op1 = op0;
8325 else
8327 op1 = gen_label_rtx ();
8328 jumpifnot (TREE_OPERAND (exp, 0), op0);
8330 start_cleanup_deferral ();
8332 /* One branch of the cond can be void, if it never returns. For
8333 example A ? throw : E */
8334 if (temp != 0
8335 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8336 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8337 else
8338 expand_expr (TREE_OPERAND (exp, 1),
8339 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8340 end_cleanup_deferral ();
8341 emit_queue ();
8342 emit_jump_insn (gen_jump (op1));
8343 emit_barrier ();
8344 emit_label (op0);
8345 start_cleanup_deferral ();
8346 if (temp != 0
8347 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8348 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8349 else
8350 expand_expr (TREE_OPERAND (exp, 2),
8351 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8354 end_cleanup_deferral ();
8356 emit_queue ();
8357 emit_label (op1);
8358 OK_DEFER_POP;
8360 return temp;
8363 case TARGET_EXPR:
8365 /* Something needs to be initialized, but we didn't know
8366 where that thing was when building the tree. For example,
8367 it could be the return value of a function, or a parameter
8368 to a function which lays down in the stack, or a temporary
8369 variable which must be passed by reference.
8371 We guarantee that the expression will either be constructed
8372 or copied into our original target. */
8374 tree slot = TREE_OPERAND (exp, 0);
8375 tree cleanups = NULL_TREE;
8376 tree exp1;
8378 if (TREE_CODE (slot) != VAR_DECL)
8379 abort ();
8381 if (! ignore)
8382 target = original_target;
8384 /* Set this here so that if we get a target that refers to a
8385 register variable that's already been used, put_reg_into_stack
8386 knows that it should fix up those uses. */
8387 TREE_USED (slot) = 1;
8389 if (target == 0)
8391 if (DECL_RTL_SET_P (slot))
8393 target = DECL_RTL (slot);
8394 /* If we have already expanded the slot, so don't do
8395 it again. (mrs) */
8396 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8397 return target;
8399 else
8401 target = assign_temp (type, 2, 0, 1);
8402 /* All temp slots at this level must not conflict. */
8403 preserve_temp_slots (target);
8404 SET_DECL_RTL (slot, target);
8405 if (TREE_ADDRESSABLE (slot))
8406 put_var_into_stack (slot);
8408 /* Since SLOT is not known to the called function
8409 to belong to its stack frame, we must build an explicit
8410 cleanup. This case occurs when we must build up a reference
8411 to pass the reference as an argument. In this case,
8412 it is very likely that such a reference need not be
8413 built here. */
8415 if (TREE_OPERAND (exp, 2) == 0)
8416 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8417 cleanups = TREE_OPERAND (exp, 2);
8420 else
8422 /* This case does occur, when expanding a parameter which
8423 needs to be constructed on the stack. The target
8424 is the actual stack address that we want to initialize.
8425 The function we call will perform the cleanup in this case. */
8427 /* If we have already assigned it space, use that space,
8428 not target that we were passed in, as our target
8429 parameter is only a hint. */
8430 if (DECL_RTL_SET_P (slot))
8432 target = DECL_RTL (slot);
8433 /* If we have already expanded the slot, so don't do
8434 it again. (mrs) */
8435 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8436 return target;
8438 else
8440 SET_DECL_RTL (slot, target);
8441 /* If we must have an addressable slot, then make sure that
8442 the RTL that we just stored in slot is OK. */
8443 if (TREE_ADDRESSABLE (slot))
8444 put_var_into_stack (slot);
8448 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8449 /* Mark it as expanded. */
8450 TREE_OPERAND (exp, 1) = NULL_TREE;
8452 store_expr (exp1, target, 0);
8454 expand_decl_cleanup (NULL_TREE, cleanups);
8456 return target;
8459 case INIT_EXPR:
8461 tree lhs = TREE_OPERAND (exp, 0);
8462 tree rhs = TREE_OPERAND (exp, 1);
8463 tree noncopied_parts = 0;
8464 tree lhs_type = TREE_TYPE (lhs);
8466 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8467 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8468 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8469 TYPE_NONCOPIED_PARTS (lhs_type));
8470 while (noncopied_parts != 0)
8472 expand_assignment (TREE_VALUE (noncopied_parts),
8473 TREE_PURPOSE (noncopied_parts), 0, 0);
8474 noncopied_parts = TREE_CHAIN (noncopied_parts);
8476 return temp;
8479 case MODIFY_EXPR:
8481 /* If lhs is complex, expand calls in rhs before computing it.
8482 That's so we don't compute a pointer and save it over a call.
8483 If lhs is simple, compute it first so we can give it as a
8484 target if the rhs is just a call. This avoids an extra temp and copy
8485 and that prevents a partial-subsumption which makes bad code.
8486 Actually we could treat component_ref's of vars like vars. */
8488 tree lhs = TREE_OPERAND (exp, 0);
8489 tree rhs = TREE_OPERAND (exp, 1);
8490 tree noncopied_parts = 0;
8491 tree lhs_type = TREE_TYPE (lhs);
8493 temp = 0;
8495 /* Check for |= or &= of a bitfield of size one into another bitfield
8496 of size 1. In this case, (unless we need the result of the
8497 assignment) we can do this more efficiently with a
8498 test followed by an assignment, if necessary.
8500 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8501 things change so we do, this code should be enhanced to
8502 support it. */
8503 if (ignore
8504 && TREE_CODE (lhs) == COMPONENT_REF
8505 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8506 || TREE_CODE (rhs) == BIT_AND_EXPR)
8507 && TREE_OPERAND (rhs, 0) == lhs
8508 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8509 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8510 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8512 rtx label = gen_label_rtx ();
8514 do_jump (TREE_OPERAND (rhs, 1),
8515 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8516 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8517 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8518 (TREE_CODE (rhs) == BIT_IOR_EXPR
8519 ? integer_one_node
8520 : integer_zero_node)),
8521 0, 0);
8522 do_pending_stack_adjust ();
8523 emit_label (label);
8524 return const0_rtx;
8527 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8528 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8529 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8530 TYPE_NONCOPIED_PARTS (lhs_type));
8532 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8533 while (noncopied_parts != 0)
8535 expand_assignment (TREE_PURPOSE (noncopied_parts),
8536 TREE_VALUE (noncopied_parts), 0, 0);
8537 noncopied_parts = TREE_CHAIN (noncopied_parts);
8539 return temp;
8542 case RETURN_EXPR:
8543 if (!TREE_OPERAND (exp, 0))
8544 expand_null_return ();
8545 else
8546 expand_return (TREE_OPERAND (exp, 0));
8547 return const0_rtx;
8549 case PREINCREMENT_EXPR:
8550 case PREDECREMENT_EXPR:
8551 return expand_increment (exp, 0, ignore);
8553 case POSTINCREMENT_EXPR:
8554 case POSTDECREMENT_EXPR:
8555 /* Faster to treat as pre-increment if result is not used. */
8556 return expand_increment (exp, ! ignore, ignore);
8558 case ADDR_EXPR:
8559 /* If nonzero, TEMP will be set to the address of something that might
8560 be a MEM corresponding to a stack slot. */
8561 temp = 0;
8563 /* Are we taking the address of a nested function? */
8564 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8565 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8566 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8567 && ! TREE_STATIC (exp))
8569 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8570 op0 = force_operand (op0, target);
8572 /* If we are taking the address of something erroneous, just
8573 return a zero. */
8574 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8575 return const0_rtx;
8576 else
8578 /* We make sure to pass const0_rtx down if we came in with
8579 ignore set, to avoid doing the cleanups twice for something. */
8580 op0 = expand_expr (TREE_OPERAND (exp, 0),
8581 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8582 (modifier == EXPAND_INITIALIZER
8583 ? modifier : EXPAND_CONST_ADDRESS));
8585 /* If we are going to ignore the result, OP0 will have been set
8586 to const0_rtx, so just return it. Don't get confused and
8587 think we are taking the address of the constant. */
8588 if (ignore)
8589 return op0;
8591 op0 = protect_from_queue (op0, 0);
8593 /* We would like the object in memory. If it is a constant, we can
8594 have it be statically allocated into memory. For a non-constant,
8595 we need to allocate some memory and store the value into it. */
8597 if (CONSTANT_P (op0))
8598 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8599 op0);
8600 else if (GET_CODE (op0) == MEM)
8602 mark_temp_addr_taken (op0);
8603 temp = XEXP (op0, 0);
8606 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8607 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8608 || GET_CODE (op0) == PARALLEL)
8610 /* If this object is in a register, it must be not
8611 be BLKmode. */
8612 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8613 tree nt = build_qualified_type (inner_type,
8614 (TYPE_QUALS (inner_type)
8615 | TYPE_QUAL_CONST));
8616 rtx memloc = assign_temp (nt, 1, 1, 1);
8618 mark_temp_addr_taken (memloc);
8619 if (GET_CODE (op0) == PARALLEL)
8620 /* Handle calls that pass values in multiple non-contiguous
8621 locations. The Irix 6 ABI has examples of this. */
8622 emit_group_store (memloc, op0,
8623 int_size_in_bytes (inner_type),
8624 TYPE_ALIGN (inner_type));
8625 else
8626 emit_move_insn (memloc, op0);
8627 op0 = memloc;
8630 if (GET_CODE (op0) != MEM)
8631 abort ();
8633 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8635 temp = XEXP (op0, 0);
8636 #ifdef POINTERS_EXTEND_UNSIGNED
8637 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8638 && mode == ptr_mode)
8639 temp = convert_memory_address (ptr_mode, temp);
8640 #endif
8641 return temp;
8644 op0 = force_operand (XEXP (op0, 0), target);
8647 if (flag_force_addr && GET_CODE (op0) != REG)
8648 op0 = force_reg (Pmode, op0);
8650 if (GET_CODE (op0) == REG
8651 && ! REG_USERVAR_P (op0))
8652 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8654 /* If we might have had a temp slot, add an equivalent address
8655 for it. */
8656 if (temp != 0)
8657 update_temp_slot_address (temp, op0);
8659 #ifdef POINTERS_EXTEND_UNSIGNED
8660 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8661 && mode == ptr_mode)
8662 op0 = convert_memory_address (ptr_mode, op0);
8663 #endif
8665 return op0;
8667 case ENTRY_VALUE_EXPR:
8668 abort ();
8670 /* COMPLEX type for Extended Pascal & Fortran */
8671 case COMPLEX_EXPR:
8673 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8674 rtx insns;
8676 /* Get the rtx code of the operands. */
8677 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8678 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8680 if (! target)
8681 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8683 start_sequence ();
8685 /* Move the real (op0) and imaginary (op1) parts to their location. */
8686 emit_move_insn (gen_realpart (mode, target), op0);
8687 emit_move_insn (gen_imagpart (mode, target), op1);
8689 insns = get_insns ();
8690 end_sequence ();
8692 /* Complex construction should appear as a single unit. */
8693 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8694 each with a separate pseudo as destination.
8695 It's not correct for flow to treat them as a unit. */
8696 if (GET_CODE (target) != CONCAT)
8697 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8698 else
8699 emit_insns (insns);
8701 return target;
8704 case REALPART_EXPR:
8705 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8706 return gen_realpart (mode, op0);
8708 case IMAGPART_EXPR:
8709 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8710 return gen_imagpart (mode, op0);
8712 case CONJ_EXPR:
8714 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8715 rtx imag_t;
8716 rtx insns;
8718 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8720 if (! target)
8721 target = gen_reg_rtx (mode);
8723 start_sequence ();
8725 /* Store the realpart and the negated imagpart to target. */
8726 emit_move_insn (gen_realpart (partmode, target),
8727 gen_realpart (partmode, op0));
8729 imag_t = gen_imagpart (partmode, target);
8730 temp = expand_unop (partmode,
8731 ! unsignedp && flag_trapv
8732 && (GET_MODE_CLASS(partmode) == MODE_INT)
8733 ? negv_optab : neg_optab,
8734 gen_imagpart (partmode, op0), imag_t, 0);
8735 if (temp != imag_t)
8736 emit_move_insn (imag_t, temp);
8738 insns = get_insns ();
8739 end_sequence ();
8741 /* Conjugate should appear as a single unit
8742 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8743 each with a separate pseudo as destination.
8744 It's not correct for flow to treat them as a unit. */
8745 if (GET_CODE (target) != CONCAT)
8746 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8747 else
8748 emit_insns (insns);
8750 return target;
8753 case TRY_CATCH_EXPR:
8755 tree handler = TREE_OPERAND (exp, 1);
8757 expand_eh_region_start ();
8759 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8761 expand_eh_region_end_cleanup (handler);
8763 return op0;
8766 case TRY_FINALLY_EXPR:
8768 tree try_block = TREE_OPERAND (exp, 0);
8769 tree finally_block = TREE_OPERAND (exp, 1);
8770 rtx finally_label = gen_label_rtx ();
8771 rtx done_label = gen_label_rtx ();
8772 rtx return_link = gen_reg_rtx (Pmode);
8773 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8774 (tree) finally_label, (tree) return_link);
8775 TREE_SIDE_EFFECTS (cleanup) = 1;
8777 /* Start a new binding layer that will keep track of all cleanup
8778 actions to be performed. */
8779 expand_start_bindings (2);
8781 target_temp_slot_level = temp_slot_level;
8783 expand_decl_cleanup (NULL_TREE, cleanup);
8784 op0 = expand_expr (try_block, target, tmode, modifier);
8786 preserve_temp_slots (op0);
8787 expand_end_bindings (NULL_TREE, 0, 0);
8788 emit_jump (done_label);
8789 emit_label (finally_label);
8790 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8791 emit_indirect_jump (return_link);
8792 emit_label (done_label);
8793 return op0;
8796 case GOTO_SUBROUTINE_EXPR:
8798 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8799 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8800 rtx return_address = gen_label_rtx ();
8801 emit_move_insn (return_link,
8802 gen_rtx_LABEL_REF (Pmode, return_address));
8803 emit_jump (subr);
8804 emit_label (return_address);
8805 return const0_rtx;
8808 case VA_ARG_EXPR:
8809 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8811 case EXC_PTR_EXPR:
8812 return get_exception_pointer ();
8814 default:
8815 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8818 /* Here to do an ordinary binary operator, generating an instruction
8819 from the optab already placed in `this_optab'. */
8820 binop:
8821 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8822 subtarget = 0;
8823 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8824 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8825 binop2:
8826 temp = expand_binop (mode, this_optab, op0, op1, target,
8827 unsignedp, OPTAB_LIB_WIDEN);
8828 if (temp == 0)
8829 abort ();
8830 return temp;
8833 /* Similar to expand_expr, except that we don't specify a target, target
8834 mode, or modifier and we return the alignment of the inner type. This is
8835 used in cases where it is not necessary to align the result to the
8836 alignment of its type as long as we know the alignment of the result, for
8837 example for comparisons of BLKmode values. */
8839 static rtx
8840 expand_expr_unaligned (exp, palign)
8841 register tree exp;
8842 unsigned int *palign;
8844 register rtx op0;
8845 tree type = TREE_TYPE (exp);
8846 register enum machine_mode mode = TYPE_MODE (type);
8848 /* Default the alignment we return to that of the type. */
8849 *palign = TYPE_ALIGN (type);
8851 /* The only cases in which we do anything special is if the resulting mode
8852 is BLKmode. */
8853 if (mode != BLKmode)
8854 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8856 switch (TREE_CODE (exp))
8858 case CONVERT_EXPR:
8859 case NOP_EXPR:
8860 case NON_LVALUE_EXPR:
8861 /* Conversions between BLKmode values don't change the underlying
8862 alignment or value. */
8863 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8864 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8865 break;
8867 case ARRAY_REF:
8868 /* Much of the code for this case is copied directly from expand_expr.
8869 We need to duplicate it here because we will do something different
8870 in the fall-through case, so we need to handle the same exceptions
8871 it does. */
8873 tree array = TREE_OPERAND (exp, 0);
8874 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8875 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8876 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8877 HOST_WIDE_INT i;
8879 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8880 abort ();
8882 /* Optimize the special-case of a zero lower bound.
8884 We convert the low_bound to sizetype to avoid some problems
8885 with constant folding. (E.g. suppose the lower bound is 1,
8886 and its mode is QI. Without the conversion, (ARRAY
8887 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8888 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8890 if (! integer_zerop (low_bound))
8891 index = size_diffop (index, convert (sizetype, low_bound));
8893 /* If this is a constant index into a constant array,
8894 just get the value from the array. Handle both the cases when
8895 we have an explicit constructor and when our operand is a variable
8896 that was declared const. */
8898 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8899 && host_integerp (index, 0)
8900 && 0 > compare_tree_int (index,
8901 list_length (CONSTRUCTOR_ELTS
8902 (TREE_OPERAND (exp, 0)))))
8904 tree elem;
8906 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8907 i = tree_low_cst (index, 0);
8908 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8911 if (elem)
8912 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8915 else if (optimize >= 1
8916 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8917 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8918 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8920 if (TREE_CODE (index) == INTEGER_CST)
8922 tree init = DECL_INITIAL (array);
8924 if (TREE_CODE (init) == CONSTRUCTOR)
8926 tree elem;
8928 for (elem = CONSTRUCTOR_ELTS (init);
8929 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8930 elem = TREE_CHAIN (elem))
8933 if (elem)
8934 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8935 palign);
8940 /* Fall through. */
8942 case COMPONENT_REF:
8943 case BIT_FIELD_REF:
8944 /* If the operand is a CONSTRUCTOR, we can just extract the
8945 appropriate field if it is present. Don't do this if we have
8946 already written the data since we want to refer to that copy
8947 and varasm.c assumes that's what we'll do. */
8948 if (TREE_CODE (exp) != ARRAY_REF
8949 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8950 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8952 tree elt;
8954 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8955 elt = TREE_CHAIN (elt))
8956 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8957 /* Note that unlike the case in expand_expr, we know this is
8958 BLKmode and hence not an integer. */
8959 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8963 enum machine_mode mode1;
8964 HOST_WIDE_INT bitsize, bitpos;
8965 tree offset;
8966 int volatilep = 0;
8967 unsigned int alignment;
8968 int unsignedp;
8969 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8970 &mode1, &unsignedp, &volatilep,
8971 &alignment);
8973 /* If we got back the original object, something is wrong. Perhaps
8974 we are evaluating an expression too early. In any event, don't
8975 infinitely recurse. */
8976 if (tem == exp)
8977 abort ();
8979 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8981 /* If this is a constant, put it into a register if it is a
8982 legitimate constant and OFFSET is 0 and memory if it isn't. */
8983 if (CONSTANT_P (op0))
8985 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8987 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8988 && offset == 0)
8989 op0 = force_reg (inner_mode, op0);
8990 else
8991 op0 = validize_mem (force_const_mem (inner_mode, op0));
8994 if (offset != 0)
8996 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8998 /* If this object is in a register, put it into memory.
8999 This case can't occur in C, but can in Ada if we have
9000 unchecked conversion of an expression from a scalar type to
9001 an array or record type. */
9002 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9003 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9005 tree nt = build_qualified_type (TREE_TYPE (tem),
9006 (TYPE_QUALS (TREE_TYPE (tem))
9007 | TYPE_QUAL_CONST));
9008 rtx memloc = assign_temp (nt, 1, 1, 1);
9010 mark_temp_addr_taken (memloc);
9011 emit_move_insn (memloc, op0);
9012 op0 = memloc;
9015 if (GET_CODE (op0) != MEM)
9016 abort ();
9018 if (GET_MODE (offset_rtx) != ptr_mode)
9020 #ifdef POINTERS_EXTEND_UNSIGNED
9021 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9022 #else
9023 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9024 #endif
9027 op0 = change_address (op0, VOIDmode,
9028 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9029 force_reg (ptr_mode,
9030 offset_rtx)));
9033 /* Don't forget about volatility even if this is a bitfield. */
9034 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9036 op0 = copy_rtx (op0);
9037 MEM_VOLATILE_P (op0) = 1;
9040 /* Check the access. */
9041 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9043 rtx to;
9044 int size;
9046 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9047 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9049 /* Check the access right of the pointer. */
9050 in_check_memory_usage = 1;
9051 if (size > BITS_PER_UNIT)
9052 emit_library_call (chkr_check_addr_libfunc,
9053 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9054 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9055 TYPE_MODE (sizetype),
9056 GEN_INT (MEMORY_USE_RO),
9057 TYPE_MODE (integer_type_node));
9058 in_check_memory_usage = 0;
9061 /* In cases where an aligned union has an unaligned object
9062 as a field, we might be extracting a BLKmode value from
9063 an integer-mode (e.g., SImode) object. Handle this case
9064 by doing the extract into an object as wide as the field
9065 (which we know to be the width of a basic mode), then
9066 storing into memory, and changing the mode to BLKmode.
9067 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9068 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9069 if (mode1 == VOIDmode
9070 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9071 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9072 && (TYPE_ALIGN (type) > alignment
9073 || bitpos % TYPE_ALIGN (type) != 0)))
9075 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9077 if (ext_mode == BLKmode)
9079 /* In this case, BITPOS must start at a byte boundary. */
9080 if (GET_CODE (op0) != MEM
9081 || bitpos % BITS_PER_UNIT != 0)
9082 abort ();
9084 op0 = change_address (op0, VOIDmode,
9085 plus_constant (XEXP (op0, 0),
9086 bitpos / BITS_PER_UNIT));
9088 else
9090 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9091 TYPE_QUAL_CONST);
9092 rtx new = assign_temp (nt, 0, 1, 1);
9094 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9095 unsignedp, NULL_RTX, ext_mode,
9096 ext_mode, alignment,
9097 int_size_in_bytes (TREE_TYPE (tem)));
9099 /* If the result is a record type and BITSIZE is narrower than
9100 the mode of OP0, an integral mode, and this is a big endian
9101 machine, we must put the field into the high-order bits. */
9102 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9103 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9104 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9105 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9106 size_int (GET_MODE_BITSIZE
9107 (GET_MODE (op0))
9108 - bitsize),
9109 op0, 1);
9111 emit_move_insn (new, op0);
9112 op0 = copy_rtx (new);
9113 PUT_MODE (op0, BLKmode);
9116 else
9117 /* Get a reference to just this component. */
9118 op0 = change_address (op0, mode1,
9119 plus_constant (XEXP (op0, 0),
9120 (bitpos / BITS_PER_UNIT)));
9122 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9124 /* Adjust the alignment in case the bit position is not
9125 a multiple of the alignment of the inner object. */
9126 while (bitpos % alignment != 0)
9127 alignment >>= 1;
9129 if (GET_CODE (XEXP (op0, 0)) == REG)
9130 mark_reg_pointer (XEXP (op0, 0), alignment);
9132 MEM_IN_STRUCT_P (op0) = 1;
9133 MEM_VOLATILE_P (op0) |= volatilep;
9135 *palign = alignment;
9136 return op0;
9139 default:
9140 break;
9144 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9147 /* Return the tree node if a ARG corresponds to a string constant or zero
9148 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9149 in bytes within the string that ARG is accessing. The type of the
9150 offset will be `sizetype'. */
9152 tree
9153 string_constant (arg, ptr_offset)
9154 tree arg;
9155 tree *ptr_offset;
9157 STRIP_NOPS (arg);
9159 if (TREE_CODE (arg) == ADDR_EXPR
9160 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9162 *ptr_offset = size_zero_node;
9163 return TREE_OPERAND (arg, 0);
9165 else if (TREE_CODE (arg) == PLUS_EXPR)
9167 tree arg0 = TREE_OPERAND (arg, 0);
9168 tree arg1 = TREE_OPERAND (arg, 1);
9170 STRIP_NOPS (arg0);
9171 STRIP_NOPS (arg1);
9173 if (TREE_CODE (arg0) == ADDR_EXPR
9174 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9176 *ptr_offset = convert (sizetype, arg1);
9177 return TREE_OPERAND (arg0, 0);
9179 else if (TREE_CODE (arg1) == ADDR_EXPR
9180 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9182 *ptr_offset = convert (sizetype, arg0);
9183 return TREE_OPERAND (arg1, 0);
9187 return 0;
9190 /* Expand code for a post- or pre- increment or decrement
9191 and return the RTX for the result.
9192 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9194 static rtx
9195 expand_increment (exp, post, ignore)
9196 register tree exp;
9197 int post, ignore;
9199 register rtx op0, op1;
9200 register rtx temp, value;
9201 register tree incremented = TREE_OPERAND (exp, 0);
9202 optab this_optab = add_optab;
9203 int icode;
9204 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9205 int op0_is_copy = 0;
9206 int single_insn = 0;
9207 /* 1 means we can't store into OP0 directly,
9208 because it is a subreg narrower than a word,
9209 and we don't dare clobber the rest of the word. */
9210 int bad_subreg = 0;
9212 /* Stabilize any component ref that might need to be
9213 evaluated more than once below. */
9214 if (!post
9215 || TREE_CODE (incremented) == BIT_FIELD_REF
9216 || (TREE_CODE (incremented) == COMPONENT_REF
9217 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9218 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9219 incremented = stabilize_reference (incremented);
9220 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9221 ones into save exprs so that they don't accidentally get evaluated
9222 more than once by the code below. */
9223 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9224 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9225 incremented = save_expr (incremented);
9227 /* Compute the operands as RTX.
9228 Note whether OP0 is the actual lvalue or a copy of it:
9229 I believe it is a copy iff it is a register or subreg
9230 and insns were generated in computing it. */
9232 temp = get_last_insn ();
9233 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9235 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9236 in place but instead must do sign- or zero-extension during assignment,
9237 so we copy it into a new register and let the code below use it as
9238 a copy.
9240 Note that we can safely modify this SUBREG since it is know not to be
9241 shared (it was made by the expand_expr call above). */
9243 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9245 if (post)
9246 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9247 else
9248 bad_subreg = 1;
9250 else if (GET_CODE (op0) == SUBREG
9251 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9253 /* We cannot increment this SUBREG in place. If we are
9254 post-incrementing, get a copy of the old value. Otherwise,
9255 just mark that we cannot increment in place. */
9256 if (post)
9257 op0 = copy_to_reg (op0);
9258 else
9259 bad_subreg = 1;
9262 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9263 && temp != get_last_insn ());
9264 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9265 EXPAND_MEMORY_USE_BAD);
9267 /* Decide whether incrementing or decrementing. */
9268 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9269 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9270 this_optab = sub_optab;
9272 /* Convert decrement by a constant into a negative increment. */
9273 if (this_optab == sub_optab
9274 && GET_CODE (op1) == CONST_INT)
9276 op1 = GEN_INT (-INTVAL (op1));
9277 this_optab = add_optab;
9280 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9281 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9283 /* For a preincrement, see if we can do this with a single instruction. */
9284 if (!post)
9286 icode = (int) this_optab->handlers[(int) mode].insn_code;
9287 if (icode != (int) CODE_FOR_nothing
9288 /* Make sure that OP0 is valid for operands 0 and 1
9289 of the insn we want to queue. */
9290 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9291 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9292 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9293 single_insn = 1;
9296 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9297 then we cannot just increment OP0. We must therefore contrive to
9298 increment the original value. Then, for postincrement, we can return
9299 OP0 since it is a copy of the old value. For preincrement, expand here
9300 unless we can do it with a single insn.
9302 Likewise if storing directly into OP0 would clobber high bits
9303 we need to preserve (bad_subreg). */
9304 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9306 /* This is the easiest way to increment the value wherever it is.
9307 Problems with multiple evaluation of INCREMENTED are prevented
9308 because either (1) it is a component_ref or preincrement,
9309 in which case it was stabilized above, or (2) it is an array_ref
9310 with constant index in an array in a register, which is
9311 safe to reevaluate. */
9312 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9313 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9314 ? MINUS_EXPR : PLUS_EXPR),
9315 TREE_TYPE (exp),
9316 incremented,
9317 TREE_OPERAND (exp, 1));
9319 while (TREE_CODE (incremented) == NOP_EXPR
9320 || TREE_CODE (incremented) == CONVERT_EXPR)
9322 newexp = convert (TREE_TYPE (incremented), newexp);
9323 incremented = TREE_OPERAND (incremented, 0);
9326 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9327 return post ? op0 : temp;
9330 if (post)
9332 /* We have a true reference to the value in OP0.
9333 If there is an insn to add or subtract in this mode, queue it.
9334 Queueing the increment insn avoids the register shuffling
9335 that often results if we must increment now and first save
9336 the old value for subsequent use. */
9338 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9339 op0 = stabilize (op0);
9340 #endif
9342 icode = (int) this_optab->handlers[(int) mode].insn_code;
9343 if (icode != (int) CODE_FOR_nothing
9344 /* Make sure that OP0 is valid for operands 0 and 1
9345 of the insn we want to queue. */
9346 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9347 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9349 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9350 op1 = force_reg (mode, op1);
9352 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9354 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9356 rtx addr = (general_operand (XEXP (op0, 0), mode)
9357 ? force_reg (Pmode, XEXP (op0, 0))
9358 : copy_to_reg (XEXP (op0, 0)));
9359 rtx temp, result;
9361 op0 = change_address (op0, VOIDmode, addr);
9362 temp = force_reg (GET_MODE (op0), op0);
9363 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9364 op1 = force_reg (mode, op1);
9366 /* The increment queue is LIFO, thus we have to `queue'
9367 the instructions in reverse order. */
9368 enqueue_insn (op0, gen_move_insn (op0, temp));
9369 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9370 return result;
9374 /* Preincrement, or we can't increment with one simple insn. */
9375 if (post)
9376 /* Save a copy of the value before inc or dec, to return it later. */
9377 temp = value = copy_to_reg (op0);
9378 else
9379 /* Arrange to return the incremented value. */
9380 /* Copy the rtx because expand_binop will protect from the queue,
9381 and the results of that would be invalid for us to return
9382 if our caller does emit_queue before using our result. */
9383 temp = copy_rtx (value = op0);
9385 /* Increment however we can. */
9386 op1 = expand_binop (mode, this_optab, value, op1,
9387 current_function_check_memory_usage ? NULL_RTX : op0,
9388 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9389 /* Make sure the value is stored into OP0. */
9390 if (op1 != op0)
9391 emit_move_insn (op0, op1);
9393 return temp;
9396 /* At the start of a function, record that we have no previously-pushed
9397 arguments waiting to be popped. */
9399 void
9400 init_pending_stack_adjust ()
9402 pending_stack_adjust = 0;
9405 /* When exiting from function, if safe, clear out any pending stack adjust
9406 so the adjustment won't get done.
9408 Note, if the current function calls alloca, then it must have a
9409 frame pointer regardless of the value of flag_omit_frame_pointer. */
9411 void
9412 clear_pending_stack_adjust ()
9414 #ifdef EXIT_IGNORE_STACK
9415 if (optimize > 0
9416 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9417 && EXIT_IGNORE_STACK
9418 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9419 && ! flag_inline_functions)
9421 stack_pointer_delta -= pending_stack_adjust,
9422 pending_stack_adjust = 0;
9424 #endif
9427 /* Pop any previously-pushed arguments that have not been popped yet. */
9429 void
9430 do_pending_stack_adjust ()
9432 if (inhibit_defer_pop == 0)
9434 if (pending_stack_adjust != 0)
9435 adjust_stack (GEN_INT (pending_stack_adjust));
9436 pending_stack_adjust = 0;
9440 /* Expand conditional expressions. */
9442 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9443 LABEL is an rtx of code CODE_LABEL, in this function and all the
9444 functions here. */
9446 void
9447 jumpifnot (exp, label)
9448 tree exp;
9449 rtx label;
9451 do_jump (exp, label, NULL_RTX);
9454 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9456 void
9457 jumpif (exp, label)
9458 tree exp;
9459 rtx label;
9461 do_jump (exp, NULL_RTX, label);
9464 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9465 the result is zero, or IF_TRUE_LABEL if the result is one.
9466 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9467 meaning fall through in that case.
9469 do_jump always does any pending stack adjust except when it does not
9470 actually perform a jump. An example where there is no jump
9471 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9473 This function is responsible for optimizing cases such as
9474 &&, || and comparison operators in EXP. */
9476 void
9477 do_jump (exp, if_false_label, if_true_label)
9478 tree exp;
9479 rtx if_false_label, if_true_label;
9481 register enum tree_code code = TREE_CODE (exp);
9482 /* Some cases need to create a label to jump to
9483 in order to properly fall through.
9484 These cases set DROP_THROUGH_LABEL nonzero. */
9485 rtx drop_through_label = 0;
9486 rtx temp;
9487 int i;
9488 tree type;
9489 enum machine_mode mode;
9491 #ifdef MAX_INTEGER_COMPUTATION_MODE
9492 check_max_integer_computation_mode (exp);
9493 #endif
9495 emit_queue ();
9497 switch (code)
9499 case ERROR_MARK:
9500 break;
9502 case INTEGER_CST:
9503 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9504 if (temp)
9505 emit_jump (temp);
9506 break;
9508 #if 0
9509 /* This is not true with #pragma weak */
9510 case ADDR_EXPR:
9511 /* The address of something can never be zero. */
9512 if (if_true_label)
9513 emit_jump (if_true_label);
9514 break;
9515 #endif
9517 case NOP_EXPR:
9518 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9519 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9520 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9521 goto normal;
9522 case CONVERT_EXPR:
9523 /* If we are narrowing the operand, we have to do the compare in the
9524 narrower mode. */
9525 if ((TYPE_PRECISION (TREE_TYPE (exp))
9526 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9527 goto normal;
9528 case NON_LVALUE_EXPR:
9529 case REFERENCE_EXPR:
9530 case ABS_EXPR:
9531 case NEGATE_EXPR:
9532 case LROTATE_EXPR:
9533 case RROTATE_EXPR:
9534 /* These cannot change zero->non-zero or vice versa. */
9535 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9536 break;
9538 case WITH_RECORD_EXPR:
9539 /* Put the object on the placeholder list, recurse through our first
9540 operand, and pop the list. */
9541 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9542 placeholder_list);
9543 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9544 placeholder_list = TREE_CHAIN (placeholder_list);
9545 break;
9547 #if 0
9548 /* This is never less insns than evaluating the PLUS_EXPR followed by
9549 a test and can be longer if the test is eliminated. */
9550 case PLUS_EXPR:
9551 /* Reduce to minus. */
9552 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9553 TREE_OPERAND (exp, 0),
9554 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9555 TREE_OPERAND (exp, 1))));
9556 /* Process as MINUS. */
9557 #endif
9559 case MINUS_EXPR:
9560 /* Non-zero iff operands of minus differ. */
9561 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9562 TREE_OPERAND (exp, 0),
9563 TREE_OPERAND (exp, 1)),
9564 NE, NE, if_false_label, if_true_label);
9565 break;
9567 case BIT_AND_EXPR:
9568 /* If we are AND'ing with a small constant, do this comparison in the
9569 smallest type that fits. If the machine doesn't have comparisons
9570 that small, it will be converted back to the wider comparison.
9571 This helps if we are testing the sign bit of a narrower object.
9572 combine can't do this for us because it can't know whether a
9573 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9575 if (! SLOW_BYTE_ACCESS
9576 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9577 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9578 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9579 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9580 && (type = type_for_mode (mode, 1)) != 0
9581 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9582 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9583 != CODE_FOR_nothing))
9585 do_jump (convert (type, exp), if_false_label, if_true_label);
9586 break;
9588 goto normal;
9590 case TRUTH_NOT_EXPR:
9591 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9592 break;
9594 case TRUTH_ANDIF_EXPR:
9595 if (if_false_label == 0)
9596 if_false_label = drop_through_label = gen_label_rtx ();
9597 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9598 start_cleanup_deferral ();
9599 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9600 end_cleanup_deferral ();
9601 break;
9603 case TRUTH_ORIF_EXPR:
9604 if (if_true_label == 0)
9605 if_true_label = drop_through_label = gen_label_rtx ();
9606 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9607 start_cleanup_deferral ();
9608 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9609 end_cleanup_deferral ();
9610 break;
9612 case COMPOUND_EXPR:
9613 push_temp_slots ();
9614 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9615 preserve_temp_slots (NULL_RTX);
9616 free_temp_slots ();
9617 pop_temp_slots ();
9618 emit_queue ();
9619 do_pending_stack_adjust ();
9620 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9621 break;
9623 case COMPONENT_REF:
9624 case BIT_FIELD_REF:
9625 case ARRAY_REF:
9627 HOST_WIDE_INT bitsize, bitpos;
9628 int unsignedp;
9629 enum machine_mode mode;
9630 tree type;
9631 tree offset;
9632 int volatilep = 0;
9633 unsigned int alignment;
9635 /* Get description of this reference. We don't actually care
9636 about the underlying object here. */
9637 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9638 &unsignedp, &volatilep, &alignment);
9640 type = type_for_size (bitsize, unsignedp);
9641 if (! SLOW_BYTE_ACCESS
9642 && type != 0 && bitsize >= 0
9643 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9644 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9645 != CODE_FOR_nothing))
9647 do_jump (convert (type, exp), if_false_label, if_true_label);
9648 break;
9650 goto normal;
9653 case COND_EXPR:
9654 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9655 if (integer_onep (TREE_OPERAND (exp, 1))
9656 && integer_zerop (TREE_OPERAND (exp, 2)))
9657 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9659 else if (integer_zerop (TREE_OPERAND (exp, 1))
9660 && integer_onep (TREE_OPERAND (exp, 2)))
9661 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9663 else
9665 register rtx label1 = gen_label_rtx ();
9666 drop_through_label = gen_label_rtx ();
9668 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9670 start_cleanup_deferral ();
9671 /* Now the THEN-expression. */
9672 do_jump (TREE_OPERAND (exp, 1),
9673 if_false_label ? if_false_label : drop_through_label,
9674 if_true_label ? if_true_label : drop_through_label);
9675 /* In case the do_jump just above never jumps. */
9676 do_pending_stack_adjust ();
9677 emit_label (label1);
9679 /* Now the ELSE-expression. */
9680 do_jump (TREE_OPERAND (exp, 2),
9681 if_false_label ? if_false_label : drop_through_label,
9682 if_true_label ? if_true_label : drop_through_label);
9683 end_cleanup_deferral ();
9685 break;
9687 case EQ_EXPR:
9689 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9691 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9692 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9694 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9695 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9696 do_jump
9697 (fold
9698 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9699 fold (build (EQ_EXPR, TREE_TYPE (exp),
9700 fold (build1 (REALPART_EXPR,
9701 TREE_TYPE (inner_type),
9702 exp0)),
9703 fold (build1 (REALPART_EXPR,
9704 TREE_TYPE (inner_type),
9705 exp1)))),
9706 fold (build (EQ_EXPR, TREE_TYPE (exp),
9707 fold (build1 (IMAGPART_EXPR,
9708 TREE_TYPE (inner_type),
9709 exp0)),
9710 fold (build1 (IMAGPART_EXPR,
9711 TREE_TYPE (inner_type),
9712 exp1)))))),
9713 if_false_label, if_true_label);
9716 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9717 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9719 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9720 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9721 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9722 else
9723 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9724 break;
9727 case NE_EXPR:
9729 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9731 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9732 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9734 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9735 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9736 do_jump
9737 (fold
9738 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9739 fold (build (NE_EXPR, TREE_TYPE (exp),
9740 fold (build1 (REALPART_EXPR,
9741 TREE_TYPE (inner_type),
9742 exp0)),
9743 fold (build1 (REALPART_EXPR,
9744 TREE_TYPE (inner_type),
9745 exp1)))),
9746 fold (build (NE_EXPR, TREE_TYPE (exp),
9747 fold (build1 (IMAGPART_EXPR,
9748 TREE_TYPE (inner_type),
9749 exp0)),
9750 fold (build1 (IMAGPART_EXPR,
9751 TREE_TYPE (inner_type),
9752 exp1)))))),
9753 if_false_label, if_true_label);
9756 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9757 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9759 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9760 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9761 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9762 else
9763 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9764 break;
9767 case LT_EXPR:
9768 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9769 if (GET_MODE_CLASS (mode) == MODE_INT
9770 && ! can_compare_p (LT, mode, ccp_jump))
9771 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9772 else
9773 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9774 break;
9776 case LE_EXPR:
9777 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9778 if (GET_MODE_CLASS (mode) == MODE_INT
9779 && ! can_compare_p (LE, mode, ccp_jump))
9780 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9781 else
9782 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9783 break;
9785 case GT_EXPR:
9786 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9787 if (GET_MODE_CLASS (mode) == MODE_INT
9788 && ! can_compare_p (GT, mode, ccp_jump))
9789 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9790 else
9791 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9792 break;
9794 case GE_EXPR:
9795 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9796 if (GET_MODE_CLASS (mode) == MODE_INT
9797 && ! can_compare_p (GE, mode, ccp_jump))
9798 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9799 else
9800 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9801 break;
9803 case UNORDERED_EXPR:
9804 case ORDERED_EXPR:
9806 enum rtx_code cmp, rcmp;
9807 int do_rev;
9809 if (code == UNORDERED_EXPR)
9810 cmp = UNORDERED, rcmp = ORDERED;
9811 else
9812 cmp = ORDERED, rcmp = UNORDERED;
9813 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9815 do_rev = 0;
9816 if (! can_compare_p (cmp, mode, ccp_jump)
9817 && (can_compare_p (rcmp, mode, ccp_jump)
9818 /* If the target doesn't provide either UNORDERED or ORDERED
9819 comparisons, canonicalize on UNORDERED for the library. */
9820 || rcmp == UNORDERED))
9821 do_rev = 1;
9823 if (! do_rev)
9824 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9825 else
9826 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9828 break;
9831 enum rtx_code rcode1;
9832 enum tree_code tcode2;
9834 case UNLT_EXPR:
9835 rcode1 = UNLT;
9836 tcode2 = LT_EXPR;
9837 goto unordered_bcc;
9838 case UNLE_EXPR:
9839 rcode1 = UNLE;
9840 tcode2 = LE_EXPR;
9841 goto unordered_bcc;
9842 case UNGT_EXPR:
9843 rcode1 = UNGT;
9844 tcode2 = GT_EXPR;
9845 goto unordered_bcc;
9846 case UNGE_EXPR:
9847 rcode1 = UNGE;
9848 tcode2 = GE_EXPR;
9849 goto unordered_bcc;
9850 case UNEQ_EXPR:
9851 rcode1 = UNEQ;
9852 tcode2 = EQ_EXPR;
9853 goto unordered_bcc;
9855 unordered_bcc:
9856 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9857 if (can_compare_p (rcode1, mode, ccp_jump))
9858 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9859 if_true_label);
9860 else
9862 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9863 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9864 tree cmp0, cmp1;
9866 /* If the target doesn't support combined unordered
9867 compares, decompose into UNORDERED + comparison. */
9868 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9869 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9870 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9871 do_jump (exp, if_false_label, if_true_label);
9874 break;
9876 default:
9877 normal:
9878 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9879 #if 0
9880 /* This is not needed any more and causes poor code since it causes
9881 comparisons and tests from non-SI objects to have different code
9882 sequences. */
9883 /* Copy to register to avoid generating bad insns by cse
9884 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9885 if (!cse_not_expected && GET_CODE (temp) == MEM)
9886 temp = copy_to_reg (temp);
9887 #endif
9888 do_pending_stack_adjust ();
9889 /* Do any postincrements in the expression that was tested. */
9890 emit_queue ();
9892 if (GET_CODE (temp) == CONST_INT
9893 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9894 || GET_CODE (temp) == LABEL_REF)
9896 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9897 if (target)
9898 emit_jump (target);
9900 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9901 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9902 /* Note swapping the labels gives us not-equal. */
9903 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9904 else if (GET_MODE (temp) != VOIDmode)
9905 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9906 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9907 GET_MODE (temp), NULL_RTX, 0,
9908 if_false_label, if_true_label);
9909 else
9910 abort ();
9913 if (drop_through_label)
9915 /* If do_jump produces code that might be jumped around,
9916 do any stack adjusts from that code, before the place
9917 where control merges in. */
9918 do_pending_stack_adjust ();
9919 emit_label (drop_through_label);
9923 /* Given a comparison expression EXP for values too wide to be compared
9924 with one insn, test the comparison and jump to the appropriate label.
9925 The code of EXP is ignored; we always test GT if SWAP is 0,
9926 and LT if SWAP is 1. */
9928 static void
9929 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9930 tree exp;
9931 int swap;
9932 rtx if_false_label, if_true_label;
9934 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9935 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9936 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9937 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9939 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9942 /* Compare OP0 with OP1, word at a time, in mode MODE.
9943 UNSIGNEDP says to do unsigned comparison.
9944 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9946 void
9947 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9948 enum machine_mode mode;
9949 int unsignedp;
9950 rtx op0, op1;
9951 rtx if_false_label, if_true_label;
9953 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9954 rtx drop_through_label = 0;
9955 int i;
9957 if (! if_true_label || ! if_false_label)
9958 drop_through_label = gen_label_rtx ();
9959 if (! if_true_label)
9960 if_true_label = drop_through_label;
9961 if (! if_false_label)
9962 if_false_label = drop_through_label;
9964 /* Compare a word at a time, high order first. */
9965 for (i = 0; i < nwords; i++)
9967 rtx op0_word, op1_word;
9969 if (WORDS_BIG_ENDIAN)
9971 op0_word = operand_subword_force (op0, i, mode);
9972 op1_word = operand_subword_force (op1, i, mode);
9974 else
9976 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9977 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9980 /* All but high-order word must be compared as unsigned. */
9981 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9982 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9983 NULL_RTX, if_true_label);
9985 /* Consider lower words only if these are equal. */
9986 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9987 NULL_RTX, 0, NULL_RTX, if_false_label);
9990 if (if_false_label)
9991 emit_jump (if_false_label);
9992 if (drop_through_label)
9993 emit_label (drop_through_label);
9996 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9997 with one insn, test the comparison and jump to the appropriate label. */
9999 static void
10000 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10001 tree exp;
10002 rtx if_false_label, if_true_label;
10004 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10005 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10006 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10007 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10008 int i;
10009 rtx drop_through_label = 0;
10011 if (! if_false_label)
10012 drop_through_label = if_false_label = gen_label_rtx ();
10014 for (i = 0; i < nwords; i++)
10015 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10016 operand_subword_force (op1, i, mode),
10017 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10018 word_mode, NULL_RTX, 0, if_false_label,
10019 NULL_RTX);
10021 if (if_true_label)
10022 emit_jump (if_true_label);
10023 if (drop_through_label)
10024 emit_label (drop_through_label);
10027 /* Jump according to whether OP0 is 0.
10028 We assume that OP0 has an integer mode that is too wide
10029 for the available compare insns. */
10031 void
10032 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10033 rtx op0;
10034 rtx if_false_label, if_true_label;
10036 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10037 rtx part;
10038 int i;
10039 rtx drop_through_label = 0;
10041 /* The fastest way of doing this comparison on almost any machine is to
10042 "or" all the words and compare the result. If all have to be loaded
10043 from memory and this is a very wide item, it's possible this may
10044 be slower, but that's highly unlikely. */
10046 part = gen_reg_rtx (word_mode);
10047 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10048 for (i = 1; i < nwords && part != 0; i++)
10049 part = expand_binop (word_mode, ior_optab, part,
10050 operand_subword_force (op0, i, GET_MODE (op0)),
10051 part, 1, OPTAB_WIDEN);
10053 if (part != 0)
10055 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10056 NULL_RTX, 0, if_false_label, if_true_label);
10058 return;
10061 /* If we couldn't do the "or" simply, do this with a series of compares. */
10062 if (! if_false_label)
10063 drop_through_label = if_false_label = gen_label_rtx ();
10065 for (i = 0; i < nwords; i++)
10066 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10067 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10068 if_false_label, NULL_RTX);
10070 if (if_true_label)
10071 emit_jump (if_true_label);
10073 if (drop_through_label)
10074 emit_label (drop_through_label);
10077 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10078 (including code to compute the values to be compared)
10079 and set (CC0) according to the result.
10080 The decision as to signed or unsigned comparison must be made by the caller.
10082 We force a stack adjustment unless there are currently
10083 things pushed on the stack that aren't yet used.
10085 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10086 compared.
10088 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10089 size of MODE should be used. */
10092 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10093 register rtx op0, op1;
10094 enum rtx_code code;
10095 int unsignedp;
10096 enum machine_mode mode;
10097 rtx size;
10098 unsigned int align;
10100 rtx tem;
10102 /* If one operand is constant, make it the second one. Only do this
10103 if the other operand is not constant as well. */
10105 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10106 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10108 tem = op0;
10109 op0 = op1;
10110 op1 = tem;
10111 code = swap_condition (code);
10114 if (flag_force_mem)
10116 op0 = force_not_mem (op0);
10117 op1 = force_not_mem (op1);
10120 do_pending_stack_adjust ();
10122 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10123 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10124 return tem;
10126 #if 0
10127 /* There's no need to do this now that combine.c can eliminate lots of
10128 sign extensions. This can be less efficient in certain cases on other
10129 machines. */
10131 /* If this is a signed equality comparison, we can do it as an
10132 unsigned comparison since zero-extension is cheaper than sign
10133 extension and comparisons with zero are done as unsigned. This is
10134 the case even on machines that can do fast sign extension, since
10135 zero-extension is easier to combine with other operations than
10136 sign-extension is. If we are comparing against a constant, we must
10137 convert it to what it would look like unsigned. */
10138 if ((code == EQ || code == NE) && ! unsignedp
10139 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10141 if (GET_CODE (op1) == CONST_INT
10142 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10143 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10144 unsignedp = 1;
10146 #endif
10148 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10150 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10153 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10154 The decision as to signed or unsigned comparison must be made by the caller.
10156 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10157 compared.
10159 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10160 size of MODE should be used. */
10162 void
10163 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10164 if_false_label, if_true_label)
10165 register rtx op0, op1;
10166 enum rtx_code code;
10167 int unsignedp;
10168 enum machine_mode mode;
10169 rtx size;
10170 unsigned int align;
10171 rtx if_false_label, if_true_label;
10173 rtx tem;
10174 int dummy_true_label = 0;
10176 /* Reverse the comparison if that is safe and we want to jump if it is
10177 false. */
10178 if (! if_true_label && ! FLOAT_MODE_P (mode))
10180 if_true_label = if_false_label;
10181 if_false_label = 0;
10182 code = reverse_condition (code);
10185 /* If one operand is constant, make it the second one. Only do this
10186 if the other operand is not constant as well. */
10188 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10189 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10191 tem = op0;
10192 op0 = op1;
10193 op1 = tem;
10194 code = swap_condition (code);
10197 if (flag_force_mem)
10199 op0 = force_not_mem (op0);
10200 op1 = force_not_mem (op1);
10203 do_pending_stack_adjust ();
10205 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10206 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10208 if (tem == const_true_rtx)
10210 if (if_true_label)
10211 emit_jump (if_true_label);
10213 else
10215 if (if_false_label)
10216 emit_jump (if_false_label);
10218 return;
10221 #if 0
10222 /* There's no need to do this now that combine.c can eliminate lots of
10223 sign extensions. This can be less efficient in certain cases on other
10224 machines. */
10226 /* If this is a signed equality comparison, we can do it as an
10227 unsigned comparison since zero-extension is cheaper than sign
10228 extension and comparisons with zero are done as unsigned. This is
10229 the case even on machines that can do fast sign extension, since
10230 zero-extension is easier to combine with other operations than
10231 sign-extension is. If we are comparing against a constant, we must
10232 convert it to what it would look like unsigned. */
10233 if ((code == EQ || code == NE) && ! unsignedp
10234 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10236 if (GET_CODE (op1) == CONST_INT
10237 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10238 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10239 unsignedp = 1;
10241 #endif
10243 if (! if_true_label)
10245 dummy_true_label = 1;
10246 if_true_label = gen_label_rtx ();
10249 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10250 if_true_label);
10252 if (if_false_label)
10253 emit_jump (if_false_label);
10254 if (dummy_true_label)
10255 emit_label (if_true_label);
10258 /* Generate code for a comparison expression EXP (including code to compute
10259 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10260 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10261 generated code will drop through.
10262 SIGNED_CODE should be the rtx operation for this comparison for
10263 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10265 We force a stack adjustment unless there are currently
10266 things pushed on the stack that aren't yet used. */
10268 static void
10269 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10270 if_true_label)
10271 register tree exp;
10272 enum rtx_code signed_code, unsigned_code;
10273 rtx if_false_label, if_true_label;
10275 unsigned int align0, align1;
10276 register rtx op0, op1;
10277 register tree type;
10278 register enum machine_mode mode;
10279 int unsignedp;
10280 enum rtx_code code;
10282 /* Don't crash if the comparison was erroneous. */
10283 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10284 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10285 return;
10287 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10288 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10289 return;
10291 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10292 mode = TYPE_MODE (type);
10293 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10294 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10295 || (GET_MODE_BITSIZE (mode)
10296 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10297 1)))))))
10299 /* op0 might have been replaced by promoted constant, in which
10300 case the type of second argument should be used. */
10301 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10302 mode = TYPE_MODE (type);
10304 unsignedp = TREE_UNSIGNED (type);
10305 code = unsignedp ? unsigned_code : signed_code;
10307 #ifdef HAVE_canonicalize_funcptr_for_compare
10308 /* If function pointers need to be "canonicalized" before they can
10309 be reliably compared, then canonicalize them. */
10310 if (HAVE_canonicalize_funcptr_for_compare
10311 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10312 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10313 == FUNCTION_TYPE))
10315 rtx new_op0 = gen_reg_rtx (mode);
10317 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10318 op0 = new_op0;
10321 if (HAVE_canonicalize_funcptr_for_compare
10322 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10323 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10324 == FUNCTION_TYPE))
10326 rtx new_op1 = gen_reg_rtx (mode);
10328 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10329 op1 = new_op1;
10331 #endif
10333 /* Do any postincrements in the expression that was tested. */
10334 emit_queue ();
10336 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10337 ((mode == BLKmode)
10338 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10339 MIN (align0, align1),
10340 if_false_label, if_true_label);
10343 /* Generate code to calculate EXP using a store-flag instruction
10344 and return an rtx for the result. EXP is either a comparison
10345 or a TRUTH_NOT_EXPR whose operand is a comparison.
10347 If TARGET is nonzero, store the result there if convenient.
10349 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10350 cheap.
10352 Return zero if there is no suitable set-flag instruction
10353 available on this machine.
10355 Once expand_expr has been called on the arguments of the comparison,
10356 we are committed to doing the store flag, since it is not safe to
10357 re-evaluate the expression. We emit the store-flag insn by calling
10358 emit_store_flag, but only expand the arguments if we have a reason
10359 to believe that emit_store_flag will be successful. If we think that
10360 it will, but it isn't, we have to simulate the store-flag with a
10361 set/jump/set sequence. */
10363 static rtx
10364 do_store_flag (exp, target, mode, only_cheap)
10365 tree exp;
10366 rtx target;
10367 enum machine_mode mode;
10368 int only_cheap;
10370 enum rtx_code code;
10371 tree arg0, arg1, type;
10372 tree tem;
10373 enum machine_mode operand_mode;
10374 int invert = 0;
10375 int unsignedp;
10376 rtx op0, op1;
10377 enum insn_code icode;
10378 rtx subtarget = target;
10379 rtx result, label;
10381 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10382 result at the end. We can't simply invert the test since it would
10383 have already been inverted if it were valid. This case occurs for
10384 some floating-point comparisons. */
10386 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10387 invert = 1, exp = TREE_OPERAND (exp, 0);
10389 arg0 = TREE_OPERAND (exp, 0);
10390 arg1 = TREE_OPERAND (exp, 1);
10392 /* Don't crash if the comparison was erroneous. */
10393 if (arg0 == error_mark_node || arg1 == error_mark_node)
10394 return const0_rtx;
10396 type = TREE_TYPE (arg0);
10397 operand_mode = TYPE_MODE (type);
10398 unsignedp = TREE_UNSIGNED (type);
10400 /* We won't bother with BLKmode store-flag operations because it would mean
10401 passing a lot of information to emit_store_flag. */
10402 if (operand_mode == BLKmode)
10403 return 0;
10405 /* We won't bother with store-flag operations involving function pointers
10406 when function pointers must be canonicalized before comparisons. */
10407 #ifdef HAVE_canonicalize_funcptr_for_compare
10408 if (HAVE_canonicalize_funcptr_for_compare
10409 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10410 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10411 == FUNCTION_TYPE))
10412 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10413 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10414 == FUNCTION_TYPE))))
10415 return 0;
10416 #endif
10418 STRIP_NOPS (arg0);
10419 STRIP_NOPS (arg1);
10421 /* Get the rtx comparison code to use. We know that EXP is a comparison
10422 operation of some type. Some comparisons against 1 and -1 can be
10423 converted to comparisons with zero. Do so here so that the tests
10424 below will be aware that we have a comparison with zero. These
10425 tests will not catch constants in the first operand, but constants
10426 are rarely passed as the first operand. */
10428 switch (TREE_CODE (exp))
10430 case EQ_EXPR:
10431 code = EQ;
10432 break;
10433 case NE_EXPR:
10434 code = NE;
10435 break;
10436 case LT_EXPR:
10437 if (integer_onep (arg1))
10438 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10439 else
10440 code = unsignedp ? LTU : LT;
10441 break;
10442 case LE_EXPR:
10443 if (! unsignedp && integer_all_onesp (arg1))
10444 arg1 = integer_zero_node, code = LT;
10445 else
10446 code = unsignedp ? LEU : LE;
10447 break;
10448 case GT_EXPR:
10449 if (! unsignedp && integer_all_onesp (arg1))
10450 arg1 = integer_zero_node, code = GE;
10451 else
10452 code = unsignedp ? GTU : GT;
10453 break;
10454 case GE_EXPR:
10455 if (integer_onep (arg1))
10456 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10457 else
10458 code = unsignedp ? GEU : GE;
10459 break;
10461 case UNORDERED_EXPR:
10462 code = UNORDERED;
10463 break;
10464 case ORDERED_EXPR:
10465 code = ORDERED;
10466 break;
10467 case UNLT_EXPR:
10468 code = UNLT;
10469 break;
10470 case UNLE_EXPR:
10471 code = UNLE;
10472 break;
10473 case UNGT_EXPR:
10474 code = UNGT;
10475 break;
10476 case UNGE_EXPR:
10477 code = UNGE;
10478 break;
10479 case UNEQ_EXPR:
10480 code = UNEQ;
10481 break;
10483 default:
10484 abort ();
10487 /* Put a constant second. */
10488 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10490 tem = arg0; arg0 = arg1; arg1 = tem;
10491 code = swap_condition (code);
10494 /* If this is an equality or inequality test of a single bit, we can
10495 do this by shifting the bit being tested to the low-order bit and
10496 masking the result with the constant 1. If the condition was EQ,
10497 we xor it with 1. This does not require an scc insn and is faster
10498 than an scc insn even if we have it. */
10500 if ((code == NE || code == EQ)
10501 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10502 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10504 tree inner = TREE_OPERAND (arg0, 0);
10505 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10506 int ops_unsignedp;
10508 /* If INNER is a right shift of a constant and it plus BITNUM does
10509 not overflow, adjust BITNUM and INNER. */
10511 if (TREE_CODE (inner) == RSHIFT_EXPR
10512 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10513 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10514 && bitnum < TYPE_PRECISION (type)
10515 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10516 bitnum - TYPE_PRECISION (type)))
10518 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10519 inner = TREE_OPERAND (inner, 0);
10522 /* If we are going to be able to omit the AND below, we must do our
10523 operations as unsigned. If we must use the AND, we have a choice.
10524 Normally unsigned is faster, but for some machines signed is. */
10525 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10526 #ifdef LOAD_EXTEND_OP
10527 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10528 #else
10530 #endif
10533 if (! get_subtarget (subtarget)
10534 || GET_MODE (subtarget) != operand_mode
10535 || ! safe_from_p (subtarget, inner, 1))
10536 subtarget = 0;
10538 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10540 if (bitnum != 0)
10541 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10542 size_int (bitnum), subtarget, ops_unsignedp);
10544 if (GET_MODE (op0) != mode)
10545 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10547 if ((code == EQ && ! invert) || (code == NE && invert))
10548 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10549 ops_unsignedp, OPTAB_LIB_WIDEN);
10551 /* Put the AND last so it can combine with more things. */
10552 if (bitnum != TYPE_PRECISION (type) - 1)
10553 op0 = expand_and (op0, const1_rtx, subtarget);
10555 return op0;
10558 /* Now see if we are likely to be able to do this. Return if not. */
10559 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10560 return 0;
10562 icode = setcc_gen_code[(int) code];
10563 if (icode == CODE_FOR_nothing
10564 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10566 /* We can only do this if it is one of the special cases that
10567 can be handled without an scc insn. */
10568 if ((code == LT && integer_zerop (arg1))
10569 || (! only_cheap && code == GE && integer_zerop (arg1)))
10571 else if (BRANCH_COST >= 0
10572 && ! only_cheap && (code == NE || code == EQ)
10573 && TREE_CODE (type) != REAL_TYPE
10574 && ((abs_optab->handlers[(int) operand_mode].insn_code
10575 != CODE_FOR_nothing)
10576 || (ffs_optab->handlers[(int) operand_mode].insn_code
10577 != CODE_FOR_nothing)))
10579 else
10580 return 0;
10583 if (! get_subtarget (target)
10584 || GET_MODE (subtarget) != operand_mode
10585 || ! safe_from_p (subtarget, arg1, 1))
10586 subtarget = 0;
10588 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10589 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10591 if (target == 0)
10592 target = gen_reg_rtx (mode);
10594 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10595 because, if the emit_store_flag does anything it will succeed and
10596 OP0 and OP1 will not be used subsequently. */
10598 result = emit_store_flag (target, code,
10599 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10600 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10601 operand_mode, unsignedp, 1);
10603 if (result)
10605 if (invert)
10606 result = expand_binop (mode, xor_optab, result, const1_rtx,
10607 result, 0, OPTAB_LIB_WIDEN);
10608 return result;
10611 /* If this failed, we have to do this with set/compare/jump/set code. */
10612 if (GET_CODE (target) != REG
10613 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10614 target = gen_reg_rtx (GET_MODE (target));
10616 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10617 result = compare_from_rtx (op0, op1, code, unsignedp,
10618 operand_mode, NULL_RTX, 0);
10619 if (GET_CODE (result) == CONST_INT)
10620 return (((result == const0_rtx && ! invert)
10621 || (result != const0_rtx && invert))
10622 ? const0_rtx : const1_rtx);
10624 label = gen_label_rtx ();
10625 if (bcc_gen_fctn[(int) code] == 0)
10626 abort ();
10628 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10629 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10630 emit_label (label);
10632 return target;
10635 /* Generate a tablejump instruction (used for switch statements). */
10637 #ifdef HAVE_tablejump
10639 /* INDEX is the value being switched on, with the lowest value
10640 in the table already subtracted.
10641 MODE is its expected mode (needed if INDEX is constant).
10642 RANGE is the length of the jump table.
10643 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10645 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10646 index value is out of range. */
10648 void
10649 do_tablejump (index, mode, range, table_label, default_label)
10650 rtx index, range, table_label, default_label;
10651 enum machine_mode mode;
10653 register rtx temp, vector;
10655 /* Do an unsigned comparison (in the proper mode) between the index
10656 expression and the value which represents the length of the range.
10657 Since we just finished subtracting the lower bound of the range
10658 from the index expression, this comparison allows us to simultaneously
10659 check that the original index expression value is both greater than
10660 or equal to the minimum value of the range and less than or equal to
10661 the maximum value of the range. */
10663 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10664 0, default_label);
10666 /* If index is in range, it must fit in Pmode.
10667 Convert to Pmode so we can index with it. */
10668 if (mode != Pmode)
10669 index = convert_to_mode (Pmode, index, 1);
10671 /* Don't let a MEM slip thru, because then INDEX that comes
10672 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10673 and break_out_memory_refs will go to work on it and mess it up. */
10674 #ifdef PIC_CASE_VECTOR_ADDRESS
10675 if (flag_pic && GET_CODE (index) != REG)
10676 index = copy_to_mode_reg (Pmode, index);
10677 #endif
10679 /* If flag_force_addr were to affect this address
10680 it could interfere with the tricky assumptions made
10681 about addresses that contain label-refs,
10682 which may be valid only very near the tablejump itself. */
10683 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10684 GET_MODE_SIZE, because this indicates how large insns are. The other
10685 uses should all be Pmode, because they are addresses. This code
10686 could fail if addresses and insns are not the same size. */
10687 index = gen_rtx_PLUS (Pmode,
10688 gen_rtx_MULT (Pmode, index,
10689 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10690 gen_rtx_LABEL_REF (Pmode, table_label));
10691 #ifdef PIC_CASE_VECTOR_ADDRESS
10692 if (flag_pic)
10693 index = PIC_CASE_VECTOR_ADDRESS (index);
10694 else
10695 #endif
10696 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10697 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10698 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10699 RTX_UNCHANGING_P (vector) = 1;
10700 convert_move (temp, vector, 0);
10702 emit_jump_insn (gen_tablejump (temp, table_label));
10704 /* If we are generating PIC code or if the table is PC-relative, the
10705 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10706 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10707 emit_barrier ();
10710 #endif /* HAVE_tablejump */