* alpha.h: NULL_PTR -> NULL.
[official-gcc.git] / gcc / expr.c
blob7edfea63c09f0e7381568e17b081457eb44805e3
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "intl.h"
43 #include "tm_p.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
51 #ifdef PUSH_ROUNDING
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
55 #endif
57 #endif
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
70 #endif
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
117 struct store_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
127 int reverse;
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
155 int));
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
157 HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
187 #ifndef MOVE_RATIO
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
189 #define MOVE_RATIO 2
190 #else
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
193 #endif
194 #endif
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
201 #endif
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
213 #endif
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
218 void
219 init_expr_once ()
221 rtx insn, pat;
222 enum machine_mode mode;
223 int num_clobbers;
224 rtx mem, mem1;
226 start_sequence ();
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
240 int regno;
241 rtx reg;
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
253 regno++)
255 if (! HARD_REGNO_MODE_OK (regno, mode))
256 continue;
258 reg = gen_rtx_REG (mode, regno);
260 SET_SRC (pat) = mem;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
282 end_sequence ();
285 /* This is run at the start of compiling a function. */
287 void
288 init_expr ()
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
292 pending_chain = 0;
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
296 saveregs_value = 0;
297 apply_args_value = 0;
298 forced_labels = 0;
301 void
302 mark_expr_status (p)
303 struct expr_status *p;
305 if (p == NULL)
306 return;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
313 void
314 free_expr_status (f)
315 struct function *f;
317 free (f->expr);
318 f->expr = NULL;
321 /* Small sanity check that the queue is empty at the end of a function. */
323 void
324 finish_expr_for_function ()
326 if (pending_chain)
327 abort ();
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
340 static rtx
341 enqueue_insn (var, body)
342 rtx var, body;
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
366 register rtx x;
367 int modify;
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
374 return x;
375 #endif
377 if (code != QUEUED)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
383 shared. */
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 register rtx y = XEXP (x, 0);
388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390 MEM_COPY_ATTRIBUTES (new, x);
392 if (QUEUED_INSN (y))
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
396 QUEUED_INSN (y));
397 return temp;
399 return new;
401 /* Otherwise, recursively protect the subexpressions of all
402 the kinds of rtx's that can contain a QUEUED. */
403 if (code == MEM)
405 rtx tem = protect_from_queue (XEXP (x, 0), 0);
406 if (tem != XEXP (x, 0))
408 x = copy_rtx (x);
409 XEXP (x, 0) = tem;
412 else if (code == PLUS || code == MULT)
414 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
415 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
416 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
418 x = copy_rtx (x);
419 XEXP (x, 0) = new0;
420 XEXP (x, 1) = new1;
423 return x;
425 /* If the increment has not happened, use the variable itself. */
426 if (QUEUED_INSN (x) == 0)
427 return QUEUED_VAR (x);
428 /* If the increment has happened and a pre-increment copy exists,
429 use that copy. */
430 if (QUEUED_COPY (x) != 0)
431 return QUEUED_COPY (x);
432 /* The increment has happened but we haven't set up a pre-increment copy.
433 Set one up now, and use it. */
434 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
435 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
436 QUEUED_INSN (x));
437 return QUEUED_COPY (x);
440 /* Return nonzero if X contains a QUEUED expression:
441 if it contains anything that will be altered by a queued increment.
442 We handle only combinations of MEM, PLUS, MINUS and MULT operators
443 since memory addresses generally contain only those. */
446 queued_subexp_p (x)
447 rtx x;
449 register enum rtx_code code = GET_CODE (x);
450 switch (code)
452 case QUEUED:
453 return 1;
454 case MEM:
455 return queued_subexp_p (XEXP (x, 0));
456 case MULT:
457 case PLUS:
458 case MINUS:
459 return (queued_subexp_p (XEXP (x, 0))
460 || queued_subexp_p (XEXP (x, 1)));
461 default:
462 return 0;
466 /* Perform all the pending incrementations. */
468 void
469 emit_queue ()
471 register rtx p;
472 while ((p = pending_chain))
474 rtx body = QUEUED_BODY (p);
476 if (GET_CODE (body) == SEQUENCE)
478 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
479 emit_insn (QUEUED_BODY (p));
481 else
482 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
483 pending_chain = QUEUED_NEXT (p);
487 /* Copy data from FROM to TO, where the machine modes are not the same.
488 Both modes may be integer, or both may be floating.
489 UNSIGNEDP should be nonzero if FROM is an unsigned type.
490 This causes zero-extension instead of sign-extension. */
492 void
493 convert_move (to, from, unsignedp)
494 register rtx to, from;
495 int unsignedp;
497 enum machine_mode to_mode = GET_MODE (to);
498 enum machine_mode from_mode = GET_MODE (from);
499 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
500 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
501 enum insn_code code;
502 rtx libcall;
504 /* rtx code for making an equivalent value. */
505 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
507 to = protect_from_queue (to, 1);
508 from = protect_from_queue (from, 0);
510 if (to_real != from_real)
511 abort ();
513 /* If FROM is a SUBREG that indicates that we have already done at least
514 the required extension, strip it. We don't handle such SUBREGs as
515 TO here. */
517 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
518 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
519 >= GET_MODE_SIZE (to_mode))
520 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
521 from = gen_lowpart (to_mode, from), from_mode = to_mode;
523 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
524 abort ();
526 if (to_mode == from_mode
527 || (from_mode == VOIDmode && CONSTANT_P (from)))
529 emit_move_insn (to, from);
530 return;
533 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
535 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
536 abort ();
538 if (VECTOR_MODE_P (to_mode))
539 from = gen_rtx_SUBREG (to_mode, from, 0);
540 else
541 to = gen_rtx_SUBREG (from_mode, to, 0);
543 emit_move_insn (to, from);
544 return;
547 if (to_real != from_real)
548 abort ();
550 if (to_real)
552 rtx value, insns;
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
558 != CODE_FOR_nothing)
560 emit_unop_insn (code, to, from, UNKNOWN);
561 return;
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
569 return;
571 #endif
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
576 return;
578 #endif
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
583 return;
585 #endif
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
590 return;
592 #endif
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
597 return;
599 #endif
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
604 return;
606 #endif
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
612 return;
614 #endif
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
619 return;
621 #endif
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
626 return;
628 #endif
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
633 return;
635 #endif
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
640 return;
642 #endif
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
648 return;
650 #endif
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
655 return;
657 #endif
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
662 return;
664 #endif
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
669 return;
671 #endif
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
677 return;
679 #endif
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
684 return;
686 #endif
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
691 return;
693 #endif
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
698 return;
700 #endif
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 return;
707 #endif
709 libcall = (rtx) 0;
710 switch (from_mode)
712 case SFmode:
713 switch (to_mode)
715 case DFmode:
716 libcall = extendsfdf2_libfunc;
717 break;
719 case XFmode:
720 libcall = extendsfxf2_libfunc;
721 break;
723 case TFmode:
724 libcall = extendsftf2_libfunc;
725 break;
727 default:
728 break;
730 break;
732 case DFmode:
733 switch (to_mode)
735 case SFmode:
736 libcall = truncdfsf2_libfunc;
737 break;
739 case XFmode:
740 libcall = extenddfxf2_libfunc;
741 break;
743 case TFmode:
744 libcall = extenddftf2_libfunc;
745 break;
747 default:
748 break;
750 break;
752 case XFmode:
753 switch (to_mode)
755 case SFmode:
756 libcall = truncxfsf2_libfunc;
757 break;
759 case DFmode:
760 libcall = truncxfdf2_libfunc;
761 break;
763 default:
764 break;
766 break;
768 case TFmode:
769 switch (to_mode)
771 case SFmode:
772 libcall = trunctfsf2_libfunc;
773 break;
775 case DFmode:
776 libcall = trunctfdf2_libfunc;
777 break;
779 default:
780 break;
782 break;
784 default:
785 break;
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
790 abort ();
792 start_sequence ();
793 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
794 1, from, from_mode);
795 insns = get_insns ();
796 end_sequence ();
797 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
798 from));
799 return;
802 /* Now both modes are integers. */
804 /* Handle expanding beyond a word. */
805 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
806 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
808 rtx insns;
809 rtx lowpart;
810 rtx fill_value;
811 rtx lowfrom;
812 int i;
813 enum machine_mode lowpart_mode;
814 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
816 /* Try converting directly if the insn is supported. */
817 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
818 != CODE_FOR_nothing)
820 /* If FROM is a SUBREG, put it into a register. Do this
821 so that we always generate the same set of insns for
822 better cse'ing; if an intermediate assignment occurred,
823 we won't be doing the operation directly on the SUBREG. */
824 if (optimize > 0 && GET_CODE (from) == SUBREG)
825 from = force_reg (from_mode, from);
826 emit_unop_insn (code, to, from, equiv_code);
827 return;
829 /* Next, try converting via full word. */
830 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
831 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
832 != CODE_FOR_nothing))
834 if (GET_CODE (to) == REG)
835 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
836 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
837 emit_unop_insn (code, to,
838 gen_lowpart (word_mode, to), equiv_code);
839 return;
842 /* No special multiword conversion insn; do it by hand. */
843 start_sequence ();
845 /* Since we will turn this into a no conflict block, we must ensure
846 that the source does not overlap the target. */
848 if (reg_overlap_mentioned_p (to, from))
849 from = force_reg (from_mode, from);
851 /* Get a copy of FROM widened to a word, if necessary. */
852 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
853 lowpart_mode = word_mode;
854 else
855 lowpart_mode = from_mode;
857 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
859 lowpart = gen_lowpart (lowpart_mode, to);
860 emit_move_insn (lowpart, lowfrom);
862 /* Compute the value to put in each remaining word. */
863 if (unsignedp)
864 fill_value = const0_rtx;
865 else
867 #ifdef HAVE_slt
868 if (HAVE_slt
869 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
870 && STORE_FLAG_VALUE == -1)
872 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
873 lowpart_mode, 0, 0);
874 fill_value = gen_reg_rtx (word_mode);
875 emit_insn (gen_slt (fill_value));
877 else
878 #endif
880 fill_value
881 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
882 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
883 NULL_RTX, 0);
884 fill_value = convert_to_mode (word_mode, fill_value, 1);
888 /* Fill the remaining words. */
889 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
891 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
892 rtx subword = operand_subword (to, index, 1, to_mode);
894 if (subword == 0)
895 abort ();
897 if (fill_value != subword)
898 emit_move_insn (subword, fill_value);
901 insns = get_insns ();
902 end_sequence ();
904 emit_no_conflict_block (insns, to, from, NULL_RTX,
905 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
906 return;
909 /* Truncating multi-word to a word or less. */
910 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
911 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
913 if (!((GET_CODE (from) == MEM
914 && ! MEM_VOLATILE_P (from)
915 && direct_load[(int) to_mode]
916 && ! mode_dependent_address_p (XEXP (from, 0)))
917 || GET_CODE (from) == REG
918 || GET_CODE (from) == SUBREG))
919 from = force_reg (from_mode, from);
920 convert_move (to, gen_lowpart (word_mode, from), 0);
921 return;
924 /* Handle pointer conversion. */ /* SPEE 900220. */
925 if (to_mode == PQImode)
927 if (from_mode != QImode)
928 from = convert_to_mode (QImode, from, unsignedp);
930 #ifdef HAVE_truncqipqi2
931 if (HAVE_truncqipqi2)
933 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
934 return;
936 #endif /* HAVE_truncqipqi2 */
937 abort ();
940 if (from_mode == PQImode)
942 if (to_mode != QImode)
944 from = convert_to_mode (QImode, from, unsignedp);
945 from_mode = QImode;
947 else
949 #ifdef HAVE_extendpqiqi2
950 if (HAVE_extendpqiqi2)
952 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
953 return;
955 #endif /* HAVE_extendpqiqi2 */
956 abort ();
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 return;
971 #endif /* HAVE_truncsipsi2 */
972 abort ();
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
982 else
984 #ifdef HAVE_extendpsisi2
985 if (! unsignedp && HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 return;
990 #endif /* HAVE_extendpsisi2 */
991 #ifdef HAVE_zero_extendpsisi2
992 if (unsignedp && HAVE_zero_extendpsisi2)
994 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
995 return;
997 #endif /* HAVE_zero_extendpsisi2 */
998 abort ();
1002 if (to_mode == PDImode)
1004 if (from_mode != DImode)
1005 from = convert_to_mode (DImode, from, unsignedp);
1007 #ifdef HAVE_truncdipdi2
1008 if (HAVE_truncdipdi2)
1010 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1011 return;
1013 #endif /* HAVE_truncdipdi2 */
1014 abort ();
1017 if (from_mode == PDImode)
1019 if (to_mode != DImode)
1021 from = convert_to_mode (DImode, from, unsignedp);
1022 from_mode = DImode;
1024 else
1026 #ifdef HAVE_extendpdidi2
1027 if (HAVE_extendpdidi2)
1029 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1030 return;
1032 #endif /* HAVE_extendpdidi2 */
1033 abort ();
1037 /* Now follow all the conversions between integers
1038 no more than a word long. */
1040 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1041 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1042 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1043 GET_MODE_BITSIZE (from_mode)))
1045 if (!((GET_CODE (from) == MEM
1046 && ! MEM_VOLATILE_P (from)
1047 && direct_load[(int) to_mode]
1048 && ! mode_dependent_address_p (XEXP (from, 0)))
1049 || GET_CODE (from) == REG
1050 || GET_CODE (from) == SUBREG))
1051 from = force_reg (from_mode, from);
1052 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1053 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1054 from = copy_to_reg (from);
1055 emit_move_insn (to, gen_lowpart (to_mode, from));
1056 return;
1059 /* Handle extension. */
1060 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1062 /* Convert directly if that works. */
1063 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1064 != CODE_FOR_nothing)
1066 emit_unop_insn (code, to, from, equiv_code);
1067 return;
1069 else
1071 enum machine_mode intermediate;
1072 rtx tmp;
1073 tree shift_amount;
1075 /* Search for a mode to convert via. */
1076 for (intermediate = from_mode; intermediate != VOIDmode;
1077 intermediate = GET_MODE_WIDER_MODE (intermediate))
1078 if (((can_extend_p (to_mode, intermediate, unsignedp)
1079 != CODE_FOR_nothing)
1080 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1081 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1082 GET_MODE_BITSIZE (intermediate))))
1083 && (can_extend_p (intermediate, from_mode, unsignedp)
1084 != CODE_FOR_nothing))
1086 convert_move (to, convert_to_mode (intermediate, from,
1087 unsignedp), unsignedp);
1088 return;
1091 /* No suitable intermediate mode.
1092 Generate what we need with shifts. */
1093 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1094 - GET_MODE_BITSIZE (from_mode), 0);
1095 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1096 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1097 to, unsignedp);
1098 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1099 to, unsignedp);
1100 if (tmp != to)
1101 emit_move_insn (to, tmp);
1102 return;
1106 /* Support special truncate insns for certain modes. */
1108 if (from_mode == DImode && to_mode == SImode)
1110 #ifdef HAVE_truncdisi2
1111 if (HAVE_truncdisi2)
1113 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1114 return;
1116 #endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1121 if (from_mode == DImode && to_mode == HImode)
1123 #ifdef HAVE_truncdihi2
1124 if (HAVE_truncdihi2)
1126 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1127 return;
1129 #endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1134 if (from_mode == DImode && to_mode == QImode)
1136 #ifdef HAVE_truncdiqi2
1137 if (HAVE_truncdiqi2)
1139 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1140 return;
1142 #endif
1143 convert_move (to, force_reg (from_mode, from), unsignedp);
1144 return;
1147 if (from_mode == SImode && to_mode == HImode)
1149 #ifdef HAVE_truncsihi2
1150 if (HAVE_truncsihi2)
1152 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1153 return;
1155 #endif
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 return;
1160 if (from_mode == SImode && to_mode == QImode)
1162 #ifdef HAVE_truncsiqi2
1163 if (HAVE_truncsiqi2)
1165 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1166 return;
1168 #endif
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 return;
1173 if (from_mode == HImode && to_mode == QImode)
1175 #ifdef HAVE_trunchiqi2
1176 if (HAVE_trunchiqi2)
1178 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == TImode && to_mode == DImode)
1188 #ifdef HAVE_trunctidi2
1189 if (HAVE_trunctidi2)
1191 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == TImode && to_mode == SImode)
1201 #ifdef HAVE_trunctisi2
1202 if (HAVE_trunctisi2)
1204 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == TImode && to_mode == HImode)
1214 #ifdef HAVE_trunctihi2
1215 if (HAVE_trunctihi2)
1217 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == TImode && to_mode == QImode)
1227 #ifdef HAVE_trunctiqi2
1228 if (HAVE_trunctiqi2)
1230 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 /* Handle truncation of volatile memrefs, and so on;
1239 the things that couldn't be truncated directly,
1240 and for which there was no special instruction. */
1241 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1243 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1244 emit_move_insn (to, temp);
1245 return;
1248 /* Mode combination is not recognized. */
1249 abort ();
1252 /* Return an rtx for a value that would result
1253 from converting X to mode MODE.
1254 Both X and MODE may be floating, or both integer.
1255 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 This function *must not* call protect_from_queue
1260 except when putting X into an insn (in which case convert_move does it). */
1263 convert_to_mode (mode, x, unsignedp)
1264 enum machine_mode mode;
1265 rtx x;
1266 int unsignedp;
1268 return convert_modes (mode, VOIDmode, x, unsignedp);
1271 /* Return an rtx for a value that would result
1272 from converting X from mode OLDMODE to mode MODE.
1273 Both modes may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1276 This can be done by referring to a part of X in place
1277 or by copying to a new temporary with conversion.
1279 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1281 This function *must not* call protect_from_queue
1282 except when putting X into an insn (in which case convert_move does it). */
1285 convert_modes (mode, oldmode, x, unsignedp)
1286 enum machine_mode mode, oldmode;
1287 rtx x;
1288 int unsignedp;
1290 register rtx temp;
1292 /* If FROM is a SUBREG that indicates that we have already done at least
1293 the required extension, strip it. */
1295 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1296 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1297 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1298 x = gen_lowpart (mode, x);
1300 if (GET_MODE (x) != VOIDmode)
1301 oldmode = GET_MODE (x);
1303 if (mode == oldmode)
1304 return x;
1306 /* There is one case that we must handle specially: If we are converting
1307 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1308 we are to interpret the constant as unsigned, gen_lowpart will do
1309 the wrong if the constant appears negative. What we want to do is
1310 make the high-order word of the constant zero, not all ones. */
1312 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1313 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1314 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1316 HOST_WIDE_INT val = INTVAL (x);
1318 if (oldmode != VOIDmode
1319 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1321 int width = GET_MODE_BITSIZE (oldmode);
1323 /* We need to zero extend VAL. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1330 /* We can do this with a gen_lowpart if both desired and current modes
1331 are integer, and this is either a constant integer, a register, or a
1332 non-volatile MEM. Except for the constant case where MODE is no
1333 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1335 if ((GET_CODE (x) == CONST_INT
1336 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1337 || (GET_MODE_CLASS (mode) == MODE_INT
1338 && GET_MODE_CLASS (oldmode) == MODE_INT
1339 && (GET_CODE (x) == CONST_DOUBLE
1340 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1341 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1342 && direct_load[(int) mode])
1343 || (GET_CODE (x) == REG
1344 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1345 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1347 /* ?? If we don't know OLDMODE, we have to assume here that
1348 X does not need sign- or zero-extension. This may not be
1349 the case, but it's the best we can do. */
1350 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1351 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1353 HOST_WIDE_INT val = INTVAL (x);
1354 int width = GET_MODE_BITSIZE (oldmode);
1356 /* We must sign or zero-extend in this case. Start by
1357 zero-extending, then sign extend if we need to. */
1358 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1359 if (! unsignedp
1360 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1361 val |= (HOST_WIDE_INT) (-1) << width;
1363 return GEN_INT (trunc_int_for_mode (val, mode));
1366 return gen_lowpart (mode, x);
1369 temp = gen_reg_rtx (mode);
1370 convert_move (temp, x, unsignedp);
1371 return temp;
1374 /* This macro is used to determine what the largest unit size that
1375 move_by_pieces can use is. */
1377 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1378 move efficiently, as opposed to MOVE_MAX which is the maximum
1379 number of bytes we can move with a single instruction. */
1381 #ifndef MOVE_MAX_PIECES
1382 #define MOVE_MAX_PIECES MOVE_MAX
1383 #endif
1385 /* Generate several move instructions to copy LEN bytes
1386 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1387 The caller must pass FROM and TO
1388 through protect_from_queue before calling.
1390 When TO is NULL, the emit_single_push_insn is used to push the
1391 FROM to stack.
1393 ALIGN is maximum alignment we can assume. */
1395 void
1396 move_by_pieces (to, from, len, align)
1397 rtx to, from;
1398 unsigned HOST_WIDE_INT len;
1399 unsigned int align;
1401 struct move_by_pieces data;
1402 rtx to_addr, from_addr = XEXP (from, 0);
1403 unsigned int max_size = MOVE_MAX_PIECES + 1;
1404 enum machine_mode mode = VOIDmode, tmode;
1405 enum insn_code icode;
1407 data.offset = 0;
1408 data.from_addr = from_addr;
1409 if (to)
1411 to_addr = XEXP (to, 0);
1412 data.to = to;
1413 data.autinc_to
1414 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1415 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1416 data.reverse
1417 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1419 else
1421 to_addr = NULL_RTX;
1422 data.to = NULL_RTX;
1423 data.autinc_to = 1;
1424 #ifdef STACK_GROWS_DOWNWARD
1425 data.reverse = 1;
1426 #else
1427 data.reverse = 0;
1428 #endif
1430 data.to_addr = to_addr;
1431 data.from = from;
1432 data.autinc_from
1433 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1434 || GET_CODE (from_addr) == POST_INC
1435 || GET_CODE (from_addr) == POST_DEC);
1437 data.explicit_inc_from = 0;
1438 data.explicit_inc_to = 0;
1439 if (data.reverse) data.offset = len;
1440 data.len = len;
1442 /* If copying requires more than two move insns,
1443 copy addresses to registers (to make displacements shorter)
1444 and use post-increment if available. */
1445 if (!(data.autinc_from && data.autinc_to)
1446 && move_by_pieces_ninsns (len, align) > 2)
1448 /* Find the mode of the largest move... */
1449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1451 if (GET_MODE_SIZE (tmode) < max_size)
1452 mode = tmode;
1454 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1456 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1457 data.autinc_from = 1;
1458 data.explicit_inc_from = -1;
1460 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1462 data.from_addr = copy_addr_to_reg (from_addr);
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = 1;
1466 if (!data.autinc_from && CONSTANT_P (from_addr))
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1470 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1471 data.autinc_to = 1;
1472 data.explicit_inc_to = -1;
1474 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1476 data.to_addr = copy_addr_to_reg (to_addr);
1477 data.autinc_to = 1;
1478 data.explicit_inc_to = 1;
1480 if (!data.autinc_to && CONSTANT_P (to_addr))
1481 data.to_addr = copy_addr_to_reg (to_addr);
1484 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1485 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1486 align = MOVE_MAX * BITS_PER_UNIT;
1488 /* First move what we can in the largest integer mode, then go to
1489 successively smaller modes. */
1491 while (max_size > 1)
1493 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1494 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1495 if (GET_MODE_SIZE (tmode) < max_size)
1496 mode = tmode;
1498 if (mode == VOIDmode)
1499 break;
1501 icode = mov_optab->handlers[(int) mode].insn_code;
1502 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1503 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1505 max_size = GET_MODE_SIZE (mode);
1508 /* The code above should have handled everything. */
1509 if (data.len > 0)
1510 abort ();
1513 /* Return number of insns required to move L bytes by pieces.
1514 ALIGN (in bytes) is maximum alignment we can assume. */
1516 static unsigned HOST_WIDE_INT
1517 move_by_pieces_ninsns (l, align)
1518 unsigned HOST_WIDE_INT l;
1519 unsigned int align;
1521 unsigned HOST_WIDE_INT n_insns = 0;
1522 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1524 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1525 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1526 align = MOVE_MAX * BITS_PER_UNIT;
1528 while (max_size > 1)
1530 enum machine_mode mode = VOIDmode, tmode;
1531 enum insn_code icode;
1533 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1534 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1535 if (GET_MODE_SIZE (tmode) < max_size)
1536 mode = tmode;
1538 if (mode == VOIDmode)
1539 break;
1541 icode = mov_optab->handlers[(int) mode].insn_code;
1542 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1543 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1545 max_size = GET_MODE_SIZE (mode);
1548 if (l)
1549 abort ();
1550 return n_insns;
1553 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1554 with move instructions for mode MODE. GENFUN is the gen_... function
1555 to make a move insn for that mode. DATA has all the other info. */
1557 static void
1558 move_by_pieces_1 (genfun, mode, data)
1559 rtx (*genfun) PARAMS ((rtx, ...));
1560 enum machine_mode mode;
1561 struct move_by_pieces *data;
1563 unsigned int size = GET_MODE_SIZE (mode);
1564 rtx to1, from1;
1566 while (data->len >= size)
1568 if (data->reverse)
1569 data->offset -= size;
1571 if (data->to)
1573 if (data->autinc_to)
1575 to1 = gen_rtx_MEM (mode, data->to_addr);
1576 MEM_COPY_ATTRIBUTES (to1, data->to);
1578 else
1579 to1 = change_address (data->to, mode,
1580 plus_constant (data->to_addr, data->offset));
1583 if (data->autinc_from)
1585 from1 = gen_rtx_MEM (mode, data->from_addr);
1586 MEM_COPY_ATTRIBUTES (from1, data->from);
1588 else
1589 from1 = change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset));
1592 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1593 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1594 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1595 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1597 if (data->to)
1598 emit_insn ((*genfun) (to1, from1));
1599 else
1600 emit_single_push_insn (mode, from1, NULL);
1602 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1604 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1607 if (! data->reverse)
1608 data->offset += size;
1610 data->len -= size;
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 with mode BLKmode.
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have.
1623 Return the address of the new block, if memcpy is called and returns it,
1624 0 otherwise. */
1627 emit_block_move (x, y, size, align)
1628 rtx x, y;
1629 rtx size;
1630 unsigned int align;
1632 rtx retval = 0;
1633 #ifdef TARGET_MEM_FUNCTIONS
1634 static tree fn;
1635 tree call_expr, arg_list;
1636 #endif
1638 if (GET_MODE (x) != BLKmode)
1639 abort ();
1641 if (GET_MODE (y) != BLKmode)
1642 abort ();
1644 x = protect_from_queue (x, 1);
1645 y = protect_from_queue (y, 0);
1646 size = protect_from_queue (size, 0);
1648 if (GET_CODE (x) != MEM)
1649 abort ();
1650 if (GET_CODE (y) != MEM)
1651 abort ();
1652 if (size == 0)
1653 abort ();
1655 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1656 move_by_pieces (x, y, INTVAL (size), align);
1657 else
1659 /* Try the most limited insn first, because there's no point
1660 including more than one in the machine description unless
1661 the more limited one has some advantage. */
1663 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1664 enum machine_mode mode;
1666 /* Since this is a move insn, we don't care about volatility. */
1667 volatile_ok = 1;
1669 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1670 mode = GET_MODE_WIDER_MODE (mode))
1672 enum insn_code code = movstr_optab[(int) mode];
1673 insn_operand_predicate_fn pred;
1675 if (code != CODE_FOR_nothing
1676 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1677 here because if SIZE is less than the mode mask, as it is
1678 returned by the macro, it will definitely be less than the
1679 actual mode mask. */
1680 && ((GET_CODE (size) == CONST_INT
1681 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1682 <= (GET_MODE_MASK (mode) >> 1)))
1683 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1684 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1685 || (*pred) (x, BLKmode))
1686 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1687 || (*pred) (y, BLKmode))
1688 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1689 || (*pred) (opalign, VOIDmode)))
1691 rtx op2;
1692 rtx last = get_last_insn ();
1693 rtx pat;
1695 op2 = convert_to_mode (mode, size, 1);
1696 pred = insn_data[(int) code].operand[2].predicate;
1697 if (pred != 0 && ! (*pred) (op2, mode))
1698 op2 = copy_to_mode_reg (mode, op2);
1700 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1701 if (pat)
1703 emit_insn (pat);
1704 volatile_ok = 0;
1705 return 0;
1707 else
1708 delete_insns_since (last);
1712 volatile_ok = 0;
1714 /* X, Y, or SIZE may have been passed through protect_from_queue.
1716 It is unsafe to save the value generated by protect_from_queue
1717 and reuse it later. Consider what happens if emit_queue is
1718 called before the return value from protect_from_queue is used.
1720 Expansion of the CALL_EXPR below will call emit_queue before
1721 we are finished emitting RTL for argument setup. So if we are
1722 not careful we could get the wrong value for an argument.
1724 To avoid this problem we go ahead and emit code to copy X, Y &
1725 SIZE into new pseudos. We can then place those new pseudos
1726 into an RTL_EXPR and use them later, even after a call to
1727 emit_queue.
1729 Note this is not strictly needed for library calls since they
1730 do not call emit_queue before loading their arguments. However,
1731 we may need to have library calls call emit_queue in the future
1732 since failing to do so could cause problems for targets which
1733 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1734 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1735 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1737 #ifdef TARGET_MEM_FUNCTIONS
1738 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1739 #else
1740 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1741 TREE_UNSIGNED (integer_type_node));
1742 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1743 #endif
1745 #ifdef TARGET_MEM_FUNCTIONS
1746 /* It is incorrect to use the libcall calling conventions to call
1747 memcpy in this context.
1749 This could be a user call to memcpy and the user may wish to
1750 examine the return value from memcpy.
1752 For targets where libcalls and normal calls have different conventions
1753 for returning pointers, we could end up generating incorrect code.
1755 So instead of using a libcall sequence we build up a suitable
1756 CALL_EXPR and expand the call in the normal fashion. */
1757 if (fn == NULL_TREE)
1759 tree fntype;
1761 /* This was copied from except.c, I don't know if all this is
1762 necessary in this context or not. */
1763 fn = get_identifier ("memcpy");
1764 fntype = build_pointer_type (void_type_node);
1765 fntype = build_function_type (fntype, NULL_TREE);
1766 fn = build_decl (FUNCTION_DECL, fn, fntype);
1767 ggc_add_tree_root (&fn, 1);
1768 DECL_EXTERNAL (fn) = 1;
1769 TREE_PUBLIC (fn) = 1;
1770 DECL_ARTIFICIAL (fn) = 1;
1771 make_decl_rtl (fn, NULL);
1772 assemble_external (fn);
1775 /* We need to make an argument list for the function call.
1777 memcpy has three arguments, the first two are void * addresses and
1778 the last is a size_t byte count for the copy. */
1779 arg_list
1780 = build_tree_list (NULL_TREE,
1781 make_tree (build_pointer_type (void_type_node), x));
1782 TREE_CHAIN (arg_list)
1783 = build_tree_list (NULL_TREE,
1784 make_tree (build_pointer_type (void_type_node), y));
1785 TREE_CHAIN (TREE_CHAIN (arg_list))
1786 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1787 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1789 /* Now we have to build up the CALL_EXPR itself. */
1790 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1791 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1792 call_expr, arg_list, NULL_TREE);
1793 TREE_SIDE_EFFECTS (call_expr) = 1;
1795 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1796 #else
1797 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1798 VOIDmode, 3, y, Pmode, x, Pmode,
1799 convert_to_mode (TYPE_MODE (integer_type_node), size,
1800 TREE_UNSIGNED (integer_type_node)),
1801 TYPE_MODE (integer_type_node));
1802 #endif
1805 return retval;
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1811 void
1812 move_block_to_reg (regno, x, nregs, mode)
1813 int regno;
1814 rtx x;
1815 int nregs;
1816 enum machine_mode mode;
1818 int i;
1819 #ifdef HAVE_load_multiple
1820 rtx pat;
1821 rtx last;
1822 #endif
1824 if (nregs == 0)
1825 return;
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1836 GEN_INT (nregs));
1837 if (pat)
1839 emit_insn (pat);
1840 return;
1842 else
1843 delete_insns_since (last);
1845 #endif
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1856 void
1857 move_block_from_reg (regno, x, nregs, size)
1858 int regno;
1859 rtx x;
1860 int nregs;
1861 int size;
1863 int i;
1864 #ifdef HAVE_store_multiple
1865 rtx pat;
1866 rtx last;
1867 #endif
1868 enum machine_mode mode;
1870 if (nregs == 0)
1871 return;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1878 emit_move_insn (change_address (x, mode, NULL),
1879 gen_rtx_REG (mode, regno));
1880 return;
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1888 rtx tem = operand_subword (x, 0, 1, BLKmode);
1889 rtx shift;
1891 if (tem == 0)
1892 abort ();
1894 shift = expand_shift (LSHIFT_EXPR, word_mode,
1895 gen_rtx_REG (word_mode, regno),
1896 build_int_2 ((UNITS_PER_WORD - size)
1897 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1898 emit_move_insn (tem, shift);
1899 return;
1902 /* See if the machine can do this with a store multiple insn. */
1903 #ifdef HAVE_store_multiple
1904 if (HAVE_store_multiple)
1906 last = get_last_insn ();
1907 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1908 GEN_INT (nregs));
1909 if (pat)
1911 emit_insn (pat);
1912 return;
1914 else
1915 delete_insns_since (last);
1917 #endif
1919 for (i = 0; i < nregs; i++)
1921 rtx tem = operand_subword (x, i, 1, BLKmode);
1923 if (tem == 0)
1924 abort ();
1926 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1930 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1931 registers represented by a PARALLEL. SSIZE represents the total size of
1932 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1933 SRC in bits. */
1934 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1935 the balance will be in what would be the low-order memory addresses, i.e.
1936 left justified for big endian, right justified for little endian. This
1937 happens to be true for the targets currently using this support. If this
1938 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1939 would be needed. */
1941 void
1942 emit_group_load (dst, orig_src, ssize, align)
1943 rtx dst, orig_src;
1944 unsigned int align;
1945 int ssize;
1947 rtx *tmps, src;
1948 int start, i;
1950 if (GET_CODE (dst) != PARALLEL)
1951 abort ();
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1956 start = 0;
1957 else
1958 start = 1;
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* If we won't be loading directly from memory, protect the real source
1963 from strange tricks we might play. */
1964 src = orig_src;
1965 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1967 if (GET_MODE (src) == VOIDmode)
1968 src = gen_reg_rtx (GET_MODE (dst));
1969 else
1970 src = gen_reg_rtx (GET_MODE (orig_src));
1971 emit_move_insn (src, orig_src);
1974 /* Process the pieces. */
1975 for (i = start; i < XVECLEN (dst, 0); i++)
1977 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1978 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1979 unsigned int bytelen = GET_MODE_SIZE (mode);
1980 int shift = 0;
1982 /* Handle trailing fragments that run over the size of the struct. */
1983 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1985 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1986 bytelen = ssize - bytepos;
1987 if (bytelen <= 0)
1988 abort ();
1991 /* Optimize the access just a bit. */
1992 if (GET_CODE (src) == MEM
1993 && align >= GET_MODE_ALIGNMENT (mode)
1994 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1995 && bytelen == GET_MODE_SIZE (mode))
1997 tmps[i] = gen_reg_rtx (mode);
1998 emit_move_insn (tmps[i],
1999 change_address (src, mode,
2000 plus_constant (XEXP (src, 0),
2001 bytepos)));
2003 else if (GET_CODE (src) == CONCAT)
2005 if (bytepos == 0
2006 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2007 tmps[i] = XEXP (src, 0);
2008 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2009 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2010 tmps[i] = XEXP (src, 1);
2011 else
2012 abort ();
2014 else if ((CONSTANT_P (src)
2015 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2016 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2017 tmps[i] = src;
2018 else
2019 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2020 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2021 mode, mode, align, ssize);
2023 if (BYTES_BIG_ENDIAN && shift)
2024 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2025 tmps[i], 0, OPTAB_WIDEN);
2028 emit_queue ();
2030 /* Copy the extracted pieces into the proper (probable) hard regs. */
2031 for (i = start; i < XVECLEN (dst, 0); i++)
2032 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2035 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2036 registers represented by a PARALLEL. SSIZE represents the total size of
2037 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2039 void
2040 emit_group_store (orig_dst, src, ssize, align)
2041 rtx orig_dst, src;
2042 int ssize;
2043 unsigned int align;
2045 rtx *tmps, dst;
2046 int start, i;
2048 if (GET_CODE (src) != PARALLEL)
2049 abort ();
2051 /* Check for a NULL entry, used to indicate that the parameter goes
2052 both on the stack and in registers. */
2053 if (XEXP (XVECEXP (src, 0, 0), 0))
2054 start = 0;
2055 else
2056 start = 1;
2058 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2060 /* Copy the (probable) hard regs into pseudos. */
2061 for (i = start; i < XVECLEN (src, 0); i++)
2063 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2064 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2065 emit_move_insn (tmps[i], reg);
2067 emit_queue ();
2069 /* If we won't be storing directly into memory, protect the real destination
2070 from strange tricks we might play. */
2071 dst = orig_dst;
2072 if (GET_CODE (dst) == PARALLEL)
2074 rtx temp;
2076 /* We can get a PARALLEL dst if there is a conditional expression in
2077 a return statement. In that case, the dst and src are the same,
2078 so no action is necessary. */
2079 if (rtx_equal_p (dst, src))
2080 return;
2082 /* It is unclear if we can ever reach here, but we may as well handle
2083 it. Allocate a temporary, and split this into a store/load to/from
2084 the temporary. */
2086 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2087 emit_group_store (temp, src, ssize, align);
2088 emit_group_load (dst, temp, ssize, align);
2089 return;
2091 else if (GET_CODE (dst) != MEM)
2093 dst = gen_reg_rtx (GET_MODE (orig_dst));
2094 /* Make life a bit easier for combine. */
2095 emit_move_insn (dst, const0_rtx);
2098 /* Process the pieces. */
2099 for (i = start; i < XVECLEN (src, 0); i++)
2101 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2102 enum machine_mode mode = GET_MODE (tmps[i]);
2103 unsigned int bytelen = GET_MODE_SIZE (mode);
2105 /* Handle trailing fragments that run over the size of the struct. */
2106 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2108 if (BYTES_BIG_ENDIAN)
2110 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2111 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2112 tmps[i], 0, OPTAB_WIDEN);
2114 bytelen = ssize - bytepos;
2117 /* Optimize the access just a bit. */
2118 if (GET_CODE (dst) == MEM
2119 && align >= GET_MODE_ALIGNMENT (mode)
2120 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2121 && bytelen == GET_MODE_SIZE (mode))
2122 emit_move_insn (change_address (dst, mode,
2123 plus_constant (XEXP (dst, 0),
2124 bytepos)),
2125 tmps[i]);
2126 else
2127 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2128 mode, tmps[i], align, ssize);
2131 emit_queue ();
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (GET_CODE (dst) == REG)
2135 emit_move_insn (orig_dst, dst);
2138 /* Generate code to copy a BLKmode object of TYPE out of a
2139 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2140 is null, a stack temporary is created. TGTBLK is returned.
2142 The primary purpose of this routine is to handle functions
2143 that return BLKmode structures in registers. Some machines
2144 (the PA for example) want to return all small structures
2145 in registers regardless of the structure's alignment. */
2148 copy_blkmode_from_reg (tgtblk, srcreg, type)
2149 rtx tgtblk;
2150 rtx srcreg;
2151 tree type;
2153 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2154 rtx src = NULL, dst = NULL;
2155 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2156 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2158 if (tgtblk == 0)
2160 tgtblk = assign_temp (build_qualified_type (type,
2161 (TYPE_QUALS (type)
2162 | TYPE_QUAL_CONST)),
2163 0, 1, 1);
2164 preserve_temp_slots (tgtblk);
2167 /* This code assumes srcreg is at least a full word. If it isn't,
2168 copy it into a new pseudo which is a full word. */
2169 if (GET_MODE (srcreg) != BLKmode
2170 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2171 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2173 /* Structures whose size is not a multiple of a word are aligned
2174 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2175 machine, this means we must skip the empty high order bytes when
2176 calculating the bit offset. */
2177 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2178 big_endian_correction
2179 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2181 /* Copy the structure BITSIZE bites at a time.
2183 We could probably emit more efficient code for machines which do not use
2184 strict alignment, but it doesn't seem worth the effort at the current
2185 time. */
2186 for (bitpos = 0, xbitpos = big_endian_correction;
2187 bitpos < bytes * BITS_PER_UNIT;
2188 bitpos += bitsize, xbitpos += bitsize)
2190 /* We need a new source operand each time xbitpos is on a
2191 word boundary and when xbitpos == big_endian_correction
2192 (the first time through). */
2193 if (xbitpos % BITS_PER_WORD == 0
2194 || xbitpos == big_endian_correction)
2195 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2197 /* We need a new destination operand each time bitpos is on
2198 a word boundary. */
2199 if (bitpos % BITS_PER_WORD == 0)
2200 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2202 /* Use xbitpos for the source extraction (right justified) and
2203 xbitpos for the destination store (left justified). */
2204 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2205 extract_bit_field (src, bitsize,
2206 xbitpos % BITS_PER_WORD, 1,
2207 NULL_RTX, word_mode, word_mode,
2208 bitsize, BITS_PER_WORD),
2209 bitsize, BITS_PER_WORD);
2212 return tgtblk;
2215 /* Add a USE expression for REG to the (possibly empty) list pointed
2216 to by CALL_FUSAGE. REG must denote a hard register. */
2218 void
2219 use_reg (call_fusage, reg)
2220 rtx *call_fusage, reg;
2222 if (GET_CODE (reg) != REG
2223 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2224 abort ();
2226 *call_fusage
2227 = gen_rtx_EXPR_LIST (VOIDmode,
2228 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2231 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2232 starting at REGNO. All of these registers must be hard registers. */
2234 void
2235 use_regs (call_fusage, regno, nregs)
2236 rtx *call_fusage;
2237 int regno;
2238 int nregs;
2240 int i;
2242 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2243 abort ();
2245 for (i = 0; i < nregs; i++)
2246 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2249 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2250 PARALLEL REGS. This is for calls that pass values in multiple
2251 non-contiguous locations. The Irix 6 ABI has examples of this. */
2253 void
2254 use_group_regs (call_fusage, regs)
2255 rtx *call_fusage;
2256 rtx regs;
2258 int i;
2260 for (i = 0; i < XVECLEN (regs, 0); i++)
2262 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2264 /* A NULL entry means the parameter goes both on the stack and in
2265 registers. This can also be a MEM for targets that pass values
2266 partially on the stack and partially in registers. */
2267 if (reg != 0 && GET_CODE (reg) == REG)
2268 use_reg (call_fusage, reg);
2274 can_store_by_pieces (len, constfun, constfundata, align)
2275 unsigned HOST_WIDE_INT len;
2276 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2277 PTR constfundata;
2278 unsigned int align;
2280 unsigned HOST_WIDE_INT max_size, l;
2281 HOST_WIDE_INT offset = 0;
2282 enum machine_mode mode, tmode;
2283 enum insn_code icode;
2284 int reverse;
2285 rtx cst;
2287 if (! MOVE_BY_PIECES_P (len, align))
2288 return 0;
2290 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2291 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2292 align = MOVE_MAX * BITS_PER_UNIT;
2294 /* We would first store what we can in the largest integer mode, then go to
2295 successively smaller modes. */
2297 for (reverse = 0;
2298 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2299 reverse++)
2301 l = len;
2302 mode = VOIDmode;
2303 max_size = MOVE_MAX_PIECES + 1;
2304 while (max_size > 1)
2306 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2307 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2308 if (GET_MODE_SIZE (tmode) < max_size)
2309 mode = tmode;
2311 if (mode == VOIDmode)
2312 break;
2314 icode = mov_optab->handlers[(int) mode].insn_code;
2315 if (icode != CODE_FOR_nothing
2316 && align >= GET_MODE_ALIGNMENT (mode))
2318 unsigned int size = GET_MODE_SIZE (mode);
2320 while (l >= size)
2322 if (reverse)
2323 offset -= size;
2325 cst = (*constfun) (constfundata, offset, mode);
2326 if (!LEGITIMATE_CONSTANT_P (cst))
2327 return 0;
2329 if (!reverse)
2330 offset += size;
2332 l -= size;
2336 max_size = GET_MODE_SIZE (mode);
2339 /* The code above should have handled everything. */
2340 if (l != 0)
2341 abort ();
2344 return 1;
2347 /* Generate several move instructions to store LEN bytes generated by
2348 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2349 pointer which will be passed as argument in every CONSTFUN call.
2350 ALIGN is maximum alignment we can assume. */
2352 void
2353 store_by_pieces (to, len, constfun, constfundata, align)
2354 rtx to;
2355 unsigned HOST_WIDE_INT len;
2356 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2357 PTR constfundata;
2358 unsigned int align;
2360 struct store_by_pieces data;
2362 if (! MOVE_BY_PIECES_P (len, align))
2363 abort ();
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2367 data.len = len;
2368 data.to = to;
2369 store_by_pieces_1 (&data, align);
2372 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2373 rtx with BLKmode). The caller must pass TO through protect_from_queue
2374 before calling. ALIGN is maximum alignment we can assume. */
2376 static void
2377 clear_by_pieces (to, len, align)
2378 rtx to;
2379 unsigned HOST_WIDE_INT len;
2380 unsigned int align;
2382 struct store_by_pieces data;
2384 data.constfun = clear_by_pieces_1;
2385 data.constfundata = NULL;
2386 data.len = len;
2387 data.to = to;
2388 store_by_pieces_1 (&data, align);
2391 /* Callback routine for clear_by_pieces.
2392 Return const0_rtx unconditionally. */
2394 static rtx
2395 clear_by_pieces_1 (data, offset, mode)
2396 PTR data ATTRIBUTE_UNUSED;
2397 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2398 enum machine_mode mode ATTRIBUTE_UNUSED;
2400 return const0_rtx;
2403 /* Subroutine of clear_by_pieces and store_by_pieces.
2404 Generate several move instructions to store LEN bytes of block TO. (A MEM
2405 rtx with BLKmode). The caller must pass TO through protect_from_queue
2406 before calling. ALIGN is maximum alignment we can assume. */
2408 static void
2409 store_by_pieces_1 (data, align)
2410 struct store_by_pieces *data;
2411 unsigned int align;
2413 rtx to_addr = XEXP (data->to, 0);
2414 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2415 enum machine_mode mode = VOIDmode, tmode;
2416 enum insn_code icode;
2418 data->offset = 0;
2419 data->to_addr = to_addr;
2420 data->autinc_to
2421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2424 data->explicit_inc_to = 0;
2425 data->reverse
2426 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2427 if (data->reverse)
2428 data->offset = data->len;
2430 /* If storing requires more than two move insns,
2431 copy addresses to registers (to make displacements shorter)
2432 and use post-increment if available. */
2433 if (!data->autinc_to
2434 && move_by_pieces_ninsns (data->len, align) > 2)
2436 /* Determine the main mode we'll be using. */
2437 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2438 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2439 if (GET_MODE_SIZE (tmode) < max_size)
2440 mode = tmode;
2442 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2444 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2445 data->autinc_to = 1;
2446 data->explicit_inc_to = -1;
2449 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2450 && ! data->autinc_to)
2452 data->to_addr = copy_addr_to_reg (to_addr);
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = 1;
2457 if ( !data->autinc_to && CONSTANT_P (to_addr))
2458 data->to_addr = copy_addr_to_reg (to_addr);
2461 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2462 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2463 align = MOVE_MAX * BITS_PER_UNIT;
2465 /* First store what we can in the largest integer mode, then go to
2466 successively smaller modes. */
2468 while (max_size > 1)
2470 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2471 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2472 if (GET_MODE_SIZE (tmode) < max_size)
2473 mode = tmode;
2475 if (mode == VOIDmode)
2476 break;
2478 icode = mov_optab->handlers[(int) mode].insn_code;
2479 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2480 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2482 max_size = GET_MODE_SIZE (mode);
2485 /* The code above should have handled everything. */
2486 if (data->len != 0)
2487 abort ();
2490 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2491 with move instructions for mode MODE. GENFUN is the gen_... function
2492 to make a move insn for that mode. DATA has all the other info. */
2494 static void
2495 store_by_pieces_2 (genfun, mode, data)
2496 rtx (*genfun) PARAMS ((rtx, ...));
2497 enum machine_mode mode;
2498 struct store_by_pieces *data;
2500 unsigned int size = GET_MODE_SIZE (mode);
2501 rtx to1, cst;
2503 while (data->len >= size)
2505 if (data->reverse)
2506 data->offset -= size;
2508 if (data->autinc_to)
2510 to1 = gen_rtx_MEM (mode, data->to_addr);
2511 MEM_COPY_ATTRIBUTES (to1, data->to);
2513 else
2514 to1 = change_address (data->to, mode,
2515 plus_constant (data->to_addr, data->offset));
2517 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2518 emit_insn (gen_add2_insn (data->to_addr,
2519 GEN_INT (-(HOST_WIDE_INT) size)));
2521 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2522 emit_insn ((*genfun) (to1, cst));
2524 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2525 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2527 if (! data->reverse)
2528 data->offset += size;
2530 data->len -= size;
2534 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2535 its length in bytes and ALIGN is the maximum alignment we can is has.
2537 If we call a function that returns the length of the block, return it. */
2540 clear_storage (object, size, align)
2541 rtx object;
2542 rtx size;
2543 unsigned int align;
2545 #ifdef TARGET_MEM_FUNCTIONS
2546 static tree fn;
2547 tree call_expr, arg_list;
2548 #endif
2549 rtx retval = 0;
2551 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2552 just move a zero. Otherwise, do this a piece at a time. */
2553 if (GET_MODE (object) != BLKmode
2554 && GET_CODE (size) == CONST_INT
2555 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2556 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2557 else
2559 object = protect_from_queue (object, 1);
2560 size = protect_from_queue (size, 0);
2562 if (GET_CODE (size) == CONST_INT
2563 && MOVE_BY_PIECES_P (INTVAL (size), align))
2564 clear_by_pieces (object, INTVAL (size), align);
2565 else
2567 /* Try the most limited insn first, because there's no point
2568 including more than one in the machine description unless
2569 the more limited one has some advantage. */
2571 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2572 enum machine_mode mode;
2574 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2575 mode = GET_MODE_WIDER_MODE (mode))
2577 enum insn_code code = clrstr_optab[(int) mode];
2578 insn_operand_predicate_fn pred;
2580 if (code != CODE_FOR_nothing
2581 /* We don't need MODE to be narrower than
2582 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2583 the mode mask, as it is returned by the macro, it will
2584 definitely be less than the actual mode mask. */
2585 && ((GET_CODE (size) == CONST_INT
2586 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2587 <= (GET_MODE_MASK (mode) >> 1)))
2588 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2589 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2590 || (*pred) (object, BLKmode))
2591 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2592 || (*pred) (opalign, VOIDmode)))
2594 rtx op1;
2595 rtx last = get_last_insn ();
2596 rtx pat;
2598 op1 = convert_to_mode (mode, size, 1);
2599 pred = insn_data[(int) code].operand[1].predicate;
2600 if (pred != 0 && ! (*pred) (op1, mode))
2601 op1 = copy_to_mode_reg (mode, op1);
2603 pat = GEN_FCN ((int) code) (object, op1, opalign);
2604 if (pat)
2606 emit_insn (pat);
2607 return 0;
2609 else
2610 delete_insns_since (last);
2614 /* OBJECT or SIZE may have been passed through protect_from_queue.
2616 It is unsafe to save the value generated by protect_from_queue
2617 and reuse it later. Consider what happens if emit_queue is
2618 called before the return value from protect_from_queue is used.
2620 Expansion of the CALL_EXPR below will call emit_queue before
2621 we are finished emitting RTL for argument setup. So if we are
2622 not careful we could get the wrong value for an argument.
2624 To avoid this problem we go ahead and emit code to copy OBJECT
2625 and SIZE into new pseudos. We can then place those new pseudos
2626 into an RTL_EXPR and use them later, even after a call to
2627 emit_queue.
2629 Note this is not strictly needed for library calls since they
2630 do not call emit_queue before loading their arguments. However,
2631 we may need to have library calls call emit_queue in the future
2632 since failing to do so could cause problems for targets which
2633 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2634 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2638 #else
2639 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2640 TREE_UNSIGNED (integer_type_node));
2641 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2642 #endif
2644 #ifdef TARGET_MEM_FUNCTIONS
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context.
2648 This could be a user call to memset and the user may wish to
2649 examine the return value from memset.
2651 For targets where libcalls and normal calls have different
2652 conventions for returning pointers, we could end up generating
2653 incorrect code.
2655 So instead of using a libcall sequence we build up a suitable
2656 CALL_EXPR and expand the call in the normal fashion. */
2657 if (fn == NULL_TREE)
2659 tree fntype;
2661 /* This was copied from except.c, I don't know if all this is
2662 necessary in this context or not. */
2663 fn = get_identifier ("memset");
2664 fntype = build_pointer_type (void_type_node);
2665 fntype = build_function_type (fntype, NULL_TREE);
2666 fn = build_decl (FUNCTION_DECL, fn, fntype);
2667 ggc_add_tree_root (&fn, 1);
2668 DECL_EXTERNAL (fn) = 1;
2669 TREE_PUBLIC (fn) = 1;
2670 DECL_ARTIFICIAL (fn) = 1;
2671 make_decl_rtl (fn, NULL);
2672 assemble_external (fn);
2675 /* We need to make an argument list for the function call.
2677 memset has three arguments, the first is a void * addresses, the
2678 second a integer with the initialization value, the last is a
2679 size_t byte count for the copy. */
2680 arg_list
2681 = build_tree_list (NULL_TREE,
2682 make_tree (build_pointer_type (void_type_node),
2683 object));
2684 TREE_CHAIN (arg_list)
2685 = build_tree_list (NULL_TREE,
2686 make_tree (integer_type_node, const0_rtx));
2687 TREE_CHAIN (TREE_CHAIN (arg_list))
2688 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2689 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2691 /* Now we have to build up the CALL_EXPR itself. */
2692 call_expr = build1 (ADDR_EXPR,
2693 build_pointer_type (TREE_TYPE (fn)), fn);
2694 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2695 call_expr, arg_list, NULL_TREE);
2696 TREE_SIDE_EFFECTS (call_expr) = 1;
2698 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2699 #else
2700 emit_library_call (bzero_libfunc, LCT_NORMAL,
2701 VOIDmode, 2, object, Pmode, size,
2702 TYPE_MODE (integer_type_node));
2703 #endif
2707 return retval;
2710 /* Generate code to copy Y into X.
2711 Both Y and X must have the same mode, except that
2712 Y can be a constant with VOIDmode.
2713 This mode cannot be BLKmode; use emit_block_move for that.
2715 Return the last instruction emitted. */
2718 emit_move_insn (x, y)
2719 rtx x, y;
2721 enum machine_mode mode = GET_MODE (x);
2722 rtx y_cst = NULL_RTX;
2723 rtx last_insn;
2725 x = protect_from_queue (x, 1);
2726 y = protect_from_queue (y, 0);
2728 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2729 abort ();
2731 /* Never force constant_p_rtx to memory. */
2732 if (GET_CODE (y) == CONSTANT_P_RTX)
2734 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2736 y_cst = y;
2737 y = force_const_mem (mode, y);
2740 /* If X or Y are memory references, verify that their addresses are valid
2741 for the machine. */
2742 if (GET_CODE (x) == MEM
2743 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2744 && ! push_operand (x, GET_MODE (x)))
2745 || (flag_force_addr
2746 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2747 x = change_address (x, VOIDmode, XEXP (x, 0));
2749 if (GET_CODE (y) == MEM
2750 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2751 || (flag_force_addr
2752 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2753 y = change_address (y, VOIDmode, XEXP (y, 0));
2755 if (mode == BLKmode)
2756 abort ();
2758 last_insn = emit_move_insn_1 (x, y);
2760 if (y_cst && GET_CODE (x) == REG)
2761 REG_NOTES (last_insn)
2762 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2764 return last_insn;
2767 /* Low level part of emit_move_insn.
2768 Called just like emit_move_insn, but assumes X and Y
2769 are basically valid. */
2772 emit_move_insn_1 (x, y)
2773 rtx x, y;
2775 enum machine_mode mode = GET_MODE (x);
2776 enum machine_mode submode;
2777 enum mode_class class = GET_MODE_CLASS (mode);
2778 unsigned int i;
2780 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2781 abort ();
2783 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2784 return
2785 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2787 /* Expand complex moves by moving real part and imag part, if possible. */
2788 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2789 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2790 * BITS_PER_UNIT),
2791 (class == MODE_COMPLEX_INT
2792 ? MODE_INT : MODE_FLOAT),
2794 && (mov_optab->handlers[(int) submode].insn_code
2795 != CODE_FOR_nothing))
2797 /* Don't split destination if it is a stack push. */
2798 int stack = push_operand (x, GET_MODE (x));
2800 #ifdef PUSH_ROUNDING
2801 /* In case we output to the stack, but the size is smaller machine can
2802 push exactly, we need to use move instructions. */
2803 if (stack
2804 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2806 rtx temp;
2807 int offset1, offset2;
2809 /* Do not use anti_adjust_stack, since we don't want to update
2810 stack_pointer_delta. */
2811 temp = expand_binop (Pmode,
2812 #ifdef STACK_GROWS_DOWNWARD
2813 sub_optab,
2814 #else
2815 add_optab,
2816 #endif
2817 stack_pointer_rtx,
2818 GEN_INT
2819 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2820 stack_pointer_rtx,
2822 OPTAB_LIB_WIDEN);
2823 if (temp != stack_pointer_rtx)
2824 emit_move_insn (stack_pointer_rtx, temp);
2825 #ifdef STACK_GROWS_DOWNWARD
2826 offset1 = 0;
2827 offset2 = GET_MODE_SIZE (submode);
2828 #else
2829 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2830 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2831 + GET_MODE_SIZE (submode));
2832 #endif
2833 emit_move_insn (change_address (x, submode,
2834 gen_rtx_PLUS (Pmode,
2835 stack_pointer_rtx,
2836 GEN_INT (offset1))),
2837 gen_realpart (submode, y));
2838 emit_move_insn (change_address (x, submode,
2839 gen_rtx_PLUS (Pmode,
2840 stack_pointer_rtx,
2841 GEN_INT (offset2))),
2842 gen_imagpart (submode, y));
2844 else
2845 #endif
2846 /* If this is a stack, push the highpart first, so it
2847 will be in the argument order.
2849 In that case, change_address is used only to convert
2850 the mode, not to change the address. */
2851 if (stack)
2853 /* Note that the real part always precedes the imag part in memory
2854 regardless of machine's endianness. */
2855 #ifdef STACK_GROWS_DOWNWARD
2856 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2857 (gen_rtx_MEM (submode, XEXP (x, 0)),
2858 gen_imagpart (submode, y)));
2859 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2860 (gen_rtx_MEM (submode, XEXP (x, 0)),
2861 gen_realpart (submode, y)));
2862 #else
2863 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2864 (gen_rtx_MEM (submode, XEXP (x, 0)),
2865 gen_realpart (submode, y)));
2866 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2867 (gen_rtx_MEM (submode, XEXP (x, 0)),
2868 gen_imagpart (submode, y)));
2869 #endif
2871 else
2873 rtx realpart_x, realpart_y;
2874 rtx imagpart_x, imagpart_y;
2876 /* If this is a complex value with each part being smaller than a
2877 word, the usual calling sequence will likely pack the pieces into
2878 a single register. Unfortunately, SUBREG of hard registers only
2879 deals in terms of words, so we have a problem converting input
2880 arguments to the CONCAT of two registers that is used elsewhere
2881 for complex values. If this is before reload, we can copy it into
2882 memory and reload. FIXME, we should see about using extract and
2883 insert on integer registers, but complex short and complex char
2884 variables should be rarely used. */
2885 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2886 && (reload_in_progress | reload_completed) == 0)
2888 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2889 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2891 if (packed_dest_p || packed_src_p)
2893 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2894 ? MODE_FLOAT : MODE_INT);
2896 enum machine_mode reg_mode
2897 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2899 if (reg_mode != BLKmode)
2901 rtx mem = assign_stack_temp (reg_mode,
2902 GET_MODE_SIZE (mode), 0);
2903 rtx cmem = change_address (mem, mode, NULL_RTX);
2905 cfun->cannot_inline
2906 = N_("function using short complex types cannot be inline");
2908 if (packed_dest_p)
2910 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2911 emit_move_insn_1 (cmem, y);
2912 return emit_move_insn_1 (sreg, mem);
2914 else
2916 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2917 emit_move_insn_1 (mem, sreg);
2918 return emit_move_insn_1 (x, cmem);
2924 realpart_x = gen_realpart (submode, x);
2925 realpart_y = gen_realpart (submode, y);
2926 imagpart_x = gen_imagpart (submode, x);
2927 imagpart_y = gen_imagpart (submode, y);
2929 /* Show the output dies here. This is necessary for SUBREGs
2930 of pseudos since we cannot track their lifetimes correctly;
2931 hard regs shouldn't appear here except as return values.
2932 We never want to emit such a clobber after reload. */
2933 if (x != y
2934 && ! (reload_in_progress || reload_completed)
2935 && (GET_CODE (realpart_x) == SUBREG
2936 || GET_CODE (imagpart_x) == SUBREG))
2938 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2941 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2942 (realpart_x, realpart_y));
2943 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2944 (imagpart_x, imagpart_y));
2947 return get_last_insn ();
2950 /* This will handle any multi-word mode that lacks a move_insn pattern.
2951 However, you will get better code if you define such patterns,
2952 even if they must turn into multiple assembler instructions. */
2953 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2955 rtx last_insn = 0;
2956 rtx seq, inner;
2957 int need_clobber;
2959 #ifdef PUSH_ROUNDING
2961 /* If X is a push on the stack, do the push now and replace
2962 X with a reference to the stack pointer. */
2963 if (push_operand (x, GET_MODE (x)))
2965 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2966 x = change_address (x, VOIDmode, stack_pointer_rtx);
2968 #endif
2970 /* If we are in reload, see if either operand is a MEM whose address
2971 is scheduled for replacement. */
2972 if (reload_in_progress && GET_CODE (x) == MEM
2973 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2975 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2977 MEM_COPY_ATTRIBUTES (new, x);
2978 x = new;
2980 if (reload_in_progress && GET_CODE (y) == MEM
2981 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2983 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2985 MEM_COPY_ATTRIBUTES (new, y);
2986 y = new;
2989 start_sequence ();
2991 need_clobber = 0;
2992 for (i = 0;
2993 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2994 i++)
2996 rtx xpart = operand_subword (x, i, 1, mode);
2997 rtx ypart = operand_subword (y, i, 1, mode);
2999 /* If we can't get a part of Y, put Y into memory if it is a
3000 constant. Otherwise, force it into a register. If we still
3001 can't get a part of Y, abort. */
3002 if (ypart == 0 && CONSTANT_P (y))
3004 y = force_const_mem (mode, y);
3005 ypart = operand_subword (y, i, 1, mode);
3007 else if (ypart == 0)
3008 ypart = operand_subword_force (y, i, mode);
3010 if (xpart == 0 || ypart == 0)
3011 abort ();
3013 need_clobber |= (GET_CODE (xpart) == SUBREG);
3015 last_insn = emit_move_insn (xpart, ypart);
3018 seq = gen_sequence ();
3019 end_sequence ();
3021 /* Show the output dies here. This is necessary for SUBREGs
3022 of pseudos since we cannot track their lifetimes correctly;
3023 hard regs shouldn't appear here except as return values.
3024 We never want to emit such a clobber after reload. */
3025 if (x != y
3026 && ! (reload_in_progress || reload_completed)
3027 && need_clobber != 0)
3029 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3032 emit_insn (seq);
3034 return last_insn;
3036 else
3037 abort ();
3040 /* Pushing data onto the stack. */
3042 /* Push a block of length SIZE (perhaps variable)
3043 and return an rtx to address the beginning of the block.
3044 Note that it is not possible for the value returned to be a QUEUED.
3045 The value may be virtual_outgoing_args_rtx.
3047 EXTRA is the number of bytes of padding to push in addition to SIZE.
3048 BELOW nonzero means this padding comes at low addresses;
3049 otherwise, the padding comes at high addresses. */
3052 push_block (size, extra, below)
3053 rtx size;
3054 int extra, below;
3056 register rtx temp;
3058 size = convert_modes (Pmode, ptr_mode, size, 1);
3059 if (CONSTANT_P (size))
3060 anti_adjust_stack (plus_constant (size, extra));
3061 else if (GET_CODE (size) == REG && extra == 0)
3062 anti_adjust_stack (size);
3063 else
3065 temp = copy_to_mode_reg (Pmode, size);
3066 if (extra != 0)
3067 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3068 temp, 0, OPTAB_LIB_WIDEN);
3069 anti_adjust_stack (temp);
3072 #ifndef STACK_GROWS_DOWNWARD
3073 #ifdef ARGS_GROW_DOWNWARD
3074 if (!ACCUMULATE_OUTGOING_ARGS)
3075 #else
3076 if (0)
3077 #endif
3078 #else
3079 if (1)
3080 #endif
3082 /* Return the lowest stack address when STACK or ARGS grow downward and
3083 we are not aaccumulating outgoing arguments (the c4x port uses such
3084 conventions). */
3085 temp = virtual_outgoing_args_rtx;
3086 if (extra != 0 && below)
3087 temp = plus_constant (temp, extra);
3089 else
3091 if (GET_CODE (size) == CONST_INT)
3092 temp = plus_constant (virtual_outgoing_args_rtx,
3093 -INTVAL (size) - (below ? 0 : extra));
3094 else if (extra != 0 && !below)
3095 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3096 negate_rtx (Pmode, plus_constant (size, extra)));
3097 else
3098 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3099 negate_rtx (Pmode, size));
3102 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3106 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3107 block of SIZE bytes. */
3109 static rtx
3110 get_push_address (size)
3111 int size;
3113 register rtx temp;
3115 if (STACK_PUSH_CODE == POST_DEC)
3116 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3117 else if (STACK_PUSH_CODE == POST_INC)
3118 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3119 else
3120 temp = stack_pointer_rtx;
3122 return copy_to_reg (temp);
3125 /* Emit single push insn. */
3126 static void
3127 emit_single_push_insn (mode, x, type)
3128 rtx x;
3129 enum machine_mode mode;
3130 tree type;
3132 #ifdef PUSH_ROUNDING
3133 rtx dest_addr;
3134 int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3135 rtx dest;
3137 if (GET_MODE_SIZE (mode) == rounded_size)
3138 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3139 else
3141 #ifdef STACK_GROWS_DOWNWARD
3142 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3143 GEN_INT (-rounded_size));
3144 #else
3145 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3146 GEN_INT (rounded_size));
3147 #endif
3148 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3151 dest = gen_rtx_MEM (mode, dest_addr);
3153 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3155 if (type != 0)
3157 set_mem_attributes (dest, type, 1);
3158 /* Function incoming arguments may overlap with sibling call
3159 outgoing arguments and we cannot allow reordering of reads
3160 from function arguments with stores to outgoing arguments
3161 of sibling calls. */
3162 MEM_ALIAS_SET (dest) = 0;
3164 emit_move_insn (dest, x);
3165 #else
3166 abort();
3167 #endif
3170 /* Generate code to push X onto the stack, assuming it has mode MODE and
3171 type TYPE.
3172 MODE is redundant except when X is a CONST_INT (since they don't
3173 carry mode info).
3174 SIZE is an rtx for the size of data to be copied (in bytes),
3175 needed only if X is BLKmode.
3177 ALIGN is maximum alignment we can assume.
3179 If PARTIAL and REG are both nonzero, then copy that many of the first
3180 words of X into registers starting with REG, and push the rest of X.
3181 The amount of space pushed is decreased by PARTIAL words,
3182 rounded *down* to a multiple of PARM_BOUNDARY.
3183 REG must be a hard register in this case.
3184 If REG is zero but PARTIAL is not, take any all others actions for an
3185 argument partially in registers, but do not actually load any
3186 registers.
3188 EXTRA is the amount in bytes of extra space to leave next to this arg.
3189 This is ignored if an argument block has already been allocated.
3191 On a machine that lacks real push insns, ARGS_ADDR is the address of
3192 the bottom of the argument block for this call. We use indexing off there
3193 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3194 argument block has not been preallocated.
3196 ARGS_SO_FAR is the size of args previously pushed for this call.
3198 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3199 for arguments passed in registers. If nonzero, it will be the number
3200 of bytes required. */
3202 void
3203 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3204 args_addr, args_so_far, reg_parm_stack_space,
3205 alignment_pad)
3206 register rtx x;
3207 enum machine_mode mode;
3208 tree type;
3209 rtx size;
3210 unsigned int align;
3211 int partial;
3212 rtx reg;
3213 int extra;
3214 rtx args_addr;
3215 rtx args_so_far;
3216 int reg_parm_stack_space;
3217 rtx alignment_pad;
3219 rtx xinner;
3220 enum direction stack_direction
3221 #ifdef STACK_GROWS_DOWNWARD
3222 = downward;
3223 #else
3224 = upward;
3225 #endif
3227 /* Decide where to pad the argument: `downward' for below,
3228 `upward' for above, or `none' for don't pad it.
3229 Default is below for small data on big-endian machines; else above. */
3230 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3232 /* Invert direction if stack is post-update. */
3233 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3234 if (where_pad != none)
3235 where_pad = (where_pad == downward ? upward : downward);
3237 xinner = x = protect_from_queue (x, 0);
3239 if (mode == BLKmode)
3241 /* Copy a block into the stack, entirely or partially. */
3243 register rtx temp;
3244 int used = partial * UNITS_PER_WORD;
3245 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3246 int skip;
3248 if (size == 0)
3249 abort ();
3251 used -= offset;
3253 /* USED is now the # of bytes we need not copy to the stack
3254 because registers will take care of them. */
3256 if (partial != 0)
3257 xinner = change_address (xinner, BLKmode,
3258 plus_constant (XEXP (xinner, 0), used));
3260 /* If the partial register-part of the arg counts in its stack size,
3261 skip the part of stack space corresponding to the registers.
3262 Otherwise, start copying to the beginning of the stack space,
3263 by setting SKIP to 0. */
3264 skip = (reg_parm_stack_space == 0) ? 0 : used;
3266 #ifdef PUSH_ROUNDING
3267 /* Do it with several push insns if that doesn't take lots of insns
3268 and if there is no difficulty with push insns that skip bytes
3269 on the stack for alignment purposes. */
3270 if (args_addr == 0
3271 && PUSH_ARGS
3272 && GET_CODE (size) == CONST_INT
3273 && skip == 0
3274 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3275 /* Here we avoid the case of a structure whose weak alignment
3276 forces many pushes of a small amount of data,
3277 and such small pushes do rounding that causes trouble. */
3278 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3279 || align >= BIGGEST_ALIGNMENT
3280 || PUSH_ROUNDING (align) == align)
3281 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3283 /* Push padding now if padding above and stack grows down,
3284 or if padding below and stack grows up.
3285 But if space already allocated, this has already been done. */
3286 if (extra && args_addr == 0
3287 && where_pad != none && where_pad != stack_direction)
3288 anti_adjust_stack (GEN_INT (extra));
3290 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3292 if (current_function_check_memory_usage && ! in_check_memory_usage)
3294 rtx temp;
3296 in_check_memory_usage = 1;
3297 temp = get_push_address (INTVAL (size) - used);
3298 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3299 emit_library_call (chkr_copy_bitmap_libfunc,
3300 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3301 Pmode, XEXP (xinner, 0), Pmode,
3302 GEN_INT (INTVAL (size) - used),
3303 TYPE_MODE (sizetype));
3304 else
3305 emit_library_call (chkr_set_right_libfunc,
3306 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3307 Pmode, GEN_INT (INTVAL (size) - used),
3308 TYPE_MODE (sizetype),
3309 GEN_INT (MEMORY_USE_RW),
3310 TYPE_MODE (integer_type_node));
3311 in_check_memory_usage = 0;
3314 else
3315 #endif /* PUSH_ROUNDING */
3317 rtx target;
3319 /* Otherwise make space on the stack and copy the data
3320 to the address of that space. */
3322 /* Deduct words put into registers from the size we must copy. */
3323 if (partial != 0)
3325 if (GET_CODE (size) == CONST_INT)
3326 size = GEN_INT (INTVAL (size) - used);
3327 else
3328 size = expand_binop (GET_MODE (size), sub_optab, size,
3329 GEN_INT (used), NULL_RTX, 0,
3330 OPTAB_LIB_WIDEN);
3333 /* Get the address of the stack space.
3334 In this case, we do not deal with EXTRA separately.
3335 A single stack adjust will do. */
3336 if (! args_addr)
3338 temp = push_block (size, extra, where_pad == downward);
3339 extra = 0;
3341 else if (GET_CODE (args_so_far) == CONST_INT)
3342 temp = memory_address (BLKmode,
3343 plus_constant (args_addr,
3344 skip + INTVAL (args_so_far)));
3345 else
3346 temp = memory_address (BLKmode,
3347 plus_constant (gen_rtx_PLUS (Pmode,
3348 args_addr,
3349 args_so_far),
3350 skip));
3351 if (current_function_check_memory_usage && ! in_check_memory_usage)
3353 in_check_memory_usage = 1;
3354 target = copy_to_reg (temp);
3355 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3356 emit_library_call (chkr_copy_bitmap_libfunc,
3357 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3358 target, Pmode,
3359 XEXP (xinner, 0), Pmode,
3360 size, TYPE_MODE (sizetype));
3361 else
3362 emit_library_call (chkr_set_right_libfunc,
3363 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3364 target, Pmode,
3365 size, TYPE_MODE (sizetype),
3366 GEN_INT (MEMORY_USE_RW),
3367 TYPE_MODE (integer_type_node));
3368 in_check_memory_usage = 0;
3371 target = gen_rtx_MEM (BLKmode, temp);
3373 if (type != 0)
3375 set_mem_attributes (target, type, 1);
3376 /* Function incoming arguments may overlap with sibling call
3377 outgoing arguments and we cannot allow reordering of reads
3378 from function arguments with stores to outgoing arguments
3379 of sibling calls. */
3380 MEM_ALIAS_SET (target) = 0;
3383 /* TEMP is the address of the block. Copy the data there. */
3384 if (GET_CODE (size) == CONST_INT
3385 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3387 move_by_pieces (target, xinner, INTVAL (size), align);
3388 goto ret;
3390 else
3392 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3393 enum machine_mode mode;
3395 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3396 mode != VOIDmode;
3397 mode = GET_MODE_WIDER_MODE (mode))
3399 enum insn_code code = movstr_optab[(int) mode];
3400 insn_operand_predicate_fn pred;
3402 if (code != CODE_FOR_nothing
3403 && ((GET_CODE (size) == CONST_INT
3404 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3405 <= (GET_MODE_MASK (mode) >> 1)))
3406 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3407 && (!(pred = insn_data[(int) code].operand[0].predicate)
3408 || ((*pred) (target, BLKmode)))
3409 && (!(pred = insn_data[(int) code].operand[1].predicate)
3410 || ((*pred) (xinner, BLKmode)))
3411 && (!(pred = insn_data[(int) code].operand[3].predicate)
3412 || ((*pred) (opalign, VOIDmode))))
3414 rtx op2 = convert_to_mode (mode, size, 1);
3415 rtx last = get_last_insn ();
3416 rtx pat;
3418 pred = insn_data[(int) code].operand[2].predicate;
3419 if (pred != 0 && ! (*pred) (op2, mode))
3420 op2 = copy_to_mode_reg (mode, op2);
3422 pat = GEN_FCN ((int) code) (target, xinner,
3423 op2, opalign);
3424 if (pat)
3426 emit_insn (pat);
3427 goto ret;
3429 else
3430 delete_insns_since (last);
3435 if (!ACCUMULATE_OUTGOING_ARGS)
3437 /* If the source is referenced relative to the stack pointer,
3438 copy it to another register to stabilize it. We do not need
3439 to do this if we know that we won't be changing sp. */
3441 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3442 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3443 temp = copy_to_reg (temp);
3446 /* Make inhibit_defer_pop nonzero around the library call
3447 to force it to pop the bcopy-arguments right away. */
3448 NO_DEFER_POP;
3449 #ifdef TARGET_MEM_FUNCTIONS
3450 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3451 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3452 convert_to_mode (TYPE_MODE (sizetype),
3453 size, TREE_UNSIGNED (sizetype)),
3454 TYPE_MODE (sizetype));
3455 #else
3456 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3457 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3458 convert_to_mode (TYPE_MODE (integer_type_node),
3459 size,
3460 TREE_UNSIGNED (integer_type_node)),
3461 TYPE_MODE (integer_type_node));
3462 #endif
3463 OK_DEFER_POP;
3466 else if (partial > 0)
3468 /* Scalar partly in registers. */
3470 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3471 int i;
3472 int not_stack;
3473 /* # words of start of argument
3474 that we must make space for but need not store. */
3475 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3476 int args_offset = INTVAL (args_so_far);
3477 int skip;
3479 /* Push padding now if padding above and stack grows down,
3480 or if padding below and stack grows up.
3481 But if space already allocated, this has already been done. */
3482 if (extra && args_addr == 0
3483 && where_pad != none && where_pad != stack_direction)
3484 anti_adjust_stack (GEN_INT (extra));
3486 /* If we make space by pushing it, we might as well push
3487 the real data. Otherwise, we can leave OFFSET nonzero
3488 and leave the space uninitialized. */
3489 if (args_addr == 0)
3490 offset = 0;
3492 /* Now NOT_STACK gets the number of words that we don't need to
3493 allocate on the stack. */
3494 not_stack = partial - offset;
3496 /* If the partial register-part of the arg counts in its stack size,
3497 skip the part of stack space corresponding to the registers.
3498 Otherwise, start copying to the beginning of the stack space,
3499 by setting SKIP to 0. */
3500 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3502 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3503 x = validize_mem (force_const_mem (mode, x));
3505 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3506 SUBREGs of such registers are not allowed. */
3507 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3508 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3509 x = copy_to_reg (x);
3511 /* Loop over all the words allocated on the stack for this arg. */
3512 /* We can do it by words, because any scalar bigger than a word
3513 has a size a multiple of a word. */
3514 #ifndef PUSH_ARGS_REVERSED
3515 for (i = not_stack; i < size; i++)
3516 #else
3517 for (i = size - 1; i >= not_stack; i--)
3518 #endif
3519 if (i >= not_stack + offset)
3520 emit_push_insn (operand_subword_force (x, i, mode),
3521 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3522 0, args_addr,
3523 GEN_INT (args_offset + ((i - not_stack + skip)
3524 * UNITS_PER_WORD)),
3525 reg_parm_stack_space, alignment_pad);
3527 else
3529 rtx addr;
3530 rtx target = NULL_RTX;
3531 rtx dest;
3533 /* Push padding now if padding above and stack grows down,
3534 or if padding below and stack grows up.
3535 But if space already allocated, this has already been done. */
3536 if (extra && args_addr == 0
3537 && where_pad != none && where_pad != stack_direction)
3538 anti_adjust_stack (GEN_INT (extra));
3540 #ifdef PUSH_ROUNDING
3541 if (args_addr == 0 && PUSH_ARGS)
3542 emit_single_push_insn (mode, x, type);
3543 else
3544 #endif
3546 if (GET_CODE (args_so_far) == CONST_INT)
3547 addr
3548 = memory_address (mode,
3549 plus_constant (args_addr,
3550 INTVAL (args_so_far)));
3551 else
3552 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3553 args_so_far));
3554 target = addr;
3555 dest = gen_rtx_MEM (mode, addr);
3556 if (type != 0)
3558 set_mem_attributes (dest, type, 1);
3559 /* Function incoming arguments may overlap with sibling call
3560 outgoing arguments and we cannot allow reordering of reads
3561 from function arguments with stores to outgoing arguments
3562 of sibling calls. */
3563 MEM_ALIAS_SET (dest) = 0;
3566 emit_move_insn (dest, x);
3570 if (current_function_check_memory_usage && ! in_check_memory_usage)
3572 in_check_memory_usage = 1;
3573 if (target == 0)
3574 target = get_push_address (GET_MODE_SIZE (mode));
3576 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3577 emit_library_call (chkr_copy_bitmap_libfunc,
3578 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3579 Pmode, XEXP (x, 0), Pmode,
3580 GEN_INT (GET_MODE_SIZE (mode)),
3581 TYPE_MODE (sizetype));
3582 else
3583 emit_library_call (chkr_set_right_libfunc,
3584 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3585 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3586 TYPE_MODE (sizetype),
3587 GEN_INT (MEMORY_USE_RW),
3588 TYPE_MODE (integer_type_node));
3589 in_check_memory_usage = 0;
3593 ret:
3594 /* If part should go in registers, copy that part
3595 into the appropriate registers. Do this now, at the end,
3596 since mem-to-mem copies above may do function calls. */
3597 if (partial > 0 && reg != 0)
3599 /* Handle calls that pass values in multiple non-contiguous locations.
3600 The Irix 6 ABI has examples of this. */
3601 if (GET_CODE (reg) == PARALLEL)
3602 emit_group_load (reg, x, -1, align); /* ??? size? */
3603 else
3604 move_block_to_reg (REGNO (reg), x, partial, mode);
3607 if (extra && args_addr == 0 && where_pad == stack_direction)
3608 anti_adjust_stack (GEN_INT (extra));
3610 if (alignment_pad && args_addr == 0)
3611 anti_adjust_stack (alignment_pad);
3614 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3615 operations. */
3617 static rtx
3618 get_subtarget (x)
3619 rtx x;
3621 return ((x == 0
3622 /* Only registers can be subtargets. */
3623 || GET_CODE (x) != REG
3624 /* If the register is readonly, it can't be set more than once. */
3625 || RTX_UNCHANGING_P (x)
3626 /* Don't use hard regs to avoid extending their life. */
3627 || REGNO (x) < FIRST_PSEUDO_REGISTER
3628 /* Avoid subtargets inside loops,
3629 since they hide some invariant expressions. */
3630 || preserve_subexpressions_p ())
3631 ? 0 : x);
3634 /* Expand an assignment that stores the value of FROM into TO.
3635 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3636 (This may contain a QUEUED rtx;
3637 if the value is constant, this rtx is a constant.)
3638 Otherwise, the returned value is NULL_RTX.
3640 SUGGEST_REG is no longer actually used.
3641 It used to mean, copy the value through a register
3642 and return that register, if that is possible.
3643 We now use WANT_VALUE to decide whether to do this. */
3646 expand_assignment (to, from, want_value, suggest_reg)
3647 tree to, from;
3648 int want_value;
3649 int suggest_reg ATTRIBUTE_UNUSED;
3651 register rtx to_rtx = 0;
3652 rtx result;
3654 /* Don't crash if the lhs of the assignment was erroneous. */
3656 if (TREE_CODE (to) == ERROR_MARK)
3658 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3659 return want_value ? result : NULL_RTX;
3662 /* Assignment of a structure component needs special treatment
3663 if the structure component's rtx is not simply a MEM.
3664 Assignment of an array element at a constant index, and assignment of
3665 an array element in an unaligned packed structure field, has the same
3666 problem. */
3668 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3669 || TREE_CODE (to) == ARRAY_REF)
3671 enum machine_mode mode1;
3672 HOST_WIDE_INT bitsize, bitpos;
3673 tree offset;
3674 int unsignedp;
3675 int volatilep = 0;
3676 tree tem;
3677 unsigned int alignment;
3679 push_temp_slots ();
3680 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3681 &unsignedp, &volatilep, &alignment);
3683 /* If we are going to use store_bit_field and extract_bit_field,
3684 make sure to_rtx will be safe for multiple use. */
3686 if (mode1 == VOIDmode && want_value)
3687 tem = stabilize_reference (tem);
3689 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3690 if (offset != 0)
3692 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3694 if (GET_CODE (to_rtx) != MEM)
3695 abort ();
3697 if (GET_MODE (offset_rtx) != ptr_mode)
3699 #ifdef POINTERS_EXTEND_UNSIGNED
3700 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3701 #else
3702 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3703 #endif
3706 /* A constant address in TO_RTX can have VOIDmode, we must not try
3707 to call force_reg for that case. Avoid that case. */
3708 if (GET_CODE (to_rtx) == MEM
3709 && GET_MODE (to_rtx) == BLKmode
3710 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3711 && bitsize
3712 && (bitpos % bitsize) == 0
3713 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3714 && alignment == GET_MODE_ALIGNMENT (mode1))
3716 rtx temp = change_address (to_rtx, mode1,
3717 plus_constant (XEXP (to_rtx, 0),
3718 (bitpos /
3719 BITS_PER_UNIT)));
3720 if (GET_CODE (XEXP (temp, 0)) == REG)
3721 to_rtx = temp;
3722 else
3723 to_rtx = change_address (to_rtx, mode1,
3724 force_reg (GET_MODE (XEXP (temp, 0)),
3725 XEXP (temp, 0)));
3726 bitpos = 0;
3729 to_rtx = change_address (to_rtx, VOIDmode,
3730 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3731 force_reg (ptr_mode,
3732 offset_rtx)));
3735 if (volatilep)
3737 if (GET_CODE (to_rtx) == MEM)
3739 /* When the offset is zero, to_rtx is the address of the
3740 structure we are storing into, and hence may be shared.
3741 We must make a new MEM before setting the volatile bit. */
3742 if (offset == 0)
3743 to_rtx = copy_rtx (to_rtx);
3745 MEM_VOLATILE_P (to_rtx) = 1;
3747 #if 0 /* This was turned off because, when a field is volatile
3748 in an object which is not volatile, the object may be in a register,
3749 and then we would abort over here. */
3750 else
3751 abort ();
3752 #endif
3755 if (TREE_CODE (to) == COMPONENT_REF
3756 && TREE_READONLY (TREE_OPERAND (to, 1)))
3758 if (offset == 0)
3759 to_rtx = copy_rtx (to_rtx);
3761 RTX_UNCHANGING_P (to_rtx) = 1;
3764 /* Check the access. */
3765 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3767 rtx to_addr;
3768 int size;
3769 int best_mode_size;
3770 enum machine_mode best_mode;
3772 best_mode = get_best_mode (bitsize, bitpos,
3773 TYPE_ALIGN (TREE_TYPE (tem)),
3774 mode1, volatilep);
3775 if (best_mode == VOIDmode)
3776 best_mode = QImode;
3778 best_mode_size = GET_MODE_BITSIZE (best_mode);
3779 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3780 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3781 size *= GET_MODE_SIZE (best_mode);
3783 /* Check the access right of the pointer. */
3784 in_check_memory_usage = 1;
3785 if (size)
3786 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3787 VOIDmode, 3, to_addr, Pmode,
3788 GEN_INT (size), TYPE_MODE (sizetype),
3789 GEN_INT (MEMORY_USE_WO),
3790 TYPE_MODE (integer_type_node));
3791 in_check_memory_usage = 0;
3794 /* If this is a varying-length object, we must get the address of
3795 the source and do an explicit block move. */
3796 if (bitsize < 0)
3798 unsigned int from_align;
3799 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3800 rtx inner_to_rtx
3801 = change_address (to_rtx, VOIDmode,
3802 plus_constant (XEXP (to_rtx, 0),
3803 bitpos / BITS_PER_UNIT));
3805 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3806 MIN (alignment, from_align));
3807 free_temp_slots ();
3808 pop_temp_slots ();
3809 return to_rtx;
3811 else
3813 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3814 (want_value
3815 /* Spurious cast for HPUX compiler. */
3816 ? ((enum machine_mode)
3817 TYPE_MODE (TREE_TYPE (to)))
3818 : VOIDmode),
3819 unsignedp,
3820 alignment,
3821 int_size_in_bytes (TREE_TYPE (tem)),
3822 get_alias_set (to));
3824 preserve_temp_slots (result);
3825 free_temp_slots ();
3826 pop_temp_slots ();
3828 /* If the value is meaningful, convert RESULT to the proper mode.
3829 Otherwise, return nothing. */
3830 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3831 TYPE_MODE (TREE_TYPE (from)),
3832 result,
3833 TREE_UNSIGNED (TREE_TYPE (to)))
3834 : NULL_RTX);
3838 /* If the rhs is a function call and its value is not an aggregate,
3839 call the function before we start to compute the lhs.
3840 This is needed for correct code for cases such as
3841 val = setjmp (buf) on machines where reference to val
3842 requires loading up part of an address in a separate insn.
3844 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3845 since it might be a promoted variable where the zero- or sign- extension
3846 needs to be done. Handling this in the normal way is safe because no
3847 computation is done before the call. */
3848 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3849 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3850 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3851 && GET_CODE (DECL_RTL (to)) == REG))
3853 rtx value;
3855 push_temp_slots ();
3856 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3857 if (to_rtx == 0)
3858 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3860 /* Handle calls that return values in multiple non-contiguous locations.
3861 The Irix 6 ABI has examples of this. */
3862 if (GET_CODE (to_rtx) == PARALLEL)
3863 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3864 TYPE_ALIGN (TREE_TYPE (from)));
3865 else if (GET_MODE (to_rtx) == BLKmode)
3866 emit_block_move (to_rtx, value, expr_size (from),
3867 TYPE_ALIGN (TREE_TYPE (from)));
3868 else
3870 #ifdef POINTERS_EXTEND_UNSIGNED
3871 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3872 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3873 value = convert_memory_address (GET_MODE (to_rtx), value);
3874 #endif
3875 emit_move_insn (to_rtx, value);
3877 preserve_temp_slots (to_rtx);
3878 free_temp_slots ();
3879 pop_temp_slots ();
3880 return want_value ? to_rtx : NULL_RTX;
3883 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3884 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3886 if (to_rtx == 0)
3888 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3889 if (GET_CODE (to_rtx) == MEM)
3890 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3893 /* Don't move directly into a return register. */
3894 if (TREE_CODE (to) == RESULT_DECL
3895 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3897 rtx temp;
3899 push_temp_slots ();
3900 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3902 if (GET_CODE (to_rtx) == PARALLEL)
3903 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3904 TYPE_ALIGN (TREE_TYPE (from)));
3905 else
3906 emit_move_insn (to_rtx, temp);
3908 preserve_temp_slots (to_rtx);
3909 free_temp_slots ();
3910 pop_temp_slots ();
3911 return want_value ? to_rtx : NULL_RTX;
3914 /* In case we are returning the contents of an object which overlaps
3915 the place the value is being stored, use a safe function when copying
3916 a value through a pointer into a structure value return block. */
3917 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3918 && current_function_returns_struct
3919 && !current_function_returns_pcc_struct)
3921 rtx from_rtx, size;
3923 push_temp_slots ();
3924 size = expr_size (from);
3925 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3926 EXPAND_MEMORY_USE_DONT);
3928 /* Copy the rights of the bitmap. */
3929 if (current_function_check_memory_usage)
3930 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3931 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3932 XEXP (from_rtx, 0), Pmode,
3933 convert_to_mode (TYPE_MODE (sizetype),
3934 size, TREE_UNSIGNED (sizetype)),
3935 TYPE_MODE (sizetype));
3937 #ifdef TARGET_MEM_FUNCTIONS
3938 emit_library_call (memmove_libfunc, LCT_NORMAL,
3939 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3940 XEXP (from_rtx, 0), Pmode,
3941 convert_to_mode (TYPE_MODE (sizetype),
3942 size, TREE_UNSIGNED (sizetype)),
3943 TYPE_MODE (sizetype));
3944 #else
3945 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3946 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3947 XEXP (to_rtx, 0), Pmode,
3948 convert_to_mode (TYPE_MODE (integer_type_node),
3949 size, TREE_UNSIGNED (integer_type_node)),
3950 TYPE_MODE (integer_type_node));
3951 #endif
3953 preserve_temp_slots (to_rtx);
3954 free_temp_slots ();
3955 pop_temp_slots ();
3956 return want_value ? to_rtx : NULL_RTX;
3959 /* Compute FROM and store the value in the rtx we got. */
3961 push_temp_slots ();
3962 result = store_expr (from, to_rtx, want_value);
3963 preserve_temp_slots (result);
3964 free_temp_slots ();
3965 pop_temp_slots ();
3966 return want_value ? result : NULL_RTX;
3969 /* Generate code for computing expression EXP,
3970 and storing the value into TARGET.
3971 TARGET may contain a QUEUED rtx.
3973 If WANT_VALUE is nonzero, return a copy of the value
3974 not in TARGET, so that we can be sure to use the proper
3975 value in a containing expression even if TARGET has something
3976 else stored in it. If possible, we copy the value through a pseudo
3977 and return that pseudo. Or, if the value is constant, we try to
3978 return the constant. In some cases, we return a pseudo
3979 copied *from* TARGET.
3981 If the mode is BLKmode then we may return TARGET itself.
3982 It turns out that in BLKmode it doesn't cause a problem.
3983 because C has no operators that could combine two different
3984 assignments into the same BLKmode object with different values
3985 with no sequence point. Will other languages need this to
3986 be more thorough?
3988 If WANT_VALUE is 0, we return NULL, to make sure
3989 to catch quickly any cases where the caller uses the value
3990 and fails to set WANT_VALUE. */
3993 store_expr (exp, target, want_value)
3994 register tree exp;
3995 register rtx target;
3996 int want_value;
3998 register rtx temp;
3999 int dont_return_target = 0;
4000 int dont_store_target = 0;
4002 if (TREE_CODE (exp) == COMPOUND_EXPR)
4004 /* Perform first part of compound expression, then assign from second
4005 part. */
4006 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4007 emit_queue ();
4008 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4010 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4012 /* For conditional expression, get safe form of the target. Then
4013 test the condition, doing the appropriate assignment on either
4014 side. This avoids the creation of unnecessary temporaries.
4015 For non-BLKmode, it is more efficient not to do this. */
4017 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4019 emit_queue ();
4020 target = protect_from_queue (target, 1);
4022 do_pending_stack_adjust ();
4023 NO_DEFER_POP;
4024 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4025 start_cleanup_deferral ();
4026 store_expr (TREE_OPERAND (exp, 1), target, 0);
4027 end_cleanup_deferral ();
4028 emit_queue ();
4029 emit_jump_insn (gen_jump (lab2));
4030 emit_barrier ();
4031 emit_label (lab1);
4032 start_cleanup_deferral ();
4033 store_expr (TREE_OPERAND (exp, 2), target, 0);
4034 end_cleanup_deferral ();
4035 emit_queue ();
4036 emit_label (lab2);
4037 OK_DEFER_POP;
4039 return want_value ? target : NULL_RTX;
4041 else if (queued_subexp_p (target))
4042 /* If target contains a postincrement, let's not risk
4043 using it as the place to generate the rhs. */
4045 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4047 /* Expand EXP into a new pseudo. */
4048 temp = gen_reg_rtx (GET_MODE (target));
4049 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4051 else
4052 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4054 /* If target is volatile, ANSI requires accessing the value
4055 *from* the target, if it is accessed. So make that happen.
4056 In no case return the target itself. */
4057 if (! MEM_VOLATILE_P (target) && want_value)
4058 dont_return_target = 1;
4060 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4061 && GET_MODE (target) != BLKmode)
4062 /* If target is in memory and caller wants value in a register instead,
4063 arrange that. Pass TARGET as target for expand_expr so that,
4064 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4065 We know expand_expr will not use the target in that case.
4066 Don't do this if TARGET is volatile because we are supposed
4067 to write it and then read it. */
4069 temp = expand_expr (exp, target, GET_MODE (target), 0);
4070 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4072 /* If TEMP is already in the desired TARGET, only copy it from
4073 memory and don't store it there again. */
4074 if (temp == target
4075 || (rtx_equal_p (temp, target)
4076 && ! side_effects_p (temp) && ! side_effects_p (target)))
4077 dont_store_target = 1;
4078 temp = copy_to_reg (temp);
4080 dont_return_target = 1;
4082 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4083 /* If this is an scalar in a register that is stored in a wider mode
4084 than the declared mode, compute the result into its declared mode
4085 and then convert to the wider mode. Our value is the computed
4086 expression. */
4088 /* If we don't want a value, we can do the conversion inside EXP,
4089 which will often result in some optimizations. Do the conversion
4090 in two steps: first change the signedness, if needed, then
4091 the extend. But don't do this if the type of EXP is a subtype
4092 of something else since then the conversion might involve
4093 more than just converting modes. */
4094 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4095 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4097 if (TREE_UNSIGNED (TREE_TYPE (exp))
4098 != SUBREG_PROMOTED_UNSIGNED_P (target))
4100 = convert
4101 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4102 TREE_TYPE (exp)),
4103 exp);
4105 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4106 SUBREG_PROMOTED_UNSIGNED_P (target)),
4107 exp);
4110 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4112 /* If TEMP is a volatile MEM and we want a result value, make
4113 the access now so it gets done only once. Likewise if
4114 it contains TARGET. */
4115 if (GET_CODE (temp) == MEM && want_value
4116 && (MEM_VOLATILE_P (temp)
4117 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4118 temp = copy_to_reg (temp);
4120 /* If TEMP is a VOIDmode constant, use convert_modes to make
4121 sure that we properly convert it. */
4122 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4123 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4124 TYPE_MODE (TREE_TYPE (exp)), temp,
4125 SUBREG_PROMOTED_UNSIGNED_P (target));
4127 convert_move (SUBREG_REG (target), temp,
4128 SUBREG_PROMOTED_UNSIGNED_P (target));
4130 /* If we promoted a constant, change the mode back down to match
4131 target. Otherwise, the caller might get confused by a result whose
4132 mode is larger than expected. */
4134 if (want_value && GET_MODE (temp) != GET_MODE (target)
4135 && GET_MODE (temp) != VOIDmode)
4137 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4138 SUBREG_PROMOTED_VAR_P (temp) = 1;
4139 SUBREG_PROMOTED_UNSIGNED_P (temp)
4140 = SUBREG_PROMOTED_UNSIGNED_P (target);
4143 return want_value ? temp : NULL_RTX;
4145 else
4147 temp = expand_expr (exp, target, GET_MODE (target), 0);
4148 /* Return TARGET if it's a specified hardware register.
4149 If TARGET is a volatile mem ref, either return TARGET
4150 or return a reg copied *from* TARGET; ANSI requires this.
4152 Otherwise, if TEMP is not TARGET, return TEMP
4153 if it is constant (for efficiency),
4154 or if we really want the correct value. */
4155 if (!(target && GET_CODE (target) == REG
4156 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4157 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4158 && ! rtx_equal_p (temp, target)
4159 && (CONSTANT_P (temp) || want_value))
4160 dont_return_target = 1;
4163 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4164 the same as that of TARGET, adjust the constant. This is needed, for
4165 example, in case it is a CONST_DOUBLE and we want only a word-sized
4166 value. */
4167 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4168 && TREE_CODE (exp) != ERROR_MARK
4169 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4170 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4171 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4173 if (current_function_check_memory_usage
4174 && GET_CODE (target) == MEM
4175 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4177 in_check_memory_usage = 1;
4178 if (GET_CODE (temp) == MEM)
4179 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4180 VOIDmode, 3, XEXP (target, 0), Pmode,
4181 XEXP (temp, 0), Pmode,
4182 expr_size (exp), TYPE_MODE (sizetype));
4183 else
4184 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4185 VOIDmode, 3, XEXP (target, 0), Pmode,
4186 expr_size (exp), TYPE_MODE (sizetype),
4187 GEN_INT (MEMORY_USE_WO),
4188 TYPE_MODE (integer_type_node));
4189 in_check_memory_usage = 0;
4192 /* If value was not generated in the target, store it there.
4193 Convert the value to TARGET's type first if nec. */
4194 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4195 one or both of them are volatile memory refs, we have to distinguish
4196 two cases:
4197 - expand_expr has used TARGET. In this case, we must not generate
4198 another copy. This can be detected by TARGET being equal according
4199 to == .
4200 - expand_expr has not used TARGET - that means that the source just
4201 happens to have the same RTX form. Since temp will have been created
4202 by expand_expr, it will compare unequal according to == .
4203 We must generate a copy in this case, to reach the correct number
4204 of volatile memory references. */
4206 if ((! rtx_equal_p (temp, target)
4207 || (temp != target && (side_effects_p (temp)
4208 || side_effects_p (target))))
4209 && TREE_CODE (exp) != ERROR_MARK
4210 && ! dont_store_target)
4212 target = protect_from_queue (target, 1);
4213 if (GET_MODE (temp) != GET_MODE (target)
4214 && GET_MODE (temp) != VOIDmode)
4216 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4217 if (dont_return_target)
4219 /* In this case, we will return TEMP,
4220 so make sure it has the proper mode.
4221 But don't forget to store the value into TARGET. */
4222 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4223 emit_move_insn (target, temp);
4225 else
4226 convert_move (target, temp, unsignedp);
4229 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4231 /* Handle copying a string constant into an array.
4232 The string constant may be shorter than the array.
4233 So copy just the string's actual length, and clear the rest. */
4234 rtx size;
4235 rtx addr;
4237 /* Get the size of the data type of the string,
4238 which is actually the size of the target. */
4239 size = expr_size (exp);
4240 if (GET_CODE (size) == CONST_INT
4241 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4242 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4243 else
4245 /* Compute the size of the data to copy from the string. */
4246 tree copy_size
4247 = size_binop (MIN_EXPR,
4248 make_tree (sizetype, size),
4249 size_int (TREE_STRING_LENGTH (exp)));
4250 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4251 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4252 VOIDmode, 0);
4253 rtx label = 0;
4255 /* Copy that much. */
4256 emit_block_move (target, temp, copy_size_rtx,
4257 TYPE_ALIGN (TREE_TYPE (exp)));
4259 /* Figure out how much is left in TARGET that we have to clear.
4260 Do all calculations in ptr_mode. */
4262 addr = XEXP (target, 0);
4263 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4265 if (GET_CODE (copy_size_rtx) == CONST_INT)
4267 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4268 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4269 align = MIN (align,
4270 (unsigned int) (BITS_PER_UNIT
4271 * (INTVAL (copy_size_rtx)
4272 & - INTVAL (copy_size_rtx))));
4274 else
4276 addr = force_reg (ptr_mode, addr);
4277 addr = expand_binop (ptr_mode, add_optab, addr,
4278 copy_size_rtx, NULL_RTX, 0,
4279 OPTAB_LIB_WIDEN);
4281 size = expand_binop (ptr_mode, sub_optab, size,
4282 copy_size_rtx, NULL_RTX, 0,
4283 OPTAB_LIB_WIDEN);
4285 align = BITS_PER_UNIT;
4286 label = gen_label_rtx ();
4287 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4288 GET_MODE (size), 0, 0, label);
4290 align = MIN (align, expr_align (copy_size));
4292 if (size != const0_rtx)
4294 rtx dest = gen_rtx_MEM (BLKmode, addr);
4296 MEM_COPY_ATTRIBUTES (dest, target);
4298 /* Be sure we can write on ADDR. */
4299 in_check_memory_usage = 1;
4300 if (current_function_check_memory_usage)
4301 emit_library_call (chkr_check_addr_libfunc,
4302 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4303 addr, Pmode,
4304 size, TYPE_MODE (sizetype),
4305 GEN_INT (MEMORY_USE_WO),
4306 TYPE_MODE (integer_type_node));
4307 in_check_memory_usage = 0;
4308 clear_storage (dest, size, align);
4311 if (label)
4312 emit_label (label);
4315 /* Handle calls that return values in multiple non-contiguous locations.
4316 The Irix 6 ABI has examples of this. */
4317 else if (GET_CODE (target) == PARALLEL)
4318 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4319 TYPE_ALIGN (TREE_TYPE (exp)));
4320 else if (GET_MODE (temp) == BLKmode)
4321 emit_block_move (target, temp, expr_size (exp),
4322 TYPE_ALIGN (TREE_TYPE (exp)));
4323 else
4324 emit_move_insn (target, temp);
4327 /* If we don't want a value, return NULL_RTX. */
4328 if (! want_value)
4329 return NULL_RTX;
4331 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4332 ??? The latter test doesn't seem to make sense. */
4333 else if (dont_return_target && GET_CODE (temp) != MEM)
4334 return temp;
4336 /* Return TARGET itself if it is a hard register. */
4337 else if (want_value && GET_MODE (target) != BLKmode
4338 && ! (GET_CODE (target) == REG
4339 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4340 return copy_to_reg (target);
4342 else
4343 return target;
4346 /* Return 1 if EXP just contains zeros. */
4348 static int
4349 is_zeros_p (exp)
4350 tree exp;
4352 tree elt;
4354 switch (TREE_CODE (exp))
4356 case CONVERT_EXPR:
4357 case NOP_EXPR:
4358 case NON_LVALUE_EXPR:
4359 return is_zeros_p (TREE_OPERAND (exp, 0));
4361 case INTEGER_CST:
4362 return integer_zerop (exp);
4364 case COMPLEX_CST:
4365 return
4366 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4368 case REAL_CST:
4369 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4371 case CONSTRUCTOR:
4372 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4373 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4374 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4375 if (! is_zeros_p (TREE_VALUE (elt)))
4376 return 0;
4378 return 1;
4380 default:
4381 return 0;
4385 /* Return 1 if EXP contains mostly (3/4) zeros. */
4387 static int
4388 mostly_zeros_p (exp)
4389 tree exp;
4391 if (TREE_CODE (exp) == CONSTRUCTOR)
4393 int elts = 0, zeros = 0;
4394 tree elt = CONSTRUCTOR_ELTS (exp);
4395 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4397 /* If there are no ranges of true bits, it is all zero. */
4398 return elt == NULL_TREE;
4400 for (; elt; elt = TREE_CHAIN (elt))
4402 /* We do not handle the case where the index is a RANGE_EXPR,
4403 so the statistic will be somewhat inaccurate.
4404 We do make a more accurate count in store_constructor itself,
4405 so since this function is only used for nested array elements,
4406 this should be close enough. */
4407 if (mostly_zeros_p (TREE_VALUE (elt)))
4408 zeros++;
4409 elts++;
4412 return 4 * zeros >= 3 * elts;
4415 return is_zeros_p (exp);
4418 /* Helper function for store_constructor.
4419 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4420 TYPE is the type of the CONSTRUCTOR, not the element type.
4421 ALIGN and CLEARED are as for store_constructor.
4422 ALIAS_SET is the alias set to use for any stores.
4424 This provides a recursive shortcut back to store_constructor when it isn't
4425 necessary to go through store_field. This is so that we can pass through
4426 the cleared field to let store_constructor know that we may not have to
4427 clear a substructure if the outer structure has already been cleared. */
4429 static void
4430 store_constructor_field (target, bitsize, bitpos,
4431 mode, exp, type, align, cleared, alias_set)
4432 rtx target;
4433 unsigned HOST_WIDE_INT bitsize;
4434 HOST_WIDE_INT bitpos;
4435 enum machine_mode mode;
4436 tree exp, type;
4437 unsigned int align;
4438 int cleared;
4439 int alias_set;
4441 if (TREE_CODE (exp) == CONSTRUCTOR
4442 && bitpos % BITS_PER_UNIT == 0
4443 /* If we have a non-zero bitpos for a register target, then we just
4444 let store_field do the bitfield handling. This is unlikely to
4445 generate unnecessary clear instructions anyways. */
4446 && (bitpos == 0 || GET_CODE (target) == MEM))
4448 if (bitpos != 0)
4449 target
4450 = change_address (target,
4451 GET_MODE (target) == BLKmode
4452 || 0 != (bitpos
4453 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4454 ? BLKmode : VOIDmode,
4455 plus_constant (XEXP (target, 0),
4456 bitpos / BITS_PER_UNIT));
4459 /* Show the alignment may no longer be what it was and update the alias
4460 set, if required. */
4461 if (bitpos != 0)
4462 align = MIN (align, (unsigned int) bitpos & - bitpos);
4463 if (GET_CODE (target) == MEM)
4464 MEM_ALIAS_SET (target) = alias_set;
4466 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4468 else
4469 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4470 int_size_in_bytes (type), alias_set);
4473 /* Store the value of constructor EXP into the rtx TARGET.
4474 TARGET is either a REG or a MEM.
4475 ALIGN is the maximum known alignment for TARGET.
4476 CLEARED is true if TARGET is known to have been zero'd.
4477 SIZE is the number of bytes of TARGET we are allowed to modify: this
4478 may not be the same as the size of EXP if we are assigning to a field
4479 which has been packed to exclude padding bits. */
4481 static void
4482 store_constructor (exp, target, align, cleared, size)
4483 tree exp;
4484 rtx target;
4485 unsigned int align;
4486 int cleared;
4487 HOST_WIDE_INT size;
4489 tree type = TREE_TYPE (exp);
4490 #ifdef WORD_REGISTER_OPERATIONS
4491 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4492 #endif
4494 /* We know our target cannot conflict, since safe_from_p has been called. */
4495 #if 0
4496 /* Don't try copying piece by piece into a hard register
4497 since that is vulnerable to being clobbered by EXP.
4498 Instead, construct in a pseudo register and then copy it all. */
4499 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4501 rtx temp = gen_reg_rtx (GET_MODE (target));
4502 store_constructor (exp, temp, align, cleared, size);
4503 emit_move_insn (target, temp);
4504 return;
4506 #endif
4508 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4509 || TREE_CODE (type) == QUAL_UNION_TYPE)
4511 register tree elt;
4513 /* Inform later passes that the whole union value is dead. */
4514 if ((TREE_CODE (type) == UNION_TYPE
4515 || TREE_CODE (type) == QUAL_UNION_TYPE)
4516 && ! cleared)
4518 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4520 /* If the constructor is empty, clear the union. */
4521 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4522 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4525 /* If we are building a static constructor into a register,
4526 set the initial value as zero so we can fold the value into
4527 a constant. But if more than one register is involved,
4528 this probably loses. */
4529 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4530 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4532 if (! cleared)
4533 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4535 cleared = 1;
4538 /* If the constructor has fewer fields than the structure
4539 or if we are initializing the structure to mostly zeros,
4540 clear the whole structure first. Don't do this is TARGET is
4541 register whose mode size isn't equal to SIZE since clear_storage
4542 can't handle this case. */
4543 else if (size > 0
4544 && ((list_length (CONSTRUCTOR_ELTS (exp))
4545 != fields_length (type))
4546 || mostly_zeros_p (exp))
4547 && (GET_CODE (target) != REG
4548 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4550 if (! cleared)
4551 clear_storage (target, GEN_INT (size), align);
4553 cleared = 1;
4555 else if (! cleared)
4556 /* Inform later passes that the old value is dead. */
4557 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4559 /* Store each element of the constructor into
4560 the corresponding field of TARGET. */
4562 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4564 register tree field = TREE_PURPOSE (elt);
4565 #ifdef WORD_REGISTER_OPERATIONS
4566 tree value = TREE_VALUE (elt);
4567 #endif
4568 register enum machine_mode mode;
4569 HOST_WIDE_INT bitsize;
4570 HOST_WIDE_INT bitpos = 0;
4571 int unsignedp;
4572 tree offset;
4573 rtx to_rtx = target;
4575 /* Just ignore missing fields.
4576 We cleared the whole structure, above,
4577 if any fields are missing. */
4578 if (field == 0)
4579 continue;
4581 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4582 continue;
4584 if (host_integerp (DECL_SIZE (field), 1))
4585 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4586 else
4587 bitsize = -1;
4589 unsignedp = TREE_UNSIGNED (field);
4590 mode = DECL_MODE (field);
4591 if (DECL_BIT_FIELD (field))
4592 mode = VOIDmode;
4594 offset = DECL_FIELD_OFFSET (field);
4595 if (host_integerp (offset, 0)
4596 && host_integerp (bit_position (field), 0))
4598 bitpos = int_bit_position (field);
4599 offset = 0;
4601 else
4602 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4604 if (offset)
4606 rtx offset_rtx;
4608 if (contains_placeholder_p (offset))
4609 offset = build (WITH_RECORD_EXPR, sizetype,
4610 offset, make_tree (TREE_TYPE (exp), target));
4612 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4613 if (GET_CODE (to_rtx) != MEM)
4614 abort ();
4616 if (GET_MODE (offset_rtx) != ptr_mode)
4618 #ifdef POINTERS_EXTEND_UNSIGNED
4619 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4620 #else
4621 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4622 #endif
4625 to_rtx
4626 = change_address (to_rtx, VOIDmode,
4627 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4628 force_reg (ptr_mode,
4629 offset_rtx)));
4630 align = DECL_OFFSET_ALIGN (field);
4633 if (TREE_READONLY (field))
4635 if (GET_CODE (to_rtx) == MEM)
4636 to_rtx = copy_rtx (to_rtx);
4638 RTX_UNCHANGING_P (to_rtx) = 1;
4641 #ifdef WORD_REGISTER_OPERATIONS
4642 /* If this initializes a field that is smaller than a word, at the
4643 start of a word, try to widen it to a full word.
4644 This special case allows us to output C++ member function
4645 initializations in a form that the optimizers can understand. */
4646 if (GET_CODE (target) == REG
4647 && bitsize < BITS_PER_WORD
4648 && bitpos % BITS_PER_WORD == 0
4649 && GET_MODE_CLASS (mode) == MODE_INT
4650 && TREE_CODE (value) == INTEGER_CST
4651 && exp_size >= 0
4652 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4654 tree type = TREE_TYPE (value);
4655 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4657 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4658 value = convert (type, value);
4660 if (BYTES_BIG_ENDIAN)
4661 value
4662 = fold (build (LSHIFT_EXPR, type, value,
4663 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4664 bitsize = BITS_PER_WORD;
4665 mode = word_mode;
4667 #endif
4668 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4669 TREE_VALUE (elt), type, align, cleared,
4670 (DECL_NONADDRESSABLE_P (field)
4671 && GET_CODE (to_rtx) == MEM)
4672 ? MEM_ALIAS_SET (to_rtx)
4673 : get_alias_set (TREE_TYPE (field)));
4676 else if (TREE_CODE (type) == ARRAY_TYPE)
4678 register tree elt;
4679 register int i;
4680 int need_to_clear;
4681 tree domain = TYPE_DOMAIN (type);
4682 tree elttype = TREE_TYPE (type);
4683 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4684 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4685 HOST_WIDE_INT minelt;
4686 HOST_WIDE_INT maxelt;
4688 /* If we have constant bounds for the range of the type, get them. */
4689 if (const_bounds_p)
4691 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4692 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4695 /* If the constructor has fewer elements than the array,
4696 clear the whole array first. Similarly if this is
4697 static constructor of a non-BLKmode object. */
4698 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4699 need_to_clear = 1;
4700 else
4702 HOST_WIDE_INT count = 0, zero_count = 0;
4703 need_to_clear = ! const_bounds_p;
4705 /* This loop is a more accurate version of the loop in
4706 mostly_zeros_p (it handles RANGE_EXPR in an index).
4707 It is also needed to check for missing elements. */
4708 for (elt = CONSTRUCTOR_ELTS (exp);
4709 elt != NULL_TREE && ! need_to_clear;
4710 elt = TREE_CHAIN (elt))
4712 tree index = TREE_PURPOSE (elt);
4713 HOST_WIDE_INT this_node_count;
4715 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4717 tree lo_index = TREE_OPERAND (index, 0);
4718 tree hi_index = TREE_OPERAND (index, 1);
4720 if (! host_integerp (lo_index, 1)
4721 || ! host_integerp (hi_index, 1))
4723 need_to_clear = 1;
4724 break;
4727 this_node_count = (tree_low_cst (hi_index, 1)
4728 - tree_low_cst (lo_index, 1) + 1);
4730 else
4731 this_node_count = 1;
4733 count += this_node_count;
4734 if (mostly_zeros_p (TREE_VALUE (elt)))
4735 zero_count += this_node_count;
4738 /* Clear the entire array first if there are any missing elements,
4739 or if the incidence of zero elements is >= 75%. */
4740 if (! need_to_clear
4741 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4742 need_to_clear = 1;
4745 if (need_to_clear && size > 0)
4747 if (! cleared)
4748 clear_storage (target, GEN_INT (size), align);
4749 cleared = 1;
4751 else
4752 /* Inform later passes that the old value is dead. */
4753 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4755 /* Store each element of the constructor into
4756 the corresponding element of TARGET, determined
4757 by counting the elements. */
4758 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4759 elt;
4760 elt = TREE_CHAIN (elt), i++)
4762 register enum machine_mode mode;
4763 HOST_WIDE_INT bitsize;
4764 HOST_WIDE_INT bitpos;
4765 int unsignedp;
4766 tree value = TREE_VALUE (elt);
4767 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4768 tree index = TREE_PURPOSE (elt);
4769 rtx xtarget = target;
4771 if (cleared && is_zeros_p (value))
4772 continue;
4774 unsignedp = TREE_UNSIGNED (elttype);
4775 mode = TYPE_MODE (elttype);
4776 if (mode == BLKmode)
4777 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4778 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4779 : -1);
4780 else
4781 bitsize = GET_MODE_BITSIZE (mode);
4783 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4785 tree lo_index = TREE_OPERAND (index, 0);
4786 tree hi_index = TREE_OPERAND (index, 1);
4787 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4788 struct nesting *loop;
4789 HOST_WIDE_INT lo, hi, count;
4790 tree position;
4792 /* If the range is constant and "small", unroll the loop. */
4793 if (const_bounds_p
4794 && host_integerp (lo_index, 0)
4795 && host_integerp (hi_index, 0)
4796 && (lo = tree_low_cst (lo_index, 0),
4797 hi = tree_low_cst (hi_index, 0),
4798 count = hi - lo + 1,
4799 (GET_CODE (target) != MEM
4800 || count <= 2
4801 || (host_integerp (TYPE_SIZE (elttype), 1)
4802 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4803 <= 40 * 8)))))
4805 lo -= minelt; hi -= minelt;
4806 for (; lo <= hi; lo++)
4808 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4809 store_constructor_field
4810 (target, bitsize, bitpos, mode, value, type, align,
4811 cleared,
4812 TYPE_NONALIASED_COMPONENT (type)
4813 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4816 else
4818 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4819 loop_top = gen_label_rtx ();
4820 loop_end = gen_label_rtx ();
4822 unsignedp = TREE_UNSIGNED (domain);
4824 index = build_decl (VAR_DECL, NULL_TREE, domain);
4826 index_r
4827 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4828 &unsignedp, 0));
4829 SET_DECL_RTL (index, index_r);
4830 if (TREE_CODE (value) == SAVE_EXPR
4831 && SAVE_EXPR_RTL (value) == 0)
4833 /* Make sure value gets expanded once before the
4834 loop. */
4835 expand_expr (value, const0_rtx, VOIDmode, 0);
4836 emit_queue ();
4838 store_expr (lo_index, index_r, 0);
4839 loop = expand_start_loop (0);
4841 /* Assign value to element index. */
4842 position
4843 = convert (ssizetype,
4844 fold (build (MINUS_EXPR, TREE_TYPE (index),
4845 index, TYPE_MIN_VALUE (domain))));
4846 position = size_binop (MULT_EXPR, position,
4847 convert (ssizetype,
4848 TYPE_SIZE_UNIT (elttype)));
4850 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4851 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4852 xtarget = change_address (target, mode, addr);
4853 if (TREE_CODE (value) == CONSTRUCTOR)
4854 store_constructor (value, xtarget, align, cleared,
4855 bitsize / BITS_PER_UNIT);
4856 else
4857 store_expr (value, xtarget, 0);
4859 expand_exit_loop_if_false (loop,
4860 build (LT_EXPR, integer_type_node,
4861 index, hi_index));
4863 expand_increment (build (PREINCREMENT_EXPR,
4864 TREE_TYPE (index),
4865 index, integer_one_node), 0, 0);
4866 expand_end_loop ();
4867 emit_label (loop_end);
4870 else if ((index != 0 && ! host_integerp (index, 0))
4871 || ! host_integerp (TYPE_SIZE (elttype), 1))
4873 rtx pos_rtx, addr;
4874 tree position;
4876 if (index == 0)
4877 index = ssize_int (1);
4879 if (minelt)
4880 index = convert (ssizetype,
4881 fold (build (MINUS_EXPR, index,
4882 TYPE_MIN_VALUE (domain))));
4884 position = size_binop (MULT_EXPR, index,
4885 convert (ssizetype,
4886 TYPE_SIZE_UNIT (elttype)));
4887 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4888 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4889 xtarget = change_address (target, mode, addr);
4890 store_expr (value, xtarget, 0);
4892 else
4894 if (index != 0)
4895 bitpos = ((tree_low_cst (index, 0) - minelt)
4896 * tree_low_cst (TYPE_SIZE (elttype), 1));
4897 else
4898 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4900 store_constructor_field (target, bitsize, bitpos, mode, value,
4901 type, align, cleared,
4902 TYPE_NONALIASED_COMPONENT (type)
4903 && GET_CODE (target) == MEM
4904 ? MEM_ALIAS_SET (target) :
4905 get_alias_set (elttype));
4911 /* Set constructor assignments. */
4912 else if (TREE_CODE (type) == SET_TYPE)
4914 tree elt = CONSTRUCTOR_ELTS (exp);
4915 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4916 tree domain = TYPE_DOMAIN (type);
4917 tree domain_min, domain_max, bitlength;
4919 /* The default implementation strategy is to extract the constant
4920 parts of the constructor, use that to initialize the target,
4921 and then "or" in whatever non-constant ranges we need in addition.
4923 If a large set is all zero or all ones, it is
4924 probably better to set it using memset (if available) or bzero.
4925 Also, if a large set has just a single range, it may also be
4926 better to first clear all the first clear the set (using
4927 bzero/memset), and set the bits we want. */
4929 /* Check for all zeros. */
4930 if (elt == NULL_TREE && size > 0)
4932 if (!cleared)
4933 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4934 return;
4937 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4938 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4939 bitlength = size_binop (PLUS_EXPR,
4940 size_diffop (domain_max, domain_min),
4941 ssize_int (1));
4943 nbits = tree_low_cst (bitlength, 1);
4945 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4946 are "complicated" (more than one range), initialize (the
4947 constant parts) by copying from a constant. */
4948 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4949 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4951 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4952 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4953 char *bit_buffer = (char *) alloca (nbits);
4954 HOST_WIDE_INT word = 0;
4955 unsigned int bit_pos = 0;
4956 unsigned int ibit = 0;
4957 unsigned int offset = 0; /* In bytes from beginning of set. */
4959 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4960 for (;;)
4962 if (bit_buffer[ibit])
4964 if (BYTES_BIG_ENDIAN)
4965 word |= (1 << (set_word_size - 1 - bit_pos));
4966 else
4967 word |= 1 << bit_pos;
4970 bit_pos++; ibit++;
4971 if (bit_pos >= set_word_size || ibit == nbits)
4973 if (word != 0 || ! cleared)
4975 rtx datum = GEN_INT (word);
4976 rtx to_rtx;
4978 /* The assumption here is that it is safe to use
4979 XEXP if the set is multi-word, but not if
4980 it's single-word. */
4981 if (GET_CODE (target) == MEM)
4983 to_rtx = plus_constant (XEXP (target, 0), offset);
4984 to_rtx = change_address (target, mode, to_rtx);
4986 else if (offset == 0)
4987 to_rtx = target;
4988 else
4989 abort ();
4990 emit_move_insn (to_rtx, datum);
4993 if (ibit == nbits)
4994 break;
4995 word = 0;
4996 bit_pos = 0;
4997 offset += set_word_size / BITS_PER_UNIT;
5001 else if (!cleared)
5002 /* Don't bother clearing storage if the set is all ones. */
5003 if (TREE_CHAIN (elt) != NULL_TREE
5004 || (TREE_PURPOSE (elt) == NULL_TREE
5005 ? nbits != 1
5006 : ( ! host_integerp (TREE_VALUE (elt), 0)
5007 || ! host_integerp (TREE_PURPOSE (elt), 0)
5008 || (tree_low_cst (TREE_VALUE (elt), 0)
5009 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5010 != (HOST_WIDE_INT) nbits))))
5011 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5013 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5015 /* Start of range of element or NULL. */
5016 tree startbit = TREE_PURPOSE (elt);
5017 /* End of range of element, or element value. */
5018 tree endbit = TREE_VALUE (elt);
5019 #ifdef TARGET_MEM_FUNCTIONS
5020 HOST_WIDE_INT startb, endb;
5021 #endif
5022 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5024 bitlength_rtx = expand_expr (bitlength,
5025 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5027 /* Handle non-range tuple element like [ expr ]. */
5028 if (startbit == NULL_TREE)
5030 startbit = save_expr (endbit);
5031 endbit = startbit;
5034 startbit = convert (sizetype, startbit);
5035 endbit = convert (sizetype, endbit);
5036 if (! integer_zerop (domain_min))
5038 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5039 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5041 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5042 EXPAND_CONST_ADDRESS);
5043 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5044 EXPAND_CONST_ADDRESS);
5046 if (REG_P (target))
5048 targetx
5049 = assign_temp
5050 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5051 TYPE_QUAL_CONST)),
5052 0, 1, 1);
5053 emit_move_insn (targetx, target);
5056 else if (GET_CODE (target) == MEM)
5057 targetx = target;
5058 else
5059 abort ();
5061 #ifdef TARGET_MEM_FUNCTIONS
5062 /* Optimization: If startbit and endbit are
5063 constants divisible by BITS_PER_UNIT,
5064 call memset instead. */
5065 if (TREE_CODE (startbit) == INTEGER_CST
5066 && TREE_CODE (endbit) == INTEGER_CST
5067 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5068 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5070 emit_library_call (memset_libfunc, LCT_NORMAL,
5071 VOIDmode, 3,
5072 plus_constant (XEXP (targetx, 0),
5073 startb / BITS_PER_UNIT),
5074 Pmode,
5075 constm1_rtx, TYPE_MODE (integer_type_node),
5076 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5077 TYPE_MODE (sizetype));
5079 else
5080 #endif
5081 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5082 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5083 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5084 startbit_rtx, TYPE_MODE (sizetype),
5085 endbit_rtx, TYPE_MODE (sizetype));
5087 if (REG_P (target))
5088 emit_move_insn (target, targetx);
5092 else
5093 abort ();
5096 /* Store the value of EXP (an expression tree)
5097 into a subfield of TARGET which has mode MODE and occupies
5098 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5099 If MODE is VOIDmode, it means that we are storing into a bit-field.
5101 If VALUE_MODE is VOIDmode, return nothing in particular.
5102 UNSIGNEDP is not used in this case.
5104 Otherwise, return an rtx for the value stored. This rtx
5105 has mode VALUE_MODE if that is convenient to do.
5106 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5108 ALIGN is the alignment that TARGET is known to have.
5109 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5111 ALIAS_SET is the alias set for the destination. This value will
5112 (in general) be different from that for TARGET, since TARGET is a
5113 reference to the containing structure. */
5115 static rtx
5116 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5117 unsignedp, align, total_size, alias_set)
5118 rtx target;
5119 HOST_WIDE_INT bitsize;
5120 HOST_WIDE_INT bitpos;
5121 enum machine_mode mode;
5122 tree exp;
5123 enum machine_mode value_mode;
5124 int unsignedp;
5125 unsigned int align;
5126 HOST_WIDE_INT total_size;
5127 int alias_set;
5129 HOST_WIDE_INT width_mask = 0;
5131 if (TREE_CODE (exp) == ERROR_MARK)
5132 return const0_rtx;
5134 /* If we have nothing to store, do nothing unless the expression has
5135 side-effects. */
5136 if (bitsize == 0)
5137 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5139 if (bitsize < HOST_BITS_PER_WIDE_INT)
5140 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5142 /* If we are storing into an unaligned field of an aligned union that is
5143 in a register, we may have the mode of TARGET being an integer mode but
5144 MODE == BLKmode. In that case, get an aligned object whose size and
5145 alignment are the same as TARGET and store TARGET into it (we can avoid
5146 the store if the field being stored is the entire width of TARGET). Then
5147 call ourselves recursively to store the field into a BLKmode version of
5148 that object. Finally, load from the object into TARGET. This is not
5149 very efficient in general, but should only be slightly more expensive
5150 than the otherwise-required unaligned accesses. Perhaps this can be
5151 cleaned up later. */
5153 if (mode == BLKmode
5154 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5156 rtx object
5157 = assign_temp
5158 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5159 TYPE_QUAL_CONST),
5160 0, 1, 1);
5161 rtx blk_object = copy_rtx (object);
5163 PUT_MODE (blk_object, BLKmode);
5165 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5166 emit_move_insn (object, target);
5168 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5169 align, total_size, alias_set);
5171 /* Even though we aren't returning target, we need to
5172 give it the updated value. */
5173 emit_move_insn (target, object);
5175 return blk_object;
5178 if (GET_CODE (target) == CONCAT)
5180 /* We're storing into a struct containing a single __complex. */
5182 if (bitpos != 0)
5183 abort ();
5184 return store_expr (exp, target, 0);
5187 /* If the structure is in a register or if the component
5188 is a bit field, we cannot use addressing to access it.
5189 Use bit-field techniques or SUBREG to store in it. */
5191 if (mode == VOIDmode
5192 || (mode != BLKmode && ! direct_store[(int) mode]
5193 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5194 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5195 || GET_CODE (target) == REG
5196 || GET_CODE (target) == SUBREG
5197 /* If the field isn't aligned enough to store as an ordinary memref,
5198 store it as a bit field. */
5199 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5200 && (align < GET_MODE_ALIGNMENT (mode)
5201 || bitpos % GET_MODE_ALIGNMENT (mode)))
5202 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5203 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5204 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5205 /* If the RHS and field are a constant size and the size of the
5206 RHS isn't the same size as the bitfield, we must use bitfield
5207 operations. */
5208 || (bitsize >= 0
5209 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5210 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5212 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5214 /* If BITSIZE is narrower than the size of the type of EXP
5215 we will be narrowing TEMP. Normally, what's wanted are the
5216 low-order bits. However, if EXP's type is a record and this is
5217 big-endian machine, we want the upper BITSIZE bits. */
5218 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5219 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5220 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5221 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5222 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5223 - bitsize),
5224 temp, 1);
5226 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5227 MODE. */
5228 if (mode != VOIDmode && mode != BLKmode
5229 && mode != TYPE_MODE (TREE_TYPE (exp)))
5230 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5232 /* If the modes of TARGET and TEMP are both BLKmode, both
5233 must be in memory and BITPOS must be aligned on a byte
5234 boundary. If so, we simply do a block copy. */
5235 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5237 unsigned int exp_align = expr_align (exp);
5239 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5240 || bitpos % BITS_PER_UNIT != 0)
5241 abort ();
5243 target = change_address (target, VOIDmode,
5244 plus_constant (XEXP (target, 0),
5245 bitpos / BITS_PER_UNIT));
5247 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5248 align = MIN (exp_align, align);
5250 /* Find an alignment that is consistent with the bit position. */
5251 while ((bitpos % align) != 0)
5252 align >>= 1;
5254 emit_block_move (target, temp,
5255 bitsize == -1 ? expr_size (exp)
5256 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5257 / BITS_PER_UNIT),
5258 align);
5260 return value_mode == VOIDmode ? const0_rtx : target;
5263 /* Store the value in the bitfield. */
5264 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5265 if (value_mode != VOIDmode)
5267 /* The caller wants an rtx for the value. */
5268 /* If possible, avoid refetching from the bitfield itself. */
5269 if (width_mask != 0
5270 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5272 tree count;
5273 enum machine_mode tmode;
5275 if (unsignedp)
5276 return expand_and (temp,
5277 GEN_INT
5278 (trunc_int_for_mode
5279 (width_mask,
5280 GET_MODE (temp) == VOIDmode
5281 ? value_mode
5282 : GET_MODE (temp))), NULL_RTX);
5283 tmode = GET_MODE (temp);
5284 if (tmode == VOIDmode)
5285 tmode = value_mode;
5286 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5287 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5288 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5290 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5291 NULL_RTX, value_mode, 0, align,
5292 total_size);
5294 return const0_rtx;
5296 else
5298 rtx addr = XEXP (target, 0);
5299 rtx to_rtx;
5301 /* If a value is wanted, it must be the lhs;
5302 so make the address stable for multiple use. */
5304 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5305 && ! CONSTANT_ADDRESS_P (addr)
5306 /* A frame-pointer reference is already stable. */
5307 && ! (GET_CODE (addr) == PLUS
5308 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5309 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5310 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5311 addr = copy_to_reg (addr);
5313 /* Now build a reference to just the desired component. */
5315 to_rtx = copy_rtx (change_address (target, mode,
5316 plus_constant (addr,
5317 (bitpos
5318 / BITS_PER_UNIT))));
5319 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5320 MEM_ALIAS_SET (to_rtx) = alias_set;
5322 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5326 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5327 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5328 ARRAY_REFs and find the ultimate containing object, which we return.
5330 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5331 bit position, and *PUNSIGNEDP to the signedness of the field.
5332 If the position of the field is variable, we store a tree
5333 giving the variable offset (in units) in *POFFSET.
5334 This offset is in addition to the bit position.
5335 If the position is not variable, we store 0 in *POFFSET.
5336 We set *PALIGNMENT to the alignment of the address that will be
5337 computed. This is the alignment of the thing we return if *POFFSET
5338 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5340 If any of the extraction expressions is volatile,
5341 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5343 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5344 is a mode that can be used to access the field. In that case, *PBITSIZE
5345 is redundant.
5347 If the field describes a variable-sized object, *PMODE is set to
5348 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5349 this case, but the address of the object can be found. */
5351 tree
5352 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5353 punsignedp, pvolatilep, palignment)
5354 tree exp;
5355 HOST_WIDE_INT *pbitsize;
5356 HOST_WIDE_INT *pbitpos;
5357 tree *poffset;
5358 enum machine_mode *pmode;
5359 int *punsignedp;
5360 int *pvolatilep;
5361 unsigned int *palignment;
5363 tree size_tree = 0;
5364 enum machine_mode mode = VOIDmode;
5365 tree offset = size_zero_node;
5366 tree bit_offset = bitsize_zero_node;
5367 unsigned int alignment = BIGGEST_ALIGNMENT;
5368 tree tem;
5370 /* First get the mode, signedness, and size. We do this from just the
5371 outermost expression. */
5372 if (TREE_CODE (exp) == COMPONENT_REF)
5374 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5375 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5376 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5378 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5380 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5382 size_tree = TREE_OPERAND (exp, 1);
5383 *punsignedp = TREE_UNSIGNED (exp);
5385 else
5387 mode = TYPE_MODE (TREE_TYPE (exp));
5388 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5390 if (mode == BLKmode)
5391 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5392 else
5393 *pbitsize = GET_MODE_BITSIZE (mode);
5396 if (size_tree != 0)
5398 if (! host_integerp (size_tree, 1))
5399 mode = BLKmode, *pbitsize = -1;
5400 else
5401 *pbitsize = tree_low_cst (size_tree, 1);
5404 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5405 and find the ultimate containing object. */
5406 while (1)
5408 if (TREE_CODE (exp) == BIT_FIELD_REF)
5409 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5410 else if (TREE_CODE (exp) == COMPONENT_REF)
5412 tree field = TREE_OPERAND (exp, 1);
5413 tree this_offset = DECL_FIELD_OFFSET (field);
5415 /* If this field hasn't been filled in yet, don't go
5416 past it. This should only happen when folding expressions
5417 made during type construction. */
5418 if (this_offset == 0)
5419 break;
5420 else if (! TREE_CONSTANT (this_offset)
5421 && contains_placeholder_p (this_offset))
5422 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5424 offset = size_binop (PLUS_EXPR, offset, this_offset);
5425 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5426 DECL_FIELD_BIT_OFFSET (field));
5428 if (! host_integerp (offset, 0))
5429 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5432 else if (TREE_CODE (exp) == ARRAY_REF)
5434 tree index = TREE_OPERAND (exp, 1);
5435 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5436 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5437 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5439 /* We assume all arrays have sizes that are a multiple of a byte.
5440 First subtract the lower bound, if any, in the type of the
5441 index, then convert to sizetype and multiply by the size of the
5442 array element. */
5443 if (low_bound != 0 && ! integer_zerop (low_bound))
5444 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5445 index, low_bound));
5447 /* If the index has a self-referential type, pass it to a
5448 WITH_RECORD_EXPR; if the component size is, pass our
5449 component to one. */
5450 if (! TREE_CONSTANT (index)
5451 && contains_placeholder_p (index))
5452 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5453 if (! TREE_CONSTANT (unit_size)
5454 && contains_placeholder_p (unit_size))
5455 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5456 TREE_OPERAND (exp, 0));
5458 offset = size_binop (PLUS_EXPR, offset,
5459 size_binop (MULT_EXPR,
5460 convert (sizetype, index),
5461 unit_size));
5464 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5465 && ! ((TREE_CODE (exp) == NOP_EXPR
5466 || TREE_CODE (exp) == CONVERT_EXPR)
5467 && (TYPE_MODE (TREE_TYPE (exp))
5468 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5469 break;
5471 /* If any reference in the chain is volatile, the effect is volatile. */
5472 if (TREE_THIS_VOLATILE (exp))
5473 *pvolatilep = 1;
5475 /* If the offset is non-constant already, then we can't assume any
5476 alignment more than the alignment here. */
5477 if (! TREE_CONSTANT (offset))
5478 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5480 exp = TREE_OPERAND (exp, 0);
5483 if (DECL_P (exp))
5484 alignment = MIN (alignment, DECL_ALIGN (exp));
5485 else if (TREE_TYPE (exp) != 0)
5486 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5488 /* If OFFSET is constant, see if we can return the whole thing as a
5489 constant bit position. Otherwise, split it up. */
5490 if (host_integerp (offset, 0)
5491 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5492 bitsize_unit_node))
5493 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5494 && host_integerp (tem, 0))
5495 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5496 else
5497 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5499 *pmode = mode;
5500 *palignment = alignment;
5501 return exp;
5504 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5506 static enum memory_use_mode
5507 get_memory_usage_from_modifier (modifier)
5508 enum expand_modifier modifier;
5510 switch (modifier)
5512 case EXPAND_NORMAL:
5513 case EXPAND_SUM:
5514 return MEMORY_USE_RO;
5515 break;
5516 case EXPAND_MEMORY_USE_WO:
5517 return MEMORY_USE_WO;
5518 break;
5519 case EXPAND_MEMORY_USE_RW:
5520 return MEMORY_USE_RW;
5521 break;
5522 case EXPAND_MEMORY_USE_DONT:
5523 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5524 MEMORY_USE_DONT, because they are modifiers to a call of
5525 expand_expr in the ADDR_EXPR case of expand_expr. */
5526 case EXPAND_CONST_ADDRESS:
5527 case EXPAND_INITIALIZER:
5528 return MEMORY_USE_DONT;
5529 case EXPAND_MEMORY_USE_BAD:
5530 default:
5531 abort ();
5535 /* Given an rtx VALUE that may contain additions and multiplications, return
5536 an equivalent value that just refers to a register, memory, or constant.
5537 This is done by generating instructions to perform the arithmetic and
5538 returning a pseudo-register containing the value.
5540 The returned value may be a REG, SUBREG, MEM or constant. */
5543 force_operand (value, target)
5544 rtx value, target;
5546 register optab binoptab = 0;
5547 /* Use a temporary to force order of execution of calls to
5548 `force_operand'. */
5549 rtx tmp;
5550 register rtx op2;
5551 /* Use subtarget as the target for operand 0 of a binary operation. */
5552 register rtx subtarget = get_subtarget (target);
5554 /* Check for a PIC address load. */
5555 if (flag_pic
5556 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5557 && XEXP (value, 0) == pic_offset_table_rtx
5558 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5559 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5560 || GET_CODE (XEXP (value, 1)) == CONST))
5562 if (!subtarget)
5563 subtarget = gen_reg_rtx (GET_MODE (value));
5564 emit_move_insn (subtarget, value);
5565 return subtarget;
5568 if (GET_CODE (value) == PLUS)
5569 binoptab = add_optab;
5570 else if (GET_CODE (value) == MINUS)
5571 binoptab = sub_optab;
5572 else if (GET_CODE (value) == MULT)
5574 op2 = XEXP (value, 1);
5575 if (!CONSTANT_P (op2)
5576 && !(GET_CODE (op2) == REG && op2 != subtarget))
5577 subtarget = 0;
5578 tmp = force_operand (XEXP (value, 0), subtarget);
5579 return expand_mult (GET_MODE (value), tmp,
5580 force_operand (op2, NULL_RTX),
5581 target, 1);
5584 if (binoptab)
5586 op2 = XEXP (value, 1);
5587 if (!CONSTANT_P (op2)
5588 && !(GET_CODE (op2) == REG && op2 != subtarget))
5589 subtarget = 0;
5590 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5592 binoptab = add_optab;
5593 op2 = negate_rtx (GET_MODE (value), op2);
5596 /* Check for an addition with OP2 a constant integer and our first
5597 operand a PLUS of a virtual register and something else. In that
5598 case, we want to emit the sum of the virtual register and the
5599 constant first and then add the other value. This allows virtual
5600 register instantiation to simply modify the constant rather than
5601 creating another one around this addition. */
5602 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5603 && GET_CODE (XEXP (value, 0)) == PLUS
5604 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5605 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5606 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5608 rtx temp = expand_binop (GET_MODE (value), binoptab,
5609 XEXP (XEXP (value, 0), 0), op2,
5610 subtarget, 0, OPTAB_LIB_WIDEN);
5611 return expand_binop (GET_MODE (value), binoptab, temp,
5612 force_operand (XEXP (XEXP (value, 0), 1), 0),
5613 target, 0, OPTAB_LIB_WIDEN);
5616 tmp = force_operand (XEXP (value, 0), subtarget);
5617 return expand_binop (GET_MODE (value), binoptab, tmp,
5618 force_operand (op2, NULL_RTX),
5619 target, 0, OPTAB_LIB_WIDEN);
5620 /* We give UNSIGNEDP = 0 to expand_binop
5621 because the only operations we are expanding here are signed ones. */
5623 return value;
5626 /* Subroutine of expand_expr:
5627 save the non-copied parts (LIST) of an expr (LHS), and return a list
5628 which can restore these values to their previous values,
5629 should something modify their storage. */
5631 static tree
5632 save_noncopied_parts (lhs, list)
5633 tree lhs;
5634 tree list;
5636 tree tail;
5637 tree parts = 0;
5639 for (tail = list; tail; tail = TREE_CHAIN (tail))
5640 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5641 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5642 else
5644 tree part = TREE_VALUE (tail);
5645 tree part_type = TREE_TYPE (part);
5646 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5647 rtx target
5648 = assign_temp (build_qualified_type (part_type,
5649 (TYPE_QUALS (part_type)
5650 | TYPE_QUAL_CONST)),
5651 0, 1, 1);
5653 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5654 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5655 parts = tree_cons (to_be_saved,
5656 build (RTL_EXPR, part_type, NULL_TREE,
5657 (tree) target),
5658 parts);
5659 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5661 return parts;
5664 /* Subroutine of expand_expr:
5665 record the non-copied parts (LIST) of an expr (LHS), and return a list
5666 which specifies the initial values of these parts. */
5668 static tree
5669 init_noncopied_parts (lhs, list)
5670 tree lhs;
5671 tree list;
5673 tree tail;
5674 tree parts = 0;
5676 for (tail = list; tail; tail = TREE_CHAIN (tail))
5677 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5678 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5679 else if (TREE_PURPOSE (tail))
5681 tree part = TREE_VALUE (tail);
5682 tree part_type = TREE_TYPE (part);
5683 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5684 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5686 return parts;
5689 /* Subroutine of expand_expr: return nonzero iff there is no way that
5690 EXP can reference X, which is being modified. TOP_P is nonzero if this
5691 call is going to be used to determine whether we need a temporary
5692 for EXP, as opposed to a recursive call to this function.
5694 It is always safe for this routine to return zero since it merely
5695 searches for optimization opportunities. */
5698 safe_from_p (x, exp, top_p)
5699 rtx x;
5700 tree exp;
5701 int top_p;
5703 rtx exp_rtl = 0;
5704 int i, nops;
5705 static tree save_expr_list;
5707 if (x == 0
5708 /* If EXP has varying size, we MUST use a target since we currently
5709 have no way of allocating temporaries of variable size
5710 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5711 So we assume here that something at a higher level has prevented a
5712 clash. This is somewhat bogus, but the best we can do. Only
5713 do this when X is BLKmode and when we are at the top level. */
5714 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5715 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5716 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5717 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5718 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5719 != INTEGER_CST)
5720 && GET_MODE (x) == BLKmode)
5721 /* If X is in the outgoing argument area, it is always safe. */
5722 || (GET_CODE (x) == MEM
5723 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5724 || (GET_CODE (XEXP (x, 0)) == PLUS
5725 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5726 return 1;
5728 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5729 find the underlying pseudo. */
5730 if (GET_CODE (x) == SUBREG)
5732 x = SUBREG_REG (x);
5733 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5734 return 0;
5737 /* A SAVE_EXPR might appear many times in the expression passed to the
5738 top-level safe_from_p call, and if it has a complex subexpression,
5739 examining it multiple times could result in a combinatorial explosion.
5740 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5741 with optimization took about 28 minutes to compile -- even though it was
5742 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5743 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5744 we have processed. Note that the only test of top_p was above. */
5746 if (top_p)
5748 int rtn;
5749 tree t;
5751 save_expr_list = 0;
5753 rtn = safe_from_p (x, exp, 0);
5755 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5756 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5758 return rtn;
5761 /* Now look at our tree code and possibly recurse. */
5762 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5764 case 'd':
5765 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5766 break;
5768 case 'c':
5769 return 1;
5771 case 'x':
5772 if (TREE_CODE (exp) == TREE_LIST)
5773 return ((TREE_VALUE (exp) == 0
5774 || safe_from_p (x, TREE_VALUE (exp), 0))
5775 && (TREE_CHAIN (exp) == 0
5776 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5777 else if (TREE_CODE (exp) == ERROR_MARK)
5778 return 1; /* An already-visited SAVE_EXPR? */
5779 else
5780 return 0;
5782 case '1':
5783 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5785 case '2':
5786 case '<':
5787 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5788 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5790 case 'e':
5791 case 'r':
5792 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5793 the expression. If it is set, we conflict iff we are that rtx or
5794 both are in memory. Otherwise, we check all operands of the
5795 expression recursively. */
5797 switch (TREE_CODE (exp))
5799 case ADDR_EXPR:
5800 return (staticp (TREE_OPERAND (exp, 0))
5801 || TREE_STATIC (exp)
5802 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5804 case INDIRECT_REF:
5805 if (GET_CODE (x) == MEM
5806 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5807 get_alias_set (exp)))
5808 return 0;
5809 break;
5811 case CALL_EXPR:
5812 /* Assume that the call will clobber all hard registers and
5813 all of memory. */
5814 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5815 || GET_CODE (x) == MEM)
5816 return 0;
5817 break;
5819 case RTL_EXPR:
5820 /* If a sequence exists, we would have to scan every instruction
5821 in the sequence to see if it was safe. This is probably not
5822 worthwhile. */
5823 if (RTL_EXPR_SEQUENCE (exp))
5824 return 0;
5826 exp_rtl = RTL_EXPR_RTL (exp);
5827 break;
5829 case WITH_CLEANUP_EXPR:
5830 exp_rtl = RTL_EXPR_RTL (exp);
5831 break;
5833 case CLEANUP_POINT_EXPR:
5834 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5836 case SAVE_EXPR:
5837 exp_rtl = SAVE_EXPR_RTL (exp);
5838 if (exp_rtl)
5839 break;
5841 /* If we've already scanned this, don't do it again. Otherwise,
5842 show we've scanned it and record for clearing the flag if we're
5843 going on. */
5844 if (TREE_PRIVATE (exp))
5845 return 1;
5847 TREE_PRIVATE (exp) = 1;
5848 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5850 TREE_PRIVATE (exp) = 0;
5851 return 0;
5854 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5855 return 1;
5857 case BIND_EXPR:
5858 /* The only operand we look at is operand 1. The rest aren't
5859 part of the expression. */
5860 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5862 case METHOD_CALL_EXPR:
5863 /* This takes a rtx argument, but shouldn't appear here. */
5864 abort ();
5866 default:
5867 break;
5870 /* If we have an rtx, we do not need to scan our operands. */
5871 if (exp_rtl)
5872 break;
5874 nops = first_rtl_op (TREE_CODE (exp));
5875 for (i = 0; i < nops; i++)
5876 if (TREE_OPERAND (exp, i) != 0
5877 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5878 return 0;
5880 /* If this is a language-specific tree code, it may require
5881 special handling. */
5882 if ((unsigned int) TREE_CODE (exp)
5883 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5884 && lang_safe_from_p
5885 && !(*lang_safe_from_p) (x, exp))
5886 return 0;
5889 /* If we have an rtl, find any enclosed object. Then see if we conflict
5890 with it. */
5891 if (exp_rtl)
5893 if (GET_CODE (exp_rtl) == SUBREG)
5895 exp_rtl = SUBREG_REG (exp_rtl);
5896 if (GET_CODE (exp_rtl) == REG
5897 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5898 return 0;
5901 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5902 are memory and they conflict. */
5903 return ! (rtx_equal_p (x, exp_rtl)
5904 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5905 && true_dependence (exp_rtl, GET_MODE (x), x,
5906 rtx_addr_varies_p)));
5909 /* If we reach here, it is safe. */
5910 return 1;
5913 /* Subroutine of expand_expr: return nonzero iff EXP is an
5914 expression whose type is statically determinable. */
5916 static int
5917 fixed_type_p (exp)
5918 tree exp;
5920 if (TREE_CODE (exp) == PARM_DECL
5921 || TREE_CODE (exp) == VAR_DECL
5922 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5923 || TREE_CODE (exp) == COMPONENT_REF
5924 || TREE_CODE (exp) == ARRAY_REF)
5925 return 1;
5926 return 0;
5929 /* Subroutine of expand_expr: return rtx if EXP is a
5930 variable or parameter; else return 0. */
5932 static rtx
5933 var_rtx (exp)
5934 tree exp;
5936 STRIP_NOPS (exp);
5937 switch (TREE_CODE (exp))
5939 case PARM_DECL:
5940 case VAR_DECL:
5941 return DECL_RTL (exp);
5942 default:
5943 return 0;
5947 #ifdef MAX_INTEGER_COMPUTATION_MODE
5949 void
5950 check_max_integer_computation_mode (exp)
5951 tree exp;
5953 enum tree_code code;
5954 enum machine_mode mode;
5956 /* Strip any NOPs that don't change the mode. */
5957 STRIP_NOPS (exp);
5958 code = TREE_CODE (exp);
5960 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5961 if (code == NOP_EXPR
5962 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5963 return;
5965 /* First check the type of the overall operation. We need only look at
5966 unary, binary and relational operations. */
5967 if (TREE_CODE_CLASS (code) == '1'
5968 || TREE_CODE_CLASS (code) == '2'
5969 || TREE_CODE_CLASS (code) == '<')
5971 mode = TYPE_MODE (TREE_TYPE (exp));
5972 if (GET_MODE_CLASS (mode) == MODE_INT
5973 && mode > MAX_INTEGER_COMPUTATION_MODE)
5974 internal_error ("unsupported wide integer operation");
5977 /* Check operand of a unary op. */
5978 if (TREE_CODE_CLASS (code) == '1')
5980 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5981 if (GET_MODE_CLASS (mode) == MODE_INT
5982 && mode > MAX_INTEGER_COMPUTATION_MODE)
5983 internal_error ("unsupported wide integer operation");
5986 /* Check operands of a binary/comparison op. */
5987 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5989 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5990 if (GET_MODE_CLASS (mode) == MODE_INT
5991 && mode > MAX_INTEGER_COMPUTATION_MODE)
5992 internal_error ("unsupported wide integer operation");
5994 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5995 if (GET_MODE_CLASS (mode) == MODE_INT
5996 && mode > MAX_INTEGER_COMPUTATION_MODE)
5997 internal_error ("unsupported wide integer operation");
6000 #endif
6002 /* expand_expr: generate code for computing expression EXP.
6003 An rtx for the computed value is returned. The value is never null.
6004 In the case of a void EXP, const0_rtx is returned.
6006 The value may be stored in TARGET if TARGET is nonzero.
6007 TARGET is just a suggestion; callers must assume that
6008 the rtx returned may not be the same as TARGET.
6010 If TARGET is CONST0_RTX, it means that the value will be ignored.
6012 If TMODE is not VOIDmode, it suggests generating the
6013 result in mode TMODE. But this is done only when convenient.
6014 Otherwise, TMODE is ignored and the value generated in its natural mode.
6015 TMODE is just a suggestion; callers must assume that
6016 the rtx returned may not have mode TMODE.
6018 Note that TARGET may have neither TMODE nor MODE. In that case, it
6019 probably will not be used.
6021 If MODIFIER is EXPAND_SUM then when EXP is an addition
6022 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6023 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6024 products as above, or REG or MEM, or constant.
6025 Ordinarily in such cases we would output mul or add instructions
6026 and then return a pseudo reg containing the sum.
6028 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6029 it also marks a label as absolutely required (it can't be dead).
6030 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6031 This is used for outputting expressions used in initializers.
6033 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6034 with a constant address even if that address is not normally legitimate.
6035 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6038 expand_expr (exp, target, tmode, modifier)
6039 register tree exp;
6040 rtx target;
6041 enum machine_mode tmode;
6042 enum expand_modifier modifier;
6044 register rtx op0, op1, temp;
6045 tree type = TREE_TYPE (exp);
6046 int unsignedp = TREE_UNSIGNED (type);
6047 register enum machine_mode mode;
6048 register enum tree_code code = TREE_CODE (exp);
6049 optab this_optab;
6050 rtx subtarget, original_target;
6051 int ignore;
6052 tree context;
6053 /* Used by check-memory-usage to make modifier read only. */
6054 enum expand_modifier ro_modifier;
6056 /* Handle ERROR_MARK before anybody tries to access its type. */
6057 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6059 op0 = CONST0_RTX (tmode);
6060 if (op0 != 0)
6061 return op0;
6062 return const0_rtx;
6065 mode = TYPE_MODE (type);
6066 /* Use subtarget as the target for operand 0 of a binary operation. */
6067 subtarget = get_subtarget (target);
6068 original_target = target;
6069 ignore = (target == const0_rtx
6070 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6071 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6072 || code == COND_EXPR)
6073 && TREE_CODE (type) == VOID_TYPE));
6075 /* Make a read-only version of the modifier. */
6076 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6077 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6078 ro_modifier = modifier;
6079 else
6080 ro_modifier = EXPAND_NORMAL;
6082 /* If we are going to ignore this result, we need only do something
6083 if there is a side-effect somewhere in the expression. If there
6084 is, short-circuit the most common cases here. Note that we must
6085 not call expand_expr with anything but const0_rtx in case this
6086 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6088 if (ignore)
6090 if (! TREE_SIDE_EFFECTS (exp))
6091 return const0_rtx;
6093 /* Ensure we reference a volatile object even if value is ignored, but
6094 don't do this if all we are doing is taking its address. */
6095 if (TREE_THIS_VOLATILE (exp)
6096 && TREE_CODE (exp) != FUNCTION_DECL
6097 && mode != VOIDmode && mode != BLKmode
6098 && modifier != EXPAND_CONST_ADDRESS)
6100 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6101 if (GET_CODE (temp) == MEM)
6102 temp = copy_to_reg (temp);
6103 return const0_rtx;
6106 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6107 || code == INDIRECT_REF || code == BUFFER_REF)
6108 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6109 VOIDmode, ro_modifier);
6110 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6111 || code == ARRAY_REF)
6113 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6114 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6115 return const0_rtx;
6117 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6118 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6119 /* If the second operand has no side effects, just evaluate
6120 the first. */
6121 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6122 VOIDmode, ro_modifier);
6123 else if (code == BIT_FIELD_REF)
6125 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6126 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6127 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6128 return const0_rtx;
6131 target = 0;
6134 #ifdef MAX_INTEGER_COMPUTATION_MODE
6135 /* Only check stuff here if the mode we want is different from the mode
6136 of the expression; if it's the same, check_max_integer_computiation_mode
6137 will handle it. Do we really need to check this stuff at all? */
6139 if (target
6140 && GET_MODE (target) != mode
6141 && TREE_CODE (exp) != INTEGER_CST
6142 && TREE_CODE (exp) != PARM_DECL
6143 && TREE_CODE (exp) != ARRAY_REF
6144 && TREE_CODE (exp) != COMPONENT_REF
6145 && TREE_CODE (exp) != BIT_FIELD_REF
6146 && TREE_CODE (exp) != INDIRECT_REF
6147 && TREE_CODE (exp) != CALL_EXPR
6148 && TREE_CODE (exp) != VAR_DECL
6149 && TREE_CODE (exp) != RTL_EXPR)
6151 enum machine_mode mode = GET_MODE (target);
6153 if (GET_MODE_CLASS (mode) == MODE_INT
6154 && mode > MAX_INTEGER_COMPUTATION_MODE)
6155 internal_error ("unsupported wide integer operation");
6158 if (tmode != mode
6159 && TREE_CODE (exp) != INTEGER_CST
6160 && TREE_CODE (exp) != PARM_DECL
6161 && TREE_CODE (exp) != ARRAY_REF
6162 && TREE_CODE (exp) != COMPONENT_REF
6163 && TREE_CODE (exp) != BIT_FIELD_REF
6164 && TREE_CODE (exp) != INDIRECT_REF
6165 && TREE_CODE (exp) != VAR_DECL
6166 && TREE_CODE (exp) != CALL_EXPR
6167 && TREE_CODE (exp) != RTL_EXPR
6168 && GET_MODE_CLASS (tmode) == MODE_INT
6169 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6170 internal_error ("unsupported wide integer operation");
6172 check_max_integer_computation_mode (exp);
6173 #endif
6175 /* If will do cse, generate all results into pseudo registers
6176 since 1) that allows cse to find more things
6177 and 2) otherwise cse could produce an insn the machine
6178 cannot support. */
6180 if (! cse_not_expected && mode != BLKmode && target
6181 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6182 target = subtarget;
6184 switch (code)
6186 case LABEL_DECL:
6188 tree function = decl_function_context (exp);
6189 /* Handle using a label in a containing function. */
6190 if (function != current_function_decl
6191 && function != inline_function_decl && function != 0)
6193 struct function *p = find_function_data (function);
6194 p->expr->x_forced_labels
6195 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6196 p->expr->x_forced_labels);
6198 else
6200 if (modifier == EXPAND_INITIALIZER)
6201 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6202 label_rtx (exp),
6203 forced_labels);
6206 temp = gen_rtx_MEM (FUNCTION_MODE,
6207 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6208 if (function != current_function_decl
6209 && function != inline_function_decl && function != 0)
6210 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6211 return temp;
6214 case PARM_DECL:
6215 if (DECL_RTL (exp) == 0)
6217 error_with_decl (exp, "prior parameter's size depends on `%s'");
6218 return CONST0_RTX (mode);
6221 /* ... fall through ... */
6223 case VAR_DECL:
6224 /* If a static var's type was incomplete when the decl was written,
6225 but the type is complete now, lay out the decl now. */
6226 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6227 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6229 layout_decl (exp, 0);
6230 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6233 /* Although static-storage variables start off initialized, according to
6234 ANSI C, a memcpy could overwrite them with uninitialized values. So
6235 we check them too. This also lets us check for read-only variables
6236 accessed via a non-const declaration, in case it won't be detected
6237 any other way (e.g., in an embedded system or OS kernel without
6238 memory protection).
6240 Aggregates are not checked here; they're handled elsewhere. */
6241 if (cfun && current_function_check_memory_usage
6242 && code == VAR_DECL
6243 && GET_CODE (DECL_RTL (exp)) == MEM
6244 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6246 enum memory_use_mode memory_usage;
6247 memory_usage = get_memory_usage_from_modifier (modifier);
6249 in_check_memory_usage = 1;
6250 if (memory_usage != MEMORY_USE_DONT)
6251 emit_library_call (chkr_check_addr_libfunc,
6252 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6253 XEXP (DECL_RTL (exp), 0), Pmode,
6254 GEN_INT (int_size_in_bytes (type)),
6255 TYPE_MODE (sizetype),
6256 GEN_INT (memory_usage),
6257 TYPE_MODE (integer_type_node));
6258 in_check_memory_usage = 0;
6261 /* ... fall through ... */
6263 case FUNCTION_DECL:
6264 case RESULT_DECL:
6265 if (DECL_RTL (exp) == 0)
6266 abort ();
6268 /* Ensure variable marked as used even if it doesn't go through
6269 a parser. If it hasn't be used yet, write out an external
6270 definition. */
6271 if (! TREE_USED (exp))
6273 assemble_external (exp);
6274 TREE_USED (exp) = 1;
6277 /* Show we haven't gotten RTL for this yet. */
6278 temp = 0;
6280 /* Handle variables inherited from containing functions. */
6281 context = decl_function_context (exp);
6283 /* We treat inline_function_decl as an alias for the current function
6284 because that is the inline function whose vars, types, etc.
6285 are being merged into the current function.
6286 See expand_inline_function. */
6288 if (context != 0 && context != current_function_decl
6289 && context != inline_function_decl
6290 /* If var is static, we don't need a static chain to access it. */
6291 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6292 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6294 rtx addr;
6296 /* Mark as non-local and addressable. */
6297 DECL_NONLOCAL (exp) = 1;
6298 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6299 abort ();
6300 mark_addressable (exp);
6301 if (GET_CODE (DECL_RTL (exp)) != MEM)
6302 abort ();
6303 addr = XEXP (DECL_RTL (exp), 0);
6304 if (GET_CODE (addr) == MEM)
6305 addr = change_address (addr, Pmode,
6306 fix_lexical_addr (XEXP (addr, 0), exp));
6307 else
6308 addr = fix_lexical_addr (addr, exp);
6310 temp = change_address (DECL_RTL (exp), mode, addr);
6313 /* This is the case of an array whose size is to be determined
6314 from its initializer, while the initializer is still being parsed.
6315 See expand_decl. */
6317 else if (GET_CODE (DECL_RTL (exp)) == MEM
6318 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6319 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6320 XEXP (DECL_RTL (exp), 0));
6322 /* If DECL_RTL is memory, we are in the normal case and either
6323 the address is not valid or it is not a register and -fforce-addr
6324 is specified, get the address into a register. */
6326 else if (GET_CODE (DECL_RTL (exp)) == MEM
6327 && modifier != EXPAND_CONST_ADDRESS
6328 && modifier != EXPAND_SUM
6329 && modifier != EXPAND_INITIALIZER
6330 && (! memory_address_p (DECL_MODE (exp),
6331 XEXP (DECL_RTL (exp), 0))
6332 || (flag_force_addr
6333 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6334 temp = change_address (DECL_RTL (exp), VOIDmode,
6335 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6337 /* If we got something, return it. But first, set the alignment
6338 the address is a register. */
6339 if (temp != 0)
6341 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6342 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6344 return temp;
6347 /* If the mode of DECL_RTL does not match that of the decl, it
6348 must be a promoted value. We return a SUBREG of the wanted mode,
6349 but mark it so that we know that it was already extended. */
6351 if (GET_CODE (DECL_RTL (exp)) == REG
6352 && GET_MODE (DECL_RTL (exp)) != mode)
6354 /* Get the signedness used for this variable. Ensure we get the
6355 same mode we got when the variable was declared. */
6356 if (GET_MODE (DECL_RTL (exp))
6357 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6358 abort ();
6360 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6361 SUBREG_PROMOTED_VAR_P (temp) = 1;
6362 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6363 return temp;
6366 return DECL_RTL (exp);
6368 case INTEGER_CST:
6369 return immed_double_const (TREE_INT_CST_LOW (exp),
6370 TREE_INT_CST_HIGH (exp), mode);
6372 case CONST_DECL:
6373 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6374 EXPAND_MEMORY_USE_BAD);
6376 case REAL_CST:
6377 /* If optimized, generate immediate CONST_DOUBLE
6378 which will be turned into memory by reload if necessary.
6380 We used to force a register so that loop.c could see it. But
6381 this does not allow gen_* patterns to perform optimizations with
6382 the constants. It also produces two insns in cases like "x = 1.0;".
6383 On most machines, floating-point constants are not permitted in
6384 many insns, so we'd end up copying it to a register in any case.
6386 Now, we do the copying in expand_binop, if appropriate. */
6387 return immed_real_const (exp);
6389 case COMPLEX_CST:
6390 case STRING_CST:
6391 if (! TREE_CST_RTL (exp))
6392 output_constant_def (exp, 1);
6394 /* TREE_CST_RTL probably contains a constant address.
6395 On RISC machines where a constant address isn't valid,
6396 make some insns to get that address into a register. */
6397 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6398 && modifier != EXPAND_CONST_ADDRESS
6399 && modifier != EXPAND_INITIALIZER
6400 && modifier != EXPAND_SUM
6401 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6402 || (flag_force_addr
6403 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6404 return change_address (TREE_CST_RTL (exp), VOIDmode,
6405 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6406 return TREE_CST_RTL (exp);
6408 case EXPR_WITH_FILE_LOCATION:
6410 rtx to_return;
6411 const char *saved_input_filename = input_filename;
6412 int saved_lineno = lineno;
6413 input_filename = EXPR_WFL_FILENAME (exp);
6414 lineno = EXPR_WFL_LINENO (exp);
6415 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6416 emit_line_note (input_filename, lineno);
6417 /* Possibly avoid switching back and force here. */
6418 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6419 input_filename = saved_input_filename;
6420 lineno = saved_lineno;
6421 return to_return;
6424 case SAVE_EXPR:
6425 context = decl_function_context (exp);
6427 /* If this SAVE_EXPR was at global context, assume we are an
6428 initialization function and move it into our context. */
6429 if (context == 0)
6430 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6432 /* We treat inline_function_decl as an alias for the current function
6433 because that is the inline function whose vars, types, etc.
6434 are being merged into the current function.
6435 See expand_inline_function. */
6436 if (context == current_function_decl || context == inline_function_decl)
6437 context = 0;
6439 /* If this is non-local, handle it. */
6440 if (context)
6442 /* The following call just exists to abort if the context is
6443 not of a containing function. */
6444 find_function_data (context);
6446 temp = SAVE_EXPR_RTL (exp);
6447 if (temp && GET_CODE (temp) == REG)
6449 put_var_into_stack (exp);
6450 temp = SAVE_EXPR_RTL (exp);
6452 if (temp == 0 || GET_CODE (temp) != MEM)
6453 abort ();
6454 return change_address (temp, mode,
6455 fix_lexical_addr (XEXP (temp, 0), exp));
6457 if (SAVE_EXPR_RTL (exp) == 0)
6459 if (mode == VOIDmode)
6460 temp = const0_rtx;
6461 else
6462 temp = assign_temp (build_qualified_type (type,
6463 (TYPE_QUALS (type)
6464 | TYPE_QUAL_CONST)),
6465 3, 0, 0);
6467 SAVE_EXPR_RTL (exp) = temp;
6468 if (!optimize && GET_CODE (temp) == REG)
6469 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6470 save_expr_regs);
6472 /* If the mode of TEMP does not match that of the expression, it
6473 must be a promoted value. We pass store_expr a SUBREG of the
6474 wanted mode but mark it so that we know that it was already
6475 extended. Note that `unsignedp' was modified above in
6476 this case. */
6478 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6480 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6481 SUBREG_PROMOTED_VAR_P (temp) = 1;
6482 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6485 if (temp == const0_rtx)
6486 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6487 EXPAND_MEMORY_USE_BAD);
6488 else
6489 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6491 TREE_USED (exp) = 1;
6494 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6495 must be a promoted value. We return a SUBREG of the wanted mode,
6496 but mark it so that we know that it was already extended. */
6498 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6499 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6501 /* Compute the signedness and make the proper SUBREG. */
6502 promote_mode (type, mode, &unsignedp, 0);
6503 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6504 SUBREG_PROMOTED_VAR_P (temp) = 1;
6505 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6506 return temp;
6509 return SAVE_EXPR_RTL (exp);
6511 case UNSAVE_EXPR:
6513 rtx temp;
6514 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6515 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6516 return temp;
6519 case PLACEHOLDER_EXPR:
6521 tree placeholder_expr;
6523 /* If there is an object on the head of the placeholder list,
6524 see if some object in it of type TYPE or a pointer to it. For
6525 further information, see tree.def. */
6526 for (placeholder_expr = placeholder_list;
6527 placeholder_expr != 0;
6528 placeholder_expr = TREE_CHAIN (placeholder_expr))
6530 tree need_type = TYPE_MAIN_VARIANT (type);
6531 tree object = 0;
6532 tree old_list = placeholder_list;
6533 tree elt;
6535 /* Find the outermost reference that is of the type we want.
6536 If none, see if any object has a type that is a pointer to
6537 the type we want. */
6538 for (elt = TREE_PURPOSE (placeholder_expr);
6539 elt != 0 && object == 0;
6541 = ((TREE_CODE (elt) == COMPOUND_EXPR
6542 || TREE_CODE (elt) == COND_EXPR)
6543 ? TREE_OPERAND (elt, 1)
6544 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6545 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6546 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6547 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6548 ? TREE_OPERAND (elt, 0) : 0))
6549 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6550 object = elt;
6552 for (elt = TREE_PURPOSE (placeholder_expr);
6553 elt != 0 && object == 0;
6555 = ((TREE_CODE (elt) == COMPOUND_EXPR
6556 || TREE_CODE (elt) == COND_EXPR)
6557 ? TREE_OPERAND (elt, 1)
6558 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6559 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6560 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6561 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6562 ? TREE_OPERAND (elt, 0) : 0))
6563 if (POINTER_TYPE_P (TREE_TYPE (elt))
6564 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6565 == need_type))
6566 object = build1 (INDIRECT_REF, need_type, elt);
6568 if (object != 0)
6570 /* Expand this object skipping the list entries before
6571 it was found in case it is also a PLACEHOLDER_EXPR.
6572 In that case, we want to translate it using subsequent
6573 entries. */
6574 placeholder_list = TREE_CHAIN (placeholder_expr);
6575 temp = expand_expr (object, original_target, tmode,
6576 ro_modifier);
6577 placeholder_list = old_list;
6578 return temp;
6583 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6584 abort ();
6586 case WITH_RECORD_EXPR:
6587 /* Put the object on the placeholder list, expand our first operand,
6588 and pop the list. */
6589 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6590 placeholder_list);
6591 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6592 tmode, ro_modifier);
6593 placeholder_list = TREE_CHAIN (placeholder_list);
6594 return target;
6596 case GOTO_EXPR:
6597 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6598 expand_goto (TREE_OPERAND (exp, 0));
6599 else
6600 expand_computed_goto (TREE_OPERAND (exp, 0));
6601 return const0_rtx;
6603 case EXIT_EXPR:
6604 expand_exit_loop_if_false (NULL,
6605 invert_truthvalue (TREE_OPERAND (exp, 0)));
6606 return const0_rtx;
6608 case LABELED_BLOCK_EXPR:
6609 if (LABELED_BLOCK_BODY (exp))
6610 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6611 /* Should perhaps use expand_label, but this is simpler and safer. */
6612 do_pending_stack_adjust ();
6613 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6614 return const0_rtx;
6616 case EXIT_BLOCK_EXPR:
6617 if (EXIT_BLOCK_RETURN (exp))
6618 sorry ("returned value in block_exit_expr");
6619 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6620 return const0_rtx;
6622 case LOOP_EXPR:
6623 push_temp_slots ();
6624 expand_start_loop (1);
6625 expand_expr_stmt (TREE_OPERAND (exp, 0));
6626 expand_end_loop ();
6627 pop_temp_slots ();
6629 return const0_rtx;
6631 case BIND_EXPR:
6633 tree vars = TREE_OPERAND (exp, 0);
6634 int vars_need_expansion = 0;
6636 /* Need to open a binding contour here because
6637 if there are any cleanups they must be contained here. */
6638 expand_start_bindings (2);
6640 /* Mark the corresponding BLOCK for output in its proper place. */
6641 if (TREE_OPERAND (exp, 2) != 0
6642 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6643 insert_block (TREE_OPERAND (exp, 2));
6645 /* If VARS have not yet been expanded, expand them now. */
6646 while (vars)
6648 if (!DECL_RTL_SET_P (vars))
6650 vars_need_expansion = 1;
6651 expand_decl (vars);
6653 expand_decl_init (vars);
6654 vars = TREE_CHAIN (vars);
6657 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6659 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6661 return temp;
6664 case RTL_EXPR:
6665 if (RTL_EXPR_SEQUENCE (exp))
6667 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6668 abort ();
6669 emit_insns (RTL_EXPR_SEQUENCE (exp));
6670 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6672 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6673 free_temps_for_rtl_expr (exp);
6674 return RTL_EXPR_RTL (exp);
6676 case CONSTRUCTOR:
6677 /* If we don't need the result, just ensure we evaluate any
6678 subexpressions. */
6679 if (ignore)
6681 tree elt;
6682 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6683 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6684 EXPAND_MEMORY_USE_BAD);
6685 return const0_rtx;
6688 /* All elts simple constants => refer to a constant in memory. But
6689 if this is a non-BLKmode mode, let it store a field at a time
6690 since that should make a CONST_INT or CONST_DOUBLE when we
6691 fold. Likewise, if we have a target we can use, it is best to
6692 store directly into the target unless the type is large enough
6693 that memcpy will be used. If we are making an initializer and
6694 all operands are constant, put it in memory as well. */
6695 else if ((TREE_STATIC (exp)
6696 && ((mode == BLKmode
6697 && ! (target != 0 && safe_from_p (target, exp, 1)))
6698 || TREE_ADDRESSABLE (exp)
6699 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6700 && (! MOVE_BY_PIECES_P
6701 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6702 TYPE_ALIGN (type)))
6703 && ! mostly_zeros_p (exp))))
6704 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6706 rtx constructor = output_constant_def (exp, 1);
6708 if (modifier != EXPAND_CONST_ADDRESS
6709 && modifier != EXPAND_INITIALIZER
6710 && modifier != EXPAND_SUM
6711 && (! memory_address_p (GET_MODE (constructor),
6712 XEXP (constructor, 0))
6713 || (flag_force_addr
6714 && GET_CODE (XEXP (constructor, 0)) != REG)))
6715 constructor = change_address (constructor, VOIDmode,
6716 XEXP (constructor, 0));
6717 return constructor;
6719 else
6721 /* Handle calls that pass values in multiple non-contiguous
6722 locations. The Irix 6 ABI has examples of this. */
6723 if (target == 0 || ! safe_from_p (target, exp, 1)
6724 || GET_CODE (target) == PARALLEL)
6725 target
6726 = assign_temp (build_qualified_type (type,
6727 (TYPE_QUALS (type)
6728 | (TREE_READONLY (exp)
6729 * TYPE_QUAL_CONST))),
6730 TREE_ADDRESSABLE (exp), 1, 1);
6732 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6733 int_size_in_bytes (TREE_TYPE (exp)));
6734 return target;
6737 case INDIRECT_REF:
6739 tree exp1 = TREE_OPERAND (exp, 0);
6740 tree index;
6741 tree string = string_constant (exp1, &index);
6743 /* Try to optimize reads from const strings. */
6744 if (string
6745 && TREE_CODE (string) == STRING_CST
6746 && TREE_CODE (index) == INTEGER_CST
6747 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6748 && GET_MODE_CLASS (mode) == MODE_INT
6749 && GET_MODE_SIZE (mode) == 1
6750 && modifier != EXPAND_MEMORY_USE_WO)
6751 return
6752 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6754 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6755 op0 = memory_address (mode, op0);
6757 if (cfun && current_function_check_memory_usage
6758 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6760 enum memory_use_mode memory_usage;
6761 memory_usage = get_memory_usage_from_modifier (modifier);
6763 if (memory_usage != MEMORY_USE_DONT)
6765 in_check_memory_usage = 1;
6766 emit_library_call (chkr_check_addr_libfunc,
6767 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6768 Pmode, GEN_INT (int_size_in_bytes (type)),
6769 TYPE_MODE (sizetype),
6770 GEN_INT (memory_usage),
6771 TYPE_MODE (integer_type_node));
6772 in_check_memory_usage = 0;
6776 temp = gen_rtx_MEM (mode, op0);
6777 set_mem_attributes (temp, exp, 0);
6779 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6780 here, because, in C and C++, the fact that a location is accessed
6781 through a pointer to const does not mean that the value there can
6782 never change. Languages where it can never change should
6783 also set TREE_STATIC. */
6784 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6786 /* If we are writing to this object and its type is a record with
6787 readonly fields, we must mark it as readonly so it will
6788 conflict with readonly references to those fields. */
6789 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6790 RTX_UNCHANGING_P (temp) = 1;
6792 return temp;
6795 case ARRAY_REF:
6796 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6797 abort ();
6800 tree array = TREE_OPERAND (exp, 0);
6801 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6802 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6803 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6804 HOST_WIDE_INT i;
6806 /* Optimize the special-case of a zero lower bound.
6808 We convert the low_bound to sizetype to avoid some problems
6809 with constant folding. (E.g. suppose the lower bound is 1,
6810 and its mode is QI. Without the conversion, (ARRAY
6811 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6812 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6814 if (! integer_zerop (low_bound))
6815 index = size_diffop (index, convert (sizetype, low_bound));
6817 /* Fold an expression like: "foo"[2].
6818 This is not done in fold so it won't happen inside &.
6819 Don't fold if this is for wide characters since it's too
6820 difficult to do correctly and this is a very rare case. */
6822 if (TREE_CODE (array) == STRING_CST
6823 && TREE_CODE (index) == INTEGER_CST
6824 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6825 && GET_MODE_CLASS (mode) == MODE_INT
6826 && GET_MODE_SIZE (mode) == 1)
6827 return
6828 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6830 /* If this is a constant index into a constant array,
6831 just get the value from the array. Handle both the cases when
6832 we have an explicit constructor and when our operand is a variable
6833 that was declared const. */
6835 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6836 && TREE_CODE (index) == INTEGER_CST
6837 && 0 > compare_tree_int (index,
6838 list_length (CONSTRUCTOR_ELTS
6839 (TREE_OPERAND (exp, 0)))))
6841 tree elem;
6843 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6844 i = TREE_INT_CST_LOW (index);
6845 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6848 if (elem)
6849 return expand_expr (fold (TREE_VALUE (elem)), target,
6850 tmode, ro_modifier);
6853 else if (optimize >= 1
6854 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6855 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6856 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6858 if (TREE_CODE (index) == INTEGER_CST)
6860 tree init = DECL_INITIAL (array);
6862 if (TREE_CODE (init) == CONSTRUCTOR)
6864 tree elem;
6866 for (elem = CONSTRUCTOR_ELTS (init);
6867 (elem
6868 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6869 elem = TREE_CHAIN (elem))
6872 if (elem)
6873 return expand_expr (fold (TREE_VALUE (elem)), target,
6874 tmode, ro_modifier);
6876 else if (TREE_CODE (init) == STRING_CST
6877 && 0 > compare_tree_int (index,
6878 TREE_STRING_LENGTH (init)))
6880 tree type = TREE_TYPE (TREE_TYPE (init));
6881 enum machine_mode mode = TYPE_MODE (type);
6883 if (GET_MODE_CLASS (mode) == MODE_INT
6884 && GET_MODE_SIZE (mode) == 1)
6885 return (GEN_INT
6886 (TREE_STRING_POINTER
6887 (init)[TREE_INT_CST_LOW (index)]));
6892 /* Fall through. */
6894 case COMPONENT_REF:
6895 case BIT_FIELD_REF:
6896 /* If the operand is a CONSTRUCTOR, we can just extract the
6897 appropriate field if it is present. Don't do this if we have
6898 already written the data since we want to refer to that copy
6899 and varasm.c assumes that's what we'll do. */
6900 if (code != ARRAY_REF
6901 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6902 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6904 tree elt;
6906 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6907 elt = TREE_CHAIN (elt))
6908 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6909 /* We can normally use the value of the field in the
6910 CONSTRUCTOR. However, if this is a bitfield in
6911 an integral mode that we can fit in a HOST_WIDE_INT,
6912 we must mask only the number of bits in the bitfield,
6913 since this is done implicitly by the constructor. If
6914 the bitfield does not meet either of those conditions,
6915 we can't do this optimization. */
6916 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6917 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6918 == MODE_INT)
6919 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6920 <= HOST_BITS_PER_WIDE_INT))))
6922 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6923 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6925 HOST_WIDE_INT bitsize
6926 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6928 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6930 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6931 op0 = expand_and (op0, op1, target);
6933 else
6935 enum machine_mode imode
6936 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6937 tree count
6938 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6941 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6942 target, 0);
6943 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6944 target, 0);
6948 return op0;
6953 enum machine_mode mode1;
6954 HOST_WIDE_INT bitsize, bitpos;
6955 tree offset;
6956 int volatilep = 0;
6957 unsigned int alignment;
6958 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6959 &mode1, &unsignedp, &volatilep,
6960 &alignment);
6962 /* If we got back the original object, something is wrong. Perhaps
6963 we are evaluating an expression too early. In any event, don't
6964 infinitely recurse. */
6965 if (tem == exp)
6966 abort ();
6968 /* If TEM's type is a union of variable size, pass TARGET to the inner
6969 computation, since it will need a temporary and TARGET is known
6970 to have to do. This occurs in unchecked conversion in Ada. */
6972 op0 = expand_expr (tem,
6973 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6974 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6975 != INTEGER_CST)
6976 ? target : NULL_RTX),
6977 VOIDmode,
6978 (modifier == EXPAND_INITIALIZER
6979 || modifier == EXPAND_CONST_ADDRESS)
6980 ? modifier : EXPAND_NORMAL);
6982 /* If this is a constant, put it into a register if it is a
6983 legitimate constant and OFFSET is 0 and memory if it isn't. */
6984 if (CONSTANT_P (op0))
6986 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6987 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6988 && offset == 0)
6989 op0 = force_reg (mode, op0);
6990 else
6991 op0 = validize_mem (force_const_mem (mode, op0));
6994 if (offset != 0)
6996 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6998 /* If this object is in memory, put it into a register.
6999 This case can't occur in C, but can in Ada if we have
7000 unchecked conversion of an expression from a scalar type to
7001 an array or record type. */
7002 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7003 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7005 tree nt = build_qualified_type (TREE_TYPE (tem),
7006 (TYPE_QUALS (TREE_TYPE (tem))
7007 | TYPE_QUAL_CONST));
7008 rtx memloc = assign_temp (nt, 1, 1, 1);
7010 mark_temp_addr_taken (memloc);
7011 emit_move_insn (memloc, op0);
7012 op0 = memloc;
7015 if (GET_CODE (op0) != MEM)
7016 abort ();
7018 if (GET_MODE (offset_rtx) != ptr_mode)
7020 #ifdef POINTERS_EXTEND_UNSIGNED
7021 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7022 #else
7023 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7024 #endif
7027 /* A constant address in OP0 can have VOIDmode, we must not try
7028 to call force_reg for that case. Avoid that case. */
7029 if (GET_CODE (op0) == MEM
7030 && GET_MODE (op0) == BLKmode
7031 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7032 && bitsize != 0
7033 && (bitpos % bitsize) == 0
7034 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7035 && alignment == GET_MODE_ALIGNMENT (mode1))
7037 rtx temp = change_address (op0, mode1,
7038 plus_constant (XEXP (op0, 0),
7039 (bitpos /
7040 BITS_PER_UNIT)));
7041 if (GET_CODE (XEXP (temp, 0)) == REG)
7042 op0 = temp;
7043 else
7044 op0 = change_address (op0, mode1,
7045 force_reg (GET_MODE (XEXP (temp, 0)),
7046 XEXP (temp, 0)));
7047 bitpos = 0;
7050 op0 = change_address (op0, VOIDmode,
7051 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7052 force_reg (ptr_mode,
7053 offset_rtx)));
7056 /* Don't forget about volatility even if this is a bitfield. */
7057 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7059 op0 = copy_rtx (op0);
7060 MEM_VOLATILE_P (op0) = 1;
7063 /* Check the access. */
7064 if (cfun != 0 && current_function_check_memory_usage
7065 && GET_CODE (op0) == MEM)
7067 enum memory_use_mode memory_usage;
7068 memory_usage = get_memory_usage_from_modifier (modifier);
7070 if (memory_usage != MEMORY_USE_DONT)
7072 rtx to;
7073 int size;
7075 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7076 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7078 /* Check the access right of the pointer. */
7079 in_check_memory_usage = 1;
7080 if (size > BITS_PER_UNIT)
7081 emit_library_call (chkr_check_addr_libfunc,
7082 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7083 Pmode, GEN_INT (size / BITS_PER_UNIT),
7084 TYPE_MODE (sizetype),
7085 GEN_INT (memory_usage),
7086 TYPE_MODE (integer_type_node));
7087 in_check_memory_usage = 0;
7091 /* In cases where an aligned union has an unaligned object
7092 as a field, we might be extracting a BLKmode value from
7093 an integer-mode (e.g., SImode) object. Handle this case
7094 by doing the extract into an object as wide as the field
7095 (which we know to be the width of a basic mode), then
7096 storing into memory, and changing the mode to BLKmode.
7097 If we ultimately want the address (EXPAND_CONST_ADDRESS or
7098 EXPAND_INITIALIZER), then we must not copy to a temporary. */
7099 if (mode1 == VOIDmode
7100 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7101 || (modifier != EXPAND_CONST_ADDRESS
7102 && modifier != EXPAND_INITIALIZER
7103 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
7104 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7105 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7106 /* If the field isn't aligned enough to fetch as a memref,
7107 fetch it as a bit field. */
7108 || (mode1 != BLKmode
7109 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7110 && ((TYPE_ALIGN (TREE_TYPE (tem))
7111 < GET_MODE_ALIGNMENT (mode))
7112 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7113 /* If the type and the field are a constant size and the
7114 size of the type isn't the same size as the bitfield,
7115 we must use bitfield operations. */
7116 || ((bitsize >= 0
7117 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7118 == INTEGER_CST)
7119 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7120 bitsize)))))
7121 || (modifier != EXPAND_CONST_ADDRESS
7122 && modifier != EXPAND_INITIALIZER
7123 && mode == BLKmode
7124 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7125 && (TYPE_ALIGN (type) > alignment
7126 || bitpos % TYPE_ALIGN (type) != 0)))
7128 enum machine_mode ext_mode = mode;
7130 if (ext_mode == BLKmode
7131 && ! (target != 0 && GET_CODE (op0) == MEM
7132 && GET_CODE (target) == MEM
7133 && bitpos % BITS_PER_UNIT == 0))
7134 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7136 if (ext_mode == BLKmode)
7138 /* In this case, BITPOS must start at a byte boundary and
7139 TARGET, if specified, must be a MEM. */
7140 if (GET_CODE (op0) != MEM
7141 || (target != 0 && GET_CODE (target) != MEM)
7142 || bitpos % BITS_PER_UNIT != 0)
7143 abort ();
7145 op0 = change_address (op0, VOIDmode,
7146 plus_constant (XEXP (op0, 0),
7147 bitpos / BITS_PER_UNIT));
7148 if (target == 0)
7149 target = assign_temp (type, 0, 1, 1);
7151 emit_block_move (target, op0,
7152 bitsize == -1 ? expr_size (exp)
7153 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7154 / BITS_PER_UNIT),
7155 BITS_PER_UNIT);
7157 return target;
7160 op0 = validize_mem (op0);
7162 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7163 mark_reg_pointer (XEXP (op0, 0), alignment);
7165 op0 = extract_bit_field (op0, bitsize, bitpos,
7166 unsignedp, target, ext_mode, ext_mode,
7167 alignment,
7168 int_size_in_bytes (TREE_TYPE (tem)));
7170 /* If the result is a record type and BITSIZE is narrower than
7171 the mode of OP0, an integral mode, and this is a big endian
7172 machine, we must put the field into the high-order bits. */
7173 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7174 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7175 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7176 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7177 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7178 - bitsize),
7179 op0, 1);
7181 if (mode == BLKmode)
7183 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7184 TYPE_QUAL_CONST);
7185 rtx new = assign_temp (nt, 0, 1, 1);
7187 emit_move_insn (new, op0);
7188 op0 = copy_rtx (new);
7189 PUT_MODE (op0, BLKmode);
7192 return op0;
7195 /* If the result is BLKmode, use that to access the object
7196 now as well. */
7197 if (mode == BLKmode)
7198 mode1 = BLKmode;
7200 /* Get a reference to just this component. */
7201 if (modifier == EXPAND_CONST_ADDRESS
7202 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7204 rtx new = gen_rtx_MEM (mode1,
7205 plus_constant (XEXP (op0, 0),
7206 (bitpos / BITS_PER_UNIT)));
7208 MEM_COPY_ATTRIBUTES (new, op0);
7209 op0 = new;
7211 else
7212 op0 = change_address (op0, mode1,
7213 plus_constant (XEXP (op0, 0),
7214 (bitpos / BITS_PER_UNIT)));
7216 set_mem_attributes (op0, exp, 0);
7217 if (GET_CODE (XEXP (op0, 0)) == REG)
7218 mark_reg_pointer (XEXP (op0, 0), alignment);
7220 MEM_VOLATILE_P (op0) |= volatilep;
7221 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7222 || modifier == EXPAND_CONST_ADDRESS
7223 || modifier == EXPAND_INITIALIZER)
7224 return op0;
7225 else if (target == 0)
7226 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7228 convert_move (target, op0, unsignedp);
7229 return target;
7232 /* Intended for a reference to a buffer of a file-object in Pascal.
7233 But it's not certain that a special tree code will really be
7234 necessary for these. INDIRECT_REF might work for them. */
7235 case BUFFER_REF:
7236 abort ();
7238 case IN_EXPR:
7240 /* Pascal set IN expression.
7242 Algorithm:
7243 rlo = set_low - (set_low%bits_per_word);
7244 the_word = set [ (index - rlo)/bits_per_word ];
7245 bit_index = index % bits_per_word;
7246 bitmask = 1 << bit_index;
7247 return !!(the_word & bitmask); */
7249 tree set = TREE_OPERAND (exp, 0);
7250 tree index = TREE_OPERAND (exp, 1);
7251 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7252 tree set_type = TREE_TYPE (set);
7253 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7254 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7255 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7256 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7257 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7258 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7259 rtx setaddr = XEXP (setval, 0);
7260 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7261 rtx rlow;
7262 rtx diff, quo, rem, addr, bit, result;
7264 /* If domain is empty, answer is no. Likewise if index is constant
7265 and out of bounds. */
7266 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7267 && TREE_CODE (set_low_bound) == INTEGER_CST
7268 && tree_int_cst_lt (set_high_bound, set_low_bound))
7269 || (TREE_CODE (index) == INTEGER_CST
7270 && TREE_CODE (set_low_bound) == INTEGER_CST
7271 && tree_int_cst_lt (index, set_low_bound))
7272 || (TREE_CODE (set_high_bound) == INTEGER_CST
7273 && TREE_CODE (index) == INTEGER_CST
7274 && tree_int_cst_lt (set_high_bound, index))))
7275 return const0_rtx;
7277 if (target == 0)
7278 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7280 /* If we get here, we have to generate the code for both cases
7281 (in range and out of range). */
7283 op0 = gen_label_rtx ();
7284 op1 = gen_label_rtx ();
7286 if (! (GET_CODE (index_val) == CONST_INT
7287 && GET_CODE (lo_r) == CONST_INT))
7289 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7290 GET_MODE (index_val), iunsignedp, 0, op1);
7293 if (! (GET_CODE (index_val) == CONST_INT
7294 && GET_CODE (hi_r) == CONST_INT))
7296 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7297 GET_MODE (index_val), iunsignedp, 0, op1);
7300 /* Calculate the element number of bit zero in the first word
7301 of the set. */
7302 if (GET_CODE (lo_r) == CONST_INT)
7303 rlow = GEN_INT (INTVAL (lo_r)
7304 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7305 else
7306 rlow = expand_binop (index_mode, and_optab, lo_r,
7307 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7308 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7310 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7311 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7313 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7314 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7315 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7316 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7318 addr = memory_address (byte_mode,
7319 expand_binop (index_mode, add_optab, diff,
7320 setaddr, NULL_RTX, iunsignedp,
7321 OPTAB_LIB_WIDEN));
7323 /* Extract the bit we want to examine. */
7324 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7325 gen_rtx_MEM (byte_mode, addr),
7326 make_tree (TREE_TYPE (index), rem),
7327 NULL_RTX, 1);
7328 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7329 GET_MODE (target) == byte_mode ? target : 0,
7330 1, OPTAB_LIB_WIDEN);
7332 if (result != target)
7333 convert_move (target, result, 1);
7335 /* Output the code to handle the out-of-range case. */
7336 emit_jump (op0);
7337 emit_label (op1);
7338 emit_move_insn (target, const0_rtx);
7339 emit_label (op0);
7340 return target;
7343 case WITH_CLEANUP_EXPR:
7344 if (RTL_EXPR_RTL (exp) == 0)
7346 RTL_EXPR_RTL (exp)
7347 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7348 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7350 /* That's it for this cleanup. */
7351 TREE_OPERAND (exp, 2) = 0;
7353 return RTL_EXPR_RTL (exp);
7355 case CLEANUP_POINT_EXPR:
7357 /* Start a new binding layer that will keep track of all cleanup
7358 actions to be performed. */
7359 expand_start_bindings (2);
7361 target_temp_slot_level = temp_slot_level;
7363 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7364 /* If we're going to use this value, load it up now. */
7365 if (! ignore)
7366 op0 = force_not_mem (op0);
7367 preserve_temp_slots (op0);
7368 expand_end_bindings (NULL_TREE, 0, 0);
7370 return op0;
7372 case CALL_EXPR:
7373 /* Check for a built-in function. */
7374 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7375 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7376 == FUNCTION_DECL)
7377 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7379 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7380 == BUILT_IN_FRONTEND)
7381 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7382 else
7383 return expand_builtin (exp, target, subtarget, tmode, ignore);
7386 return expand_call (exp, target, ignore);
7388 case NON_LVALUE_EXPR:
7389 case NOP_EXPR:
7390 case CONVERT_EXPR:
7391 case REFERENCE_EXPR:
7392 if (TREE_OPERAND (exp, 0) == error_mark_node)
7393 return const0_rtx;
7395 if (TREE_CODE (type) == UNION_TYPE)
7397 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7399 /* If both input and output are BLKmode, this conversion
7400 isn't actually doing anything unless we need to make the
7401 alignment stricter. */
7402 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7403 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7404 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7405 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7406 modifier);
7408 if (target == 0)
7409 target = assign_temp (type, 0, 1, 1);
7411 if (GET_CODE (target) == MEM)
7412 /* Store data into beginning of memory target. */
7413 store_expr (TREE_OPERAND (exp, 0),
7414 change_address (target, TYPE_MODE (valtype), 0), 0);
7416 else if (GET_CODE (target) == REG)
7417 /* Store this field into a union of the proper type. */
7418 store_field (target,
7419 MIN ((int_size_in_bytes (TREE_TYPE
7420 (TREE_OPERAND (exp, 0)))
7421 * BITS_PER_UNIT),
7422 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7423 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7424 VOIDmode, 0, BITS_PER_UNIT,
7425 int_size_in_bytes (type), 0);
7426 else
7427 abort ();
7429 /* Return the entire union. */
7430 return target;
7433 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7436 ro_modifier);
7438 /* If the signedness of the conversion differs and OP0 is
7439 a promoted SUBREG, clear that indication since we now
7440 have to do the proper extension. */
7441 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7442 && GET_CODE (op0) == SUBREG)
7443 SUBREG_PROMOTED_VAR_P (op0) = 0;
7445 return op0;
7448 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7449 if (GET_MODE (op0) == mode)
7450 return op0;
7452 /* If OP0 is a constant, just convert it into the proper mode. */
7453 if (CONSTANT_P (op0))
7454 return
7455 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7456 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7458 if (modifier == EXPAND_INITIALIZER)
7459 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7461 if (target == 0)
7462 return
7463 convert_to_mode (mode, op0,
7464 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7465 else
7466 convert_move (target, op0,
7467 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7468 return target;
7470 case PLUS_EXPR:
7471 /* We come here from MINUS_EXPR when the second operand is a
7472 constant. */
7473 plus_expr:
7474 this_optab = ! unsignedp && flag_trapv
7475 && (GET_MODE_CLASS(mode) == MODE_INT)
7476 ? addv_optab : add_optab;
7478 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7479 something else, make sure we add the register to the constant and
7480 then to the other thing. This case can occur during strength
7481 reduction and doing it this way will produce better code if the
7482 frame pointer or argument pointer is eliminated.
7484 fold-const.c will ensure that the constant is always in the inner
7485 PLUS_EXPR, so the only case we need to do anything about is if
7486 sp, ap, or fp is our second argument, in which case we must swap
7487 the innermost first argument and our second argument. */
7489 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7490 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7491 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7492 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7493 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7494 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7496 tree t = TREE_OPERAND (exp, 1);
7498 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7499 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7502 /* If the result is to be ptr_mode and we are adding an integer to
7503 something, we might be forming a constant. So try to use
7504 plus_constant. If it produces a sum and we can't accept it,
7505 use force_operand. This allows P = &ARR[const] to generate
7506 efficient code on machines where a SYMBOL_REF is not a valid
7507 address.
7509 If this is an EXPAND_SUM call, always return the sum. */
7510 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7511 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7513 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7514 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7515 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7517 rtx constant_part;
7519 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7520 EXPAND_SUM);
7521 /* Use immed_double_const to ensure that the constant is
7522 truncated according to the mode of OP1, then sign extended
7523 to a HOST_WIDE_INT. Using the constant directly can result
7524 in non-canonical RTL in a 64x32 cross compile. */
7525 constant_part
7526 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7527 (HOST_WIDE_INT) 0,
7528 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7529 op1 = plus_constant (op1, INTVAL (constant_part));
7530 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7531 op1 = force_operand (op1, target);
7532 return op1;
7535 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7536 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7537 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7539 rtx constant_part;
7541 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7542 EXPAND_SUM);
7543 if (! CONSTANT_P (op0))
7545 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7546 VOIDmode, modifier);
7547 /* Don't go to both_summands if modifier
7548 says it's not right to return a PLUS. */
7549 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7550 goto binop2;
7551 goto both_summands;
7553 /* Use immed_double_const to ensure that the constant is
7554 truncated according to the mode of OP1, then sign extended
7555 to a HOST_WIDE_INT. Using the constant directly can result
7556 in non-canonical RTL in a 64x32 cross compile. */
7557 constant_part
7558 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7559 (HOST_WIDE_INT) 0,
7560 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7561 op0 = plus_constant (op0, INTVAL (constant_part));
7562 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7563 op0 = force_operand (op0, target);
7564 return op0;
7568 /* No sense saving up arithmetic to be done
7569 if it's all in the wrong mode to form part of an address.
7570 And force_operand won't know whether to sign-extend or
7571 zero-extend. */
7572 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7573 || mode != ptr_mode)
7574 goto binop;
7576 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7577 subtarget = 0;
7579 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7580 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7582 both_summands:
7583 /* Make sure any term that's a sum with a constant comes last. */
7584 if (GET_CODE (op0) == PLUS
7585 && CONSTANT_P (XEXP (op0, 1)))
7587 temp = op0;
7588 op0 = op1;
7589 op1 = temp;
7591 /* If adding to a sum including a constant,
7592 associate it to put the constant outside. */
7593 if (GET_CODE (op1) == PLUS
7594 && CONSTANT_P (XEXP (op1, 1)))
7596 rtx constant_term = const0_rtx;
7598 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7599 if (temp != 0)
7600 op0 = temp;
7601 /* Ensure that MULT comes first if there is one. */
7602 else if (GET_CODE (op0) == MULT)
7603 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7604 else
7605 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7607 /* Let's also eliminate constants from op0 if possible. */
7608 op0 = eliminate_constant_term (op0, &constant_term);
7610 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7611 their sum should be a constant. Form it into OP1, since the
7612 result we want will then be OP0 + OP1. */
7614 temp = simplify_binary_operation (PLUS, mode, constant_term,
7615 XEXP (op1, 1));
7616 if (temp != 0)
7617 op1 = temp;
7618 else
7619 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7622 /* Put a constant term last and put a multiplication first. */
7623 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7624 temp = op1, op1 = op0, op0 = temp;
7626 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7627 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7629 case MINUS_EXPR:
7630 /* For initializers, we are allowed to return a MINUS of two
7631 symbolic constants. Here we handle all cases when both operands
7632 are constant. */
7633 /* Handle difference of two symbolic constants,
7634 for the sake of an initializer. */
7635 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7636 && really_constant_p (TREE_OPERAND (exp, 0))
7637 && really_constant_p (TREE_OPERAND (exp, 1)))
7639 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7640 VOIDmode, ro_modifier);
7641 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7642 VOIDmode, ro_modifier);
7644 /* If the last operand is a CONST_INT, use plus_constant of
7645 the negated constant. Else make the MINUS. */
7646 if (GET_CODE (op1) == CONST_INT)
7647 return plus_constant (op0, - INTVAL (op1));
7648 else
7649 return gen_rtx_MINUS (mode, op0, op1);
7651 /* Convert A - const to A + (-const). */
7652 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7654 tree negated = fold (build1 (NEGATE_EXPR, type,
7655 TREE_OPERAND (exp, 1)));
7657 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7658 /* If we can't negate the constant in TYPE, leave it alone and
7659 expand_binop will negate it for us. We used to try to do it
7660 here in the signed version of TYPE, but that doesn't work
7661 on POINTER_TYPEs. */;
7662 else
7664 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7665 goto plus_expr;
7668 this_optab = ! unsignedp && flag_trapv
7669 && (GET_MODE_CLASS(mode) == MODE_INT)
7670 ? subv_optab : sub_optab;
7671 goto binop;
7673 case MULT_EXPR:
7674 /* If first operand is constant, swap them.
7675 Thus the following special case checks need only
7676 check the second operand. */
7677 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7679 register tree t1 = TREE_OPERAND (exp, 0);
7680 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7681 TREE_OPERAND (exp, 1) = t1;
7684 /* Attempt to return something suitable for generating an
7685 indexed address, for machines that support that. */
7687 if (modifier == EXPAND_SUM && mode == ptr_mode
7688 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7689 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7691 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7692 EXPAND_SUM);
7694 /* Apply distributive law if OP0 is x+c. */
7695 if (GET_CODE (op0) == PLUS
7696 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7697 return
7698 gen_rtx_PLUS
7699 (mode,
7700 gen_rtx_MULT
7701 (mode, XEXP (op0, 0),
7702 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7703 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7704 * INTVAL (XEXP (op0, 1))));
7706 if (GET_CODE (op0) != REG)
7707 op0 = force_operand (op0, NULL_RTX);
7708 if (GET_CODE (op0) != REG)
7709 op0 = copy_to_mode_reg (mode, op0);
7711 return
7712 gen_rtx_MULT (mode, op0,
7713 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7716 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7717 subtarget = 0;
7719 /* Check for multiplying things that have been extended
7720 from a narrower type. If this machine supports multiplying
7721 in that narrower type with a result in the desired type,
7722 do it that way, and avoid the explicit type-conversion. */
7723 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7724 && TREE_CODE (type) == INTEGER_TYPE
7725 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7726 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7727 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7728 && int_fits_type_p (TREE_OPERAND (exp, 1),
7729 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7730 /* Don't use a widening multiply if a shift will do. */
7731 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7732 > HOST_BITS_PER_WIDE_INT)
7733 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7735 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7736 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7738 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7739 /* If both operands are extended, they must either both
7740 be zero-extended or both be sign-extended. */
7741 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7743 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7745 enum machine_mode innermode
7746 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7747 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7748 ? smul_widen_optab : umul_widen_optab);
7749 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7750 ? umul_widen_optab : smul_widen_optab);
7751 if (mode == GET_MODE_WIDER_MODE (innermode))
7753 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7755 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7756 NULL_RTX, VOIDmode, 0);
7757 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7758 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7759 VOIDmode, 0);
7760 else
7761 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7762 NULL_RTX, VOIDmode, 0);
7763 goto binop2;
7765 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7766 && innermode == word_mode)
7768 rtx htem;
7769 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7770 NULL_RTX, VOIDmode, 0);
7771 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7772 op1 = convert_modes (innermode, mode,
7773 expand_expr (TREE_OPERAND (exp, 1),
7774 NULL_RTX, VOIDmode, 0),
7775 unsignedp);
7776 else
7777 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7778 NULL_RTX, VOIDmode, 0);
7779 temp = expand_binop (mode, other_optab, op0, op1, target,
7780 unsignedp, OPTAB_LIB_WIDEN);
7781 htem = expand_mult_highpart_adjust (innermode,
7782 gen_highpart (innermode, temp),
7783 op0, op1,
7784 gen_highpart (innermode, temp),
7785 unsignedp);
7786 emit_move_insn (gen_highpart (innermode, temp), htem);
7787 return temp;
7791 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7792 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7793 return expand_mult (mode, op0, op1, target, unsignedp);
7795 case TRUNC_DIV_EXPR:
7796 case FLOOR_DIV_EXPR:
7797 case CEIL_DIV_EXPR:
7798 case ROUND_DIV_EXPR:
7799 case EXACT_DIV_EXPR:
7800 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7801 subtarget = 0;
7802 /* Possible optimization: compute the dividend with EXPAND_SUM
7803 then if the divisor is constant can optimize the case
7804 where some terms of the dividend have coeffs divisible by it. */
7805 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7806 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7807 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7809 case RDIV_EXPR:
7810 this_optab = flodiv_optab;
7811 goto binop;
7813 case TRUNC_MOD_EXPR:
7814 case FLOOR_MOD_EXPR:
7815 case CEIL_MOD_EXPR:
7816 case ROUND_MOD_EXPR:
7817 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7818 subtarget = 0;
7819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7820 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7821 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7823 case FIX_ROUND_EXPR:
7824 case FIX_FLOOR_EXPR:
7825 case FIX_CEIL_EXPR:
7826 abort (); /* Not used for C. */
7828 case FIX_TRUNC_EXPR:
7829 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7830 if (target == 0)
7831 target = gen_reg_rtx (mode);
7832 expand_fix (target, op0, unsignedp);
7833 return target;
7835 case FLOAT_EXPR:
7836 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7837 if (target == 0)
7838 target = gen_reg_rtx (mode);
7839 /* expand_float can't figure out what to do if FROM has VOIDmode.
7840 So give it the correct mode. With -O, cse will optimize this. */
7841 if (GET_MODE (op0) == VOIDmode)
7842 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7843 op0);
7844 expand_float (target, op0,
7845 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7846 return target;
7848 case NEGATE_EXPR:
7849 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7850 temp = expand_unop (mode,
7851 ! unsignedp && flag_trapv
7852 && (GET_MODE_CLASS(mode) == MODE_INT)
7853 ? negv_optab : neg_optab, op0, target, 0);
7854 if (temp == 0)
7855 abort ();
7856 return temp;
7858 case ABS_EXPR:
7859 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7861 /* Handle complex values specially. */
7862 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7863 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7864 return expand_complex_abs (mode, op0, target, unsignedp);
7866 /* Unsigned abs is simply the operand. Testing here means we don't
7867 risk generating incorrect code below. */
7868 if (TREE_UNSIGNED (type))
7869 return op0;
7871 return expand_abs (mode, op0, target, unsignedp,
7872 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7874 case MAX_EXPR:
7875 case MIN_EXPR:
7876 target = original_target;
7877 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7878 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7879 || GET_MODE (target) != mode
7880 || (GET_CODE (target) == REG
7881 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7882 target = gen_reg_rtx (mode);
7883 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7884 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7886 /* First try to do it with a special MIN or MAX instruction.
7887 If that does not win, use a conditional jump to select the proper
7888 value. */
7889 this_optab = (TREE_UNSIGNED (type)
7890 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7891 : (code == MIN_EXPR ? smin_optab : smax_optab));
7893 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7894 OPTAB_WIDEN);
7895 if (temp != 0)
7896 return temp;
7898 /* At this point, a MEM target is no longer useful; we will get better
7899 code without it. */
7901 if (GET_CODE (target) == MEM)
7902 target = gen_reg_rtx (mode);
7904 if (target != op0)
7905 emit_move_insn (target, op0);
7907 op0 = gen_label_rtx ();
7909 /* If this mode is an integer too wide to compare properly,
7910 compare word by word. Rely on cse to optimize constant cases. */
7911 if (GET_MODE_CLASS (mode) == MODE_INT
7912 && ! can_compare_p (GE, mode, ccp_jump))
7914 if (code == MAX_EXPR)
7915 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7916 target, op1, NULL_RTX, op0);
7917 else
7918 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7919 op1, target, NULL_RTX, op0);
7921 else
7923 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7924 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7925 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7926 op0);
7928 emit_move_insn (target, op1);
7929 emit_label (op0);
7930 return target;
7932 case BIT_NOT_EXPR:
7933 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7934 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7935 if (temp == 0)
7936 abort ();
7937 return temp;
7939 case FFS_EXPR:
7940 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7941 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7942 if (temp == 0)
7943 abort ();
7944 return temp;
7946 /* ??? Can optimize bitwise operations with one arg constant.
7947 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7948 and (a bitwise1 b) bitwise2 b (etc)
7949 but that is probably not worth while. */
7951 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7952 boolean values when we want in all cases to compute both of them. In
7953 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7954 as actual zero-or-1 values and then bitwise anding. In cases where
7955 there cannot be any side effects, better code would be made by
7956 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7957 how to recognize those cases. */
7959 case TRUTH_AND_EXPR:
7960 case BIT_AND_EXPR:
7961 this_optab = and_optab;
7962 goto binop;
7964 case TRUTH_OR_EXPR:
7965 case BIT_IOR_EXPR:
7966 this_optab = ior_optab;
7967 goto binop;
7969 case TRUTH_XOR_EXPR:
7970 case BIT_XOR_EXPR:
7971 this_optab = xor_optab;
7972 goto binop;
7974 case LSHIFT_EXPR:
7975 case RSHIFT_EXPR:
7976 case LROTATE_EXPR:
7977 case RROTATE_EXPR:
7978 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7979 subtarget = 0;
7980 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7981 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7982 unsignedp);
7984 /* Could determine the answer when only additive constants differ. Also,
7985 the addition of one can be handled by changing the condition. */
7986 case LT_EXPR:
7987 case LE_EXPR:
7988 case GT_EXPR:
7989 case GE_EXPR:
7990 case EQ_EXPR:
7991 case NE_EXPR:
7992 case UNORDERED_EXPR:
7993 case ORDERED_EXPR:
7994 case UNLT_EXPR:
7995 case UNLE_EXPR:
7996 case UNGT_EXPR:
7997 case UNGE_EXPR:
7998 case UNEQ_EXPR:
7999 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8000 if (temp != 0)
8001 return temp;
8003 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8004 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8005 && original_target
8006 && GET_CODE (original_target) == REG
8007 && (GET_MODE (original_target)
8008 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8010 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8011 VOIDmode, 0);
8013 if (temp != original_target)
8014 temp = copy_to_reg (temp);
8016 op1 = gen_label_rtx ();
8017 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8018 GET_MODE (temp), unsignedp, 0, op1);
8019 emit_move_insn (temp, const1_rtx);
8020 emit_label (op1);
8021 return temp;
8024 /* If no set-flag instruction, must generate a conditional
8025 store into a temporary variable. Drop through
8026 and handle this like && and ||. */
8028 case TRUTH_ANDIF_EXPR:
8029 case TRUTH_ORIF_EXPR:
8030 if (! ignore
8031 && (target == 0 || ! safe_from_p (target, exp, 1)
8032 /* Make sure we don't have a hard reg (such as function's return
8033 value) live across basic blocks, if not optimizing. */
8034 || (!optimize && GET_CODE (target) == REG
8035 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8036 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8038 if (target)
8039 emit_clr_insn (target);
8041 op1 = gen_label_rtx ();
8042 jumpifnot (exp, op1);
8044 if (target)
8045 emit_0_to_1_insn (target);
8047 emit_label (op1);
8048 return ignore ? const0_rtx : target;
8050 case TRUTH_NOT_EXPR:
8051 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8052 /* The parser is careful to generate TRUTH_NOT_EXPR
8053 only with operands that are always zero or one. */
8054 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8055 target, 1, OPTAB_LIB_WIDEN);
8056 if (temp == 0)
8057 abort ();
8058 return temp;
8060 case COMPOUND_EXPR:
8061 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8062 emit_queue ();
8063 return expand_expr (TREE_OPERAND (exp, 1),
8064 (ignore ? const0_rtx : target),
8065 VOIDmode, 0);
8067 case COND_EXPR:
8068 /* If we would have a "singleton" (see below) were it not for a
8069 conversion in each arm, bring that conversion back out. */
8070 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8071 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8072 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8073 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8075 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8076 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8078 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8079 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8080 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8081 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8082 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8083 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8084 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8085 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8086 return expand_expr (build1 (NOP_EXPR, type,
8087 build (COND_EXPR, TREE_TYPE (iftrue),
8088 TREE_OPERAND (exp, 0),
8089 iftrue, iffalse)),
8090 target, tmode, modifier);
8094 /* Note that COND_EXPRs whose type is a structure or union
8095 are required to be constructed to contain assignments of
8096 a temporary variable, so that we can evaluate them here
8097 for side effect only. If type is void, we must do likewise. */
8099 /* If an arm of the branch requires a cleanup,
8100 only that cleanup is performed. */
8102 tree singleton = 0;
8103 tree binary_op = 0, unary_op = 0;
8105 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8106 convert it to our mode, if necessary. */
8107 if (integer_onep (TREE_OPERAND (exp, 1))
8108 && integer_zerop (TREE_OPERAND (exp, 2))
8109 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8111 if (ignore)
8113 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8114 ro_modifier);
8115 return const0_rtx;
8118 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8119 if (GET_MODE (op0) == mode)
8120 return op0;
8122 if (target == 0)
8123 target = gen_reg_rtx (mode);
8124 convert_move (target, op0, unsignedp);
8125 return target;
8128 /* Check for X ? A + B : A. If we have this, we can copy A to the
8129 output and conditionally add B. Similarly for unary operations.
8130 Don't do this if X has side-effects because those side effects
8131 might affect A or B and the "?" operation is a sequence point in
8132 ANSI. (operand_equal_p tests for side effects.) */
8134 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8135 && operand_equal_p (TREE_OPERAND (exp, 2),
8136 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8137 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8138 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8139 && operand_equal_p (TREE_OPERAND (exp, 1),
8140 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8141 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8142 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8143 && operand_equal_p (TREE_OPERAND (exp, 2),
8144 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8145 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8146 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8147 && operand_equal_p (TREE_OPERAND (exp, 1),
8148 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8149 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8151 /* If we are not to produce a result, we have no target. Otherwise,
8152 if a target was specified use it; it will not be used as an
8153 intermediate target unless it is safe. If no target, use a
8154 temporary. */
8156 if (ignore)
8157 temp = 0;
8158 else if (original_target
8159 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8160 || (singleton && GET_CODE (original_target) == REG
8161 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8162 && original_target == var_rtx (singleton)))
8163 && GET_MODE (original_target) == mode
8164 #ifdef HAVE_conditional_move
8165 && (! can_conditionally_move_p (mode)
8166 || GET_CODE (original_target) == REG
8167 || TREE_ADDRESSABLE (type))
8168 #endif
8169 && ! (GET_CODE (original_target) == MEM
8170 && MEM_VOLATILE_P (original_target)))
8171 temp = original_target;
8172 else if (TREE_ADDRESSABLE (type))
8173 abort ();
8174 else
8175 temp = assign_temp (type, 0, 0, 1);
8177 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8178 do the test of X as a store-flag operation, do this as
8179 A + ((X != 0) << log C). Similarly for other simple binary
8180 operators. Only do for C == 1 if BRANCH_COST is low. */
8181 if (temp && singleton && binary_op
8182 && (TREE_CODE (binary_op) == PLUS_EXPR
8183 || TREE_CODE (binary_op) == MINUS_EXPR
8184 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8185 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8186 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8187 : integer_onep (TREE_OPERAND (binary_op, 1)))
8188 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8190 rtx result;
8191 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8192 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8193 ? addv_optab : add_optab)
8194 : TREE_CODE (binary_op) == MINUS_EXPR
8195 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8196 ? subv_optab : sub_optab)
8197 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8198 : xor_optab);
8200 /* If we had X ? A : A + 1, do this as A + (X == 0).
8202 We have to invert the truth value here and then put it
8203 back later if do_store_flag fails. We cannot simply copy
8204 TREE_OPERAND (exp, 0) to another variable and modify that
8205 because invert_truthvalue can modify the tree pointed to
8206 by its argument. */
8207 if (singleton == TREE_OPERAND (exp, 1))
8208 TREE_OPERAND (exp, 0)
8209 = invert_truthvalue (TREE_OPERAND (exp, 0));
8211 result = do_store_flag (TREE_OPERAND (exp, 0),
8212 (safe_from_p (temp, singleton, 1)
8213 ? temp : NULL_RTX),
8214 mode, BRANCH_COST <= 1);
8216 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8217 result = expand_shift (LSHIFT_EXPR, mode, result,
8218 build_int_2 (tree_log2
8219 (TREE_OPERAND
8220 (binary_op, 1)),
8222 (safe_from_p (temp, singleton, 1)
8223 ? temp : NULL_RTX), 0);
8225 if (result)
8227 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8228 return expand_binop (mode, boptab, op1, result, temp,
8229 unsignedp, OPTAB_LIB_WIDEN);
8231 else if (singleton == TREE_OPERAND (exp, 1))
8232 TREE_OPERAND (exp, 0)
8233 = invert_truthvalue (TREE_OPERAND (exp, 0));
8236 do_pending_stack_adjust ();
8237 NO_DEFER_POP;
8238 op0 = gen_label_rtx ();
8240 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8242 if (temp != 0)
8244 /* If the target conflicts with the other operand of the
8245 binary op, we can't use it. Also, we can't use the target
8246 if it is a hard register, because evaluating the condition
8247 might clobber it. */
8248 if ((binary_op
8249 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8250 || (GET_CODE (temp) == REG
8251 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8252 temp = gen_reg_rtx (mode);
8253 store_expr (singleton, temp, 0);
8255 else
8256 expand_expr (singleton,
8257 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8258 if (singleton == TREE_OPERAND (exp, 1))
8259 jumpif (TREE_OPERAND (exp, 0), op0);
8260 else
8261 jumpifnot (TREE_OPERAND (exp, 0), op0);
8263 start_cleanup_deferral ();
8264 if (binary_op && temp == 0)
8265 /* Just touch the other operand. */
8266 expand_expr (TREE_OPERAND (binary_op, 1),
8267 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8268 else if (binary_op)
8269 store_expr (build (TREE_CODE (binary_op), type,
8270 make_tree (type, temp),
8271 TREE_OPERAND (binary_op, 1)),
8272 temp, 0);
8273 else
8274 store_expr (build1 (TREE_CODE (unary_op), type,
8275 make_tree (type, temp)),
8276 temp, 0);
8277 op1 = op0;
8279 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8280 comparison operator. If we have one of these cases, set the
8281 output to A, branch on A (cse will merge these two references),
8282 then set the output to FOO. */
8283 else if (temp
8284 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8285 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8286 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8287 TREE_OPERAND (exp, 1), 0)
8288 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8289 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8290 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8292 if (GET_CODE (temp) == REG
8293 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8294 temp = gen_reg_rtx (mode);
8295 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8296 jumpif (TREE_OPERAND (exp, 0), op0);
8298 start_cleanup_deferral ();
8299 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8300 op1 = op0;
8302 else if (temp
8303 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8304 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8305 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8306 TREE_OPERAND (exp, 2), 0)
8307 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8308 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8309 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8311 if (GET_CODE (temp) == REG
8312 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8313 temp = gen_reg_rtx (mode);
8314 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8315 jumpifnot (TREE_OPERAND (exp, 0), op0);
8317 start_cleanup_deferral ();
8318 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8319 op1 = op0;
8321 else
8323 op1 = gen_label_rtx ();
8324 jumpifnot (TREE_OPERAND (exp, 0), op0);
8326 start_cleanup_deferral ();
8328 /* One branch of the cond can be void, if it never returns. For
8329 example A ? throw : E */
8330 if (temp != 0
8331 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8332 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8333 else
8334 expand_expr (TREE_OPERAND (exp, 1),
8335 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8336 end_cleanup_deferral ();
8337 emit_queue ();
8338 emit_jump_insn (gen_jump (op1));
8339 emit_barrier ();
8340 emit_label (op0);
8341 start_cleanup_deferral ();
8342 if (temp != 0
8343 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8344 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8345 else
8346 expand_expr (TREE_OPERAND (exp, 2),
8347 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8350 end_cleanup_deferral ();
8352 emit_queue ();
8353 emit_label (op1);
8354 OK_DEFER_POP;
8356 return temp;
8359 case TARGET_EXPR:
8361 /* Something needs to be initialized, but we didn't know
8362 where that thing was when building the tree. For example,
8363 it could be the return value of a function, or a parameter
8364 to a function which lays down in the stack, or a temporary
8365 variable which must be passed by reference.
8367 We guarantee that the expression will either be constructed
8368 or copied into our original target. */
8370 tree slot = TREE_OPERAND (exp, 0);
8371 tree cleanups = NULL_TREE;
8372 tree exp1;
8374 if (TREE_CODE (slot) != VAR_DECL)
8375 abort ();
8377 if (! ignore)
8378 target = original_target;
8380 /* Set this here so that if we get a target that refers to a
8381 register variable that's already been used, put_reg_into_stack
8382 knows that it should fix up those uses. */
8383 TREE_USED (slot) = 1;
8385 if (target == 0)
8387 if (DECL_RTL_SET_P (slot))
8389 target = DECL_RTL (slot);
8390 /* If we have already expanded the slot, so don't do
8391 it again. (mrs) */
8392 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8393 return target;
8395 else
8397 target = assign_temp (type, 2, 0, 1);
8398 /* All temp slots at this level must not conflict. */
8399 preserve_temp_slots (target);
8400 SET_DECL_RTL (slot, target);
8401 if (TREE_ADDRESSABLE (slot))
8402 put_var_into_stack (slot);
8404 /* Since SLOT is not known to the called function
8405 to belong to its stack frame, we must build an explicit
8406 cleanup. This case occurs when we must build up a reference
8407 to pass the reference as an argument. In this case,
8408 it is very likely that such a reference need not be
8409 built here. */
8411 if (TREE_OPERAND (exp, 2) == 0)
8412 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8413 cleanups = TREE_OPERAND (exp, 2);
8416 else
8418 /* This case does occur, when expanding a parameter which
8419 needs to be constructed on the stack. The target
8420 is the actual stack address that we want to initialize.
8421 The function we call will perform the cleanup in this case. */
8423 /* If we have already assigned it space, use that space,
8424 not target that we were passed in, as our target
8425 parameter is only a hint. */
8426 if (DECL_RTL_SET_P (slot))
8428 target = DECL_RTL (slot);
8429 /* If we have already expanded the slot, so don't do
8430 it again. (mrs) */
8431 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8432 return target;
8434 else
8436 SET_DECL_RTL (slot, target);
8437 /* If we must have an addressable slot, then make sure that
8438 the RTL that we just stored in slot is OK. */
8439 if (TREE_ADDRESSABLE (slot))
8440 put_var_into_stack (slot);
8444 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8445 /* Mark it as expanded. */
8446 TREE_OPERAND (exp, 1) = NULL_TREE;
8448 store_expr (exp1, target, 0);
8450 expand_decl_cleanup (NULL_TREE, cleanups);
8452 return target;
8455 case INIT_EXPR:
8457 tree lhs = TREE_OPERAND (exp, 0);
8458 tree rhs = TREE_OPERAND (exp, 1);
8459 tree noncopied_parts = 0;
8460 tree lhs_type = TREE_TYPE (lhs);
8462 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8463 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8464 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8465 TYPE_NONCOPIED_PARTS (lhs_type));
8466 while (noncopied_parts != 0)
8468 expand_assignment (TREE_VALUE (noncopied_parts),
8469 TREE_PURPOSE (noncopied_parts), 0, 0);
8470 noncopied_parts = TREE_CHAIN (noncopied_parts);
8472 return temp;
8475 case MODIFY_EXPR:
8477 /* If lhs is complex, expand calls in rhs before computing it.
8478 That's so we don't compute a pointer and save it over a call.
8479 If lhs is simple, compute it first so we can give it as a
8480 target if the rhs is just a call. This avoids an extra temp and copy
8481 and that prevents a partial-subsumption which makes bad code.
8482 Actually we could treat component_ref's of vars like vars. */
8484 tree lhs = TREE_OPERAND (exp, 0);
8485 tree rhs = TREE_OPERAND (exp, 1);
8486 tree noncopied_parts = 0;
8487 tree lhs_type = TREE_TYPE (lhs);
8489 temp = 0;
8491 /* Check for |= or &= of a bitfield of size one into another bitfield
8492 of size 1. In this case, (unless we need the result of the
8493 assignment) we can do this more efficiently with a
8494 test followed by an assignment, if necessary.
8496 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8497 things change so we do, this code should be enhanced to
8498 support it. */
8499 if (ignore
8500 && TREE_CODE (lhs) == COMPONENT_REF
8501 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8502 || TREE_CODE (rhs) == BIT_AND_EXPR)
8503 && TREE_OPERAND (rhs, 0) == lhs
8504 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8505 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8506 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8508 rtx label = gen_label_rtx ();
8510 do_jump (TREE_OPERAND (rhs, 1),
8511 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8512 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8513 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8514 (TREE_CODE (rhs) == BIT_IOR_EXPR
8515 ? integer_one_node
8516 : integer_zero_node)),
8517 0, 0);
8518 do_pending_stack_adjust ();
8519 emit_label (label);
8520 return const0_rtx;
8523 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8524 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8525 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8526 TYPE_NONCOPIED_PARTS (lhs_type));
8528 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8529 while (noncopied_parts != 0)
8531 expand_assignment (TREE_PURPOSE (noncopied_parts),
8532 TREE_VALUE (noncopied_parts), 0, 0);
8533 noncopied_parts = TREE_CHAIN (noncopied_parts);
8535 return temp;
8538 case RETURN_EXPR:
8539 if (!TREE_OPERAND (exp, 0))
8540 expand_null_return ();
8541 else
8542 expand_return (TREE_OPERAND (exp, 0));
8543 return const0_rtx;
8545 case PREINCREMENT_EXPR:
8546 case PREDECREMENT_EXPR:
8547 return expand_increment (exp, 0, ignore);
8549 case POSTINCREMENT_EXPR:
8550 case POSTDECREMENT_EXPR:
8551 /* Faster to treat as pre-increment if result is not used. */
8552 return expand_increment (exp, ! ignore, ignore);
8554 case ADDR_EXPR:
8555 /* If nonzero, TEMP will be set to the address of something that might
8556 be a MEM corresponding to a stack slot. */
8557 temp = 0;
8559 /* Are we taking the address of a nested function? */
8560 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8561 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8562 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8563 && ! TREE_STATIC (exp))
8565 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8566 op0 = force_operand (op0, target);
8568 /* If we are taking the address of something erroneous, just
8569 return a zero. */
8570 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8571 return const0_rtx;
8572 else
8574 /* We make sure to pass const0_rtx down if we came in with
8575 ignore set, to avoid doing the cleanups twice for something. */
8576 op0 = expand_expr (TREE_OPERAND (exp, 0),
8577 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8578 (modifier == EXPAND_INITIALIZER
8579 ? modifier : EXPAND_CONST_ADDRESS));
8581 /* If we are going to ignore the result, OP0 will have been set
8582 to const0_rtx, so just return it. Don't get confused and
8583 think we are taking the address of the constant. */
8584 if (ignore)
8585 return op0;
8587 op0 = protect_from_queue (op0, 0);
8589 /* We would like the object in memory. If it is a constant, we can
8590 have it be statically allocated into memory. For a non-constant,
8591 we need to allocate some memory and store the value into it. */
8593 if (CONSTANT_P (op0))
8594 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8595 op0);
8596 else if (GET_CODE (op0) == MEM)
8598 mark_temp_addr_taken (op0);
8599 temp = XEXP (op0, 0);
8602 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8603 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8604 || GET_CODE (op0) == PARALLEL)
8606 /* If this object is in a register, it must be not
8607 be BLKmode. */
8608 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8609 tree nt = build_qualified_type (inner_type,
8610 (TYPE_QUALS (inner_type)
8611 | TYPE_QUAL_CONST));
8612 rtx memloc = assign_temp (nt, 1, 1, 1);
8614 mark_temp_addr_taken (memloc);
8615 if (GET_CODE (op0) == PARALLEL)
8616 /* Handle calls that pass values in multiple non-contiguous
8617 locations. The Irix 6 ABI has examples of this. */
8618 emit_group_store (memloc, op0,
8619 int_size_in_bytes (inner_type),
8620 TYPE_ALIGN (inner_type));
8621 else
8622 emit_move_insn (memloc, op0);
8623 op0 = memloc;
8626 if (GET_CODE (op0) != MEM)
8627 abort ();
8629 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8631 temp = XEXP (op0, 0);
8632 #ifdef POINTERS_EXTEND_UNSIGNED
8633 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8634 && mode == ptr_mode)
8635 temp = convert_memory_address (ptr_mode, temp);
8636 #endif
8637 return temp;
8640 op0 = force_operand (XEXP (op0, 0), target);
8643 if (flag_force_addr && GET_CODE (op0) != REG)
8644 op0 = force_reg (Pmode, op0);
8646 if (GET_CODE (op0) == REG
8647 && ! REG_USERVAR_P (op0))
8648 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8650 /* If we might have had a temp slot, add an equivalent address
8651 for it. */
8652 if (temp != 0)
8653 update_temp_slot_address (temp, op0);
8655 #ifdef POINTERS_EXTEND_UNSIGNED
8656 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8657 && mode == ptr_mode)
8658 op0 = convert_memory_address (ptr_mode, op0);
8659 #endif
8661 return op0;
8663 case ENTRY_VALUE_EXPR:
8664 abort ();
8666 /* COMPLEX type for Extended Pascal & Fortran */
8667 case COMPLEX_EXPR:
8669 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8670 rtx insns;
8672 /* Get the rtx code of the operands. */
8673 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8674 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8676 if (! target)
8677 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8679 start_sequence ();
8681 /* Move the real (op0) and imaginary (op1) parts to their location. */
8682 emit_move_insn (gen_realpart (mode, target), op0);
8683 emit_move_insn (gen_imagpart (mode, target), op1);
8685 insns = get_insns ();
8686 end_sequence ();
8688 /* Complex construction should appear as a single unit. */
8689 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8690 each with a separate pseudo as destination.
8691 It's not correct for flow to treat them as a unit. */
8692 if (GET_CODE (target) != CONCAT)
8693 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8694 else
8695 emit_insns (insns);
8697 return target;
8700 case REALPART_EXPR:
8701 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8702 return gen_realpart (mode, op0);
8704 case IMAGPART_EXPR:
8705 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8706 return gen_imagpart (mode, op0);
8708 case CONJ_EXPR:
8710 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8711 rtx imag_t;
8712 rtx insns;
8714 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8716 if (! target)
8717 target = gen_reg_rtx (mode);
8719 start_sequence ();
8721 /* Store the realpart and the negated imagpart to target. */
8722 emit_move_insn (gen_realpart (partmode, target),
8723 gen_realpart (partmode, op0));
8725 imag_t = gen_imagpart (partmode, target);
8726 temp = expand_unop (partmode,
8727 ! unsignedp && flag_trapv
8728 && (GET_MODE_CLASS(partmode) == MODE_INT)
8729 ? negv_optab : neg_optab,
8730 gen_imagpart (partmode, op0), imag_t, 0);
8731 if (temp != imag_t)
8732 emit_move_insn (imag_t, temp);
8734 insns = get_insns ();
8735 end_sequence ();
8737 /* Conjugate should appear as a single unit
8738 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8739 each with a separate pseudo as destination.
8740 It's not correct for flow to treat them as a unit. */
8741 if (GET_CODE (target) != CONCAT)
8742 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8743 else
8744 emit_insns (insns);
8746 return target;
8749 case TRY_CATCH_EXPR:
8751 tree handler = TREE_OPERAND (exp, 1);
8753 expand_eh_region_start ();
8755 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8757 expand_eh_region_end_cleanup (handler);
8759 return op0;
8762 case TRY_FINALLY_EXPR:
8764 tree try_block = TREE_OPERAND (exp, 0);
8765 tree finally_block = TREE_OPERAND (exp, 1);
8766 rtx finally_label = gen_label_rtx ();
8767 rtx done_label = gen_label_rtx ();
8768 rtx return_link = gen_reg_rtx (Pmode);
8769 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8770 (tree) finally_label, (tree) return_link);
8771 TREE_SIDE_EFFECTS (cleanup) = 1;
8773 /* Start a new binding layer that will keep track of all cleanup
8774 actions to be performed. */
8775 expand_start_bindings (2);
8777 target_temp_slot_level = temp_slot_level;
8779 expand_decl_cleanup (NULL_TREE, cleanup);
8780 op0 = expand_expr (try_block, target, tmode, modifier);
8782 preserve_temp_slots (op0);
8783 expand_end_bindings (NULL_TREE, 0, 0);
8784 emit_jump (done_label);
8785 emit_label (finally_label);
8786 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8787 emit_indirect_jump (return_link);
8788 emit_label (done_label);
8789 return op0;
8792 case GOTO_SUBROUTINE_EXPR:
8794 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8795 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8796 rtx return_address = gen_label_rtx ();
8797 emit_move_insn (return_link,
8798 gen_rtx_LABEL_REF (Pmode, return_address));
8799 emit_jump (subr);
8800 emit_label (return_address);
8801 return const0_rtx;
8804 case VA_ARG_EXPR:
8805 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8807 case EXC_PTR_EXPR:
8808 return get_exception_pointer ();
8810 default:
8811 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8814 /* Here to do an ordinary binary operator, generating an instruction
8815 from the optab already placed in `this_optab'. */
8816 binop:
8817 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8818 subtarget = 0;
8819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8820 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8821 binop2:
8822 temp = expand_binop (mode, this_optab, op0, op1, target,
8823 unsignedp, OPTAB_LIB_WIDEN);
8824 if (temp == 0)
8825 abort ();
8826 return temp;
8829 /* Similar to expand_expr, except that we don't specify a target, target
8830 mode, or modifier and we return the alignment of the inner type. This is
8831 used in cases where it is not necessary to align the result to the
8832 alignment of its type as long as we know the alignment of the result, for
8833 example for comparisons of BLKmode values. */
8835 static rtx
8836 expand_expr_unaligned (exp, palign)
8837 register tree exp;
8838 unsigned int *palign;
8840 register rtx op0;
8841 tree type = TREE_TYPE (exp);
8842 register enum machine_mode mode = TYPE_MODE (type);
8844 /* Default the alignment we return to that of the type. */
8845 *palign = TYPE_ALIGN (type);
8847 /* The only cases in which we do anything special is if the resulting mode
8848 is BLKmode. */
8849 if (mode != BLKmode)
8850 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8852 switch (TREE_CODE (exp))
8854 case CONVERT_EXPR:
8855 case NOP_EXPR:
8856 case NON_LVALUE_EXPR:
8857 /* Conversions between BLKmode values don't change the underlying
8858 alignment or value. */
8859 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8860 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8861 break;
8863 case ARRAY_REF:
8864 /* Much of the code for this case is copied directly from expand_expr.
8865 We need to duplicate it here because we will do something different
8866 in the fall-through case, so we need to handle the same exceptions
8867 it does. */
8869 tree array = TREE_OPERAND (exp, 0);
8870 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8871 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8872 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8873 HOST_WIDE_INT i;
8875 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8876 abort ();
8878 /* Optimize the special-case of a zero lower bound.
8880 We convert the low_bound to sizetype to avoid some problems
8881 with constant folding. (E.g. suppose the lower bound is 1,
8882 and its mode is QI. Without the conversion, (ARRAY
8883 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8884 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8886 if (! integer_zerop (low_bound))
8887 index = size_diffop (index, convert (sizetype, low_bound));
8889 /* If this is a constant index into a constant array,
8890 just get the value from the array. Handle both the cases when
8891 we have an explicit constructor and when our operand is a variable
8892 that was declared const. */
8894 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8895 && host_integerp (index, 0)
8896 && 0 > compare_tree_int (index,
8897 list_length (CONSTRUCTOR_ELTS
8898 (TREE_OPERAND (exp, 0)))))
8900 tree elem;
8902 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8903 i = tree_low_cst (index, 0);
8904 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8907 if (elem)
8908 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8911 else if (optimize >= 1
8912 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8913 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8914 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8916 if (TREE_CODE (index) == INTEGER_CST)
8918 tree init = DECL_INITIAL (array);
8920 if (TREE_CODE (init) == CONSTRUCTOR)
8922 tree elem;
8924 for (elem = CONSTRUCTOR_ELTS (init);
8925 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8926 elem = TREE_CHAIN (elem))
8929 if (elem)
8930 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8931 palign);
8936 /* Fall through. */
8938 case COMPONENT_REF:
8939 case BIT_FIELD_REF:
8940 /* If the operand is a CONSTRUCTOR, we can just extract the
8941 appropriate field if it is present. Don't do this if we have
8942 already written the data since we want to refer to that copy
8943 and varasm.c assumes that's what we'll do. */
8944 if (TREE_CODE (exp) != ARRAY_REF
8945 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8946 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8948 tree elt;
8950 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8951 elt = TREE_CHAIN (elt))
8952 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8953 /* Note that unlike the case in expand_expr, we know this is
8954 BLKmode and hence not an integer. */
8955 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8959 enum machine_mode mode1;
8960 HOST_WIDE_INT bitsize, bitpos;
8961 tree offset;
8962 int volatilep = 0;
8963 unsigned int alignment;
8964 int unsignedp;
8965 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8966 &mode1, &unsignedp, &volatilep,
8967 &alignment);
8969 /* If we got back the original object, something is wrong. Perhaps
8970 we are evaluating an expression too early. In any event, don't
8971 infinitely recurse. */
8972 if (tem == exp)
8973 abort ();
8975 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8977 /* If this is a constant, put it into a register if it is a
8978 legitimate constant and OFFSET is 0 and memory if it isn't. */
8979 if (CONSTANT_P (op0))
8981 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8983 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8984 && offset == 0)
8985 op0 = force_reg (inner_mode, op0);
8986 else
8987 op0 = validize_mem (force_const_mem (inner_mode, op0));
8990 if (offset != 0)
8992 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8994 /* If this object is in a register, put it into memory.
8995 This case can't occur in C, but can in Ada if we have
8996 unchecked conversion of an expression from a scalar type to
8997 an array or record type. */
8998 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8999 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9001 tree nt = build_qualified_type (TREE_TYPE (tem),
9002 (TYPE_QUALS (TREE_TYPE (tem))
9003 | TYPE_QUAL_CONST));
9004 rtx memloc = assign_temp (nt, 1, 1, 1);
9006 mark_temp_addr_taken (memloc);
9007 emit_move_insn (memloc, op0);
9008 op0 = memloc;
9011 if (GET_CODE (op0) != MEM)
9012 abort ();
9014 if (GET_MODE (offset_rtx) != ptr_mode)
9016 #ifdef POINTERS_EXTEND_UNSIGNED
9017 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9018 #else
9019 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9020 #endif
9023 op0 = change_address (op0, VOIDmode,
9024 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9025 force_reg (ptr_mode,
9026 offset_rtx)));
9029 /* Don't forget about volatility even if this is a bitfield. */
9030 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9032 op0 = copy_rtx (op0);
9033 MEM_VOLATILE_P (op0) = 1;
9036 /* Check the access. */
9037 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9039 rtx to;
9040 int size;
9042 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9043 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9045 /* Check the access right of the pointer. */
9046 in_check_memory_usage = 1;
9047 if (size > BITS_PER_UNIT)
9048 emit_library_call (chkr_check_addr_libfunc,
9049 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9050 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9051 TYPE_MODE (sizetype),
9052 GEN_INT (MEMORY_USE_RO),
9053 TYPE_MODE (integer_type_node));
9054 in_check_memory_usage = 0;
9057 /* In cases where an aligned union has an unaligned object
9058 as a field, we might be extracting a BLKmode value from
9059 an integer-mode (e.g., SImode) object. Handle this case
9060 by doing the extract into an object as wide as the field
9061 (which we know to be the width of a basic mode), then
9062 storing into memory, and changing the mode to BLKmode.
9063 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9064 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9065 if (mode1 == VOIDmode
9066 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9067 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9068 && (TYPE_ALIGN (type) > alignment
9069 || bitpos % TYPE_ALIGN (type) != 0)))
9071 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9073 if (ext_mode == BLKmode)
9075 /* In this case, BITPOS must start at a byte boundary. */
9076 if (GET_CODE (op0) != MEM
9077 || bitpos % BITS_PER_UNIT != 0)
9078 abort ();
9080 op0 = change_address (op0, VOIDmode,
9081 plus_constant (XEXP (op0, 0),
9082 bitpos / BITS_PER_UNIT));
9084 else
9086 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9087 TYPE_QUAL_CONST);
9088 rtx new = assign_temp (nt, 0, 1, 1);
9090 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9091 unsignedp, NULL_RTX, ext_mode,
9092 ext_mode, alignment,
9093 int_size_in_bytes (TREE_TYPE (tem)));
9095 /* If the result is a record type and BITSIZE is narrower than
9096 the mode of OP0, an integral mode, and this is a big endian
9097 machine, we must put the field into the high-order bits. */
9098 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9099 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9100 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9101 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9102 size_int (GET_MODE_BITSIZE
9103 (GET_MODE (op0))
9104 - bitsize),
9105 op0, 1);
9107 emit_move_insn (new, op0);
9108 op0 = copy_rtx (new);
9109 PUT_MODE (op0, BLKmode);
9112 else
9113 /* Get a reference to just this component. */
9114 op0 = change_address (op0, mode1,
9115 plus_constant (XEXP (op0, 0),
9116 (bitpos / BITS_PER_UNIT)));
9118 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9120 /* Adjust the alignment in case the bit position is not
9121 a multiple of the alignment of the inner object. */
9122 while (bitpos % alignment != 0)
9123 alignment >>= 1;
9125 if (GET_CODE (XEXP (op0, 0)) == REG)
9126 mark_reg_pointer (XEXP (op0, 0), alignment);
9128 MEM_IN_STRUCT_P (op0) = 1;
9129 MEM_VOLATILE_P (op0) |= volatilep;
9131 *palign = alignment;
9132 return op0;
9135 default:
9136 break;
9140 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9143 /* Return the tree node if a ARG corresponds to a string constant or zero
9144 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9145 in bytes within the string that ARG is accessing. The type of the
9146 offset will be `sizetype'. */
9148 tree
9149 string_constant (arg, ptr_offset)
9150 tree arg;
9151 tree *ptr_offset;
9153 STRIP_NOPS (arg);
9155 if (TREE_CODE (arg) == ADDR_EXPR
9156 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9158 *ptr_offset = size_zero_node;
9159 return TREE_OPERAND (arg, 0);
9161 else if (TREE_CODE (arg) == PLUS_EXPR)
9163 tree arg0 = TREE_OPERAND (arg, 0);
9164 tree arg1 = TREE_OPERAND (arg, 1);
9166 STRIP_NOPS (arg0);
9167 STRIP_NOPS (arg1);
9169 if (TREE_CODE (arg0) == ADDR_EXPR
9170 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9172 *ptr_offset = convert (sizetype, arg1);
9173 return TREE_OPERAND (arg0, 0);
9175 else if (TREE_CODE (arg1) == ADDR_EXPR
9176 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9178 *ptr_offset = convert (sizetype, arg0);
9179 return TREE_OPERAND (arg1, 0);
9183 return 0;
9186 /* Expand code for a post- or pre- increment or decrement
9187 and return the RTX for the result.
9188 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9190 static rtx
9191 expand_increment (exp, post, ignore)
9192 register tree exp;
9193 int post, ignore;
9195 register rtx op0, op1;
9196 register rtx temp, value;
9197 register tree incremented = TREE_OPERAND (exp, 0);
9198 optab this_optab = add_optab;
9199 int icode;
9200 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9201 int op0_is_copy = 0;
9202 int single_insn = 0;
9203 /* 1 means we can't store into OP0 directly,
9204 because it is a subreg narrower than a word,
9205 and we don't dare clobber the rest of the word. */
9206 int bad_subreg = 0;
9208 /* Stabilize any component ref that might need to be
9209 evaluated more than once below. */
9210 if (!post
9211 || TREE_CODE (incremented) == BIT_FIELD_REF
9212 || (TREE_CODE (incremented) == COMPONENT_REF
9213 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9214 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9215 incremented = stabilize_reference (incremented);
9216 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9217 ones into save exprs so that they don't accidentally get evaluated
9218 more than once by the code below. */
9219 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9220 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9221 incremented = save_expr (incremented);
9223 /* Compute the operands as RTX.
9224 Note whether OP0 is the actual lvalue or a copy of it:
9225 I believe it is a copy iff it is a register or subreg
9226 and insns were generated in computing it. */
9228 temp = get_last_insn ();
9229 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9231 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9232 in place but instead must do sign- or zero-extension during assignment,
9233 so we copy it into a new register and let the code below use it as
9234 a copy.
9236 Note that we can safely modify this SUBREG since it is know not to be
9237 shared (it was made by the expand_expr call above). */
9239 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9241 if (post)
9242 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9243 else
9244 bad_subreg = 1;
9246 else if (GET_CODE (op0) == SUBREG
9247 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9249 /* We cannot increment this SUBREG in place. If we are
9250 post-incrementing, get a copy of the old value. Otherwise,
9251 just mark that we cannot increment in place. */
9252 if (post)
9253 op0 = copy_to_reg (op0);
9254 else
9255 bad_subreg = 1;
9258 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9259 && temp != get_last_insn ());
9260 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9261 EXPAND_MEMORY_USE_BAD);
9263 /* Decide whether incrementing or decrementing. */
9264 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9265 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9266 this_optab = sub_optab;
9268 /* Convert decrement by a constant into a negative increment. */
9269 if (this_optab == sub_optab
9270 && GET_CODE (op1) == CONST_INT)
9272 op1 = GEN_INT (-INTVAL (op1));
9273 this_optab = add_optab;
9276 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9277 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9279 /* For a preincrement, see if we can do this with a single instruction. */
9280 if (!post)
9282 icode = (int) this_optab->handlers[(int) mode].insn_code;
9283 if (icode != (int) CODE_FOR_nothing
9284 /* Make sure that OP0 is valid for operands 0 and 1
9285 of the insn we want to queue. */
9286 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9287 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9288 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9289 single_insn = 1;
9292 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9293 then we cannot just increment OP0. We must therefore contrive to
9294 increment the original value. Then, for postincrement, we can return
9295 OP0 since it is a copy of the old value. For preincrement, expand here
9296 unless we can do it with a single insn.
9298 Likewise if storing directly into OP0 would clobber high bits
9299 we need to preserve (bad_subreg). */
9300 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9302 /* This is the easiest way to increment the value wherever it is.
9303 Problems with multiple evaluation of INCREMENTED are prevented
9304 because either (1) it is a component_ref or preincrement,
9305 in which case it was stabilized above, or (2) it is an array_ref
9306 with constant index in an array in a register, which is
9307 safe to reevaluate. */
9308 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9309 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9310 ? MINUS_EXPR : PLUS_EXPR),
9311 TREE_TYPE (exp),
9312 incremented,
9313 TREE_OPERAND (exp, 1));
9315 while (TREE_CODE (incremented) == NOP_EXPR
9316 || TREE_CODE (incremented) == CONVERT_EXPR)
9318 newexp = convert (TREE_TYPE (incremented), newexp);
9319 incremented = TREE_OPERAND (incremented, 0);
9322 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9323 return post ? op0 : temp;
9326 if (post)
9328 /* We have a true reference to the value in OP0.
9329 If there is an insn to add or subtract in this mode, queue it.
9330 Queueing the increment insn avoids the register shuffling
9331 that often results if we must increment now and first save
9332 the old value for subsequent use. */
9334 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9335 op0 = stabilize (op0);
9336 #endif
9338 icode = (int) this_optab->handlers[(int) mode].insn_code;
9339 if (icode != (int) CODE_FOR_nothing
9340 /* Make sure that OP0 is valid for operands 0 and 1
9341 of the insn we want to queue. */
9342 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9343 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9345 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9346 op1 = force_reg (mode, op1);
9348 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9350 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9352 rtx addr = (general_operand (XEXP (op0, 0), mode)
9353 ? force_reg (Pmode, XEXP (op0, 0))
9354 : copy_to_reg (XEXP (op0, 0)));
9355 rtx temp, result;
9357 op0 = change_address (op0, VOIDmode, addr);
9358 temp = force_reg (GET_MODE (op0), op0);
9359 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9360 op1 = force_reg (mode, op1);
9362 /* The increment queue is LIFO, thus we have to `queue'
9363 the instructions in reverse order. */
9364 enqueue_insn (op0, gen_move_insn (op0, temp));
9365 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9366 return result;
9370 /* Preincrement, or we can't increment with one simple insn. */
9371 if (post)
9372 /* Save a copy of the value before inc or dec, to return it later. */
9373 temp = value = copy_to_reg (op0);
9374 else
9375 /* Arrange to return the incremented value. */
9376 /* Copy the rtx because expand_binop will protect from the queue,
9377 and the results of that would be invalid for us to return
9378 if our caller does emit_queue before using our result. */
9379 temp = copy_rtx (value = op0);
9381 /* Increment however we can. */
9382 op1 = expand_binop (mode, this_optab, value, op1,
9383 current_function_check_memory_usage ? NULL_RTX : op0,
9384 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9385 /* Make sure the value is stored into OP0. */
9386 if (op1 != op0)
9387 emit_move_insn (op0, op1);
9389 return temp;
9392 /* At the start of a function, record that we have no previously-pushed
9393 arguments waiting to be popped. */
9395 void
9396 init_pending_stack_adjust ()
9398 pending_stack_adjust = 0;
9401 /* When exiting from function, if safe, clear out any pending stack adjust
9402 so the adjustment won't get done.
9404 Note, if the current function calls alloca, then it must have a
9405 frame pointer regardless of the value of flag_omit_frame_pointer. */
9407 void
9408 clear_pending_stack_adjust ()
9410 #ifdef EXIT_IGNORE_STACK
9411 if (optimize > 0
9412 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9413 && EXIT_IGNORE_STACK
9414 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9415 && ! flag_inline_functions)
9417 stack_pointer_delta -= pending_stack_adjust,
9418 pending_stack_adjust = 0;
9420 #endif
9423 /* Pop any previously-pushed arguments that have not been popped yet. */
9425 void
9426 do_pending_stack_adjust ()
9428 if (inhibit_defer_pop == 0)
9430 if (pending_stack_adjust != 0)
9431 adjust_stack (GEN_INT (pending_stack_adjust));
9432 pending_stack_adjust = 0;
9436 /* Expand conditional expressions. */
9438 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9439 LABEL is an rtx of code CODE_LABEL, in this function and all the
9440 functions here. */
9442 void
9443 jumpifnot (exp, label)
9444 tree exp;
9445 rtx label;
9447 do_jump (exp, label, NULL_RTX);
9450 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9452 void
9453 jumpif (exp, label)
9454 tree exp;
9455 rtx label;
9457 do_jump (exp, NULL_RTX, label);
9460 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9461 the result is zero, or IF_TRUE_LABEL if the result is one.
9462 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9463 meaning fall through in that case.
9465 do_jump always does any pending stack adjust except when it does not
9466 actually perform a jump. An example where there is no jump
9467 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9469 This function is responsible for optimizing cases such as
9470 &&, || and comparison operators in EXP. */
9472 void
9473 do_jump (exp, if_false_label, if_true_label)
9474 tree exp;
9475 rtx if_false_label, if_true_label;
9477 register enum tree_code code = TREE_CODE (exp);
9478 /* Some cases need to create a label to jump to
9479 in order to properly fall through.
9480 These cases set DROP_THROUGH_LABEL nonzero. */
9481 rtx drop_through_label = 0;
9482 rtx temp;
9483 int i;
9484 tree type;
9485 enum machine_mode mode;
9487 #ifdef MAX_INTEGER_COMPUTATION_MODE
9488 check_max_integer_computation_mode (exp);
9489 #endif
9491 emit_queue ();
9493 switch (code)
9495 case ERROR_MARK:
9496 break;
9498 case INTEGER_CST:
9499 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9500 if (temp)
9501 emit_jump (temp);
9502 break;
9504 #if 0
9505 /* This is not true with #pragma weak */
9506 case ADDR_EXPR:
9507 /* The address of something can never be zero. */
9508 if (if_true_label)
9509 emit_jump (if_true_label);
9510 break;
9511 #endif
9513 case NOP_EXPR:
9514 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9515 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9516 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9517 goto normal;
9518 case CONVERT_EXPR:
9519 /* If we are narrowing the operand, we have to do the compare in the
9520 narrower mode. */
9521 if ((TYPE_PRECISION (TREE_TYPE (exp))
9522 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9523 goto normal;
9524 case NON_LVALUE_EXPR:
9525 case REFERENCE_EXPR:
9526 case ABS_EXPR:
9527 case NEGATE_EXPR:
9528 case LROTATE_EXPR:
9529 case RROTATE_EXPR:
9530 /* These cannot change zero->non-zero or vice versa. */
9531 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9532 break;
9534 case WITH_RECORD_EXPR:
9535 /* Put the object on the placeholder list, recurse through our first
9536 operand, and pop the list. */
9537 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9538 placeholder_list);
9539 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9540 placeholder_list = TREE_CHAIN (placeholder_list);
9541 break;
9543 #if 0
9544 /* This is never less insns than evaluating the PLUS_EXPR followed by
9545 a test and can be longer if the test is eliminated. */
9546 case PLUS_EXPR:
9547 /* Reduce to minus. */
9548 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9549 TREE_OPERAND (exp, 0),
9550 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9551 TREE_OPERAND (exp, 1))));
9552 /* Process as MINUS. */
9553 #endif
9555 case MINUS_EXPR:
9556 /* Non-zero iff operands of minus differ. */
9557 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9558 TREE_OPERAND (exp, 0),
9559 TREE_OPERAND (exp, 1)),
9560 NE, NE, if_false_label, if_true_label);
9561 break;
9563 case BIT_AND_EXPR:
9564 /* If we are AND'ing with a small constant, do this comparison in the
9565 smallest type that fits. If the machine doesn't have comparisons
9566 that small, it will be converted back to the wider comparison.
9567 This helps if we are testing the sign bit of a narrower object.
9568 combine can't do this for us because it can't know whether a
9569 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9571 if (! SLOW_BYTE_ACCESS
9572 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9573 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9574 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9575 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9576 && (type = type_for_mode (mode, 1)) != 0
9577 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9578 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9579 != CODE_FOR_nothing))
9581 do_jump (convert (type, exp), if_false_label, if_true_label);
9582 break;
9584 goto normal;
9586 case TRUTH_NOT_EXPR:
9587 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9588 break;
9590 case TRUTH_ANDIF_EXPR:
9591 if (if_false_label == 0)
9592 if_false_label = drop_through_label = gen_label_rtx ();
9593 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9594 start_cleanup_deferral ();
9595 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9596 end_cleanup_deferral ();
9597 break;
9599 case TRUTH_ORIF_EXPR:
9600 if (if_true_label == 0)
9601 if_true_label = drop_through_label = gen_label_rtx ();
9602 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9603 start_cleanup_deferral ();
9604 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9605 end_cleanup_deferral ();
9606 break;
9608 case COMPOUND_EXPR:
9609 push_temp_slots ();
9610 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9611 preserve_temp_slots (NULL_RTX);
9612 free_temp_slots ();
9613 pop_temp_slots ();
9614 emit_queue ();
9615 do_pending_stack_adjust ();
9616 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9617 break;
9619 case COMPONENT_REF:
9620 case BIT_FIELD_REF:
9621 case ARRAY_REF:
9623 HOST_WIDE_INT bitsize, bitpos;
9624 int unsignedp;
9625 enum machine_mode mode;
9626 tree type;
9627 tree offset;
9628 int volatilep = 0;
9629 unsigned int alignment;
9631 /* Get description of this reference. We don't actually care
9632 about the underlying object here. */
9633 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9634 &unsignedp, &volatilep, &alignment);
9636 type = type_for_size (bitsize, unsignedp);
9637 if (! SLOW_BYTE_ACCESS
9638 && type != 0 && bitsize >= 0
9639 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9640 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9641 != CODE_FOR_nothing))
9643 do_jump (convert (type, exp), if_false_label, if_true_label);
9644 break;
9646 goto normal;
9649 case COND_EXPR:
9650 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9651 if (integer_onep (TREE_OPERAND (exp, 1))
9652 && integer_zerop (TREE_OPERAND (exp, 2)))
9653 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9655 else if (integer_zerop (TREE_OPERAND (exp, 1))
9656 && integer_onep (TREE_OPERAND (exp, 2)))
9657 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9659 else
9661 register rtx label1 = gen_label_rtx ();
9662 drop_through_label = gen_label_rtx ();
9664 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9666 start_cleanup_deferral ();
9667 /* Now the THEN-expression. */
9668 do_jump (TREE_OPERAND (exp, 1),
9669 if_false_label ? if_false_label : drop_through_label,
9670 if_true_label ? if_true_label : drop_through_label);
9671 /* In case the do_jump just above never jumps. */
9672 do_pending_stack_adjust ();
9673 emit_label (label1);
9675 /* Now the ELSE-expression. */
9676 do_jump (TREE_OPERAND (exp, 2),
9677 if_false_label ? if_false_label : drop_through_label,
9678 if_true_label ? if_true_label : drop_through_label);
9679 end_cleanup_deferral ();
9681 break;
9683 case EQ_EXPR:
9685 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9687 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9688 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9690 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9691 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9692 do_jump
9693 (fold
9694 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9695 fold (build (EQ_EXPR, TREE_TYPE (exp),
9696 fold (build1 (REALPART_EXPR,
9697 TREE_TYPE (inner_type),
9698 exp0)),
9699 fold (build1 (REALPART_EXPR,
9700 TREE_TYPE (inner_type),
9701 exp1)))),
9702 fold (build (EQ_EXPR, TREE_TYPE (exp),
9703 fold (build1 (IMAGPART_EXPR,
9704 TREE_TYPE (inner_type),
9705 exp0)),
9706 fold (build1 (IMAGPART_EXPR,
9707 TREE_TYPE (inner_type),
9708 exp1)))))),
9709 if_false_label, if_true_label);
9712 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9713 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9715 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9716 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9717 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9718 else
9719 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9720 break;
9723 case NE_EXPR:
9725 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9727 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9728 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9730 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9731 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9732 do_jump
9733 (fold
9734 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9735 fold (build (NE_EXPR, TREE_TYPE (exp),
9736 fold (build1 (REALPART_EXPR,
9737 TREE_TYPE (inner_type),
9738 exp0)),
9739 fold (build1 (REALPART_EXPR,
9740 TREE_TYPE (inner_type),
9741 exp1)))),
9742 fold (build (NE_EXPR, TREE_TYPE (exp),
9743 fold (build1 (IMAGPART_EXPR,
9744 TREE_TYPE (inner_type),
9745 exp0)),
9746 fold (build1 (IMAGPART_EXPR,
9747 TREE_TYPE (inner_type),
9748 exp1)))))),
9749 if_false_label, if_true_label);
9752 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9753 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9755 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9756 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9757 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9758 else
9759 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9760 break;
9763 case LT_EXPR:
9764 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9765 if (GET_MODE_CLASS (mode) == MODE_INT
9766 && ! can_compare_p (LT, mode, ccp_jump))
9767 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9768 else
9769 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9770 break;
9772 case LE_EXPR:
9773 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9774 if (GET_MODE_CLASS (mode) == MODE_INT
9775 && ! can_compare_p (LE, mode, ccp_jump))
9776 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9777 else
9778 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9779 break;
9781 case GT_EXPR:
9782 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9783 if (GET_MODE_CLASS (mode) == MODE_INT
9784 && ! can_compare_p (GT, mode, ccp_jump))
9785 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9786 else
9787 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9788 break;
9790 case GE_EXPR:
9791 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9792 if (GET_MODE_CLASS (mode) == MODE_INT
9793 && ! can_compare_p (GE, mode, ccp_jump))
9794 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9795 else
9796 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9797 break;
9799 case UNORDERED_EXPR:
9800 case ORDERED_EXPR:
9802 enum rtx_code cmp, rcmp;
9803 int do_rev;
9805 if (code == UNORDERED_EXPR)
9806 cmp = UNORDERED, rcmp = ORDERED;
9807 else
9808 cmp = ORDERED, rcmp = UNORDERED;
9809 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9811 do_rev = 0;
9812 if (! can_compare_p (cmp, mode, ccp_jump)
9813 && (can_compare_p (rcmp, mode, ccp_jump)
9814 /* If the target doesn't provide either UNORDERED or ORDERED
9815 comparisons, canonicalize on UNORDERED for the library. */
9816 || rcmp == UNORDERED))
9817 do_rev = 1;
9819 if (! do_rev)
9820 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9821 else
9822 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9824 break;
9827 enum rtx_code rcode1;
9828 enum tree_code tcode2;
9830 case UNLT_EXPR:
9831 rcode1 = UNLT;
9832 tcode2 = LT_EXPR;
9833 goto unordered_bcc;
9834 case UNLE_EXPR:
9835 rcode1 = UNLE;
9836 tcode2 = LE_EXPR;
9837 goto unordered_bcc;
9838 case UNGT_EXPR:
9839 rcode1 = UNGT;
9840 tcode2 = GT_EXPR;
9841 goto unordered_bcc;
9842 case UNGE_EXPR:
9843 rcode1 = UNGE;
9844 tcode2 = GE_EXPR;
9845 goto unordered_bcc;
9846 case UNEQ_EXPR:
9847 rcode1 = UNEQ;
9848 tcode2 = EQ_EXPR;
9849 goto unordered_bcc;
9851 unordered_bcc:
9852 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9853 if (can_compare_p (rcode1, mode, ccp_jump))
9854 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9855 if_true_label);
9856 else
9858 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9859 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9860 tree cmp0, cmp1;
9862 /* If the target doesn't support combined unordered
9863 compares, decompose into UNORDERED + comparison. */
9864 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9865 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9866 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9867 do_jump (exp, if_false_label, if_true_label);
9870 break;
9872 default:
9873 normal:
9874 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9875 #if 0
9876 /* This is not needed any more and causes poor code since it causes
9877 comparisons and tests from non-SI objects to have different code
9878 sequences. */
9879 /* Copy to register to avoid generating bad insns by cse
9880 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9881 if (!cse_not_expected && GET_CODE (temp) == MEM)
9882 temp = copy_to_reg (temp);
9883 #endif
9884 do_pending_stack_adjust ();
9885 /* Do any postincrements in the expression that was tested. */
9886 emit_queue ();
9888 if (GET_CODE (temp) == CONST_INT
9889 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9890 || GET_CODE (temp) == LABEL_REF)
9892 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9893 if (target)
9894 emit_jump (target);
9896 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9897 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9898 /* Note swapping the labels gives us not-equal. */
9899 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9900 else if (GET_MODE (temp) != VOIDmode)
9901 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9902 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9903 GET_MODE (temp), NULL_RTX, 0,
9904 if_false_label, if_true_label);
9905 else
9906 abort ();
9909 if (drop_through_label)
9911 /* If do_jump produces code that might be jumped around,
9912 do any stack adjusts from that code, before the place
9913 where control merges in. */
9914 do_pending_stack_adjust ();
9915 emit_label (drop_through_label);
9919 /* Given a comparison expression EXP for values too wide to be compared
9920 with one insn, test the comparison and jump to the appropriate label.
9921 The code of EXP is ignored; we always test GT if SWAP is 0,
9922 and LT if SWAP is 1. */
9924 static void
9925 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9926 tree exp;
9927 int swap;
9928 rtx if_false_label, if_true_label;
9930 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9931 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9932 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9933 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9935 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9938 /* Compare OP0 with OP1, word at a time, in mode MODE.
9939 UNSIGNEDP says to do unsigned comparison.
9940 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9942 void
9943 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9944 enum machine_mode mode;
9945 int unsignedp;
9946 rtx op0, op1;
9947 rtx if_false_label, if_true_label;
9949 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9950 rtx drop_through_label = 0;
9951 int i;
9953 if (! if_true_label || ! if_false_label)
9954 drop_through_label = gen_label_rtx ();
9955 if (! if_true_label)
9956 if_true_label = drop_through_label;
9957 if (! if_false_label)
9958 if_false_label = drop_through_label;
9960 /* Compare a word at a time, high order first. */
9961 for (i = 0; i < nwords; i++)
9963 rtx op0_word, op1_word;
9965 if (WORDS_BIG_ENDIAN)
9967 op0_word = operand_subword_force (op0, i, mode);
9968 op1_word = operand_subword_force (op1, i, mode);
9970 else
9972 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9973 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9976 /* All but high-order word must be compared as unsigned. */
9977 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9978 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9979 NULL_RTX, if_true_label);
9981 /* Consider lower words only if these are equal. */
9982 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9983 NULL_RTX, 0, NULL_RTX, if_false_label);
9986 if (if_false_label)
9987 emit_jump (if_false_label);
9988 if (drop_through_label)
9989 emit_label (drop_through_label);
9992 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9993 with one insn, test the comparison and jump to the appropriate label. */
9995 static void
9996 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9997 tree exp;
9998 rtx if_false_label, if_true_label;
10000 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10001 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10002 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10003 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10004 int i;
10005 rtx drop_through_label = 0;
10007 if (! if_false_label)
10008 drop_through_label = if_false_label = gen_label_rtx ();
10010 for (i = 0; i < nwords; i++)
10011 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10012 operand_subword_force (op1, i, mode),
10013 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10014 word_mode, NULL_RTX, 0, if_false_label,
10015 NULL_RTX);
10017 if (if_true_label)
10018 emit_jump (if_true_label);
10019 if (drop_through_label)
10020 emit_label (drop_through_label);
10023 /* Jump according to whether OP0 is 0.
10024 We assume that OP0 has an integer mode that is too wide
10025 for the available compare insns. */
10027 void
10028 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10029 rtx op0;
10030 rtx if_false_label, if_true_label;
10032 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10033 rtx part;
10034 int i;
10035 rtx drop_through_label = 0;
10037 /* The fastest way of doing this comparison on almost any machine is to
10038 "or" all the words and compare the result. If all have to be loaded
10039 from memory and this is a very wide item, it's possible this may
10040 be slower, but that's highly unlikely. */
10042 part = gen_reg_rtx (word_mode);
10043 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10044 for (i = 1; i < nwords && part != 0; i++)
10045 part = expand_binop (word_mode, ior_optab, part,
10046 operand_subword_force (op0, i, GET_MODE (op0)),
10047 part, 1, OPTAB_WIDEN);
10049 if (part != 0)
10051 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10052 NULL_RTX, 0, if_false_label, if_true_label);
10054 return;
10057 /* If we couldn't do the "or" simply, do this with a series of compares. */
10058 if (! if_false_label)
10059 drop_through_label = if_false_label = gen_label_rtx ();
10061 for (i = 0; i < nwords; i++)
10062 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10063 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10064 if_false_label, NULL_RTX);
10066 if (if_true_label)
10067 emit_jump (if_true_label);
10069 if (drop_through_label)
10070 emit_label (drop_through_label);
10073 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10074 (including code to compute the values to be compared)
10075 and set (CC0) according to the result.
10076 The decision as to signed or unsigned comparison must be made by the caller.
10078 We force a stack adjustment unless there are currently
10079 things pushed on the stack that aren't yet used.
10081 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10082 compared.
10084 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10085 size of MODE should be used. */
10088 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10089 register rtx op0, op1;
10090 enum rtx_code code;
10091 int unsignedp;
10092 enum machine_mode mode;
10093 rtx size;
10094 unsigned int align;
10096 rtx tem;
10098 /* If one operand is constant, make it the second one. Only do this
10099 if the other operand is not constant as well. */
10101 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10102 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10104 tem = op0;
10105 op0 = op1;
10106 op1 = tem;
10107 code = swap_condition (code);
10110 if (flag_force_mem)
10112 op0 = force_not_mem (op0);
10113 op1 = force_not_mem (op1);
10116 do_pending_stack_adjust ();
10118 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10119 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10120 return tem;
10122 #if 0
10123 /* There's no need to do this now that combine.c can eliminate lots of
10124 sign extensions. This can be less efficient in certain cases on other
10125 machines. */
10127 /* If this is a signed equality comparison, we can do it as an
10128 unsigned comparison since zero-extension is cheaper than sign
10129 extension and comparisons with zero are done as unsigned. This is
10130 the case even on machines that can do fast sign extension, since
10131 zero-extension is easier to combine with other operations than
10132 sign-extension is. If we are comparing against a constant, we must
10133 convert it to what it would look like unsigned. */
10134 if ((code == EQ || code == NE) && ! unsignedp
10135 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10137 if (GET_CODE (op1) == CONST_INT
10138 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10139 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10140 unsignedp = 1;
10142 #endif
10144 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10146 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10149 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10150 The decision as to signed or unsigned comparison must be made by the caller.
10152 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10153 compared.
10155 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10156 size of MODE should be used. */
10158 void
10159 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10160 if_false_label, if_true_label)
10161 register rtx op0, op1;
10162 enum rtx_code code;
10163 int unsignedp;
10164 enum machine_mode mode;
10165 rtx size;
10166 unsigned int align;
10167 rtx if_false_label, if_true_label;
10169 rtx tem;
10170 int dummy_true_label = 0;
10172 /* Reverse the comparison if that is safe and we want to jump if it is
10173 false. */
10174 if (! if_true_label && ! FLOAT_MODE_P (mode))
10176 if_true_label = if_false_label;
10177 if_false_label = 0;
10178 code = reverse_condition (code);
10181 /* If one operand is constant, make it the second one. Only do this
10182 if the other operand is not constant as well. */
10184 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10185 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10187 tem = op0;
10188 op0 = op1;
10189 op1 = tem;
10190 code = swap_condition (code);
10193 if (flag_force_mem)
10195 op0 = force_not_mem (op0);
10196 op1 = force_not_mem (op1);
10199 do_pending_stack_adjust ();
10201 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10202 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10204 if (tem == const_true_rtx)
10206 if (if_true_label)
10207 emit_jump (if_true_label);
10209 else
10211 if (if_false_label)
10212 emit_jump (if_false_label);
10214 return;
10217 #if 0
10218 /* There's no need to do this now that combine.c can eliminate lots of
10219 sign extensions. This can be less efficient in certain cases on other
10220 machines. */
10222 /* If this is a signed equality comparison, we can do it as an
10223 unsigned comparison since zero-extension is cheaper than sign
10224 extension and comparisons with zero are done as unsigned. This is
10225 the case even on machines that can do fast sign extension, since
10226 zero-extension is easier to combine with other operations than
10227 sign-extension is. If we are comparing against a constant, we must
10228 convert it to what it would look like unsigned. */
10229 if ((code == EQ || code == NE) && ! unsignedp
10230 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10232 if (GET_CODE (op1) == CONST_INT
10233 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10234 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10235 unsignedp = 1;
10237 #endif
10239 if (! if_true_label)
10241 dummy_true_label = 1;
10242 if_true_label = gen_label_rtx ();
10245 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10246 if_true_label);
10248 if (if_false_label)
10249 emit_jump (if_false_label);
10250 if (dummy_true_label)
10251 emit_label (if_true_label);
10254 /* Generate code for a comparison expression EXP (including code to compute
10255 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10256 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10257 generated code will drop through.
10258 SIGNED_CODE should be the rtx operation for this comparison for
10259 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10261 We force a stack adjustment unless there are currently
10262 things pushed on the stack that aren't yet used. */
10264 static void
10265 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10266 if_true_label)
10267 register tree exp;
10268 enum rtx_code signed_code, unsigned_code;
10269 rtx if_false_label, if_true_label;
10271 unsigned int align0, align1;
10272 register rtx op0, op1;
10273 register tree type;
10274 register enum machine_mode mode;
10275 int unsignedp;
10276 enum rtx_code code;
10278 /* Don't crash if the comparison was erroneous. */
10279 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10280 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10281 return;
10283 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10284 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10285 return;
10287 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10288 mode = TYPE_MODE (type);
10289 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10290 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10291 || (GET_MODE_BITSIZE (mode)
10292 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10293 1)))))))
10295 /* op0 might have been replaced by promoted constant, in which
10296 case the type of second argument should be used. */
10297 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10298 mode = TYPE_MODE (type);
10300 unsignedp = TREE_UNSIGNED (type);
10301 code = unsignedp ? unsigned_code : signed_code;
10303 #ifdef HAVE_canonicalize_funcptr_for_compare
10304 /* If function pointers need to be "canonicalized" before they can
10305 be reliably compared, then canonicalize them. */
10306 if (HAVE_canonicalize_funcptr_for_compare
10307 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10308 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10309 == FUNCTION_TYPE))
10311 rtx new_op0 = gen_reg_rtx (mode);
10313 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10314 op0 = new_op0;
10317 if (HAVE_canonicalize_funcptr_for_compare
10318 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10319 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10320 == FUNCTION_TYPE))
10322 rtx new_op1 = gen_reg_rtx (mode);
10324 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10325 op1 = new_op1;
10327 #endif
10329 /* Do any postincrements in the expression that was tested. */
10330 emit_queue ();
10332 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10333 ((mode == BLKmode)
10334 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10335 MIN (align0, align1),
10336 if_false_label, if_true_label);
10339 /* Generate code to calculate EXP using a store-flag instruction
10340 and return an rtx for the result. EXP is either a comparison
10341 or a TRUTH_NOT_EXPR whose operand is a comparison.
10343 If TARGET is nonzero, store the result there if convenient.
10345 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10346 cheap.
10348 Return zero if there is no suitable set-flag instruction
10349 available on this machine.
10351 Once expand_expr has been called on the arguments of the comparison,
10352 we are committed to doing the store flag, since it is not safe to
10353 re-evaluate the expression. We emit the store-flag insn by calling
10354 emit_store_flag, but only expand the arguments if we have a reason
10355 to believe that emit_store_flag will be successful. If we think that
10356 it will, but it isn't, we have to simulate the store-flag with a
10357 set/jump/set sequence. */
10359 static rtx
10360 do_store_flag (exp, target, mode, only_cheap)
10361 tree exp;
10362 rtx target;
10363 enum machine_mode mode;
10364 int only_cheap;
10366 enum rtx_code code;
10367 tree arg0, arg1, type;
10368 tree tem;
10369 enum machine_mode operand_mode;
10370 int invert = 0;
10371 int unsignedp;
10372 rtx op0, op1;
10373 enum insn_code icode;
10374 rtx subtarget = target;
10375 rtx result, label;
10377 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10378 result at the end. We can't simply invert the test since it would
10379 have already been inverted if it were valid. This case occurs for
10380 some floating-point comparisons. */
10382 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10383 invert = 1, exp = TREE_OPERAND (exp, 0);
10385 arg0 = TREE_OPERAND (exp, 0);
10386 arg1 = TREE_OPERAND (exp, 1);
10388 /* Don't crash if the comparison was erroneous. */
10389 if (arg0 == error_mark_node || arg1 == error_mark_node)
10390 return const0_rtx;
10392 type = TREE_TYPE (arg0);
10393 operand_mode = TYPE_MODE (type);
10394 unsignedp = TREE_UNSIGNED (type);
10396 /* We won't bother with BLKmode store-flag operations because it would mean
10397 passing a lot of information to emit_store_flag. */
10398 if (operand_mode == BLKmode)
10399 return 0;
10401 /* We won't bother with store-flag operations involving function pointers
10402 when function pointers must be canonicalized before comparisons. */
10403 #ifdef HAVE_canonicalize_funcptr_for_compare
10404 if (HAVE_canonicalize_funcptr_for_compare
10405 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10406 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10407 == FUNCTION_TYPE))
10408 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10409 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10410 == FUNCTION_TYPE))))
10411 return 0;
10412 #endif
10414 STRIP_NOPS (arg0);
10415 STRIP_NOPS (arg1);
10417 /* Get the rtx comparison code to use. We know that EXP is a comparison
10418 operation of some type. Some comparisons against 1 and -1 can be
10419 converted to comparisons with zero. Do so here so that the tests
10420 below will be aware that we have a comparison with zero. These
10421 tests will not catch constants in the first operand, but constants
10422 are rarely passed as the first operand. */
10424 switch (TREE_CODE (exp))
10426 case EQ_EXPR:
10427 code = EQ;
10428 break;
10429 case NE_EXPR:
10430 code = NE;
10431 break;
10432 case LT_EXPR:
10433 if (integer_onep (arg1))
10434 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10435 else
10436 code = unsignedp ? LTU : LT;
10437 break;
10438 case LE_EXPR:
10439 if (! unsignedp && integer_all_onesp (arg1))
10440 arg1 = integer_zero_node, code = LT;
10441 else
10442 code = unsignedp ? LEU : LE;
10443 break;
10444 case GT_EXPR:
10445 if (! unsignedp && integer_all_onesp (arg1))
10446 arg1 = integer_zero_node, code = GE;
10447 else
10448 code = unsignedp ? GTU : GT;
10449 break;
10450 case GE_EXPR:
10451 if (integer_onep (arg1))
10452 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10453 else
10454 code = unsignedp ? GEU : GE;
10455 break;
10457 case UNORDERED_EXPR:
10458 code = UNORDERED;
10459 break;
10460 case ORDERED_EXPR:
10461 code = ORDERED;
10462 break;
10463 case UNLT_EXPR:
10464 code = UNLT;
10465 break;
10466 case UNLE_EXPR:
10467 code = UNLE;
10468 break;
10469 case UNGT_EXPR:
10470 code = UNGT;
10471 break;
10472 case UNGE_EXPR:
10473 code = UNGE;
10474 break;
10475 case UNEQ_EXPR:
10476 code = UNEQ;
10477 break;
10479 default:
10480 abort ();
10483 /* Put a constant second. */
10484 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10486 tem = arg0; arg0 = arg1; arg1 = tem;
10487 code = swap_condition (code);
10490 /* If this is an equality or inequality test of a single bit, we can
10491 do this by shifting the bit being tested to the low-order bit and
10492 masking the result with the constant 1. If the condition was EQ,
10493 we xor it with 1. This does not require an scc insn and is faster
10494 than an scc insn even if we have it. */
10496 if ((code == NE || code == EQ)
10497 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10498 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10500 tree inner = TREE_OPERAND (arg0, 0);
10501 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10502 int ops_unsignedp;
10504 /* If INNER is a right shift of a constant and it plus BITNUM does
10505 not overflow, adjust BITNUM and INNER. */
10507 if (TREE_CODE (inner) == RSHIFT_EXPR
10508 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10509 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10510 && bitnum < TYPE_PRECISION (type)
10511 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10512 bitnum - TYPE_PRECISION (type)))
10514 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10515 inner = TREE_OPERAND (inner, 0);
10518 /* If we are going to be able to omit the AND below, we must do our
10519 operations as unsigned. If we must use the AND, we have a choice.
10520 Normally unsigned is faster, but for some machines signed is. */
10521 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10522 #ifdef LOAD_EXTEND_OP
10523 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10524 #else
10526 #endif
10529 if (! get_subtarget (subtarget)
10530 || GET_MODE (subtarget) != operand_mode
10531 || ! safe_from_p (subtarget, inner, 1))
10532 subtarget = 0;
10534 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10536 if (bitnum != 0)
10537 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10538 size_int (bitnum), subtarget, ops_unsignedp);
10540 if (GET_MODE (op0) != mode)
10541 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10543 if ((code == EQ && ! invert) || (code == NE && invert))
10544 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10545 ops_unsignedp, OPTAB_LIB_WIDEN);
10547 /* Put the AND last so it can combine with more things. */
10548 if (bitnum != TYPE_PRECISION (type) - 1)
10549 op0 = expand_and (op0, const1_rtx, subtarget);
10551 return op0;
10554 /* Now see if we are likely to be able to do this. Return if not. */
10555 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10556 return 0;
10558 icode = setcc_gen_code[(int) code];
10559 if (icode == CODE_FOR_nothing
10560 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10562 /* We can only do this if it is one of the special cases that
10563 can be handled without an scc insn. */
10564 if ((code == LT && integer_zerop (arg1))
10565 || (! only_cheap && code == GE && integer_zerop (arg1)))
10567 else if (BRANCH_COST >= 0
10568 && ! only_cheap && (code == NE || code == EQ)
10569 && TREE_CODE (type) != REAL_TYPE
10570 && ((abs_optab->handlers[(int) operand_mode].insn_code
10571 != CODE_FOR_nothing)
10572 || (ffs_optab->handlers[(int) operand_mode].insn_code
10573 != CODE_FOR_nothing)))
10575 else
10576 return 0;
10579 if (! get_subtarget (target)
10580 || GET_MODE (subtarget) != operand_mode
10581 || ! safe_from_p (subtarget, arg1, 1))
10582 subtarget = 0;
10584 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10585 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10587 if (target == 0)
10588 target = gen_reg_rtx (mode);
10590 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10591 because, if the emit_store_flag does anything it will succeed and
10592 OP0 and OP1 will not be used subsequently. */
10594 result = emit_store_flag (target, code,
10595 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10596 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10597 operand_mode, unsignedp, 1);
10599 if (result)
10601 if (invert)
10602 result = expand_binop (mode, xor_optab, result, const1_rtx,
10603 result, 0, OPTAB_LIB_WIDEN);
10604 return result;
10607 /* If this failed, we have to do this with set/compare/jump/set code. */
10608 if (GET_CODE (target) != REG
10609 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10610 target = gen_reg_rtx (GET_MODE (target));
10612 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10613 result = compare_from_rtx (op0, op1, code, unsignedp,
10614 operand_mode, NULL_RTX, 0);
10615 if (GET_CODE (result) == CONST_INT)
10616 return (((result == const0_rtx && ! invert)
10617 || (result != const0_rtx && invert))
10618 ? const0_rtx : const1_rtx);
10620 label = gen_label_rtx ();
10621 if (bcc_gen_fctn[(int) code] == 0)
10622 abort ();
10624 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10625 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10626 emit_label (label);
10628 return target;
10631 /* Generate a tablejump instruction (used for switch statements). */
10633 #ifdef HAVE_tablejump
10635 /* INDEX is the value being switched on, with the lowest value
10636 in the table already subtracted.
10637 MODE is its expected mode (needed if INDEX is constant).
10638 RANGE is the length of the jump table.
10639 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10641 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10642 index value is out of range. */
10644 void
10645 do_tablejump (index, mode, range, table_label, default_label)
10646 rtx index, range, table_label, default_label;
10647 enum machine_mode mode;
10649 register rtx temp, vector;
10651 /* Do an unsigned comparison (in the proper mode) between the index
10652 expression and the value which represents the length of the range.
10653 Since we just finished subtracting the lower bound of the range
10654 from the index expression, this comparison allows us to simultaneously
10655 check that the original index expression value is both greater than
10656 or equal to the minimum value of the range and less than or equal to
10657 the maximum value of the range. */
10659 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10660 0, default_label);
10662 /* If index is in range, it must fit in Pmode.
10663 Convert to Pmode so we can index with it. */
10664 if (mode != Pmode)
10665 index = convert_to_mode (Pmode, index, 1);
10667 /* Don't let a MEM slip thru, because then INDEX that comes
10668 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10669 and break_out_memory_refs will go to work on it and mess it up. */
10670 #ifdef PIC_CASE_VECTOR_ADDRESS
10671 if (flag_pic && GET_CODE (index) != REG)
10672 index = copy_to_mode_reg (Pmode, index);
10673 #endif
10675 /* If flag_force_addr were to affect this address
10676 it could interfere with the tricky assumptions made
10677 about addresses that contain label-refs,
10678 which may be valid only very near the tablejump itself. */
10679 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10680 GET_MODE_SIZE, because this indicates how large insns are. The other
10681 uses should all be Pmode, because they are addresses. This code
10682 could fail if addresses and insns are not the same size. */
10683 index = gen_rtx_PLUS (Pmode,
10684 gen_rtx_MULT (Pmode, index,
10685 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10686 gen_rtx_LABEL_REF (Pmode, table_label));
10687 #ifdef PIC_CASE_VECTOR_ADDRESS
10688 if (flag_pic)
10689 index = PIC_CASE_VECTOR_ADDRESS (index);
10690 else
10691 #endif
10692 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10693 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10694 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10695 RTX_UNCHANGING_P (vector) = 1;
10696 convert_move (temp, vector, 0);
10698 emit_jump_insn (gen_tablejump (temp, table_label));
10700 /* If we are generating PIC code or if the table is PC-relative, the
10701 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10702 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10703 emit_barrier ();
10706 #endif /* HAVE_tablejump */